From 436824b508c63bd5f30083a938597ff21fc4014c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 13:19:50 +0530 Subject: [PATCH 001/190] feat: add local registry dev mode and develop deploy pipeline --- .github/AGENTS.md | 31 ++++++++ .github/workflows/deploy-develop.yml | 115 +++++++++++++++++++++++++++ AGENTS.md | 1 + apps/registry/AGENTS.md | 29 +++++++ apps/registry/package.json | 1 + apps/registry/wrangler.toml | 8 ++ package.json | 3 +- 7 files changed, 187 insertions(+), 1 deletion(-) create mode 100644 .github/AGENTS.md create mode 100644 .github/workflows/deploy-develop.yml create mode 100644 apps/registry/AGENTS.md diff --git a/.github/AGENTS.md b/.github/AGENTS.md new file mode 100644 index 0000000..5f71f8f --- /dev/null +++ b/.github/AGENTS.md @@ -0,0 +1,31 @@ +# GitHub Actions Guardrails + +## Purpose +- Make workflow automation predictable, auditable, and safe for this repo's Cloudflare Worker deployments. +- Surface the canonical deploy pipeline for merges into `develop` so every engineer knows which workflow runs migrations, deploys, and verifies the dev Worker. + +## Deployment-first practices +- Keep CI (`ci.yml`) focused on lint/typecheck/test/build so shorter feedback loops run on every push/pull request. +- Keep deployment logic in dedicated workflows whose triggers and permissions are explicit (e.g., `deploy-develop.yml` on `develop`). +- Always run migrations before `wrangler deploy` and verify `/health` after the deploy completes. + +## Secrets and permissions +- Required secrets for the Cloudflare workflow: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`. +- Mirror the token into `CF_API_TOKEN`/`CF_ACCOUNT_ID` so both `wrangler`/`pnpm` commands and Cloudflare tooling can resolve the IDs. +- Scope the token down to the least privileges needed: `Workers Scripts:Write`, `Workers Routes:Write`, `Zone:Read`, `D1:Database:Admin + Migrate`, `D1:Database:Read/Write`, and minimal `Account:Read`. + +## Workflow expectations +- Each deploy workflow must run `pnpm install --frozen-lockfile`, `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, and `pnpm -r build` before touching production assets. +- Use `pnpm --filter @clawdentity/registry run deploy:dev` (or equivalent) to keep migration/deploy scripts centralized under `apps/registry`. +- After a deploy, hit the branded health endpoint (`https://dev.api.clawdentity.com/health`) and ensure the response reports `status: "ok"` and `environment: "development"` before marking the job complete. +- Deploy workflows should use concurrency groups to avoid overlapping deploys for the same environment. + +## Migration Rollback Strategy (Develop) +- Before migrations/deploy, capture rollback artifacts: + - `wrangler deployments list --env dev --json` (current Worker versions) + - `wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` + - `wrangler d1 export clawdentity-db-dev --remote --output ` +- Upload artifacts from every run (success or failure) so operators can recover quickly. +- On failed deploy: + - Worker rollback: `wrangler rollback --env dev` + - DB rollback: `wrangler d1 time-travel restore clawdentity-db-dev --env dev --timestamp ` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml new file mode 100644 index 0000000..35715f8 --- /dev/null +++ b/.github/workflows/deploy-develop.yml @@ -0,0 +1,115 @@ +name: Deploy develop +on: + push: + branches: + - develop + workflow_dispatch: {} + +concurrency: + group: deploy-develop + cancel-in-progress: true + +permissions: + contents: read +jobs: + deploy: + name: Lint, test, migrate, and deploy to dev + runs-on: ubuntu-latest + timeout-minutes: 30 + env: + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CF_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.23.0 + + - name: Validate required secrets + run: | + test -n "${CLOUDFLARE_API_TOKEN}" + test -n "${CLOUDFLARE_ACCOUNT_ID}" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Lint + run: pnpm lint + + - name: Typecheck + run: pnpm -r typecheck + + - name: Run tests + run: pnpm -r test + + - name: Build + run: pnpm -r build + + - name: Capture pre-deploy rollback artifacts + run: | + mkdir -p artifacts + PREDEPLOY_TS=$(date -u +%Y-%m-%dT%H:%M:%SZ) + echo "PREDEPLOY_TS=${PREDEPLOY_TS}" >> "${GITHUB_ENV}" + printf "%s\n" "${PREDEPLOY_TS}" > artifacts/predeploy.timestamp + wrangler --cwd apps/registry deployments list --env dev --json > artifacts/worker-deployments-pre.json + wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json + wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql + + - name: Apply dev migrations and deploy + run: pnpm --filter @clawdentity/registry run deploy:dev + + - name: Verify health endpoint + run: | + python3 - <<'PY' + import json, sys, urllib.request, urllib.error + url = "https://dev.api.clawdentity.com/health" + try: + resp = urllib.request.urlopen(url, timeout=10) + except urllib.error.HTTPError as exc: + sys.stderr.write(f"health check failed ({exc.code} {exc.reason})\n") + sys.exit(1) + data = json.load(resp) + if data.get("status") != "ok" or data.get("environment") != "development": + raise SystemExit(f"unexpected health payload: {data}") + print("healthcheck passed", data) + PY + + - name: Capture post-deploy state + if: always() + run: | + mkdir -p artifacts + wrangler --cwd apps/registry deployments list --env dev --json > artifacts/worker-deployments-post.json || true + wrangler --cwd apps/registry d1 migrations list clawdentity-db-dev --remote --env dev > artifacts/d1-migrations-post.txt || true + + - name: Rollback instructions on failure + if: failure() + run: | + echo "Worker rollback:" + echo " wrangler --cwd apps/registry deployments list --env dev --json" + echo " wrangler --cwd apps/registry rollback --env dev -y -m \"ci rollback\"" + echo "" + echo "D1 rollback:" + echo " wrangler --cwd apps/registry d1 time-travel restore clawdentity-db-dev --env dev --timestamp \"${PREDEPLOY_TS}\"" + echo " # or restore via bookmark from artifacts/d1-time-travel-pre.json" + + - name: Upload rollback artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: deploy-develop-rollback-${{ github.run_id }} + path: artifacts/ + if-no-files-found: warn + retention-days: 14 diff --git a/AGENTS.md b/AGENTS.md index fa859f5..33f6560 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -34,6 +34,7 @@ - `--env dev` for development (Worker: `clawdentity-registry-dev`, D1: `clawdentity-db-dev`) - `--env production` for production (Worker: `clawdentity-registry`, D1: `clawdentity-db`) - **Local dev** uses `wrangler dev --env dev` with local SQLite. Override vars via `apps/registry/.dev.vars` (gitignored). +- Use `pnpm -F @clawdentity/registry run dev:local` (or root alias `pnpm dev:registry:local`) to apply local D1 migrations before starting dev server. - **One-touch deploy** scripts in `apps/registry/package.json`: - `deploy:dev` — migrates remote dev D1 + deploys dev Worker - `deploy:production` — migrates remote prod D1 + deploys prod Worker diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md new file mode 100644 index 0000000..30535e3 --- /dev/null +++ b/apps/registry/AGENTS.md @@ -0,0 +1,29 @@ +# Registry Agent Notes + +## Purpose +- Keep registry deployment and domain configuration consistent across environments. + +## Domain Rules +- Public endpoints must use branded custom domains, not `*.workers.dev`. +- Development custom domain: `dev.api.clawdentity.com`. +- Production custom domain: `api.clawdentity.com`. +- `workers.dev` is currently disabled by custom-domain routing unless `workers_dev = true` is explicitly set. + +## Deployment Rules +- Always deploy with explicit environment: `--env dev` or `--env production`. +- For deploy scripts, run D1 migrations before Worker deploy. +- Verify `GET /health` returns: + - `status: "ok"` + - expected environment value (`development` or `production`). +- For CI deploys, capture a pre-migration D1 export and time-travel point-in-time marker for rollback. +- Local development should run migrations against the local D1 alias before `wrangler dev --env dev`, e.g. `pnpm -F @clawdentity/registry dev:local`. + +## Database Authorization Rules +- Cloudflare D1 (SQLite) does not provide PostgreSQL-style row-level security (RLS) policies. +- Enforce per-actor access in application queries and handlers (e.g., `owner_id` / `human_id` filters). +- Treat authorization as fail-closed: no actor context means no data access. + +## Change Safety +- When changing routes/domains, validate no overlap with existing zone routes. +- Do not store secrets in repo; use `wrangler secret put`. +- If deploy fails after migrations, rollback DB with D1 Time Travel and rollback Worker to the previous version. diff --git a/apps/registry/package.json b/apps/registry/package.json index ea0ed59..19309dd 100644 --- a/apps/registry/package.json +++ b/apps/registry/package.json @@ -14,6 +14,7 @@ "scripts": { "build": "tsup", "dev": "wrangler dev --env dev", + "dev:local": "wrangler d1 migrations apply clawdentity-db-dev --local --env dev && wrangler dev --env dev", "deploy:dev": "wrangler d1 migrations apply clawdentity-db-dev --remote --env dev && wrangler deploy --env dev", "deploy:production": "wrangler d1 migrations apply clawdentity-db --remote --env production && wrangler deploy --env production", "db:generate": "drizzle-kit generate", diff --git a/apps/registry/wrangler.toml b/apps/registry/wrangler.toml index 70e7400..55a158c 100644 --- a/apps/registry/wrangler.toml +++ b/apps/registry/wrangler.toml @@ -14,6 +14,10 @@ migrations_dir = "drizzle" [env.dev.vars] ENVIRONMENT = "development" +[[env.dev.routes]] +pattern = "dev.api.clawdentity.com" +custom_domain = true + # ── Production (wrangler deploy --env production) ── [[env.production.d1_databases]] @@ -24,3 +28,7 @@ migrations_dir = "drizzle" [env.production.vars] ENVIRONMENT = "production" + +[[env.production.routes]] +pattern = "api.clawdentity.com" +custom_domain = true diff --git a/package.json b/package.json index 3e5dedc..f349e5c 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,8 @@ "lint": "biome check .", "test": "nx run-many -t test", "build": "nx run-many -t build", - "typecheck": "nx run-many -t typecheck" + "typecheck": "nx run-many -t typecheck", + "dev:registry:local": "pnpm -F @clawdentity/registry run dev:local" }, "devDependencies": { "@biomejs/biome": "^2.3.14", From ee94b2bffaa01177293179de44e4a8dfc91afd94 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 13:31:13 +0530 Subject: [PATCH 002/190] ci: avoid duplicate runs on PR updates --- .github/AGENTS.md | 1 + .github/workflows/ci.yml | 14 +++++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index fb11bae..53d7525 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -9,6 +9,7 @@ - Use `fetch-depth: 0` when running `nx affected`. - Compute and export `NX_BASE` and `NX_HEAD` before invoking affected commands. - Run root lint (`pnpm lint`) before affected tasks to keep style checks global. +- Avoid duplicate CI runs for PR updates by limiting `push` triggers to long-lived branches (`main`, `develop`) and using `pull_request` for feature branches. ## Quality Gates - CI command order: install -> base/head setup -> lint -> affected checks. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9ee96cf..879a5fc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,5 +1,17 @@ name: CI -on: [push, pull_request] +on: + push: + branches: + - main + - develop + pull_request: + branches: + - main + - develop + +concurrency: + group: ci-${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: check: runs-on: ubuntu-latest From b6283f1ec612c3396151d4574cf4bc99e2fc497f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 13:34:43 +0530 Subject: [PATCH 003/190] ci(nx): enforce dependency builds before tests --- nx.json | 1 + 1 file changed, 1 insertion(+) diff --git a/nx.json b/nx.json index f1db26f..31fdcf9 100644 --- a/nx.json +++ b/nx.json @@ -25,6 +25,7 @@ "cache": true }, "test": { + "dependsOn": ["^build"], "inputs": [ "default", "^production", From 329f242c173a172f88f69fb3bf580a6bbed2d3a9 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 14:31:31 +0530 Subject: [PATCH 004/190] Implement canonical request string --- apps/registry/AGENTS.md | 1 + apps/registry/vitest.config.ts | 11 +++ packages/protocol/AGENTS.md | 3 + packages/protocol/src/http-signing.test.ts | 91 ++++++++++++++++++++++ packages/protocol/src/http-signing.ts | 20 +++++ packages/protocol/src/index.test.ts | 24 ++++++ packages/protocol/src/index.ts | 5 ++ 7 files changed, 155 insertions(+) create mode 100644 packages/protocol/src/http-signing.test.ts create mode 100644 packages/protocol/src/http-signing.ts diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 5921878..5d34d67 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -26,6 +26,7 @@ ## Validation - Validate config changes with `wrangler check` before deployment. - Run `pnpm -F @clawdentity/registry run test` and `pnpm -F @clawdentity/registry run typecheck` for app-level safety. +- Keep Vitest path aliases pointed at workspace source (`packages/*/src/index.ts`) so tests do not depend on stale package `dist` outputs. ## Database Authorization - Cloudflare D1 (SQLite) does not provide PostgreSQL-style RLS policies. diff --git a/apps/registry/vitest.config.ts b/apps/registry/vitest.config.ts index e2ec332..a42bac4 100644 --- a/apps/registry/vitest.config.ts +++ b/apps/registry/vitest.config.ts @@ -1,6 +1,17 @@ +import { fileURLToPath } from "node:url"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + alias: { + "@clawdentity/protocol": fileURLToPath( + new URL("../../packages/protocol/src/index.ts", import.meta.url), + ), + "@clawdentity/sdk": fileURLToPath( + new URL("../../packages/sdk/src/index.ts", import.meta.url), + ), + }, + }, test: { globals: true, }, diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index f5179e5..5bd1ac0 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -8,6 +8,9 @@ - Keep protocol APIs small and explicit; avoid leaking third-party library types into public exports. - Parse functions should throw `ProtocolParseError` with stable codes for caller-safe branching. - Maintain Cloudflare Worker portability: avoid Node-only globals in protocol helpers. +- Keep HTTP signing canonical strings deterministic: canonicalize method, normalized path (path + query), timestamp, nonce, and body hash exactly as `README.md`, `PRD.md`, and the policy docs describe (see `CLAW-PROOF-V1\n\n\n\n\n`). +- Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). +- Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). ## Testing - Add focused Vitest tests per helper module and one root export test in `src/index.test.ts`. diff --git a/packages/protocol/src/http-signing.test.ts b/packages/protocol/src/http-signing.test.ts new file mode 100644 index 0000000..2391bc1 --- /dev/null +++ b/packages/protocol/src/http-signing.test.ts @@ -0,0 +1,91 @@ +import { describe, expect, it } from "vitest"; +import { + CLAW_PROOF_CANONICAL_VERSION, + canonicalizeRequest, +} from "./http-signing.js"; + +describe("http signing canonicalization", () => { + it("uses the expected canonical version prefix", () => { + expect(CLAW_PROOF_CANONICAL_VERSION).toBe("CLAW-PROOF-V1"); + }); + + it("matches a representative canonical output snapshot", () => { + const canonical = canonicalizeRequest({ + method: "post", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_abc123", + bodyHash: "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU", + }); + + expect(canonical).toMatchInlineSnapshot(` + "CLAW-PROOF-V1 + POST + /v1/messages?b=2&a=1 + 1739364000 + nonce_abc123 + 47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU" + `); + }); + + it("returns identical output for identical input across runs", () => { + const input = { + method: "patch", + pathWithQuery: "/v1/agents/01ARZ3NDEKTSV4RRFFQ69G5FAV?view=full", + timestamp: "1739364123", + nonce: "nonce_repeatable", + bodyHash: "xvYb4zVfQ0jM2fN4Yg0J-9g8F0M9Qz2jQ8J6w0kM1oA", + }; + + const first = canonicalizeRequest(input); + const second = canonicalizeRequest(input); + const third = canonicalizeRequest(input); + + expect(second).toBe(first); + expect(third).toBe(first); + }); + + it("uppercases HTTP method in canonical output", () => { + const canonical = canonicalizeRequest({ + method: "pAtCh", + pathWithQuery: "/v1/ping", + timestamp: "1739364300", + nonce: "nonce_method", + bodyHash: "hash_method", + }); + + expect(canonical).toContain("\nPATCH\n"); + }); + + it("preserves query ordering exactly as provided", () => { + const canonical = canonicalizeRequest({ + method: "GET", + pathWithQuery: "/v1/search?z=9&b=2&a=1", + timestamp: "1739364400", + nonce: "nonce_query", + bodyHash: "hash_query", + }); + + expect(canonical).toContain("\n/v1/search?z=9&b=2&a=1\n"); + expect(canonical).not.toContain("\n/v1/search?a=1&b=2&z=9\n"); + }); + + it("keeps precomputed empty-body hash unchanged in canonical output", () => { + const canonical = canonicalizeRequest({ + method: "GET", + pathWithQuery: "/v1/health", + timestamp: "1739364500", + nonce: "nonce_empty_body", + bodyHash: "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU", + }); + + expect(canonical).toMatchInlineSnapshot(` + "CLAW-PROOF-V1 + GET + /v1/health + 1739364500 + nonce_empty_body + 47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU" + `); + }); +}); diff --git a/packages/protocol/src/http-signing.ts b/packages/protocol/src/http-signing.ts new file mode 100644 index 0000000..3b07db0 --- /dev/null +++ b/packages/protocol/src/http-signing.ts @@ -0,0 +1,20 @@ +export const CLAW_PROOF_CANONICAL_VERSION = "CLAW-PROOF-V1"; + +export interface CanonicalRequestInput { + method: string; + pathWithQuery: string; + timestamp: string; + nonce: string; + bodyHash: string; +} + +export function canonicalizeRequest(input: CanonicalRequestInput): string { + return [ + CLAW_PROOF_CANONICAL_VERSION, + input.method.toUpperCase(), + input.pathWithQuery, + input.timestamp, + input.nonce, + input.bodyHash, + ].join("\n"); +} diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index 2ba0236..30edd06 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -1,5 +1,7 @@ import { describe, expect, it } from "vitest"; import { + CLAW_PROOF_CANONICAL_VERSION, + canonicalizeRequest, decodeBase64url, encodeBase64url, generateUlid, @@ -29,4 +31,26 @@ describe("protocol", () => { expect(parseDid(agentDid)).toEqual({ kind: "agent", ulid }); expect(ProtocolParseError).toBeTypeOf("function"); }); + + it("exports http signing canonicalization helpers", () => { + const canonical = canonicalizeRequest({ + method: "post", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_abc123", + bodyHash: "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU", + }); + + expect(CLAW_PROOF_CANONICAL_VERSION).toBe("CLAW-PROOF-V1"); + expect(canonical).toBe( + [ + "CLAW-PROOF-V1", + "POST", + "/v1/messages?b=2&a=1", + "1739364000", + "nonce_abc123", + "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU", + ].join("\n"), + ); + }); }); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 78cdd5d..c56e7fb 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -5,4 +5,9 @@ export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export type { ProtocolParseErrorCode } from "./errors.js"; export { ProtocolParseError } from "./errors.js"; +export type { CanonicalRequestInput } from "./http-signing.js"; +export { + CLAW_PROOF_CANONICAL_VERSION, + canonicalizeRequest, +} from "./http-signing.js"; export { generateUlid, parseUlid } from "./ulid.js"; From e05f89c6d5053e4c19d6672322094fdcc6a85da1 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 15:01:48 +0530 Subject: [PATCH 005/190] feat(sdk): implement T03 ed25519 keypair/sign/verify utilities --- packages/sdk/AGENTS.md | 5 ++ packages/sdk/package.json | 1 + packages/sdk/src/crypto/ed25519.test.ts | 110 ++++++++++++++++++++++++ packages/sdk/src/crypto/ed25519.ts | 61 +++++++++++++ packages/sdk/src/index.test.ts | 27 ++++++ packages/sdk/src/index.ts | 13 +++ packages/sdk/tsconfig.json | 3 +- pnpm-lock.yaml | 8 ++ 8 files changed, 226 insertions(+), 2 deletions(-) create mode 100644 packages/sdk/src/crypto/ed25519.test.ts create mode 100644 packages/sdk/src/crypto/ed25519.ts diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 9e3922f..09f7d39 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -10,14 +10,19 @@ - `datetime`: UTC-only helpers for expiry and date arithmetic. - `config`: schema-validated runtime config parsing. - `request-context`: request ID extraction/generation and propagation. +- `crypto/ed25519`: byte-first keypair/sign/verify helpers for PoP and token workflows. ## Design Rules - Keep helpers Cloudflare-compatible and local-runtime-compatible. - Prefer small wrappers with explicit contracts over heavy framework abstractions. - Avoid leaking secrets in logs and error payloads. - Keep all parse/validation errors explicit and deterministic. +- Keep cryptography APIs byte-first (`Uint8Array`) and runtime-portable. +- Reuse protocol base64url helpers as the single source of truth; do not duplicate encoding logic in SDK. +- Never log secret keys or raw signature material. ## Testing Rules - Unit test each shared module. - Validate error codes/envelopes and request ID behavior. - Keep tests deterministic and offline. +- Crypto tests must include explicit negative verification cases (wrong message/signature/key). diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 04b95da..64dab77 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -20,6 +20,7 @@ }, "dependencies": { "@clawdentity/protocol": "workspace:*", + "@noble/ed25519": "^3.0.0", "hono": "^4.11.9", "zod": "^4.1.12" } diff --git a/packages/sdk/src/crypto/ed25519.test.ts b/packages/sdk/src/crypto/ed25519.test.ts new file mode 100644 index 0000000..eb593d4 --- /dev/null +++ b/packages/sdk/src/crypto/ed25519.test.ts @@ -0,0 +1,110 @@ +import { ProtocolParseError } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { + decodeEd25519KeypairBase64url, + decodeEd25519SignatureBase64url, + encodeEd25519KeypairBase64url, + encodeEd25519SignatureBase64url, + generateEd25519Keypair, + signEd25519, + verifyEd25519, +} from "./ed25519.js"; + +const encoder = new TextEncoder(); + +describe("ed25519 crypto helpers", () => { + it("generates keypairs with expected key lengths", async () => { + const keypair = await generateEd25519Keypair(); + + expect(keypair.publicKey).toBeInstanceOf(Uint8Array); + expect(keypair.secretKey).toBeInstanceOf(Uint8Array); + expect(keypair.publicKey).toHaveLength(32); + expect(keypair.secretKey).toHaveLength(32); + }); + + it("signs and verifies successfully with matching message and keypair", async () => { + const keypair = await generateEd25519Keypair(); + const message = encoder.encode("t03-happy-path"); + + const signature = await signEd25519(message, keypair.secretKey); + const isValid = await verifyEd25519(signature, message, keypair.publicKey); + + expect(isValid).toBe(true); + }); + + it("fails verification with the wrong message", async () => { + const keypair = await generateEd25519Keypair(); + const message = encoder.encode("t03-original-message"); + const wrongMessage = encoder.encode("t03-tampered-message"); + const signature = await signEd25519(message, keypair.secretKey); + + const isValid = await verifyEd25519( + signature, + wrongMessage, + keypair.publicKey, + ); + + expect(isValid).toBe(false); + }); + + it("fails verification with a tampered signature", async () => { + const keypair = await generateEd25519Keypair(); + const message = encoder.encode("t03-signature-tamper"); + const signature = await signEd25519(message, keypair.secretKey); + const tamperedSignature = Uint8Array.from(signature); + tamperedSignature[0] ^= 0xff; + + const isValid = await verifyEd25519( + tamperedSignature, + message, + keypair.publicKey, + ); + + expect(isValid).toBe(false); + }); + + it("fails verification with a different public key", async () => { + const keypair = await generateEd25519Keypair(); + const otherKeypair = await generateEd25519Keypair(); + const message = encoder.encode("t03-wrong-public-key"); + const signature = await signEd25519(message, keypair.secretKey); + + const isValid = await verifyEd25519( + signature, + message, + otherKeypair.publicKey, + ); + + expect(isValid).toBe(false); + }); + + it("roundtrips keypairs and signatures through base64url wrappers", async () => { + const keypair = await generateEd25519Keypair(); + const message = encoder.encode("t03-base64url-roundtrip"); + const signature = await signEd25519(message, keypair.secretKey); + + const encodedKeypair = encodeEd25519KeypairBase64url(keypair); + const decodedKeypair = decodeEd25519KeypairBase64url(encodedKeypair); + + expect(Array.from(decodedKeypair.publicKey)).toEqual( + Array.from(keypair.publicKey), + ); + expect(Array.from(decodedKeypair.secretKey)).toEqual( + Array.from(keypair.secretKey), + ); + + const encodedSignature = encodeEd25519SignatureBase64url(signature); + const decodedSignature = decodeEd25519SignatureBase64url(encodedSignature); + expect(Array.from(decodedSignature)).toEqual(Array.from(signature)); + }); + + it("throws protocol parse errors when decoding invalid base64url signature", () => { + try { + decodeEd25519SignatureBase64url("invalid+base64url"); + throw new Error("expected decode to throw"); + } catch (error) { + expect(error).toBeInstanceOf(ProtocolParseError); + expect((error as ProtocolParseError).code).toBe("INVALID_BASE64URL"); + } + }); +}); diff --git a/packages/sdk/src/crypto/ed25519.ts b/packages/sdk/src/crypto/ed25519.ts new file mode 100644 index 0000000..d590ef3 --- /dev/null +++ b/packages/sdk/src/crypto/ed25519.ts @@ -0,0 +1,61 @@ +import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; +import * as ed25519 from "@noble/ed25519"; + +export type Ed25519KeypairBytes = { + publicKey: Uint8Array; + secretKey: Uint8Array; +}; + +export type Ed25519KeypairBase64url = { + publicKey: string; + secretKey: string; +}; + +export async function generateEd25519Keypair(): Promise { + const keypair = await ed25519.keygenAsync(); + return { + publicKey: keypair.publicKey, + secretKey: keypair.secretKey, + }; +} + +export async function signEd25519( + message: Uint8Array, + secretKey: Uint8Array, +): Promise { + return ed25519.signAsync(message, secretKey); +} + +export async function verifyEd25519( + signature: Uint8Array, + message: Uint8Array, + publicKey: Uint8Array, +): Promise { + return ed25519.verifyAsync(signature, message, publicKey); +} + +export function encodeEd25519KeypairBase64url( + keypair: Ed25519KeypairBytes, +): Ed25519KeypairBase64url { + return { + publicKey: encodeBase64url(keypair.publicKey), + secretKey: encodeBase64url(keypair.secretKey), + }; +} + +export function decodeEd25519KeypairBase64url( + keypair: Ed25519KeypairBase64url, +): Ed25519KeypairBytes { + return { + publicKey: decodeBase64url(keypair.publicKey), + secretKey: decodeBase64url(keypair.secretKey), + }; +} + +export function encodeEd25519SignatureBase64url(signature: Uint8Array): string { + return encodeBase64url(signature); +} + +export function decodeEd25519SignatureBase64url(signature: string): Uint8Array { + return decodeBase64url(signature); +} diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 90076ae..3151f17 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -2,10 +2,17 @@ import { describe, expect, it } from "vitest"; import { AppError, addSeconds, + decodeEd25519KeypairBase64url, + decodeEd25519SignatureBase64url, + encodeEd25519KeypairBase64url, + encodeEd25519SignatureBase64url, + generateEd25519Keypair, parseRegistryConfig, REQUEST_ID_HEADER, resolveRequestId, SDK_VERSION, + signEd25519, + verifyEd25519, } from "./index.js"; describe("sdk", () => { @@ -24,4 +31,24 @@ describe("sdk", () => { expect(REQUEST_ID_HEADER).toBe("x-request-id"); expect(AppError).toBeTypeOf("function"); }); + + it("exports Ed25519 helpers from package root", async () => { + const keypair = await generateEd25519Keypair(); + const message = new TextEncoder().encode("root-export-crypto-test"); + const signature = await signEd25519(message, keypair.secretKey); + + expect(await verifyEd25519(signature, message, keypair.publicKey)).toBe( + true, + ); + + const encodedKeypair = encodeEd25519KeypairBase64url(keypair); + const decodedKeypair = decodeEd25519KeypairBase64url(encodedKeypair); + expect(Array.from(decodedKeypair.publicKey)).toEqual( + Array.from(keypair.publicKey), + ); + + const encodedSignature = encodeEd25519SignatureBase64url(signature); + const decodedSignature = decodeEd25519SignatureBase64url(encodedSignature); + expect(Array.from(decodedSignature)).toEqual(Array.from(signature)); + }); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 8a7ada3..5101996 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -2,6 +2,19 @@ export const SDK_VERSION = "0.0.0"; export type { RegistryConfig } from "./config.js"; export { parseRegistryConfig, registryConfigSchema } from "./config.js"; +export type { + Ed25519KeypairBase64url, + Ed25519KeypairBytes, +} from "./crypto/ed25519.js"; +export { + decodeEd25519KeypairBase64url, + decodeEd25519SignatureBase64url, + encodeEd25519KeypairBase64url, + encodeEd25519SignatureBase64url, + generateEd25519Keypair, + signEd25519, + verifyEd25519, +} from "./crypto/ed25519.js"; export { addSeconds, isExpired, nowIso } from "./datetime.js"; export { AppError, diff --git a/packages/sdk/tsconfig.json b/packages/sdk/tsconfig.json index 0335380..c7b841f 100644 --- a/packages/sdk/tsconfig.json +++ b/packages/sdk/tsconfig.json @@ -2,8 +2,7 @@ "extends": "../../tsconfig.base.json", "compilerOptions": { "lib": ["ES2022", "DOM"], - "outDir": "./dist", - "rootDir": "./src" + "outDir": "./dist" }, "include": ["src"] } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e96e727..e18fd6d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -87,6 +87,9 @@ importers: '@clawdentity/protocol': specifier: workspace:* version: link:../protocol + '@noble/ed25519': + specifier: ^3.0.0 + version: 3.0.0 hono: specifier: ^4.11.9 version: 4.11.9 @@ -839,6 +842,9 @@ packages: '@napi-rs/wasm-runtime@0.2.4': resolution: {integrity: sha512-9zESzOO5aDByvhIAsOy9TbpZ0Ur2AJbUI7UT73kcUTS2mxAMHOBaa1st/jAymNoCtvrit99kkzT1FZuXVcgfIQ==} + '@noble/ed25519@3.0.0': + resolution: {integrity: sha512-QyteqMNm0GLqfa5SoYbSC3+Pvykwpn95Zgth4MFVSMKBB75ELl9tX1LAVsN4c3HXOrakHsF2gL4zWDAYCcsnzg==} + '@nx/nx-darwin-arm64@22.5.0': resolution: {integrity: sha512-MHnzv6tzucvLsh4oS9FTepj+ct/o8/DPXrQow+9Jid7GSgY59xrDX/8CleJOrwL5lqKEyGW7vv8TR+4wGtEWTA==} cpu: [arm64] @@ -2728,6 +2734,8 @@ snapshots: '@emnapi/runtime': 1.8.1 '@tybys/wasm-util': 0.9.0 + '@noble/ed25519@3.0.0': {} + '@nx/nx-darwin-arm64@22.5.0': optional: true From 4d1932c318e96a2746b94cd44c11d4b3c5045ca6 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 15:29:08 +0530 Subject: [PATCH 006/190] Implement T04 AIT schema --- packages/protocol/AGENTS.md | 3 + packages/protocol/package.json | 3 +- packages/protocol/src/ait.test.ts | 150 +++++++++++++++++++++++++ packages/protocol/src/ait.ts | 163 ++++++++++++++++++++++++++++ packages/protocol/src/errors.ts | 1 + packages/protocol/src/index.test.ts | 37 +++++++ packages/protocol/src/index.ts | 9 ++ pnpm-lock.yaml | 3 + 8 files changed, 368 insertions(+), 1 deletion(-) create mode 100644 packages/protocol/src/ait.test.ts create mode 100644 packages/protocol/src/ait.ts diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index 5bd1ac0..9a04113 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -8,6 +8,9 @@ - Keep protocol APIs small and explicit; avoid leaking third-party library types into public exports. - Parse functions should throw `ProtocolParseError` with stable codes for caller-safe branching. - Maintain Cloudflare Worker portability: avoid Node-only globals in protocol helpers. +- Keep AIT schema parsing strict (`.strict()` objects) so unknown claims are rejected by default. +- Validate risky identity fields (`name`, `description`) with explicit allowlists/length caps; never pass through raw control characters. +- Reuse existing protocol validators/parsers (`parseDid`, `parseUlid`, base64url helpers) instead of duplicating claim validation logic. - Keep HTTP signing canonical strings deterministic: canonicalize method, normalized path (path + query), timestamp, nonce, and body hash exactly as `README.md`, `PRD.md`, and the policy docs describe (see `CLAW-PROOF-V1\n\n\n\n\n`). - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). diff --git a/packages/protocol/package.json b/packages/protocol/package.json index e9dcf2d..6aa0ab5 100644 --- a/packages/protocol/package.json +++ b/packages/protocol/package.json @@ -20,6 +20,7 @@ }, "dependencies": { "@scure/base": "^2.0.0", - "ulid": "^3.0.1" + "ulid": "^3.0.1", + "zod": "^4.3.6" } } diff --git a/packages/protocol/src/ait.test.ts b/packages/protocol/src/ait.test.ts new file mode 100644 index 0000000..949c47f --- /dev/null +++ b/packages/protocol/src/ait.test.ts @@ -0,0 +1,150 @@ +import { describe, expect, it } from "vitest"; +import { parseAitClaims, validateAgentName } from "./ait.js"; +import { encodeBase64url } from "./base64url.js"; +import { makeAgentDid, makeHumanDid } from "./did.js"; +import { ProtocolParseError } from "./errors.js"; +import { generateUlid } from "./ulid.js"; + +function makeValidClaims() { + const agentUlid = generateUlid(1700000000000); + const ownerUlid = generateUlid(1700000000100); + const now = 1700000000; + + return { + iss: "https://registry.clawdentity.dev", + sub: makeAgentDid(agentUlid), + ownerDid: makeHumanDid(ownerUlid), + name: "agent_name.v1", + framework: "openclaw", + description: "Safe agent description.", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(Uint8Array.from({ length: 32 }, (_, i) => i + 1)), + }, + }, + iat: now, + nbf: now, + exp: now + 3600, + jti: generateUlid(1700000000200), + }; +} + +describe("AIT name validation", () => { + it("accepts valid names", () => { + expect(validateAgentName("agent-1")).toBe(true); + expect(validateAgentName("Agent Name")).toBe(true); + expect(validateAgentName("agent_name.v1")).toBe(true); + expect(validateAgentName("A")).toBe(true); + }); + + it("rejects invalid names", () => { + expect(validateAgentName("")).toBe(false); + expect(validateAgentName("a".repeat(65))).toBe(false); + expect(validateAgentName("agent\nname")).toBe(false); + expect(validateAgentName("agent\tname")).toBe(false); + expect(validateAgentName(`agent${String.fromCharCode(0)}name`)).toBe(false); + expect(validateAgentName("agent🙂")).toBe(false); + expect(validateAgentName("agent/name")).toBe(false); + }); +}); + +describe("AIT claims schema", () => { + it("accepts valid MVP claims", () => { + const parsed = parseAitClaims(makeValidClaims()); + expect(parsed.sub).toMatch(/^did:claw:agent:/); + expect(parsed.ownerDid).toMatch(/^did:claw:human:/); + expect(parsed.cnf.jwk.kty).toBe("OKP"); + expect(parsed.cnf.jwk.crv).toBe("Ed25519"); + }); + + it("rejects missing required claims", () => { + const claims = makeValidClaims(); + delete (claims as Record).ownerDid; + + expect(() => parseAitClaims(claims)).toThrow(ProtocolParseError); + }); + + it("rejects wrong DID kinds for sub and ownerDid", () => { + const claimsWithHumanSub = makeValidClaims(); + claimsWithHumanSub.sub = claimsWithHumanSub.ownerDid; + + const claimsWithAgentOwner = makeValidClaims(); + claimsWithAgentOwner.ownerDid = claimsWithAgentOwner.sub; + + expect(() => parseAitClaims(claimsWithHumanSub)).toThrow( + ProtocolParseError, + ); + expect(() => parseAitClaims(claimsWithAgentOwner)).toThrow( + ProtocolParseError, + ); + }); + + it("rejects invalid cnf.jwk fields", () => { + const badKty = makeValidClaims(); + badKty.cnf.jwk.kty = "EC"; + + const badCrv = makeValidClaims(); + badCrv.cnf.jwk.crv = "P-256"; + + const badX = makeValidClaims(); + badX.cnf.jwk.x = "invalid+base64url"; + + expect(() => parseAitClaims(badKty)).toThrow(ProtocolParseError); + expect(() => parseAitClaims(badCrv)).toThrow(ProtocolParseError); + expect(() => parseAitClaims(badX)).toThrow(ProtocolParseError); + }); + + it("rejects invalid temporal ordering", () => { + const expBeforeNbf = makeValidClaims(); + expBeforeNbf.exp = expBeforeNbf.nbf; + + const expBeforeIat = makeValidClaims(); + expBeforeIat.exp = expBeforeIat.iat; + + expect(() => parseAitClaims(expBeforeNbf)).toThrow(ProtocolParseError); + expect(() => parseAitClaims(expBeforeIat)).toThrow(ProtocolParseError); + }); + + it("rejects invalid jti", () => { + const claims = makeValidClaims(); + claims.jti = "not-a-ulid"; + expect(() => parseAitClaims(claims)).toThrow(ProtocolParseError); + }); + + it("rejects invalid name and description", () => { + const badName = makeValidClaims(); + badName.name = "bad/name"; + + const badDescriptionControl = makeValidClaims(); + badDescriptionControl.description = "line one\nline two"; + + const badDescriptionLong = makeValidClaims(); + badDescriptionLong.description = "x".repeat(281); + + expect(() => parseAitClaims(badName)).toThrow(ProtocolParseError); + expect(() => parseAitClaims(badDescriptionControl)).toThrow( + ProtocolParseError, + ); + expect(() => parseAitClaims(badDescriptionLong)).toThrow( + ProtocolParseError, + ); + }); + + it("accepts omitted description", () => { + const claims = makeValidClaims(); + delete (claims as Record).description; + + expect(parseAitClaims(claims).description).toBeUndefined(); + }); + + it("rejects unknown top-level claims", () => { + const claims = { + ...makeValidClaims(), + unknownClaim: "should-fail", + }; + + expect(() => parseAitClaims(claims)).toThrow(ProtocolParseError); + }); +}); diff --git a/packages/protocol/src/ait.ts b/packages/protocol/src/ait.ts new file mode 100644 index 0000000..8c00814 --- /dev/null +++ b/packages/protocol/src/ait.ts @@ -0,0 +1,163 @@ +import { z } from "zod"; +import { decodeBase64url } from "./base64url.js"; +import { parseDid } from "./did.js"; +import { ProtocolParseError } from "./errors.js"; +import { parseUlid } from "./ulid.js"; + +export const MAX_AGENT_NAME_LENGTH = 64; +export const MAX_AGENT_DESCRIPTION_LENGTH = 280; +export const AGENT_NAME_REGEX = /^[A-Za-z0-9._ -]{1,64}$/; + +const MAX_FRAMEWORK_LENGTH = 32; + +export type AitCnfJwk = { + kty: "OKP"; + crv: "Ed25519"; + x: string; +}; + +function hasControlChars(value: string): boolean { + for (const char of value) { + const code = char.charCodeAt(0); + if (code <= 0x1f || code === 0x7f) { + return true; + } + } + return false; +} + +function invalidAitClaims(message: string): ProtocolParseError { + return new ProtocolParseError("INVALID_AIT_CLAIMS", message); +} + +export function validateAgentName(name: string): boolean { + return AGENT_NAME_REGEX.test(name); +} + +export const aitClaimsSchema = z + .object({ + iss: z.string().min(1, "iss is required"), + sub: z.string().min(1, "sub is required"), + ownerDid: z.string().min(1, "ownerDid is required"), + name: z + .string() + .refine(validateAgentName, "name contains invalid characters or length"), + framework: z + .string() + .min(1, "framework is required") + .max(MAX_FRAMEWORK_LENGTH) + .refine( + (value) => !hasControlChars(value), + "framework contains control characters", + ), + description: z + .string() + .max(MAX_AGENT_DESCRIPTION_LENGTH) + .refine( + (value) => !hasControlChars(value), + "description contains control characters", + ) + .optional(), + cnf: z + .object({ + jwk: z + .object({ + kty: z.literal("OKP"), + crv: z.literal("Ed25519"), + x: z.string().min(1), + }) + .strict(), + }) + .strict(), + iat: z.number().int().nonnegative(), + nbf: z.number().int().nonnegative(), + exp: z.number().int().nonnegative(), + jti: z.string().min(1), + }) + .strict() + .superRefine((claims, ctx) => { + try { + const parsedSub = parseDid(claims.sub); + if (parsedSub.kind !== "agent") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "sub must be an agent DID", + path: ["sub"], + }); + } + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "sub must be a valid DID", + path: ["sub"], + }); + } + + try { + const parsedOwnerDid = parseDid(claims.ownerDid); + if (parsedOwnerDid.kind !== "human") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "ownerDid must be a human DID", + path: ["ownerDid"], + }); + } + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "ownerDid must be a valid DID", + path: ["ownerDid"], + }); + } + + try { + decodeBase64url(claims.cnf.jwk.x); + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "cnf.jwk.x must be valid base64url", + path: ["cnf", "jwk", "x"], + }); + } + + try { + parseUlid(claims.jti); + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "jti must be a valid ULID", + path: ["jti"], + }); + } + + if (claims.exp <= claims.nbf) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "exp must be greater than nbf", + path: ["exp"], + }); + } + + if (claims.exp <= claims.iat) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "exp must be greater than iat", + path: ["exp"], + }); + } + }); + +export type AitClaims = z.infer; + +export function parseAitClaims(input: unknown): AitClaims { + const parsed = aitClaimsSchema.safeParse(input); + if (!parsed.success) { + const message = parsed.error.issues + .map((issue) => issue.message) + .join("; "); + throw invalidAitClaims( + message.length > 0 ? message : "Invalid AIT claims payload", + ); + } + return parsed.data; +} diff --git a/packages/protocol/src/errors.ts b/packages/protocol/src/errors.ts index f92c6b4..9369751 100644 --- a/packages/protocol/src/errors.ts +++ b/packages/protocol/src/errors.ts @@ -1,4 +1,5 @@ export type ProtocolParseErrorCode = + | "INVALID_AIT_CLAIMS" | "INVALID_BASE64URL" | "INVALID_ULID" | "INVALID_DID"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index 30edd06..f07d09a 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -1,16 +1,22 @@ import { describe, expect, it } from "vitest"; import { + AGENT_NAME_REGEX, + aitClaimsSchema, CLAW_PROOF_CANONICAL_VERSION, canonicalizeRequest, decodeBase64url, encodeBase64url, generateUlid, + MAX_AGENT_DESCRIPTION_LENGTH, + MAX_AGENT_NAME_LENGTH, makeAgentDid, makeHumanDid, PROTOCOL_VERSION, ProtocolParseError, + parseAitClaims, parseDid, parseUlid, + validateAgentName, } from "./index.js"; describe("protocol", () => { @@ -53,4 +59,35 @@ describe("protocol", () => { ].join("\n"), ); }); + + it("exports AIT helpers from package root", () => { + const agentUlid = generateUlid(1700000000000); + const ownerUlid = generateUlid(1700000000100); + const parsed = parseAitClaims({ + iss: "https://registry.clawdentity.dev", + sub: makeAgentDid(agentUlid), + ownerDid: makeHumanDid(ownerUlid), + name: "agent_01", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(Uint8Array.from({ length: 32 }, (_, i) => i + 1)), + }, + }, + iat: 1700000000, + nbf: 1700000000, + exp: 1700003600, + jti: generateUlid(1700000000200), + }); + + expect(validateAgentName("agent_01")).toBe(true); + expect(validateAgentName("bad/name")).toBe(false); + expect(parsed.name).toBe("agent_01"); + expect(MAX_AGENT_NAME_LENGTH).toBe(64); + expect(MAX_AGENT_DESCRIPTION_LENGTH).toBe(280); + expect(AGENT_NAME_REGEX.test("agent_01")).toBe(true); + expect(aitClaimsSchema).toBeDefined(); + }); }); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index c56e7fb..bb68771 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -1,5 +1,14 @@ export const PROTOCOL_VERSION = "0.0.0"; +export type { AitClaims, AitCnfJwk } from "./ait.js"; +export { + AGENT_NAME_REGEX, + aitClaimsSchema, + MAX_AGENT_DESCRIPTION_LENGTH, + MAX_AGENT_NAME_LENGTH, + parseAitClaims, + validateAgentName, +} from "./ait.js"; export { decodeBase64url, encodeBase64url } from "./base64url.js"; export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e18fd6d..779fd5c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -81,6 +81,9 @@ importers: ulid: specifier: ^3.0.1 version: 3.0.2 + zod: + specifier: ^4.3.6 + version: 4.3.6 packages/sdk: dependencies: From 9f7e331377d6e186bd3d7b87ebb70b29565d9649 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 15:47:48 +0530 Subject: [PATCH 007/190] feat(sdk): implement T05 AIT JWS sign/verify with kid lookup --- packages/sdk/AGENTS.md | 3 + packages/sdk/package.json | 1 + packages/sdk/src/index.test.ts | 47 +++++++++++ packages/sdk/src/index.ts | 6 ++ packages/sdk/src/jwt/ait-jwt.test.ts | 121 +++++++++++++++++++++++++++ packages/sdk/src/jwt/ait-jwt.ts | 100 ++++++++++++++++++++++ pnpm-lock.yaml | 8 ++ 7 files changed, 286 insertions(+) create mode 100644 packages/sdk/src/jwt/ait-jwt.test.ts create mode 100644 packages/sdk/src/jwt/ait-jwt.ts diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 09f7d39..082b5eb 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -11,6 +11,7 @@ - `config`: schema-validated runtime config parsing. - `request-context`: request ID extraction/generation and propagation. - `crypto/ed25519`: byte-first keypair/sign/verify helpers for PoP and token workflows. +- `jwt/ait-jwt`: AIT JWS signing and verification with strict header and issuer checks. ## Design Rules - Keep helpers Cloudflare-compatible and local-runtime-compatible. @@ -20,9 +21,11 @@ - Keep cryptography APIs byte-first (`Uint8Array`) and runtime-portable. - Reuse protocol base64url helpers as the single source of truth; do not duplicate encoding logic in SDK. - Never log secret keys or raw signature material. +- Enforce AIT JWT security invariants in verification: `alg=EdDSA`, `typ=AIT`, and `kid` lookup against registry keys. ## Testing Rules - Unit test each shared module. - Validate error codes/envelopes and request ID behavior. - Keep tests deterministic and offline. - Crypto tests must include explicit negative verification cases (wrong message/signature/key). +- JWT tests must include sign/verify happy path and failure paths for issuer mismatch and missing/unknown `kid`. diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 64dab77..799f425 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -22,6 +22,7 @@ "@clawdentity/protocol": "workspace:*", "@noble/ed25519": "^3.0.0", "hono": "^4.11.9", + "jose": "^6.1.3", "zod": "^4.1.12" } } diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 3151f17..4a11969 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "vitest"; import { + AitJwtError, AppError, addSeconds, decodeEd25519KeypairBase64url, @@ -11,7 +12,9 @@ import { REQUEST_ID_HEADER, resolveRequestId, SDK_VERSION, + signAIT, signEd25519, + verifyAIT, verifyEd25519, } from "./index.js"; @@ -51,4 +54,48 @@ describe("sdk", () => { const decodedSignature = decodeEd25519SignatureBase64url(encodedSignature); expect(Array.from(decodedSignature)).toEqual(Array.from(signature)); }); + + it("exports AIT JWT helpers from package root", async () => { + const keypair = await generateEd25519Keypair(); + const token = await signAIT({ + claims: { + iss: "https://registry.clawdentity.dev", + sub: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + ownerDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + name: "jwt-root-test", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeEd25519KeypairBase64url(keypair).publicKey, + }, + }, + iat: 1700100000, + nbf: 1700099995, + exp: 4700100000, + jti: "01HF7YAT4TXP6AW5QNXA2Y9K43", + }, + signerKid: "reg-key-root", + signerKeypair: keypair, + }); + + const verified = await verifyAIT({ + token, + registryKeys: [ + { + kid: "reg-key-root", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeEd25519KeypairBase64url(keypair).publicKey, + }, + }, + ], + expectedIssuer: "https://registry.clawdentity.dev", + }); + + expect(verified.name).toBe("jwt-root-test"); + expect(AitJwtError).toBeTypeOf("function"); + }); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 5101996..40232fb 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -21,6 +21,12 @@ export { createHonoErrorHandler, toErrorEnvelope, } from "./exceptions.js"; +export type { + RegistryAitVerificationKey, + SignAitInput, + VerifyAitInput, +} from "./jwt/ait-jwt.js"; +export { AitJwtError, signAIT, verifyAIT } from "./jwt/ait-jwt.js"; export type { Logger } from "./logging.js"; export { createLogger, createRequestLoggingMiddleware } from "./logging.js"; export type { RequestContextVariables } from "./request-context.js"; diff --git a/packages/sdk/src/jwt/ait-jwt.test.ts b/packages/sdk/src/jwt/ait-jwt.test.ts new file mode 100644 index 0000000..6caa3bf --- /dev/null +++ b/packages/sdk/src/jwt/ait-jwt.test.ts @@ -0,0 +1,121 @@ +import { + type AitClaims, + encodeBase64url, + generateUlid, + makeAgentDid, + makeHumanDid, +} from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { generateEd25519Keypair } from "../crypto/ed25519.js"; +import { signAIT, verifyAIT } from "./ait-jwt.js"; + +function makeClaims(overrides: Partial = {}): AitClaims { + const agentUlid = generateUlid(1700100000000); + const ownerUlid = generateUlid(1700100001000); + const now = Math.floor(Date.now() / 1000); + + return { + iss: "https://registry.clawdentity.dev", + sub: makeAgentDid(agentUlid), + ownerDid: makeHumanDid(ownerUlid), + name: "agent-jwt-01", + framework: "openclaw", + description: "AIT JWT test payload", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url( + Uint8Array.from({ length: 32 }, (_, index) => index + 1), + ), + }, + }, + iat: now, + nbf: now - 5, + exp: now + 3600, + jti: generateUlid(1700100002000), + ...overrides, + }; +} + +describe("AIT JWT helpers", () => { + it("signs and verifies an AIT with matching registry key + kid", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: keypair, + }); + + const verified = await verifyAIT({ + token, + registryKeys: [ + { + kid: "reg-key-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + expectedIssuer: claims.iss, + }); + + expect(verified).toEqual(claims); + }); + + it("fails verification when issuer does not match expected issuer", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: keypair, + }); + + await expect( + verifyAIT({ + token, + registryKeys: [ + { + kid: "reg-key-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + expectedIssuer: "https://wrong-issuer.example", + }), + ).rejects.toThrow(); + }); + + it("fails verification when token kid cannot be found in registry key set", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: keypair, + }); + + await expect( + verifyAIT({ + token, + registryKeys: [ + { + kid: "reg-key-2", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + }), + ).rejects.toThrow(/kid/i); + }); +}); diff --git a/packages/sdk/src/jwt/ait-jwt.ts b/packages/sdk/src/jwt/ait-jwt.ts new file mode 100644 index 0000000..db48107 --- /dev/null +++ b/packages/sdk/src/jwt/ait-jwt.ts @@ -0,0 +1,100 @@ +import type { AitClaims, AitCnfJwk } from "@clawdentity/protocol"; +import { parseAitClaims } from "@clawdentity/protocol"; +import type { JWTVerifyOptions } from "jose"; +import { decodeProtectedHeader, importJWK, jwtVerify, SignJWT } from "jose"; +import { + type Ed25519KeypairBytes, + encodeEd25519KeypairBase64url, +} from "../crypto/ed25519.js"; + +type AitPrivateJwk = AitCnfJwk & { + d: string; +}; + +export type RegistryAitVerificationKey = { + kid: string; + jwk: AitCnfJwk; +}; + +export type SignAitInput = { + claims: AitClaims; + signerKid: string; + signerKeypair: Ed25519KeypairBytes; +}; + +export type VerifyAitInput = { + token: string; + registryKeys: RegistryAitVerificationKey[]; + expectedIssuer?: string; +}; + +export class AitJwtError extends Error { + readonly code: "INVALID_AIT_HEADER" | "UNKNOWN_AIT_KID"; + + constructor(code: "INVALID_AIT_HEADER" | "UNKNOWN_AIT_KID", message: string) { + super(message); + this.name = "AitJwtError"; + this.code = code; + } +} + +function invalidAitHeader(message: string): AitJwtError { + return new AitJwtError("INVALID_AIT_HEADER", message); +} + +function unknownAitKid(kid: string): AitJwtError { + return new AitJwtError("UNKNOWN_AIT_KID", `Unknown AIT signing kid: ${kid}`); +} + +export async function signAIT(input: SignAitInput): Promise { + const claims = parseAitClaims(input.claims); + const encodedKeypair = encodeEd25519KeypairBase64url(input.signerKeypair); + const privateJwk: AitPrivateJwk = { + kty: "OKP", + crv: "Ed25519", + x: encodedKeypair.publicKey, + d: encodedKeypair.secretKey, + }; + const privateKey = await importJWK(privateJwk, "EdDSA"); + + return new SignJWT(claims) + .setProtectedHeader({ + alg: "EdDSA", + typ: "AIT", + kid: input.signerKid, + }) + .sign(privateKey); +} + +export async function verifyAIT(input: VerifyAitInput): Promise { + const header = decodeProtectedHeader(input.token); + if (header.alg !== "EdDSA") { + throw invalidAitHeader("AIT token must use alg=EdDSA"); + } + + if (header.typ !== "AIT") { + throw invalidAitHeader("AIT token must use typ=AIT"); + } + + if (typeof header.kid !== "string" || header.kid.length === 0) { + throw invalidAitHeader("AIT token missing protected kid header"); + } + + const key = input.registryKeys.find((item) => item.kid === header.kid); + if (!key) { + throw unknownAitKid(header.kid); + } + + const publicKey = await importJWK(key.jwk, "EdDSA"); + const options: JWTVerifyOptions = { + algorithms: ["EdDSA"], + typ: "AIT", + }; + + if (input.expectedIssuer !== undefined) { + options.issuer = input.expectedIssuer; + } + + const { payload } = await jwtVerify(input.token, publicKey, options); + return parseAitClaims(payload); +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 779fd5c..12347f2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -96,6 +96,9 @@ importers: hono: specifier: ^4.11.9 version: 4.11.9 + jose: + specifier: ^6.1.3 + version: 6.1.3 zod: specifier: ^4.1.12 version: 4.3.6 @@ -1663,6 +1666,9 @@ packages: resolution: {integrity: sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==} engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + jose@6.1.3: + resolution: {integrity: sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==} + joycon@3.1.1: resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} engines: {node: '>=10'} @@ -3402,6 +3408,8 @@ snapshots: chalk: 4.1.2 pretty-format: 30.2.0 + jose@6.1.3: {} + joycon@3.1.1: {} js-yaml@3.14.2: From ddb5608e346e963354a7752d4f452617b5a310b5 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 16:05:28 +0530 Subject: [PATCH 008/190] feat(protocol,sdk): implement T06 signed CRL schema and JWT helpers --- packages/protocol/AGENTS.md | 3 + packages/protocol/src/ait.ts | 11 +- packages/protocol/src/crl.test.ts | 87 +++++++++++++ packages/protocol/src/crl.ts | 91 ++++++++++++++ packages/protocol/src/errors.ts | 3 +- packages/protocol/src/index.test.ts | 26 ++++ packages/protocol/src/index.ts | 2 + packages/protocol/src/text.ts | 9 ++ packages/sdk/AGENTS.md | 3 + packages/sdk/src/index.test.ts | 44 +++++++ packages/sdk/src/index.ts | 7 ++ packages/sdk/src/jwt/crl-jwt.test.ts | 179 +++++++++++++++++++++++++++ packages/sdk/src/jwt/crl-jwt.ts | 104 ++++++++++++++++ 13 files changed, 558 insertions(+), 11 deletions(-) create mode 100644 packages/protocol/src/crl.test.ts create mode 100644 packages/protocol/src/crl.ts create mode 100644 packages/protocol/src/text.ts create mode 100644 packages/sdk/src/jwt/crl-jwt.test.ts create mode 100644 packages/sdk/src/jwt/crl-jwt.ts diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index 9a04113..b2848f1 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -12,6 +12,8 @@ - Validate risky identity fields (`name`, `description`) with explicit allowlists/length caps; never pass through raw control characters. - Reuse existing protocol validators/parsers (`parseDid`, `parseUlid`, base64url helpers) instead of duplicating claim validation logic. - Keep HTTP signing canonical strings deterministic: canonicalize method, normalized path (path + query), timestamp, nonce, and body hash exactly as `README.md`, `PRD.md`, and the policy docs describe (see `CLAW-PROOF-V1\n\n\n\n\n`). +- Mirror the AIT guardrails for CRL payloads: `crl.ts` keeps `.strict()` definitions, requires at least one revocation entry, enforces `agentDid` is a `did:claw:agent`, `revocation.jti` is a ULID, `exp > iat`, and surfaces `INVALID_CRL_CLAIMS` via `ProtocolParseError`. +- Reuse cross-module helpers (e.g., `text.ts`’s `hasControlChars`) so control-character checks stay consistent across AIT and CRL validation. - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). @@ -19,3 +21,4 @@ - Add focused Vitest tests per helper module and one root export test in `src/index.test.ts`. - Roundtrip tests must cover empty inputs, known vectors, and invalid inputs for parse failures. - Error tests must assert `ProtocolParseError` code values, not just message strings. +- CRL helpers specifically need coverage for valid payloads, missing or empty revocation entries, invalid `agentDid`/`jti` values, and `exp <= iat`, all verifying the `INVALID_CRL_CLAIMS` code. diff --git a/packages/protocol/src/ait.ts b/packages/protocol/src/ait.ts index 8c00814..39ec673 100644 --- a/packages/protocol/src/ait.ts +++ b/packages/protocol/src/ait.ts @@ -2,6 +2,7 @@ import { z } from "zod"; import { decodeBase64url } from "./base64url.js"; import { parseDid } from "./did.js"; import { ProtocolParseError } from "./errors.js"; +import { hasControlChars } from "./text.js"; import { parseUlid } from "./ulid.js"; export const MAX_AGENT_NAME_LENGTH = 64; @@ -16,16 +17,6 @@ export type AitCnfJwk = { x: string; }; -function hasControlChars(value: string): boolean { - for (const char of value) { - const code = char.charCodeAt(0); - if (code <= 0x1f || code === 0x7f) { - return true; - } - } - return false; -} - function invalidAitClaims(message: string): ProtocolParseError { return new ProtocolParseError("INVALID_AIT_CLAIMS", message); } diff --git a/packages/protocol/src/crl.test.ts b/packages/protocol/src/crl.test.ts new file mode 100644 index 0000000..a2b9a1e --- /dev/null +++ b/packages/protocol/src/crl.test.ts @@ -0,0 +1,87 @@ +import { describe, expect, it } from "vitest"; +import { parseCrlClaims } from "./crl.js"; +import { makeAgentDid, makeHumanDid } from "./did.js"; +import { ProtocolParseError } from "./errors.js"; +import { generateUlid } from "./ulid.js"; + +function makeValidCrlClaims() { + const now = 1700000000; + const agentUlid = generateUlid(1700000000000); + + return { + iss: "https://registry.clawdentity.dev", + jti: generateUlid(1700000100000), + iat: now, + exp: now + 3600, + revocations: [ + { + jti: generateUlid(1700000200000), + agentDid: makeAgentDid(agentUlid), + reason: "key compromise", + revokedAt: now + 1000, + }, + ], + }; +} + +function expectInvalidCrl(payload: unknown) { + try { + parseCrlClaims(payload); + throw new Error("parseCrlClaims was expected to throw"); + } catch (error) { + expect(error).toBeInstanceOf(ProtocolParseError); + if (error instanceof ProtocolParseError) { + expect(error.code).toBe("INVALID_CRL_CLAIMS"); + } + } +} + +describe("CRL claims schema", () => { + it("accepts valid CRL payloads", () => { + const parsed = parseCrlClaims(makeValidCrlClaims()); + expect(parsed.revocations).toHaveLength(1); + expect(parsed.revocations[0].agentDid).toMatch(/^did:claw:agent:/); + }); + + it("rejects missing required fields", () => { + const claims = makeValidCrlClaims(); + delete (claims as Record).revocations; + + expectInvalidCrl(claims); + }); + + it("rejects empty revocation arrays", () => { + const claims = makeValidCrlClaims(); + claims.revocations = []; + + expectInvalidCrl(claims); + }); + + it("rejects non-agent DIDs for revocations", () => { + const claims = makeValidCrlClaims(); + claims.revocations[0].agentDid = makeHumanDid(generateUlid(1700000000000)); + + expectInvalidCrl(claims); + }); + + it("rejects invalid ULIDs in revocation entries", () => { + const claims = makeValidCrlClaims(); + claims.revocations[0].jti = "not-a-ulid"; + + expectInvalidCrl(claims); + }); + + it("rejects exp <= iat", () => { + const claims = makeValidCrlClaims(); + claims.exp = claims.iat; + + expectInvalidCrl(claims); + }); + + it("rejects unknown top-level claims", () => { + const claims = makeValidCrlClaims() as Record; + claims.extra = "unexpected"; + + expectInvalidCrl(claims); + }); +}); diff --git a/packages/protocol/src/crl.ts b/packages/protocol/src/crl.ts new file mode 100644 index 0000000..ecb1cbb --- /dev/null +++ b/packages/protocol/src/crl.ts @@ -0,0 +1,91 @@ +import { z } from "zod"; +import { parseDid } from "./did.js"; +import { ProtocolParseError } from "./errors.js"; +import { hasControlChars } from "./text.js"; +import { parseUlid } from "./ulid.js"; + +const INVALID_CRL_CLAIMS = "INVALID_CRL_CLAIMS" as const; + +export const crlClaimsSchema = z + .object({ + iss: z.string().min(1, "iss is required"), + jti: z.string().min(1, "jti is required"), + iat: z.number().int().nonnegative(), + exp: z.number().int().nonnegative(), + revocations: z + .array( + z + .object({ + jti: z.string().min(1, "revocation.jti is required"), + agentDid: z.string().min(1, "agentDid is required"), + reason: z.string().max(280).optional(), + revokedAt: z.number().int().nonnegative(), + }) + .strict() + .superRefine((revocation, ctx) => { + if (hasControlChars(revocation.agentDid)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "revocation.agentDid contains control characters", + path: ["agentDid"], + }); + } + }), + ) + .min(1, "revocations must include at least one entry"), + }) + .strict() + .superRefine((claims, ctx) => { + if (claims.exp <= claims.iat) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "exp must be greater than iat", + path: ["exp"], + }); + } + + for (const [index, revocation] of claims.revocations.entries()) { + try { + const parsedAgentDid = parseDid(revocation.agentDid); + if (parsedAgentDid.kind !== "agent") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "revocation.agentDid must refer to an agent DID", + path: ["revocations", index, "agentDid"], + }); + } + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "revocation.agentDid must be a valid DID", + path: ["revocations", index, "agentDid"], + }); + } + + try { + parseUlid(revocation.jti); + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "revocation.jti must be a valid ULID", + path: ["revocations", index, "jti"], + }); + } + } + }); + +export type CrlClaims = z.infer; + +export function parseCrlClaims(input: unknown): CrlClaims { + const parsed = crlClaimsSchema.safeParse(input); + if (!parsed.success) { + const message = parsed.error.issues + .map((issue) => issue.message) + .join("; "); + throw new ProtocolParseError( + INVALID_CRL_CLAIMS, + message.length > 0 ? message : "Invalid CRL claims payload", + ); + } + return parsed.data; +} diff --git a/packages/protocol/src/errors.ts b/packages/protocol/src/errors.ts index 9369751..2f8f8c3 100644 --- a/packages/protocol/src/errors.ts +++ b/packages/protocol/src/errors.ts @@ -2,7 +2,8 @@ export type ProtocolParseErrorCode = | "INVALID_AIT_CLAIMS" | "INVALID_BASE64URL" | "INVALID_ULID" - | "INVALID_DID"; + | "INVALID_DID" + | "INVALID_CRL_CLAIMS"; export class ProtocolParseError extends Error { readonly code: ProtocolParseErrorCode; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index f07d09a..8720d17 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -4,6 +4,7 @@ import { aitClaimsSchema, CLAW_PROOF_CANONICAL_VERSION, canonicalizeRequest, + crlClaimsSchema, decodeBase64url, encodeBase64url, generateUlid, @@ -14,6 +15,7 @@ import { PROTOCOL_VERSION, ProtocolParseError, parseAitClaims, + parseCrlClaims, parseDid, parseUlid, validateAgentName, @@ -90,4 +92,28 @@ describe("protocol", () => { expect(AGENT_NAME_REGEX.test("agent_01")).toBe(true); expect(aitClaimsSchema).toBeDefined(); }); + + it("exports CRL helpers from package root", () => { + const now = 1700000000; + const agentUlid = generateUlid(now); + const agentDid = makeAgentDid(agentUlid); + + const parsed = parseCrlClaims({ + iss: "https://registry.clawdentity.dev", + jti: generateUlid(now + 1000), + iat: now, + exp: now + 3600, + revocations: [ + { + jti: generateUlid(now + 2000), + agentDid, + reason: "manual revoke", + revokedAt: now + 100, + }, + ], + }); + + expect(parsed.revocations[0].agentDid).toBe(agentDid); + expect(crlClaimsSchema).toBeDefined(); + }); }); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index bb68771..3144b31 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -10,6 +10,8 @@ export { validateAgentName, } from "./ait.js"; export { decodeBase64url, encodeBase64url } from "./base64url.js"; +export type { CrlClaims } from "./crl.js"; +export { crlClaimsSchema, parseCrlClaims } from "./crl.js"; export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export type { ProtocolParseErrorCode } from "./errors.js"; diff --git a/packages/protocol/src/text.ts b/packages/protocol/src/text.ts new file mode 100644 index 0000000..2a2aa96 --- /dev/null +++ b/packages/protocol/src/text.ts @@ -0,0 +1,9 @@ +export function hasControlChars(value: string): boolean { + for (const char of value) { + const code = char.charCodeAt(0); + if (code <= 0x1f || code === 0x7f) { + return true; + } + } + return false; +} diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 082b5eb..8dd0f3b 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -12,6 +12,8 @@ - `request-context`: request ID extraction/generation and propagation. - `crypto/ed25519`: byte-first keypair/sign/verify helpers for PoP and token workflows. - `jwt/ait-jwt`: AIT JWS signing and verification with strict header and issuer checks. +- `jwt/crl-jwt`: CRL JWT helpers with EdDSA signing, header consistency checks, and tamper-detection test coverage. +- Tests should prove tamper cases (payload change, header kid swap, signature corruption). ## Design Rules - Keep helpers Cloudflare-compatible and local-runtime-compatible. @@ -20,6 +22,7 @@ - Keep all parse/validation errors explicit and deterministic. - Keep cryptography APIs byte-first (`Uint8Array`) and runtime-portable. - Reuse protocol base64url helpers as the single source of truth; do not duplicate encoding logic in SDK. +- Keep CRL claim schema authority in `@clawdentity/protocol` (`crl.ts`); SDK JWT helpers should avoid duplicating claim-validation rules. - Never log secret keys or raw signature material. - Enforce AIT JWT security invariants in verification: `alg=EdDSA`, `typ=AIT`, and `kid` lookup against registry keys. diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 4a11969..66286fc 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -3,6 +3,7 @@ import { AitJwtError, AppError, addSeconds, + CrlJwtError, decodeEd25519KeypairBase64url, decodeEd25519SignatureBase64url, encodeEd25519KeypairBase64url, @@ -13,8 +14,10 @@ import { resolveRequestId, SDK_VERSION, signAIT, + signCRL, signEd25519, verifyAIT, + verifyCRL, verifyEd25519, } from "./index.js"; @@ -98,4 +101,45 @@ describe("sdk", () => { expect(verified.name).toBe("jwt-root-test"); expect(AitJwtError).toBeTypeOf("function"); }); + + it("exports CRL JWT helpers from package root", async () => { + const keypair = await generateEd25519Keypair(); + const now = Math.floor(Date.now() / 1000); + const token = await signCRL({ + claims: { + iss: "https://registry.clawdentity.dev", + jti: "01HF7YAT4TXP6AW5QNXA2Y9K43", + iat: now, + exp: now + 3600, + revocations: [ + { + jti: "01HF7YAT31JZHSMW1CG6Q6MHB7", + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + reason: "manual revoke", + revokedAt: now, + }, + ], + }, + signerKid: "reg-crl-root", + signerKeypair: keypair, + }); + + const verified = await verifyCRL({ + token, + registryKeys: [ + { + kid: "reg-crl-root", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeEd25519KeypairBase64url(keypair).publicKey, + }, + }, + ], + expectedIssuer: "https://registry.clawdentity.dev", + }); + + expect(verified.revocations).toHaveLength(1); + expect(CrlJwtError).toBeTypeOf("function"); + }); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 40232fb..5cf521c 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -27,6 +27,13 @@ export type { VerifyAitInput, } from "./jwt/ait-jwt.js"; export { AitJwtError, signAIT, verifyAIT } from "./jwt/ait-jwt.js"; +export type { + CrlClaims, + RegistryCrlVerificationKey, + SignCrlInput, + VerifyCrlInput, +} from "./jwt/crl-jwt.js"; +export { CrlJwtError, signCRL, verifyCRL } from "./jwt/crl-jwt.js"; export type { Logger } from "./logging.js"; export { createLogger, createRequestLoggingMiddleware } from "./logging.js"; export type { RequestContextVariables } from "./request-context.js"; diff --git a/packages/sdk/src/jwt/crl-jwt.test.ts b/packages/sdk/src/jwt/crl-jwt.test.ts new file mode 100644 index 0000000..b168775 --- /dev/null +++ b/packages/sdk/src/jwt/crl-jwt.test.ts @@ -0,0 +1,179 @@ +import { + decodeBase64url, + encodeBase64url, + generateUlid, + makeAgentDid, +} from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { generateEd25519Keypair } from "../crypto/ed25519.js"; +import { type CrlClaims, signCRL, verifyCRL } from "./crl-jwt.js"; + +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); + +function makeClaims(overrides: Partial = {}): CrlClaims { + const now = Math.floor(Date.now() / 1000); + const agentId = generateUlid(1700105002000); + + return { + iss: "https://registry.clawdentity.dev", + jti: generateUlid(1700105000000), + iat: now, + exp: now + 3600, + revocations: [ + { + jti: generateUlid(1700105001000), + agentDid: makeAgentDid(agentId), + reason: "compromised key", + revokedAt: now, + }, + ], + ...overrides, + }; +} + +function patchTokenSegment( + token: string, + segmentIndex: 0 | 1, + patch: (payload: Record) => Record, +): string { + const parts = token.split("."); + const bytes = decodeBase64url(parts[segmentIndex]); + const parsed = JSON.parse(textDecoder.decode(bytes)); + const patched = patch(parsed); + const encoded = encodeBase64url(textEncoder.encode(JSON.stringify(patched))); + parts[segmentIndex] = encoded; + return parts.join("."); +} + +function swapSignature(token: string): string { + const parts = token.split("."); + parts[2] = "A".repeat(parts[2].length); + return parts.join("."); +} + +describe("CRL JWT helpers", () => { + it("signs and verifies a valid CRL token", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signCRL({ + claims, + signerKid: "reg-crl-1", + signerKeypair: keypair, + }); + + const verified = await verifyCRL({ + token, + registryKeys: [ + { + kid: "reg-crl-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + expectedIssuer: "https://registry.clawdentity.dev", + }); + + expect(verified.jti).toBe(claims.jti); + expect(verified.revocations).toHaveLength(1); + }); + + it("rejects a payload change after signing", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signCRL({ + claims, + signerKid: "reg-crl-1", + signerKeypair: keypair, + }); + + const tampered = patchTokenSegment(token, 1, (payload) => { + const base = payload as Record & { + revocations?: unknown[]; + }; + const existing = Array.isArray(base.revocations) ? base.revocations : []; + return { + ...base, + revocations: [...existing, "tampered"], + }; + }); + + await expect( + verifyCRL({ + token: tampered, + registryKeys: [ + { + kid: "reg-crl-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + }), + ).rejects.toThrow(); + }); + + it("rejects a header kid tampering attempt", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signCRL({ + claims, + signerKid: "reg-crl-1", + signerKeypair: keypair, + }); + + const tampered = patchTokenSegment(token, 0, (header) => ({ + ...header, + kid: "tamper-kid", + })); + + await expect( + verifyCRL({ + token: tampered, + registryKeys: [ + { + kid: "reg-crl-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + }), + ).rejects.toThrow(/kid/i); + }); + + it("rejects tampered signature bytes", async () => { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signCRL({ + claims, + signerKid: "reg-crl-1", + signerKeypair: keypair, + }); + + const tampered = swapSignature(token); + + await expect( + verifyCRL({ + token: tampered, + registryKeys: [ + { + kid: "reg-crl-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + }), + ).rejects.toThrow(); + }); +}); diff --git a/packages/sdk/src/jwt/crl-jwt.ts b/packages/sdk/src/jwt/crl-jwt.ts new file mode 100644 index 0000000..da3d5d3 --- /dev/null +++ b/packages/sdk/src/jwt/crl-jwt.ts @@ -0,0 +1,104 @@ +import type { CrlClaims as ProtocolCrlClaims } from "@clawdentity/protocol"; +import type { JWTVerifyOptions } from "jose"; +import { decodeProtectedHeader, importJWK, jwtVerify, SignJWT } from "jose"; +import type { Ed25519KeypairBytes } from "../crypto/ed25519.js"; +import { encodeEd25519KeypairBase64url } from "../crypto/ed25519.js"; + +export type CrlClaims = ProtocolCrlClaims; + +type CrlPublicJwk = { + kty: "OKP"; + crv: "Ed25519"; + x: string; +}; + +type CrlPrivateJwk = CrlPublicJwk & { + d: string; +}; + +export type RegistryCrlVerificationKey = { + kid: string; + jwk: CrlPublicJwk; +}; + +export type SignCrlInput = { + claims: CrlClaims; + signerKid: string; + signerKeypair: Ed25519KeypairBytes; +}; + +export type VerifyCrlInput = { + token: string; + registryKeys: RegistryCrlVerificationKey[]; + expectedIssuer?: string; +}; + +export class CrlJwtError extends Error { + readonly code: "INVALID_CRL_HEADER" | "UNKNOWN_CRL_KID"; + + constructor(code: "INVALID_CRL_HEADER" | "UNKNOWN_CRL_KID", message: string) { + super(message); + this.name = "CrlJwtError"; + this.code = code; + } +} + +function invalidCrlHeader(message: string): CrlJwtError { + return new CrlJwtError("INVALID_CRL_HEADER", message); +} + +function unknownCrlKid(kid: string): CrlJwtError { + return new CrlJwtError("UNKNOWN_CRL_KID", `Unknown CRL signing kid: ${kid}`); +} + +export async function signCRL(input: SignCrlInput): Promise { + const encodedKeypair = encodeEd25519KeypairBase64url(input.signerKeypair); + const privateJwk: CrlPrivateJwk = { + kty: "OKP", + crv: "Ed25519", + x: encodedKeypair.publicKey, + d: encodedKeypair.secretKey, + }; + const privateKey = await importJWK(privateJwk, "EdDSA"); + + return new SignJWT(input.claims) + .setProtectedHeader({ + alg: "EdDSA", + typ: "CRL", + kid: input.signerKid, + }) + .sign(privateKey); +} + +export async function verifyCRL(input: VerifyCrlInput): Promise { + const header = decodeProtectedHeader(input.token); + if (header.alg !== "EdDSA") { + throw invalidCrlHeader("CRL token must use alg=EdDSA"); + } + + if (header.typ !== "CRL") { + throw invalidCrlHeader("CRL token must use typ=CRL"); + } + + if (typeof header.kid !== "string" || header.kid.length === 0) { + throw invalidCrlHeader("CRL token missing protected kid header"); + } + + const key = input.registryKeys.find((entry) => entry.kid === header.kid); + if (!key) { + throw unknownCrlKid(header.kid); + } + + const publicKey = await importJWK(key.jwk, "EdDSA"); + const options: JWTVerifyOptions = { + algorithms: ["EdDSA"], + typ: "CRL", + }; + + if (input.expectedIssuer !== undefined) { + options.issuer = input.expectedIssuer; + } + + const { payload } = await jwtVerify(input.token, publicKey, options); + return payload as CrlClaims; +} From b3ac564d763af37ad2f6923a0037c4bc44975119 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 16:32:20 +0530 Subject: [PATCH 009/190] feat(sdk): implement T07 HTTP PoP signing and verification --- packages/sdk/AGENTS.md | 3 + packages/sdk/src/http/constants.ts | 4 + packages/sdk/src/http/sign.test.ts | 65 ++++++++++++ packages/sdk/src/http/sign.ts | 57 +++++++++++ packages/sdk/src/http/types.ts | 41 ++++++++ packages/sdk/src/http/utils.ts | 148 +++++++++++++++++++++++++++ packages/sdk/src/http/verify.test.ts | 107 +++++++++++++++++++ packages/sdk/src/http/verify.ts | 92 +++++++++++++++++ packages/sdk/src/index.test.ts | 26 +++++ packages/sdk/src/index.ts | 9 ++ 10 files changed, 552 insertions(+) create mode 100644 packages/sdk/src/http/constants.ts create mode 100644 packages/sdk/src/http/sign.test.ts create mode 100644 packages/sdk/src/http/sign.ts create mode 100644 packages/sdk/src/http/types.ts create mode 100644 packages/sdk/src/http/utils.ts create mode 100644 packages/sdk/src/http/verify.test.ts create mode 100644 packages/sdk/src/http/verify.ts diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 8dd0f3b..516b8e7 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -13,6 +13,7 @@ - `crypto/ed25519`: byte-first keypair/sign/verify helpers for PoP and token workflows. - `jwt/ait-jwt`: AIT JWS signing and verification with strict header and issuer checks. - `jwt/crl-jwt`: CRL JWT helpers with EdDSA signing, header consistency checks, and tamper-detection test coverage. +- `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. - Tests should prove tamper cases (payload change, header kid swap, signature corruption). ## Design Rules @@ -25,6 +26,7 @@ - Keep CRL claim schema authority in `@clawdentity/protocol` (`crl.ts`); SDK JWT helpers should avoid duplicating claim-validation rules. - Never log secret keys or raw signature material. - Enforce AIT JWT security invariants in verification: `alg=EdDSA`, `typ=AIT`, and `kid` lookup against registry keys. +- For HTTP signing errors, keep user-facing messages static and send extra context through `AppError.details`. ## Testing Rules - Unit test each shared module. @@ -32,3 +34,4 @@ - Keep tests deterministic and offline. - Crypto tests must include explicit negative verification cases (wrong message/signature/key). - JWT tests must include sign/verify happy path and failure paths for issuer mismatch and missing/unknown `kid`. +- HTTP signing tests must include sign/verify happy path and explicit failures when method, path, body, or timestamp are altered. diff --git a/packages/sdk/src/http/constants.ts b/packages/sdk/src/http/constants.ts new file mode 100644 index 0000000..d59a41b --- /dev/null +++ b/packages/sdk/src/http/constants.ts @@ -0,0 +1,4 @@ +export const X_CLAW_TIMESTAMP = "X-Claw-Timestamp"; +export const X_CLAW_NONCE = "X-Claw-Nonce"; +export const X_CLAW_BODY_SHA256 = "X-Claw-Body-SHA256"; +export const X_CLAW_PROOF = "X-Claw-Proof"; diff --git a/packages/sdk/src/http/sign.test.ts b/packages/sdk/src/http/sign.test.ts new file mode 100644 index 0000000..20ba5e2 --- /dev/null +++ b/packages/sdk/src/http/sign.test.ts @@ -0,0 +1,65 @@ +import { canonicalizeRequest } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { + decodeEd25519SignatureBase64url, + generateEd25519Keypair, + verifyEd25519, +} from "../crypto/ed25519.js"; +import { signHttpRequest } from "./sign.js"; + +const textEncoder = new TextEncoder(); + +describe("signHttpRequest", () => { + it("hashes body and returns proof-bound signature headers", async () => { + const keypair = await generateEd25519Keypair(); + const body = textEncoder.encode('{"ok":true}'); + + const signed = await signHttpRequest({ + method: "post", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_abc123", + body, + secretKey: keypair.secretKey, + }); + + expect(signed.headers["X-Claw-Timestamp"]).toBe("1739364000"); + expect(signed.headers["X-Claw-Nonce"]).toBe("nonce_abc123"); + expect(signed.headers["X-Claw-Body-SHA256"]).toBeTruthy(); + expect(signed.headers["X-Claw-Proof"]).toBeTruthy(); + + expect(signed.canonicalRequest).toBe( + canonicalizeRequest({ + method: "post", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_abc123", + bodyHash: signed.headers["X-Claw-Body-SHA256"], + }), + ); + + const signature = decodeEd25519SignatureBase64url(signed.proof); + const verified = await verifyEd25519( + signature, + textEncoder.encode(signed.canonicalRequest), + keypair.publicKey, + ); + expect(verified).toBe(true); + }); + + it("uses SHA-256 base64url hash for empty body by default", async () => { + const keypair = await generateEd25519Keypair(); + + const signed = await signHttpRequest({ + method: "GET", + pathWithQuery: "/v1/health", + timestamp: "1739364500", + nonce: "nonce_empty_body", + secretKey: keypair.secretKey, + }); + + expect(signed.headers["X-Claw-Body-SHA256"]).toBe( + "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU", + ); + }); +}); diff --git a/packages/sdk/src/http/sign.ts b/packages/sdk/src/http/sign.ts new file mode 100644 index 0000000..12eafa0 --- /dev/null +++ b/packages/sdk/src/http/sign.ts @@ -0,0 +1,57 @@ +import { canonicalizeRequest } from "@clawdentity/protocol"; +import { + encodeEd25519SignatureBase64url, + signEd25519, +} from "../crypto/ed25519.js"; +import { + X_CLAW_BODY_SHA256, + X_CLAW_NONCE, + X_CLAW_PROOF, + X_CLAW_TIMESTAMP, +} from "./constants.js"; +import type { SignHttpRequestInput, SignHttpRequestResult } from "./types.js"; +import { + ensureBodyBytes, + ensureSecretKey, + ensureString, + hashBodySha256Base64url, + textEncoder, +} from "./utils.js"; + +export async function signHttpRequest( + input: SignHttpRequestInput, +): Promise { + ensureSecretKey(input.secretKey); + + const method = ensureString(input.method, "method"); + const pathWithQuery = ensureString(input.pathWithQuery, "pathWithQuery"); + const timestamp = ensureString(input.timestamp, "timestamp"); + const nonce = ensureString(input.nonce, "nonce"); + const body = ensureBodyBytes(input.body); + const bodyHash = await hashBodySha256Base64url(body); + + const canonicalRequest = canonicalizeRequest({ + method, + pathWithQuery, + timestamp, + nonce, + bodyHash, + }); + + const signature = await signEd25519( + textEncoder.encode(canonicalRequest), + input.secretKey, + ); + const proof = encodeEd25519SignatureBase64url(signature); + + return { + canonicalRequest, + proof, + headers: { + [X_CLAW_TIMESTAMP]: timestamp, + [X_CLAW_NONCE]: nonce, + [X_CLAW_BODY_SHA256]: bodyHash, + [X_CLAW_PROOF]: proof, + }, + }; +} diff --git a/packages/sdk/src/http/types.ts b/packages/sdk/src/http/types.ts new file mode 100644 index 0000000..d16f8eb --- /dev/null +++ b/packages/sdk/src/http/types.ts @@ -0,0 +1,41 @@ +import { + X_CLAW_BODY_SHA256, + X_CLAW_NONCE, + X_CLAW_PROOF, + X_CLAW_TIMESTAMP, +} from "./constants.js"; + +export type ClawSignatureHeaders = { + [X_CLAW_TIMESTAMP]: string; + [X_CLAW_NONCE]: string; + [X_CLAW_BODY_SHA256]: string; + [X_CLAW_PROOF]: string; +}; + +export interface SignHttpRequestInput { + method: string; + pathWithQuery: string; + timestamp: string; + nonce: string; + body?: Uint8Array; + secretKey: Uint8Array; +} + +export interface SignHttpRequestResult { + canonicalRequest: string; + proof: string; + headers: ClawSignatureHeaders; +} + +export interface VerifyHttpRequestInput { + method: string; + pathWithQuery: string; + headers: Record; + body?: Uint8Array; + publicKey: Uint8Array; +} + +export interface VerifyHttpRequestResult { + canonicalRequest: string; + proof: string; +} diff --git a/packages/sdk/src/http/utils.ts b/packages/sdk/src/http/utils.ts new file mode 100644 index 0000000..ac61ecd --- /dev/null +++ b/packages/sdk/src/http/utils.ts @@ -0,0 +1,148 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { AppError } from "../exceptions.js"; +import { + X_CLAW_BODY_SHA256, + X_CLAW_NONCE, + X_CLAW_PROOF, + X_CLAW_TIMESTAMP, +} from "./constants.js"; +import type { ClawSignatureHeaders } from "./types.js"; + +export const textEncoder = new TextEncoder(); + +type SubtleCryptoLike = { + digest: (algorithm: string, data: Uint8Array) => Promise; +}; + +type CryptoLike = { + subtle?: SubtleCryptoLike; +}; + +function getCrypto(): CryptoLike | undefined { + return (globalThis as { crypto?: CryptoLike }).crypto; +} + +export function ensureString(value: unknown, label: string): string { + if (typeof value !== "string" || value.trim() === "") { + throw new AppError({ + code: "HTTP_SIGNATURE_INVALID_INPUT", + message: "Input must be a non-empty string", + status: 400, + details: { + field: label, + }, + }); + } + + return value; +} + +export function ensureBodyBytes(body: Uint8Array | undefined): Uint8Array { + if (body === undefined) { + return new Uint8Array(); + } + + if (!(body instanceof Uint8Array)) { + throw new AppError({ + code: "HTTP_SIGNATURE_INVALID_INPUT", + message: "body must be a Uint8Array when provided", + status: 400, + details: { + field: "body", + }, + }); + } + + return body; +} + +export function ensureSecretKey(key: Uint8Array): void { + if (!(key instanceof Uint8Array) || key.length === 0) { + throw new AppError({ + code: "HTTP_SIGNATURE_MISSING_SECRET", + message: "Secret key is required to sign HTTP requests", + status: 500, + details: { + keyLength: key instanceof Uint8Array ? key.length : null, + }, + }); + } +} + +export function ensurePublicKey(key: Uint8Array): void { + if (!(key instanceof Uint8Array) || key.length === 0) { + throw new AppError({ + code: "HTTP_SIGNATURE_MISSING_PUBLIC", + message: "Public key is required to verify HTTP requests", + status: 500, + details: { + keyLength: key instanceof Uint8Array ? key.length : null, + }, + }); + } +} + +export async function hashBodySha256Base64url( + body: Uint8Array, +): Promise { + const cryptoObject = getCrypto(); + + if ( + typeof cryptoObject !== "object" || + typeof cryptoObject?.subtle !== "object" || + typeof cryptoObject?.subtle?.digest !== "function" + ) { + throw new AppError({ + code: "HTTP_SIGNATURE_CRYPTO_UNAVAILABLE", + message: "Web Crypto API is required for HTTP signing", + status: 500, + details: { + runtime: typeof cryptoObject, + }, + }); + } + + const digest = await cryptoObject.subtle.digest("SHA-256", body); + return encodeBase64url(new Uint8Array(digest)); +} + +function readHeader( + headers: Record, + headerName: string, +): string | undefined { + if (headerName in headers) { + return headers[headerName]; + } + + const normalizedName = headerName.toLowerCase(); + for (const [key, value] of Object.entries(headers)) { + if (key.toLowerCase() === normalizedName) { + return value; + } + } + + return undefined; +} + +export function normalizeSignatureHeaders( + headers: Record, +): ClawSignatureHeaders { + return { + [X_CLAW_TIMESTAMP]: ensureString( + readHeader(headers, X_CLAW_TIMESTAMP), + X_CLAW_TIMESTAMP, + ), + [X_CLAW_NONCE]: ensureString( + readHeader(headers, X_CLAW_NONCE), + X_CLAW_NONCE, + ), + [X_CLAW_BODY_SHA256]: ensureString( + readHeader(headers, X_CLAW_BODY_SHA256), + X_CLAW_BODY_SHA256, + ), + [X_CLAW_PROOF]: ensureString( + readHeader(headers, X_CLAW_PROOF), + X_CLAW_PROOF, + ), + }; +} diff --git a/packages/sdk/src/http/verify.test.ts b/packages/sdk/src/http/verify.test.ts new file mode 100644 index 0000000..dc3ebf3 --- /dev/null +++ b/packages/sdk/src/http/verify.test.ts @@ -0,0 +1,107 @@ +import { describe, expect, it } from "vitest"; +import { generateEd25519Keypair } from "../crypto/ed25519.js"; +import { signHttpRequest } from "./sign.js"; +import { verifyHttpRequest } from "./verify.js"; + +const textEncoder = new TextEncoder(); + +async function makeSignedFixture() { + const keypair = await generateEd25519Keypair(); + const body = textEncoder.encode('{"hello":"world"}'); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_abc123", + body, + secretKey: keypair.secretKey, + }); + + return { keypair, body, signed }; +} + +describe("verifyHttpRequest", () => { + it("verifies a signed request successfully", async () => { + const { keypair, body, signed } = await makeSignedFixture(); + + const verified = await verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: signed.headers, + body, + publicKey: keypair.publicKey, + }); + + expect(verified.proof).toBe(signed.proof); + expect(verified.canonicalRequest).toBe(signed.canonicalRequest); + }); + + it("fails verification when method is altered", async () => { + const { keypair, body, signed } = await makeSignedFixture(); + + await expect( + verifyHttpRequest({ + method: "PATCH", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: signed.headers, + body, + publicKey: keypair.publicKey, + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_INVALID_PROOF", + }); + }); + + it("fails verification when path is altered", async () => { + const { keypair, body, signed } = await makeSignedFixture(); + + await expect( + verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?a=1&b=2", + headers: signed.headers, + body, + publicKey: keypair.publicKey, + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_INVALID_PROOF", + }); + }); + + it("fails verification when body is altered", async () => { + const { keypair, signed } = await makeSignedFixture(); + const alteredBody = textEncoder.encode('{"hello":"tampered"}'); + + await expect( + verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: signed.headers, + body: alteredBody, + publicKey: keypair.publicKey, + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_BODY_HASH_MISMATCH", + }); + }); + + it("fails verification when timestamp header is altered", async () => { + const { keypair, body, signed } = await makeSignedFixture(); + const tamperedHeaders = { + ...signed.headers, + "X-Claw-Timestamp": "1739364999", + }; + + await expect( + verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: tamperedHeaders, + body, + publicKey: keypair.publicKey, + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_INVALID_PROOF", + }); + }); +}); diff --git a/packages/sdk/src/http/verify.ts b/packages/sdk/src/http/verify.ts new file mode 100644 index 0000000..24d8572 --- /dev/null +++ b/packages/sdk/src/http/verify.ts @@ -0,0 +1,92 @@ +import { canonicalizeRequest } from "@clawdentity/protocol"; +import { + decodeEd25519SignatureBase64url, + verifyEd25519, +} from "../crypto/ed25519.js"; +import { AppError } from "../exceptions.js"; +import { + X_CLAW_BODY_SHA256, + X_CLAW_NONCE, + X_CLAW_PROOF, + X_CLAW_TIMESTAMP, +} from "./constants.js"; +import type { + VerifyHttpRequestInput, + VerifyHttpRequestResult, +} from "./types.js"; +import { + ensureBodyBytes, + ensurePublicKey, + ensureString, + hashBodySha256Base64url, + normalizeSignatureHeaders, + textEncoder, +} from "./utils.js"; + +export async function verifyHttpRequest( + input: VerifyHttpRequestInput, +): Promise { + ensurePublicKey(input.publicKey); + + const method = ensureString(input.method, "method"); + const pathWithQuery = ensureString(input.pathWithQuery, "pathWithQuery"); + const headers = normalizeSignatureHeaders(input.headers); + const body = ensureBodyBytes(input.body); + const expectedBodyHash = await hashBodySha256Base64url(body); + + if (headers[X_CLAW_BODY_SHA256] !== expectedBodyHash) { + throw new AppError({ + code: "HTTP_SIGNATURE_BODY_HASH_MISMATCH", + message: "Body hash does not match X-Claw-Body-SHA256 header", + status: 401, + details: { + expectedBodyHash, + receivedBodyHash: headers[X_CLAW_BODY_SHA256], + }, + }); + } + + const canonicalRequest = canonicalizeRequest({ + method, + pathWithQuery, + timestamp: headers[X_CLAW_TIMESTAMP], + nonce: headers[X_CLAW_NONCE], + bodyHash: headers[X_CLAW_BODY_SHA256], + }); + + let signature: Uint8Array; + try { + signature = decodeEd25519SignatureBase64url(headers[X_CLAW_PROOF]); + } catch { + throw new AppError({ + code: "HTTP_SIGNATURE_INVALID_PROOF", + message: "X-Claw-Proof is not a valid base64url signature", + status: 401, + details: { + header: X_CLAW_PROOF, + }, + }); + } + + const isValid = await verifyEd25519( + signature, + textEncoder.encode(canonicalRequest), + input.publicKey, + ); + + if (!isValid) { + throw new AppError({ + code: "HTTP_SIGNATURE_INVALID_PROOF", + message: "X-Claw-Proof verification failed", + status: 401, + details: { + reason: "signature_mismatch", + }, + }); + } + + return { + canonicalRequest, + proof: headers[X_CLAW_PROOF], + }; +} diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 66286fc..ccba998 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -16,9 +16,11 @@ import { signAIT, signCRL, signEd25519, + signHttpRequest, verifyAIT, verifyCRL, verifyEd25519, + verifyHttpRequest, } from "./index.js"; describe("sdk", () => { @@ -142,4 +144,28 @@ describe("sdk", () => { expect(verified.revocations).toHaveLength(1); expect(CrlJwtError).toBeTypeOf("function"); }); + + it("exports HTTP signing helpers from package root", async () => { + const keypair = await generateEd25519Keypair(); + const body = new TextEncoder().encode('{"ok":true}'); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_root_http", + body, + secretKey: keypair.secretKey, + }); + + const verified = await verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: signed.headers, + body, + publicKey: keypair.publicKey, + }); + + expect(verified.proof).toBe(signed.proof); + expect(verified.canonicalRequest).toBe(signed.canonicalRequest); + }); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 5cf521c..f5c1c1e 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -21,6 +21,15 @@ export { createHonoErrorHandler, toErrorEnvelope, } from "./exceptions.js"; +export { signHttpRequest } from "./http/sign.js"; +export type { + ClawSignatureHeaders, + SignHttpRequestInput, + SignHttpRequestResult, + VerifyHttpRequestInput, + VerifyHttpRequestResult, +} from "./http/types.js"; +export { verifyHttpRequest } from "./http/verify.js"; export type { RegistryAitVerificationKey, SignAitInput, From 42b1a2bbd604d4ef36cb7cc62395b82b566fcf29 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 16:48:20 +0530 Subject: [PATCH 010/190] fix(sdk): harden PoP proof and key length validation --- packages/sdk/AGENTS.md | 2 ++ packages/sdk/src/http/sign.test.ts | 19 ++++++++++++ packages/sdk/src/http/utils.ts | 14 +++++++-- packages/sdk/src/http/verify.test.ts | 44 ++++++++++++++++++++++++++++ packages/sdk/src/http/verify.ts | 4 +++ 5 files changed, 81 insertions(+), 2 deletions(-) diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 516b8e7..082a9fc 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -27,6 +27,8 @@ - Never log secret keys or raw signature material. - Enforce AIT JWT security invariants in verification: `alg=EdDSA`, `typ=AIT`, and `kid` lookup against registry keys. - For HTTP signing errors, keep user-facing messages static and send extra context through `AppError.details`. +- Enforce Ed25519 key lengths at SDK boundaries (`secretKey` 32 bytes, `publicKey` 32 bytes) so misconfiguration returns stable `AppError` codes. +- Treat any decoded PoP proof that is not 64 bytes as `HTTP_SIGNATURE_INVALID_PROOF`. ## Testing Rules - Unit test each shared module. diff --git a/packages/sdk/src/http/sign.test.ts b/packages/sdk/src/http/sign.test.ts index 20ba5e2..3abbc34 100644 --- a/packages/sdk/src/http/sign.test.ts +++ b/packages/sdk/src/http/sign.test.ts @@ -62,4 +62,23 @@ describe("signHttpRequest", () => { "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU", ); }); + + it("rejects wrong-length secret keys", async () => { + await expect( + signHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + timestamp: "1739364000", + nonce: "nonce_bad_secret", + body: textEncoder.encode("bad"), + secretKey: new Uint8Array([1]), + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_MISSING_SECRET", + details: { + keyLength: 1, + expectedKeyLength: 32, + }, + }); + }); }); diff --git a/packages/sdk/src/http/utils.ts b/packages/sdk/src/http/utils.ts index ac61ecd..706025d 100644 --- a/packages/sdk/src/http/utils.ts +++ b/packages/sdk/src/http/utils.ts @@ -9,6 +9,8 @@ import { import type { ClawSignatureHeaders } from "./types.js"; export const textEncoder = new TextEncoder(); +const ED25519_PUBLIC_KEY_LENGTH = 32; +const ED25519_SECRET_KEY_LENGTH = 32; type SubtleCryptoLike = { digest: (algorithm: string, data: Uint8Array) => Promise; @@ -57,26 +59,34 @@ export function ensureBodyBytes(body: Uint8Array | undefined): Uint8Array { } export function ensureSecretKey(key: Uint8Array): void { - if (!(key instanceof Uint8Array) || key.length === 0) { + if ( + !(key instanceof Uint8Array) || + key.length !== ED25519_SECRET_KEY_LENGTH + ) { throw new AppError({ code: "HTTP_SIGNATURE_MISSING_SECRET", message: "Secret key is required to sign HTTP requests", status: 500, details: { keyLength: key instanceof Uint8Array ? key.length : null, + expectedKeyLength: ED25519_SECRET_KEY_LENGTH, }, }); } } export function ensurePublicKey(key: Uint8Array): void { - if (!(key instanceof Uint8Array) || key.length === 0) { + if ( + !(key instanceof Uint8Array) || + key.length !== ED25519_PUBLIC_KEY_LENGTH + ) { throw new AppError({ code: "HTTP_SIGNATURE_MISSING_PUBLIC", message: "Public key is required to verify HTTP requests", status: 500, details: { keyLength: key instanceof Uint8Array ? key.length : null, + expectedKeyLength: ED25519_PUBLIC_KEY_LENGTH, }, }); } diff --git a/packages/sdk/src/http/verify.test.ts b/packages/sdk/src/http/verify.test.ts index dc3ebf3..2aec69a 100644 --- a/packages/sdk/src/http/verify.test.ts +++ b/packages/sdk/src/http/verify.test.ts @@ -104,4 +104,48 @@ describe("verifyHttpRequest", () => { code: "HTTP_SIGNATURE_INVALID_PROOF", }); }); + + it("fails verification when proof decodes to non-64-byte signature", async () => { + const { keypair, body, signed } = await makeSignedFixture(); + const tamperedHeaders = { + ...signed.headers, + "X-Claw-Proof": "AA", + }; + + await expect( + verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: tamperedHeaders, + body, + publicKey: keypair.publicKey, + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_INVALID_PROOF", + status: 401, + details: { + reason: "invalid_base64url_or_signature_length", + }, + }); + }); + + it("rejects wrong-length public keys", async () => { + const { body, signed } = await makeSignedFixture(); + + await expect( + verifyHttpRequest({ + method: "POST", + pathWithQuery: "/v1/messages?b=2&a=1", + headers: signed.headers, + body, + publicKey: new Uint8Array([1]), + }), + ).rejects.toMatchObject({ + code: "HTTP_SIGNATURE_MISSING_PUBLIC", + details: { + keyLength: 1, + expectedKeyLength: 32, + }, + }); + }); }); diff --git a/packages/sdk/src/http/verify.ts b/packages/sdk/src/http/verify.ts index 24d8572..0c04a07 100644 --- a/packages/sdk/src/http/verify.ts +++ b/packages/sdk/src/http/verify.ts @@ -57,6 +57,9 @@ export async function verifyHttpRequest( let signature: Uint8Array; try { signature = decodeEd25519SignatureBase64url(headers[X_CLAW_PROOF]); + if (signature.length !== 64) { + throw new Error("invalid_signature_length"); + } } catch { throw new AppError({ code: "HTTP_SIGNATURE_INVALID_PROOF", @@ -64,6 +67,7 @@ export async function verifyHttpRequest( status: 401, details: { header: X_CLAW_PROOF, + reason: "invalid_base64url_or_signature_length", }, }); } From 86e3218c2d4bd5ae664b3dcf06db01ec23118c03 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 16:59:42 +0530 Subject: [PATCH 011/190] fix(sdk): validate CRL payload schema after jwt verify --- packages/sdk/AGENTS.md | 1 + packages/sdk/src/jwt/crl-jwt.test.ts | 34 ++++++++++++++++++++++++++++ packages/sdk/src/jwt/crl-jwt.ts | 7 ++++-- 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 082a9fc..42b2c56 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -26,6 +26,7 @@ - Keep CRL claim schema authority in `@clawdentity/protocol` (`crl.ts`); SDK JWT helpers should avoid duplicating claim-validation rules. - Never log secret keys or raw signature material. - Enforce AIT JWT security invariants in verification: `alg=EdDSA`, `typ=AIT`, and `kid` lookup against registry keys. +- Always parse CRL JWT payloads through protocol `parseCrlClaims` after signature verification so schema invariants (revocations non-empty, DID/ULID checks) are enforced. - For HTTP signing errors, keep user-facing messages static and send extra context through `AppError.details`. - Enforce Ed25519 key lengths at SDK boundaries (`secretKey` 32 bytes, `publicKey` 32 bytes) so misconfiguration returns stable `AppError` codes. - Treat any decoded PoP proof that is not 64 bytes as `HTTP_SIGNATURE_INVALID_PROOF`. diff --git a/packages/sdk/src/jwt/crl-jwt.test.ts b/packages/sdk/src/jwt/crl-jwt.test.ts index b168775..ac7fe75 100644 --- a/packages/sdk/src/jwt/crl-jwt.test.ts +++ b/packages/sdk/src/jwt/crl-jwt.test.ts @@ -176,4 +176,38 @@ describe("CRL JWT helpers", () => { }), ).rejects.toThrow(); }); + + it("rejects schema-invalid but correctly signed payloads", async () => { + const keypair = await generateEd25519Keypair(); + const now = Math.floor(Date.now() / 1000); + const token = await signCRL({ + claims: { + iss: "https://registry.clawdentity.dev", + jti: generateUlid(1700105000000), + iat: now, + exp: now + 3600, + revocations: [], + } as unknown as CrlClaims, + signerKid: "reg-crl-1", + signerKeypair: keypair, + }); + + await expect( + verifyCRL({ + token, + registryKeys: [ + { + kid: "reg-crl-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(keypair.publicKey), + }, + }, + ], + }), + ).rejects.toMatchObject({ + code: "INVALID_CRL_CLAIMS", + }); + }); }); diff --git a/packages/sdk/src/jwt/crl-jwt.ts b/packages/sdk/src/jwt/crl-jwt.ts index da3d5d3..7bf2ee1 100644 --- a/packages/sdk/src/jwt/crl-jwt.ts +++ b/packages/sdk/src/jwt/crl-jwt.ts @@ -1,4 +1,7 @@ -import type { CrlClaims as ProtocolCrlClaims } from "@clawdentity/protocol"; +import { + type CrlClaims as ProtocolCrlClaims, + parseCrlClaims, +} from "@clawdentity/protocol"; import type { JWTVerifyOptions } from "jose"; import { decodeProtectedHeader, importJWK, jwtVerify, SignJWT } from "jose"; import type { Ed25519KeypairBytes } from "../crypto/ed25519.js"; @@ -100,5 +103,5 @@ export async function verifyCRL(input: VerifyCrlInput): Promise { } const { payload } = await jwtVerify(input.token, publicKey, options); - return payload as CrlClaims; + return parseCrlClaims(payload); } From 9fcf8f20fefebe088b616dcec8ffe0a39eedfa25 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 17:02:22 +0530 Subject: [PATCH 012/190] fix(sdk): validate CRL claims before signing --- packages/sdk/src/jwt/crl-jwt.test.ts | 61 ++++++++++++++++++++++------ packages/sdk/src/jwt/crl-jwt.ts | 3 +- 2 files changed, 51 insertions(+), 13 deletions(-) diff --git a/packages/sdk/src/jwt/crl-jwt.test.ts b/packages/sdk/src/jwt/crl-jwt.test.ts index ac7fe75..f8adf43 100644 --- a/packages/sdk/src/jwt/crl-jwt.test.ts +++ b/packages/sdk/src/jwt/crl-jwt.test.ts @@ -4,8 +4,12 @@ import { generateUlid, makeAgentDid, } from "@clawdentity/protocol"; +import { importJWK, SignJWT } from "jose"; import { describe, expect, it } from "vitest"; -import { generateEd25519Keypair } from "../crypto/ed25519.js"; +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, +} from "../crypto/ed25519.js"; import { type CrlClaims, signCRL, verifyCRL } from "./crl-jwt.js"; const textEncoder = new TextEncoder(); @@ -180,17 +184,29 @@ describe("CRL JWT helpers", () => { it("rejects schema-invalid but correctly signed payloads", async () => { const keypair = await generateEd25519Keypair(); const now = Math.floor(Date.now() / 1000); - const token = await signCRL({ - claims: { - iss: "https://registry.clawdentity.dev", - jti: generateUlid(1700105000000), - iat: now, - exp: now + 3600, - revocations: [], - } as unknown as CrlClaims, - signerKid: "reg-crl-1", - signerKeypair: keypair, - }); + const encodedKeypair = encodeEd25519KeypairBase64url(keypair); + const privateKey = await importJWK( + { + kty: "OKP", + crv: "Ed25519", + x: encodedKeypair.publicKey, + d: encodedKeypair.secretKey, + }, + "EdDSA", + ); + const token = await new SignJWT({ + iss: "https://registry.clawdentity.dev", + jti: generateUlid(1700105000000), + iat: now, + exp: now + 3600, + revocations: [], + }) + .setProtectedHeader({ + alg: "EdDSA", + typ: "CRL", + kid: "reg-crl-1", + }) + .sign(privateKey); await expect( verifyCRL({ @@ -210,4 +226,25 @@ describe("CRL JWT helpers", () => { code: "INVALID_CRL_CLAIMS", }); }); + + it("rejects invalid CRL claims before signing", async () => { + const keypair = await generateEd25519Keypair(); + const now = Math.floor(Date.now() / 1000); + + await expect( + signCRL({ + claims: { + iss: "https://registry.clawdentity.dev", + jti: generateUlid(1700105000000), + iat: now, + exp: now + 3600, + revocations: [], + } as unknown as CrlClaims, + signerKid: "reg-crl-1", + signerKeypair: keypair, + }), + ).rejects.toMatchObject({ + code: "INVALID_CRL_CLAIMS", + }); + }); }); diff --git a/packages/sdk/src/jwt/crl-jwt.ts b/packages/sdk/src/jwt/crl-jwt.ts index 7bf2ee1..fb24be4 100644 --- a/packages/sdk/src/jwt/crl-jwt.ts +++ b/packages/sdk/src/jwt/crl-jwt.ts @@ -55,6 +55,7 @@ function unknownCrlKid(kid: string): CrlJwtError { } export async function signCRL(input: SignCrlInput): Promise { + const claims = parseCrlClaims(input.claims); const encodedKeypair = encodeEd25519KeypairBase64url(input.signerKeypair); const privateJwk: CrlPrivateJwk = { kty: "OKP", @@ -64,7 +65,7 @@ export async function signCRL(input: SignCrlInput): Promise { }; const privateKey = await importJWK(privateJwk, "EdDSA"); - return new SignJWT(input.claims) + return new SignJWT(claims) .setProtectedHeader({ alg: "EdDSA", typ: "CRL", From bea09c83b515ae28ad681f9105676f689a9bc24e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 17:04:30 +0530 Subject: [PATCH 013/190] fix(protocol): enforce 32-byte Ed25519 key in AIT claims --- packages/protocol/AGENTS.md | 1 + packages/protocol/src/ait.test.ts | 4 ++++ packages/protocol/src/ait.ts | 10 +++++++++- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index b2848f1..237c7f3 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -10,6 +10,7 @@ - Maintain Cloudflare Worker portability: avoid Node-only globals in protocol helpers. - Keep AIT schema parsing strict (`.strict()` objects) so unknown claims are rejected by default. - Validate risky identity fields (`name`, `description`) with explicit allowlists/length caps; never pass through raw control characters. +- Enforce `cnf.jwk.x` semantics for AIT parsing: value must be base64url and decode to exactly 32 bytes for Ed25519 (`kty=OKP`, `crv=Ed25519`). - Reuse existing protocol validators/parsers (`parseDid`, `parseUlid`, base64url helpers) instead of duplicating claim validation logic. - Keep HTTP signing canonical strings deterministic: canonicalize method, normalized path (path + query), timestamp, nonce, and body hash exactly as `README.md`, `PRD.md`, and the policy docs describe (see `CLAW-PROOF-V1\n\n\n\n\n`). - Mirror the AIT guardrails for CRL payloads: `crl.ts` keeps `.strict()` definitions, requires at least one revocation entry, enforces `agentDid` is a `did:claw:agent`, `revocation.jti` is a ULID, `exp > iat`, and surfaces `INVALID_CRL_CLAIMS` via `ProtocolParseError`. diff --git a/packages/protocol/src/ait.test.ts b/packages/protocol/src/ait.test.ts index 949c47f..729ff9e 100644 --- a/packages/protocol/src/ait.test.ts +++ b/packages/protocol/src/ait.test.ts @@ -91,9 +91,13 @@ describe("AIT claims schema", () => { const badX = makeValidClaims(); badX.cnf.jwk.x = "invalid+base64url"; + const shortX = makeValidClaims(); + shortX.cnf.jwk.x = encodeBase64url(Uint8Array.from([1])); + expect(() => parseAitClaims(badKty)).toThrow(ProtocolParseError); expect(() => parseAitClaims(badCrv)).toThrow(ProtocolParseError); expect(() => parseAitClaims(badX)).toThrow(ProtocolParseError); + expect(() => parseAitClaims(shortX)).toThrow(ProtocolParseError); }); it("rejects invalid temporal ordering", () => { diff --git a/packages/protocol/src/ait.ts b/packages/protocol/src/ait.ts index 39ec673..7d52a42 100644 --- a/packages/protocol/src/ait.ts +++ b/packages/protocol/src/ait.ts @@ -10,6 +10,7 @@ export const MAX_AGENT_DESCRIPTION_LENGTH = 280; export const AGENT_NAME_REGEX = /^[A-Za-z0-9._ -]{1,64}$/; const MAX_FRAMEWORK_LENGTH = 32; +const ED25519_PUBLIC_KEY_LENGTH = 32; export type AitCnfJwk = { kty: "OKP"; @@ -102,7 +103,14 @@ export const aitClaimsSchema = z } try { - decodeBase64url(claims.cnf.jwk.x); + const decodedPublicKey = decodeBase64url(claims.cnf.jwk.x); + if (decodedPublicKey.length !== ED25519_PUBLIC_KEY_LENGTH) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "cnf.jwk.x must decode to 32-byte Ed25519 public key", + path: ["cnf", "jwk", "x"], + }); + } } catch { ctx.addIssue({ code: z.ZodIssueCode.custom, From bb0ba1a4fe036018c52190e7f659d10664b2d60f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 17:10:28 +0530 Subject: [PATCH 014/190] feat(sdk): implement T08 nonce replay cache --- packages/sdk/AGENTS.md | 2 + packages/sdk/src/index.test.ts | 26 ++++ packages/sdk/src/index.ts | 10 ++ packages/sdk/src/security/nonce-cache.test.ts | 88 ++++++++++++ packages/sdk/src/security/nonce-cache.ts | 131 ++++++++++++++++++ 5 files changed, 257 insertions(+) create mode 100644 packages/sdk/src/security/nonce-cache.test.ts create mode 100644 packages/sdk/src/security/nonce-cache.ts diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 42b2c56..f4f50cc 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -14,6 +14,7 @@ - `jwt/ait-jwt`: AIT JWS signing and verification with strict header and issuer checks. - `jwt/crl-jwt`: CRL JWT helpers with EdDSA signing, header consistency checks, and tamper-detection test coverage. - `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. +- `security/nonce-cache`: in-memory TTL nonce replay protection keyed by `agentDid + nonce`. - Tests should prove tamper cases (payload change, header kid swap, signature corruption). ## Design Rules @@ -38,3 +39,4 @@ - Crypto tests must include explicit negative verification cases (wrong message/signature/key). - JWT tests must include sign/verify happy path and failure paths for issuer mismatch and missing/unknown `kid`. - HTTP signing tests must include sign/verify happy path and explicit failures when method, path, body, or timestamp are altered. +- Nonce cache tests must include duplicate nonce rejection within TTL and acceptance after TTL expiry. diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index ccba998..81fb583 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -4,6 +4,8 @@ import { AppError, addSeconds, CrlJwtError, + createNonceCache, + DEFAULT_NONCE_TTL_MS, decodeEd25519KeypairBase64url, decodeEd25519SignatureBase64url, encodeEd25519KeypairBase64url, @@ -168,4 +170,28 @@ describe("sdk", () => { expect(verified.proof).toBe(signed.proof); expect(verified.canonicalRequest).toBe(signed.canonicalRequest); }); + + it("exports nonce cache helpers from package root", () => { + const now = 5_000; + const cache = createNonceCache({ + ttlMs: 100, + clock: () => now, + }); + + const first = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-root", + }); + const second = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-root", + }); + + expect(first.accepted).toBe(true); + expect(second).toMatchObject({ + accepted: false, + reason: "replay", + }); + expect(DEFAULT_NONCE_TTL_MS).toBe(300000); + }); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index f5c1c1e..ba92322 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -51,3 +51,13 @@ export { REQUEST_ID_HEADER, resolveRequestId, } from "./request-context.js"; +export type { + NonceCache, + NonceCacheInput, + NonceCacheOptions, + NonceCacheResult, +} from "./security/nonce-cache.js"; +export { + createNonceCache, + DEFAULT_NONCE_TTL_MS, +} from "./security/nonce-cache.js"; diff --git a/packages/sdk/src/security/nonce-cache.test.ts b/packages/sdk/src/security/nonce-cache.test.ts new file mode 100644 index 0000000..2ac4312 --- /dev/null +++ b/packages/sdk/src/security/nonce-cache.test.ts @@ -0,0 +1,88 @@ +import { describe, expect, it } from "vitest"; +import { createNonceCache, DEFAULT_NONCE_TTL_MS } from "./nonce-cache.js"; + +describe("nonce cache", () => { + it("rejects duplicate nonce for same agent within ttl", () => { + const now = 1_000; + const cache = createNonceCache({ + ttlMs: DEFAULT_NONCE_TTL_MS, + clock: () => now, + }); + + const first = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-1", + }); + const second = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-1", + }); + + expect(first.accepted).toBe(true); + expect(second).toMatchObject({ + accepted: false, + reason: "replay", + }); + }); + + it("treats expired nonces as unseen", () => { + let now = 2_000; + const ttlMs = 100; + const cache = createNonceCache({ + ttlMs, + clock: () => now, + }); + + const first = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-2", + }); + now += ttlMs + 1; + const second = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-2", + }); + + expect(first.accepted).toBe(true); + expect(second.accepted).toBe(true); + }); + + it("isolates nonce tracking per agent did", () => { + const cache = createNonceCache({ + clock: () => 3_000, + }); + + const first = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-shared", + }); + const second = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + nonce: "nonce-shared", + }); + + expect(first.accepted).toBe(true); + expect(second.accepted).toBe(true); + }); + + it("purges expired entries so future checks are accepted", () => { + let now = 4_000; + const cache = createNonceCache({ + ttlMs: 100, + clock: () => now, + }); + + cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-3", + }); + now += 101; + cache.purgeExpired(); + + const next = cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: "nonce-3", + }); + expect(next.accepted).toBe(true); + }); +}); diff --git a/packages/sdk/src/security/nonce-cache.ts b/packages/sdk/src/security/nonce-cache.ts new file mode 100644 index 0000000..14c2d37 --- /dev/null +++ b/packages/sdk/src/security/nonce-cache.ts @@ -0,0 +1,131 @@ +import { AppError } from "../exceptions.js"; + +export const DEFAULT_NONCE_TTL_MS = 5 * 60 * 1000; + +export type NonceCacheOptions = { + ttlMs?: number; + clock?: () => number; +}; + +export type NonceCacheInput = { + agentDid: string; + nonce: string; +}; + +export type NonceCacheResult = + | { + accepted: true; + seenAt: number; + expiresAt: number; + } + | { + accepted: false; + reason: "replay"; + seenAt: number; + expiresAt: number; + }; + +export interface NonceCache { + tryAcceptNonce(input: NonceCacheInput): NonceCacheResult; + purgeExpired(): void; +} + +type NonceRecord = { + seenAt: number; + expiresAt: number; +}; + +function ensureNonEmptyString(value: unknown, field: string): string { + if (typeof value !== "string" || value.trim() === "") { + throw new AppError({ + code: "NONCE_CACHE_INVALID_INPUT", + message: "Nonce cache input must be a non-empty string", + status: 400, + details: { field }, + }); + } + + return value; +} + +function resolveTtlMs(ttlMs: number | undefined): number { + const ttl = ttlMs ?? DEFAULT_NONCE_TTL_MS; + if (!Number.isFinite(ttl) || ttl <= 0) { + throw new AppError({ + code: "NONCE_CACHE_INVALID_TTL", + message: "Nonce cache ttl must be a positive number", + status: 500, + details: { ttlMs: ttl }, + }); + } + return ttl; +} + +function pruneExpiredFromAgent( + agentMap: Map, + now: number, +) { + for (const [nonce, record] of agentMap.entries()) { + if (record.expiresAt <= now) { + agentMap.delete(nonce); + } + } +} + +export function createNonceCache(options: NonceCacheOptions = {}): NonceCache { + const ttlMs = resolveTtlMs(options.ttlMs); + const clock = options.clock ?? Date.now; + const seenByAgent = new Map>(); + + function purgeExpired(): void { + const now = clock(); + for (const [agentDid, agentMap] of seenByAgent.entries()) { + pruneExpiredFromAgent(agentMap, now); + if (agentMap.size === 0) { + seenByAgent.delete(agentDid); + } + } + } + + function tryAcceptNonce(input: NonceCacheInput): NonceCacheResult { + const agentDid = ensureNonEmptyString(input.agentDid, "agentDid"); + const nonce = ensureNonEmptyString(input.nonce, "nonce"); + const now = clock(); + + let agentMap = seenByAgent.get(agentDid); + if (!agentMap) { + agentMap = new Map(); + seenByAgent.set(agentDid, agentMap); + } + + pruneExpiredFromAgent(agentMap, now); + + const existing = agentMap.get(nonce); + if (existing) { + return { + accepted: false, + reason: "replay", + seenAt: existing.seenAt, + expiresAt: existing.expiresAt, + }; + } + + const seenAt = now; + const expiresAt = now + ttlMs; + agentMap.set(nonce, { + seenAt, + expiresAt, + }); + + return { + accepted: true, + seenAt, + expiresAt, + }; + } + + return { + tryAcceptNonce, + purgeExpired, + }; +} From 3cb6947bffbbe77e72b37d02690cf321a071c528 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 17:23:03 +0530 Subject: [PATCH 015/190] fix(sdk): harden nonce cache pruning and input guards --- packages/sdk/AGENTS.md | 2 + packages/sdk/src/security/nonce-cache.test.ts | 57 +++++++++++++++++++ packages/sdk/src/security/nonce-cache.ts | 32 +++++++++-- 3 files changed, 85 insertions(+), 6 deletions(-) diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index f4f50cc..434db81 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -31,6 +31,8 @@ - For HTTP signing errors, keep user-facing messages static and send extra context through `AppError.details`. - Enforce Ed25519 key lengths at SDK boundaries (`secretKey` 32 bytes, `publicKey` 32 bytes) so misconfiguration returns stable `AppError` codes. - Treat any decoded PoP proof that is not 64 bytes as `HTTP_SIGNATURE_INVALID_PROOF`. +- Nonce cache accept path must prune expired entries across all agent buckets to keep memory bounded under high-cardinality agent traffic. +- Nonce cache must validate the top-level input shape before reading fields so invalid JS callers receive structured `AppError`s instead of runtime `TypeError`s. ## Testing Rules - Unit test each shared module. diff --git a/packages/sdk/src/security/nonce-cache.test.ts b/packages/sdk/src/security/nonce-cache.test.ts index 2ac4312..024259d 100644 --- a/packages/sdk/src/security/nonce-cache.test.ts +++ b/packages/sdk/src/security/nonce-cache.test.ts @@ -2,6 +2,24 @@ import { describe, expect, it } from "vitest"; import { createNonceCache, DEFAULT_NONCE_TTL_MS } from "./nonce-cache.js"; describe("nonce cache", () => { + it("rejects non-object input with structured app error", () => { + const cache = createNonceCache(); + + try { + cache.tryAcceptNonce( + undefined as unknown as { agentDid: string; nonce: string }, + ); + throw new Error("expected tryAcceptNonce to throw"); + } catch (error) { + expect(error).toMatchObject({ + code: "NONCE_CACHE_INVALID_INPUT", + details: { + field: "input", + }, + }); + } + }); + it("rejects duplicate nonce for same agent within ttl", () => { const now = 1_000; const cache = createNonceCache({ @@ -85,4 +103,43 @@ describe("nonce cache", () => { }); expect(next.accepted).toBe(true); }); + + it("prunes expired entries across all agents during accept", () => { + let now = 10_000; + const targetNonce = "nonce-expired-other-agent"; + const cache = createNonceCache({ + ttlMs: 100, + clock: () => now, + }); + + cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + nonce: targetNonce, + }); + now += 101; + + const originalDelete = Map.prototype.delete; + let removedExpiredNonce = false; + + Map.prototype.delete = function patchedDelete( + this: Map, + key: unknown, + ): boolean { + if (key === targetNonce) { + removedExpiredNonce = true; + } + return originalDelete.call(this, key); + }; + + try { + cache.tryAcceptNonce({ + agentDid: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + nonce: "nonce-fresh", + }); + } finally { + Map.prototype.delete = originalDelete; + } + + expect(removedExpiredNonce).toBe(true); + }); }); diff --git a/packages/sdk/src/security/nonce-cache.ts b/packages/sdk/src/security/nonce-cache.ts index 14c2d37..c293a62 100644 --- a/packages/sdk/src/security/nonce-cache.ts +++ b/packages/sdk/src/security/nonce-cache.ts @@ -48,6 +48,23 @@ function ensureNonEmptyString(value: unknown, field: string): string { return value; } +function ensureNonceCacheInput(input: unknown): NonceCacheInput { + if (typeof input !== "object" || input === null) { + throw new AppError({ + code: "NONCE_CACHE_INVALID_INPUT", + message: "Nonce cache input must be an object", + status: 400, + details: { field: "input" }, + }); + } + + const parsed = input as Partial; + return { + agentDid: ensureNonEmptyString(parsed.agentDid, "agentDid"), + nonce: ensureNonEmptyString(parsed.nonce, "nonce"), + }; +} + function resolveTtlMs(ttlMs: number | undefined): number { const ttl = ttlMs ?? DEFAULT_NONCE_TTL_MS; if (!Number.isFinite(ttl) || ttl <= 0) { @@ -77,8 +94,7 @@ export function createNonceCache(options: NonceCacheOptions = {}): NonceCache { const clock = options.clock ?? Date.now; const seenByAgent = new Map>(); - function purgeExpired(): void { - const now = clock(); + function purgeExpiredAt(now: number): void { for (const [agentDid, agentMap] of seenByAgent.entries()) { pruneExpiredFromAgent(agentMap, now); if (agentMap.size === 0) { @@ -87,10 +103,16 @@ export function createNonceCache(options: NonceCacheOptions = {}): NonceCache { } } + function purgeExpired(): void { + purgeExpiredAt(clock()); + } + function tryAcceptNonce(input: NonceCacheInput): NonceCacheResult { - const agentDid = ensureNonEmptyString(input.agentDid, "agentDid"); - const nonce = ensureNonEmptyString(input.nonce, "nonce"); + const parsed = ensureNonceCacheInput(input); + const agentDid = parsed.agentDid; + const nonce = parsed.nonce; const now = clock(); + purgeExpiredAt(now); let agentMap = seenByAgent.get(agentDid); if (!agentMap) { @@ -98,8 +120,6 @@ export function createNonceCache(options: NonceCacheOptions = {}): NonceCache { seenByAgent.set(agentDid, agentMap); } - pruneExpiredFromAgent(agentMap, now); - const existing = agentMap.get(nonce); if (existing) { return { From 18ebf21853b0c8afc524d93b56c80562ac2846e6 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 17:26:01 +0530 Subject: [PATCH 016/190] Validate CRL claims before use --- .DS_Store | Bin 0 -> 6148 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 .DS_Store diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0 Date: Thu, 12 Feb 2026 20:54:18 +0530 Subject: [PATCH 017/190] feat(sdk): implement T09 CRL cache refresh and staleness policy --- packages/sdk/AGENTS.md | 4 + packages/sdk/src/crl/cache.test.ts | 131 +++++++++++++++ packages/sdk/src/crl/cache.ts | 253 +++++++++++++++++++++++++++++ packages/sdk/src/index.test.ts | 32 ++++ packages/sdk/src/index.ts | 12 ++ 5 files changed, 432 insertions(+) create mode 100644 packages/sdk/src/crl/cache.test.ts create mode 100644 packages/sdk/src/crl/cache.ts diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 434db81..419c590 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -13,6 +13,7 @@ - `crypto/ed25519`: byte-first keypair/sign/verify helpers for PoP and token workflows. - `jwt/ait-jwt`: AIT JWS signing and verification with strict header and issuer checks. - `jwt/crl-jwt`: CRL JWT helpers with EdDSA signing, header consistency checks, and tamper-detection test coverage. +- `crl/cache`: in-memory CRL cache with periodic refresh, staleness reporting, and configurable stale behavior. - `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. - `security/nonce-cache`: in-memory TTL nonce replay protection keyed by `agentDid + nonce`. - Tests should prove tamper cases (payload change, header kid swap, signature corruption). @@ -28,6 +29,8 @@ - Never log secret keys or raw signature material. - Enforce AIT JWT security invariants in verification: `alg=EdDSA`, `typ=AIT`, and `kid` lookup against registry keys. - Always parse CRL JWT payloads through protocol `parseCrlClaims` after signature verification so schema invariants (revocations non-empty, DID/ULID checks) are enforced. +- CRL cache must parse fetched payloads through protocol `parseCrlClaims` before replacing cache state. +- CRL cache stale behavior must be explicit and configurable (`fail-open` or `fail-closed`), with warnings surfaced on refresh failures. - For HTTP signing errors, keep user-facing messages static and send extra context through `AppError.details`. - Enforce Ed25519 key lengths at SDK boundaries (`secretKey` 32 bytes, `publicKey` 32 bytes) so misconfiguration returns stable `AppError` codes. - Treat any decoded PoP proof that is not 64 bytes as `HTTP_SIGNATURE_INVALID_PROOF`. @@ -42,3 +45,4 @@ - JWT tests must include sign/verify happy path and failure paths for issuer mismatch and missing/unknown `kid`. - HTTP signing tests must include sign/verify happy path and explicit failures when method, path, body, or timestamp are altered. - Nonce cache tests must include duplicate nonce rejection within TTL and acceptance after TTL expiry. +- CRL cache tests must cover revoked lookup, refresh-on-stale, and stale-path behavior in both `fail-open` and `fail-closed` modes. diff --git a/packages/sdk/src/crl/cache.test.ts b/packages/sdk/src/crl/cache.test.ts new file mode 100644 index 0000000..88db372 --- /dev/null +++ b/packages/sdk/src/crl/cache.test.ts @@ -0,0 +1,131 @@ +import { describe, expect, it } from "vitest"; +import type { CrlClaims } from "../jwt/crl-jwt.js"; +import { + createCrlCache, + DEFAULT_CRL_MAX_AGE_MS, + DEFAULT_CRL_REFRESH_INTERVAL_MS, +} from "./cache.js"; + +const REGISTRY_ISSUER = "https://registry.clawdentity.dev"; +const AGENT_DID = "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"; +const CRL_JTI = "01HF7YAT4TXP6AW5QNXA2Y9K43"; +const REVOCATION_JTI_A = "01HF7YAT31JZHSMW1CG6Q6MHB7"; +const REVOCATION_JTI_B = "01HF7YAT5QJ4K3YVQJ6Q2F9M1N"; + +function makeClaims(revocationJti: string): CrlClaims { + return { + iss: REGISTRY_ISSUER, + jti: CRL_JTI, + iat: 1_700_000_000, + exp: 1_700_003_600, + revocations: [ + { + jti: revocationJti, + agentDid: AGENT_DID, + reason: "manual revoke", + revokedAt: 1_700_000_100, + }, + ], + }; +} + +describe("crl cache", () => { + it("uses sensible default timing values", () => { + expect(DEFAULT_CRL_REFRESH_INTERVAL_MS).toBe(300000); + expect(DEFAULT_CRL_MAX_AGE_MS).toBe(900000); + }); + + it("checks revoked jti after loading claims", async () => { + const cache = createCrlCache({ + fetchLatest: async () => makeClaims(REVOCATION_JTI_A), + clock: () => 1_000, + }); + + await expect(cache.isRevoked(REVOCATION_JTI_A)).resolves.toBe(true); + await expect(cache.isRevoked(REVOCATION_JTI_B)).resolves.toBe(false); + }); + + it("attempts refresh for stale cache and surfaces warnings when refresh fails in fail-open mode", async () => { + let now = 500; + let fetchCalls = 0; + const cache = createCrlCache({ + fetchLatest: async () => { + fetchCalls += 1; + throw new Error("network down"); + }, + staleBehavior: "fail-open", + refreshIntervalMs: 100, + maxAgeMs: 200, + initialClaims: makeClaims(REVOCATION_JTI_A), + initialFetchedAtMs: 0, + clock: () => now, + }); + + const result = await cache.refreshIfStale(); + + expect(fetchCalls).toBe(1); + expect(result.refreshed).toBe(false); + expect(result.stale).toBe(true); + expect(result.warnings.map((warning) => warning.code)).toEqual([ + "CRL_REFRESH_FAILED", + "CRL_STALE", + ]); + + now += 50; + await cache.refreshIfStale(); + expect(fetchCalls).toBe(1); + }); + + it("throws in fail-closed mode when stale cache cannot refresh", async () => { + const cache = createCrlCache({ + fetchLatest: async () => { + throw new Error("registry unavailable"); + }, + staleBehavior: "fail-closed", + refreshIntervalMs: 100, + maxAgeMs: 200, + clock: () => 1_000, + }); + + await expect(cache.refreshIfStale()).rejects.toMatchObject({ + code: "CRL_CACHE_STALE", + }); + }); + + it("refreshes when interval elapsed and uses latest revocation list", async () => { + const now = 150; + let currentClaims = makeClaims(REVOCATION_JTI_A); + const cache = createCrlCache({ + fetchLatest: async () => currentClaims, + refreshIntervalMs: 100, + maxAgeMs: 1_000, + initialClaims: makeClaims(REVOCATION_JTI_A), + initialFetchedAtMs: 0, + clock: () => now, + }); + + currentClaims = makeClaims(REVOCATION_JTI_B); + + const result = await cache.refreshIfStale(); + expect(result.refreshed).toBe(true); + expect(result.stale).toBe(false); + expect(result.warnings).toEqual([]); + + await expect(cache.isRevoked(REVOCATION_JTI_A)).resolves.toBe(false); + await expect(cache.isRevoked(REVOCATION_JTI_B)).resolves.toBe(true); + }); + + it("rejects invalid jti input with structured app error", async () => { + const cache = createCrlCache({ + fetchLatest: async () => makeClaims(REVOCATION_JTI_A), + clock: () => 0, + }); + + await expect(cache.isRevoked(" ")).rejects.toMatchObject({ + code: "CRL_CACHE_INVALID_INPUT", + details: { + field: "jti", + }, + }); + }); +}); diff --git a/packages/sdk/src/crl/cache.ts b/packages/sdk/src/crl/cache.ts new file mode 100644 index 0000000..083c338 --- /dev/null +++ b/packages/sdk/src/crl/cache.ts @@ -0,0 +1,253 @@ +import { parseCrlClaims } from "@clawdentity/protocol"; +import { AppError } from "../exceptions.js"; +import type { CrlClaims } from "../jwt/crl-jwt.js"; + +export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; +export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; + +export type CrlCacheStaleBehavior = "fail-open" | "fail-closed"; + +export type CrlCacheWarning = { + code: "CRL_REFRESH_FAILED" | "CRL_STALE"; + message: string; + details?: Record; +}; + +export type CrlCacheRefreshResult = { + refreshed: boolean; + stale: boolean; + warnings: CrlCacheWarning[]; + fetchedAtMs: number | null; +}; + +export type CrlCacheOptions = { + fetchLatest: () => Promise; + refreshIntervalMs?: number; + maxAgeMs?: number; + staleBehavior?: CrlCacheStaleBehavior; + clock?: () => number; + initialClaims?: unknown; + initialFetchedAtMs?: number; +}; + +export interface CrlCache { + refreshIfStale(): Promise; + isRevoked(jti: string): Promise; +} + +function invalidConfig(field: string, message: string): AppError { + return new AppError({ + code: "CRL_CACHE_INVALID_CONFIG", + message, + status: 500, + details: { field }, + }); +} + +function invalidInput(field: string): AppError { + return new AppError({ + code: "CRL_CACHE_INVALID_INPUT", + message: "CRL cache input must be a non-empty string", + status: 400, + details: { field }, + }); +} + +function ensureNonEmptyString(value: unknown, field: string): string { + if (typeof value !== "string" || value.trim() === "") { + throw invalidInput(field); + } + return value; +} + +function ensurePositiveNumber( + value: number | undefined, + fallback: number, + field: string, +): number { + const resolved = value ?? fallback; + if (!Number.isFinite(resolved) || resolved <= 0) { + throw invalidConfig( + field, + "CRL cache timing values must be positive numbers", + ); + } + return resolved; +} + +function ensureOptionalTimestamp(value: number | undefined): number | null { + if (value === undefined) { + return null; + } + + if (!Number.isFinite(value) || value < 0) { + throw invalidConfig( + "initialFetchedAtMs", + "initialFetchedAtMs must be a non-negative number", + ); + } + + return value; +} + +function ensureStaleBehavior(value: unknown): CrlCacheStaleBehavior { + if (value === "fail-open" || value === "fail-closed") { + return value; + } + + throw invalidConfig( + "staleBehavior", + "staleBehavior must be either fail-open or fail-closed", + ); +} + +function staleCacheError(details: { + maxAgeMs: number; + lastSuccessfulRefreshAtMs: number | null; + lastRefreshAttemptAtMs: number | null; +}): AppError { + return new AppError({ + code: "CRL_CACHE_STALE", + message: "CRL cache is stale and cannot be refreshed", + status: 503, + details, + }); +} + +export function createCrlCache(options: CrlCacheOptions): CrlCache { + if (typeof options !== "object" || options === null) { + throw invalidConfig("options", "CRL cache options must be an object"); + } + + if (typeof options.fetchLatest !== "function") { + throw invalidConfig( + "fetchLatest", + "CRL cache requires a fetchLatest function", + ); + } + + const refreshIntervalMs = ensurePositiveNumber( + options.refreshIntervalMs, + DEFAULT_CRL_REFRESH_INTERVAL_MS, + "refreshIntervalMs", + ); + const maxAgeMs = ensurePositiveNumber( + options.maxAgeMs, + DEFAULT_CRL_MAX_AGE_MS, + "maxAgeMs", + ); + const staleBehavior = ensureStaleBehavior( + options.staleBehavior ?? "fail-open", + ); + const clock = options.clock ?? Date.now; + + let claims: CrlClaims | null = + options.initialClaims === undefined + ? null + : parseCrlClaims(options.initialClaims); + let lastSuccessfulRefreshAtMs = ensureOptionalTimestamp( + options.initialFetchedAtMs, + ); + + if (claims !== null && lastSuccessfulRefreshAtMs === null) { + lastSuccessfulRefreshAtMs = clock(); + } + + if (claims === null && lastSuccessfulRefreshAtMs !== null) { + throw invalidConfig( + "initialFetchedAtMs", + "initialFetchedAtMs requires initialClaims", + ); + } + + let lastRefreshAttemptAtMs: number | null = null; + + function ageMs(now: number): number { + if (lastSuccessfulRefreshAtMs === null) { + return Number.POSITIVE_INFINITY; + } + return now - lastSuccessfulRefreshAtMs; + } + + function isStale(now: number): boolean { + return claims === null || ageMs(now) > maxAgeMs; + } + + function shouldRefresh(now: number): boolean { + return claims === null || ageMs(now) >= refreshIntervalMs || isStale(now); + } + + function canAttemptRefresh(now: number): boolean { + return ( + lastRefreshAttemptAtMs === null || + now - lastRefreshAttemptAtMs >= refreshIntervalMs + ); + } + + async function refreshIfStale(): Promise { + const warnings: CrlCacheWarning[] = []; + let refreshed = false; + const now = clock(); + + if (shouldRefresh(now) && canAttemptRefresh(now)) { + lastRefreshAttemptAtMs = now; + try { + const nextClaims = parseCrlClaims(await options.fetchLatest()); + claims = nextClaims; + lastSuccessfulRefreshAtMs = now; + refreshed = true; + } catch (error) { + warnings.push({ + code: "CRL_REFRESH_FAILED", + message: "CRL refresh attempt failed", + details: { + reason: error instanceof Error ? error.message : "unknown", + }, + }); + } + } + + const stale = isStale(now); + if (stale) { + warnings.push({ + code: "CRL_STALE", + message: "CRL cache is stale", + details: { + ageMs: Number.isFinite(ageMs(now)) ? ageMs(now) : null, + maxAgeMs, + }, + }); + + if (staleBehavior === "fail-closed") { + throw staleCacheError({ + maxAgeMs, + lastSuccessfulRefreshAtMs, + lastRefreshAttemptAtMs, + }); + } + } + + return { + refreshed, + stale, + warnings, + fetchedAtMs: lastSuccessfulRefreshAtMs, + }; + } + + async function isRevoked(jtiInput: string): Promise { + const jti = ensureNonEmptyString(jtiInput, "jti"); + await refreshIfStale(); + + if (claims === null) { + return false; + } + + return claims.revocations.some((revocation) => revocation.jti === jti); + } + + return { + refreshIfStale, + isRevoked, + }; +} diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 81fb583..d4d7e9f 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -4,7 +4,10 @@ import { AppError, addSeconds, CrlJwtError, + createCrlCache, createNonceCache, + DEFAULT_CRL_MAX_AGE_MS, + DEFAULT_CRL_REFRESH_INTERVAL_MS, DEFAULT_NONCE_TTL_MS, decodeEd25519KeypairBase64url, decodeEd25519SignatureBase64url, @@ -194,4 +197,33 @@ describe("sdk", () => { }); expect(DEFAULT_NONCE_TTL_MS).toBe(300000); }); + + it("exports CRL cache helpers from package root", async () => { + const cache = createCrlCache({ + fetchLatest: async () => ({ + iss: "https://registry.clawdentity.dev", + jti: "01HF7YAT4TXP6AW5QNXA2Y9K43", + iat: 1700100000, + exp: 1700103600, + revocations: [ + { + jti: "01HF7YAT31JZHSMW1CG6Q6MHB7", + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + reason: "manual revoke", + revokedAt: 1700100010, + }, + ], + }), + clock: () => 1_000, + }); + + await expect(cache.isRevoked("01HF7YAT31JZHSMW1CG6Q6MHB7")).resolves.toBe( + true, + ); + await expect(cache.isRevoked("01HF7YAT5QJ4K3YVQJ6Q2F9M1N")).resolves.toBe( + false, + ); + expect(DEFAULT_CRL_REFRESH_INTERVAL_MS).toBe(300000); + expect(DEFAULT_CRL_MAX_AGE_MS).toBe(900000); + }); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index ba92322..1b33291 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -2,6 +2,18 @@ export const SDK_VERSION = "0.0.0"; export type { RegistryConfig } from "./config.js"; export { parseRegistryConfig, registryConfigSchema } from "./config.js"; +export type { + CrlCache, + CrlCacheOptions, + CrlCacheRefreshResult, + CrlCacheStaleBehavior, + CrlCacheWarning, +} from "./crl/cache.js"; +export { + createCrlCache, + DEFAULT_CRL_MAX_AGE_MS, + DEFAULT_CRL_REFRESH_INTERVAL_MS, +} from "./crl/cache.js"; export type { Ed25519KeypairBase64url, Ed25519KeypairBytes, From 88a27d12b48fcbc626c54627815217568bd1d118 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 21:16:05 +0530 Subject: [PATCH 018/190] fix(sdk): bypass refresh throttle when CRL cache is stale --- packages/sdk/AGENTS.md | 1 + packages/sdk/src/crl/cache.test.ts | 36 ++++++++++++++++++++++++++++-- packages/sdk/src/crl/cache.ts | 3 ++- 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 419c590..a268310 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -31,6 +31,7 @@ - Always parse CRL JWT payloads through protocol `parseCrlClaims` after signature verification so schema invariants (revocations non-empty, DID/ULID checks) are enforced. - CRL cache must parse fetched payloads through protocol `parseCrlClaims` before replacing cache state. - CRL cache stale behavior must be explicit and configurable (`fail-open` or `fail-closed`), with warnings surfaced on refresh failures. +- CRL cache refresh throttling must not block stale recovery attempts; once stale, refresh should be attempted immediately. - For HTTP signing errors, keep user-facing messages static and send extra context through `AppError.details`. - Enforce Ed25519 key lengths at SDK boundaries (`secretKey` 32 bytes, `publicKey` 32 bytes) so misconfiguration returns stable `AppError` codes. - Treat any decoded PoP proof that is not 64 bytes as `HTTP_SIGNATURE_INVALID_PROOF`. diff --git a/packages/sdk/src/crl/cache.test.ts b/packages/sdk/src/crl/cache.test.ts index 88db372..8f68208 100644 --- a/packages/sdk/src/crl/cache.test.ts +++ b/packages/sdk/src/crl/cache.test.ts @@ -72,8 +72,12 @@ describe("crl cache", () => { ]); now += 50; - await cache.refreshIfStale(); - expect(fetchCalls).toBe(1); + const second = await cache.refreshIfStale(); + expect(fetchCalls).toBe(2); + expect(second.warnings.map((warning) => warning.code)).toEqual([ + "CRL_REFRESH_FAILED", + "CRL_STALE", + ]); }); it("throws in fail-closed mode when stale cache cannot refresh", async () => { @@ -92,6 +96,34 @@ describe("crl cache", () => { }); }); + it("attempts refresh immediately when cache is stale even before refresh interval", async () => { + let now = 0; + let fetchCalls = 0; + const cache = createCrlCache({ + fetchLatest: async () => { + fetchCalls += 1; + if (fetchCalls === 1) { + throw new Error("temporary outage"); + } + return makeClaims(REVOCATION_JTI_A); + }, + staleBehavior: "fail-open", + refreshIntervalMs: 1000, + maxAgeMs: 100, + clock: () => now, + }); + + const first = await cache.refreshIfStale(); + expect(first.stale).toBe(true); + expect(fetchCalls).toBe(1); + + now = 150; + const second = await cache.refreshIfStale(); + expect(fetchCalls).toBe(2); + expect(second.refreshed).toBe(true); + expect(second.stale).toBe(false); + }); + it("refreshes when interval elapsed and uses latest revocation list", async () => { const now = 150; let currentClaims = makeClaims(REVOCATION_JTI_A); diff --git a/packages/sdk/src/crl/cache.ts b/packages/sdk/src/crl/cache.ts index 083c338..57c2147 100644 --- a/packages/sdk/src/crl/cache.ts +++ b/packages/sdk/src/crl/cache.ts @@ -188,8 +188,9 @@ export function createCrlCache(options: CrlCacheOptions): CrlCache { const warnings: CrlCacheWarning[] = []; let refreshed = false; const now = clock(); + const staleBeforeRefresh = isStale(now); - if (shouldRefresh(now) && canAttemptRefresh(now)) { + if (shouldRefresh(now) && (staleBeforeRefresh || canAttemptRefresh(now))) { lastRefreshAttemptAtMs = now; try { const nextClaims = parseCrlClaims(await options.fetchLatest()); From a2abf7497e07cdfdc4402ab176264ab8672f9ef4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 22:14:09 +0530 Subject: [PATCH 019/190] test(registry): add T10 schema contract verification --- apps/registry/AGENTS.md | 12 ++++++ apps/registry/src/db/AGENTS.md | 25 +++++++++++++ apps/registry/src/db/schema.contract.test.ts | 39 ++++++++++++++++++++ apps/registry/src/raw-imports.d.ts | 4 ++ 4 files changed, 80 insertions(+) create mode 100644 apps/registry/src/db/AGENTS.md create mode 100644 apps/registry/src/db/schema.contract.test.ts create mode 100644 apps/registry/src/raw-imports.d.ts diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 5d34d67..be929df 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -33,6 +33,18 @@ - Enforce per-actor authorization in handlers and queries (for example `owner_id`/`human_id` filters). - Fail closed when actor context is missing. +## T10 Schema Contract +- Source of truth for registry schema is `src/db/schema.ts`. +- Baseline migration for T10 verification is `drizzle/0000_common_marrow.sql`. +- T10 required tables: `humans`, `agents`, `revocations`, `api_keys`. +- T10 required indexes: + - `idx_agents_owner_status` on `agents(owner_id, status)` + - revocations `jti` lookup index (`revocations_jti_unique` satisfies this as a unique index) +- Keep schema, migration SQL, and `src/db/schema.contract.test.ts` synchronized in the same change. +- Migration verification command path: + - local apply: `pnpm -F @clawdentity/registry run db:migrate:local` + - fresh local smoke (non-destructive): `pnpm -F @clawdentity/registry exec wrangler d1 migrations apply clawdentity-db-dev --local --env dev --persist-to .wrangler/t10-fresh-smoke` + ## Rollback and Safety - For CI deploys, capture pre-deploy artifacts (deployments list, D1 time-travel marker, D1 export). - If deploy fails after migrations: diff --git a/apps/registry/src/db/AGENTS.md b/apps/registry/src/db/AGENTS.md new file mode 100644 index 0000000..e0cccdd --- /dev/null +++ b/apps/registry/src/db/AGENTS.md @@ -0,0 +1,25 @@ +# AGENTS.md (apps/registry/src/db) + +## Purpose +- Keep the registry database contract explicit and testable for T10 and follow-up tickets. + +## Source of Truth +- Define schema in `schema.ts`. +- Keep generated SQL migrations in `../../drizzle/`. +- Treat `schema.contract.test.ts` as the executable contract for required table/index coverage. + +## T10 Baseline Requirements +- Required tables: `humans`, `agents`, `revocations`, `api_keys`. +- Required index: `idx_agents_owner_status` on `agents(owner_id, status)`. +- Required revocations `jti` lookup index may be unique or non-unique; current baseline is `revocations_jti_unique`. + +## Change Rules +- When changing table/index names, update all of: + - `schema.ts` + - affected SQL migration files under `../../drizzle/` + - `schema.contract.test.ts` +- Avoid duplicate index definitions across schema and migration outputs. + +## Verification +- Migration apply: `pnpm -F @clawdentity/registry run db:migrate:local` +- Fresh migration smoke: `pnpm -F @clawdentity/registry exec wrangler d1 migrations apply clawdentity-db-dev --local --env dev --persist-to .wrangler/t10-fresh-smoke` diff --git a/apps/registry/src/db/schema.contract.test.ts b/apps/registry/src/db/schema.contract.test.ts new file mode 100644 index 0000000..7400249 --- /dev/null +++ b/apps/registry/src/db/schema.contract.test.ts @@ -0,0 +1,39 @@ +import { getTableName } from "drizzle-orm"; +import { describe, expect, it } from "vitest"; +import migrationSql from "../../drizzle/0000_common_marrow.sql?raw"; +import { agents, api_keys, humans, revocations } from "./schema.js"; + +const t10RequiredTables = [ + "humans", + "agents", + "revocations", + "api_keys", +] as const; +describe("T10 schema contract", () => { + it("defines required table names in schema source", () => { + expect(getTableName(humans)).toBe("humans"); + expect(getTableName(agents)).toBe("agents"); + expect(getTableName(revocations)).toBe("revocations"); + expect(getTableName(api_keys)).toBe("api_keys"); + }); + + it("contains required tables in baseline migration SQL", () => { + for (const tableName of t10RequiredTables) { + expect(migrationSql).toMatch( + new RegExp(String.raw`CREATE TABLE \`${tableName}\``), + ); + } + }); + + it("creates the required owner/status index for agents", () => { + expect(migrationSql).toMatch( + /CREATE INDEX `idx_agents_owner_status` ON `agents` \(`owner_id`,`status`\);/, + ); + }); + + it("creates a revocations jti lookup index (unique or non-unique)", () => { + expect(migrationSql).toMatch( + /CREATE (?:UNIQUE )?INDEX `[^`]+` ON `revocations` \(`jti`\);/, + ); + }); +}); diff --git a/apps/registry/src/raw-imports.d.ts b/apps/registry/src/raw-imports.d.ts new file mode 100644 index 0000000..ce1e43f --- /dev/null +++ b/apps/registry/src/raw-imports.d.ts @@ -0,0 +1,4 @@ +declare module "*?raw" { + const content: string; + export default content; +} From 0866d99ebe574cce635c4f3f2c161f2800d1d861 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 22:34:45 +0530 Subject: [PATCH 020/190] feat(registry): bootstrap T11 server entry and health config tests --- apps/registry/AGENTS.md | 6 ++++ apps/registry/src/AGENTS.md | 16 ++++++++++ apps/registry/src/index.test.ts | 21 -------------- apps/registry/src/index.ts | 43 +-------------------------- apps/registry/src/server.test.ts | 36 +++++++++++++++++++++++ apps/registry/src/server.ts | 50 ++++++++++++++++++++++++++++++++ apps/registry/wrangler.jsonc | 2 +- 7 files changed, 110 insertions(+), 64 deletions(-) create mode 100644 apps/registry/src/AGENTS.md delete mode 100644 apps/registry/src/index.test.ts create mode 100644 apps/registry/src/server.test.ts create mode 100644 apps/registry/src/server.ts diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 5d34d67..8935d5d 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -21,6 +21,7 @@ ## Runtime and API - Preserve `/health` response contract: `{ status, version, environment }`. +- Keep the worker entrypoint in `src/server.ts`; use `src/index.ts` only as the package export wrapper. - Keep environment variables non-secret in `wrangler.jsonc` and secret values out of git. ## Validation @@ -28,6 +29,11 @@ - Run `pnpm -F @clawdentity/registry run test` and `pnpm -F @clawdentity/registry run typecheck` for app-level safety. - Keep Vitest path aliases pointed at workspace source (`packages/*/src/index.ts`) so tests do not depend on stale package `dist` outputs. +## Health & Config Readiness +- Treat `/health` as the release verification surface: return `status`, the build `version`, and the live `environment`. Prefer sourcing `version` from build metadata or an environment override rather than hard-coded `0.0.0` so deployments can be differentiated. +- Rely on `parseRegistryConfig` early and cache it once per worker—fail-fast with `CONFIG_VALIDATION_FAILED` errors when the schema rejects the runtime bindings. +- Cover both happy and failure paths in Vitest (status/headers plus config validation) so downstream tickets can rely on this contract without reintroducing regressions. + ## Database Authorization - Cloudflare D1 (SQLite) does not provide PostgreSQL-style RLS policies. - Enforce per-actor authorization in handlers and queries (for example `owner_id`/`human_id` filters). diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md new file mode 100644 index 0000000..d523d19 --- /dev/null +++ b/apps/registry/src/AGENTS.md @@ -0,0 +1,16 @@ +# AGENTS.md (apps/registry/src) + +## Purpose +- Keep runtime entrypoints and route contracts consistent for the registry worker. + +## Entrypoints +- `server.ts` is the Cloudflare Worker runtime entrypoint. +- `index.ts` should re-export `server.ts` for package build/test compatibility. + +## Health Contract +- `/health` must return HTTP 200 with `{ status, version, environment }` on valid config. +- Invalid runtime config must fail through the shared error handler and return `CONFIG_VALIDATION_FAILED`. + +## Validation +- Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. +- Run `pnpm -F @clawdentity/registry run typecheck` before commit. diff --git a/apps/registry/src/index.test.ts b/apps/registry/src/index.test.ts deleted file mode 100644 index 09d872a..0000000 --- a/apps/registry/src/index.test.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { REQUEST_ID_HEADER } from "@clawdentity/sdk"; -import { describe, expect, it } from "vitest"; -import app from "./index.js"; - -describe("GET /health", () => { - it("returns status ok", async () => { - const res = await app.request( - "/health", - {}, - { DB: {}, ENVIRONMENT: "test" }, - ); - expect(res.status).toBe(200); - const body = await res.json(); - expect(body).toEqual({ - status: "ok", - version: "0.0.0", - environment: "test", - }); - expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); - }); -}); diff --git a/apps/registry/src/index.ts b/apps/registry/src/index.ts index a609226..5f3a71f 100644 --- a/apps/registry/src/index.ts +++ b/apps/registry/src/index.ts @@ -1,42 +1 @@ -import { - createHonoErrorHandler, - createLogger, - createRequestContextMiddleware, - createRequestLoggingMiddleware, - parseRegistryConfig, - type RegistryConfig, -} from "@clawdentity/sdk"; -import { Hono } from "hono"; - -type Bindings = { DB: D1Database; ENVIRONMENT: string }; -const logger = createLogger({ service: "registry" }); -let cachedConfig: RegistryConfig | undefined; - -function getConfig(bindings: Bindings): RegistryConfig { - if (cachedConfig) { - return cachedConfig; - } - - cachedConfig = parseRegistryConfig(bindings); - return cachedConfig; -} - -const app = new Hono<{ - Bindings: Bindings; - Variables: { requestId: string }; -}>(); - -app.use("*", createRequestContextMiddleware()); -app.use("*", createRequestLoggingMiddleware(logger)); -app.onError(createHonoErrorHandler(logger)); - -app.get("/health", (c) => { - const config = getConfig(c.env); - return c.json({ - status: "ok", - version: "0.0.0", - environment: config.ENVIRONMENT, - }); -}); - -export default app; +export { default } from "./server.js"; diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts new file mode 100644 index 0000000..1687c61 --- /dev/null +++ b/apps/registry/src/server.test.ts @@ -0,0 +1,36 @@ +import { REQUEST_ID_HEADER } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import app, { createRegistryApp } from "./server.js"; + +describe("GET /health", () => { + it("returns status ok", async () => { + const res = await app.request( + "/health", + {}, + { DB: {}, ENVIRONMENT: "test" }, + ); + expect(res.status).toBe(200); + const body = await res.json(); + expect(body).toEqual({ + status: "ok", + version: "0.0.0", + environment: "test", + }); + expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); + }); + + it("returns config validation error for invalid environment", async () => { + const res = await createRegistryApp().request( + "/health", + {}, + { DB: {}, ENVIRONMENT: "local" }, + ); + expect(res.status).toBe(500); + expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); + const body = (await res.json()) as { + error: { code: string; message: string }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + }); +}); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts new file mode 100644 index 0000000..618470e --- /dev/null +++ b/apps/registry/src/server.ts @@ -0,0 +1,50 @@ +import { + createHonoErrorHandler, + createLogger, + createRequestContextMiddleware, + createRequestLoggingMiddleware, + parseRegistryConfig, + type RegistryConfig, +} from "@clawdentity/sdk"; +import { Hono } from "hono"; + +type Bindings = { DB: D1Database; ENVIRONMENT: string }; +const logger = createLogger({ service: "registry" }); + +function createRegistryApp() { + let cachedConfig: RegistryConfig | undefined; + + function getConfig(bindings: Bindings): RegistryConfig { + if (cachedConfig) { + return cachedConfig; + } + + cachedConfig = parseRegistryConfig(bindings); + return cachedConfig; + } + + const app = new Hono<{ + Bindings: Bindings; + Variables: { requestId: string }; + }>(); + + app.use("*", createRequestContextMiddleware()); + app.use("*", createRequestLoggingMiddleware(logger)); + app.onError(createHonoErrorHandler(logger)); + + app.get("/health", (c) => { + const config = getConfig(c.env); + return c.json({ + status: "ok", + version: "0.0.0", + environment: config.ENVIRONMENT, + }); + }); + + return app; +} + +const app = createRegistryApp(); + +export { createRegistryApp }; +export default app; diff --git a/apps/registry/wrangler.jsonc b/apps/registry/wrangler.jsonc index acc90b8..f60b6bb 100644 --- a/apps/registry/wrangler.jsonc +++ b/apps/registry/wrangler.jsonc @@ -1,7 +1,7 @@ { "$schema": "../../node_modules/wrangler/config-schema.json", "name": "clawdentity-registry", - "main": "src/index.ts", + "main": "src/server.ts", "compatibility_date": "2025-09-01", "compatibility_flags": ["nodejs_compat"], "env": { From 6abb814703b73ba34326ad92d139333219ee0a87 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 12 Feb 2026 23:15:52 +0530 Subject: [PATCH 021/190] Update registry AGENTS auth rules --- apps/registry/AGENTS.md | 7 ++ apps/registry/src/auth/AGENTS.md | 17 +++ apps/registry/src/auth/apiKeyAuth.ts | 162 ++++++++++++++++++++++++ apps/registry/src/db/AGENTS.md | 18 +++ apps/registry/src/db/client.ts | 17 +++ apps/registry/src/server.test.ts | 182 +++++++++++++++++++++++++++ apps/registry/src/server.ts | 10 +- 7 files changed, 412 insertions(+), 1 deletion(-) create mode 100644 apps/registry/src/auth/AGENTS.md create mode 100644 apps/registry/src/auth/apiKeyAuth.ts create mode 100644 apps/registry/src/db/AGENTS.md create mode 100644 apps/registry/src/db/client.ts diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 8935d5d..8f322b0 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -45,3 +45,10 @@ - Roll back Worker to previous version. - Restore D1 from time-travel checkpoint. - When changing routes/domains, validate there is no overlap with existing zone routes. + +## Auth & API Keys +- Treat `Authorization: Bearer clw_pat_` as the default registry entry point for human- or CLI-issued requests and assume the value is SHA-256 hashed before persistence (see `api_keys.key_hash`). +- Prefer Drizzle ORM (`src/db/client.ts`) for API-key lookup and `last_used_at` updates; keep raw D1 SQL only for unsupported query shapes. +- Use constant-time comparison when checking the header-derived hash against `api_keys.key_hash`, only allow `status = 'active'`, and surface failures through `AppError` codes such as `API_KEY_MISSING`, `API_KEY_INVALID`, or `API_KEY_REVOKED` so the shared SDK error handler can produce consistent envelopes. +- Enrich the request context with `humanId`, `apiKeyId`, and `apiKeyName` for downstream handlers and update `last_used_at` as part of the auth middleware/handler so analytics and revocation tooling stay honest. +- Keep the middleware reversible: a no-auth `GET /health` can stay open but any future `/v1/*` endpoints should extend this middleware so unauthorized access never reaches the DB layer. diff --git a/apps/registry/src/auth/AGENTS.md b/apps/registry/src/auth/AGENTS.md new file mode 100644 index 0000000..f2fbc6c --- /dev/null +++ b/apps/registry/src/auth/AGENTS.md @@ -0,0 +1,17 @@ +# AGENTS.md (apps/registry/src/auth) + +## Purpose +- Keep registry authentication middleware consistent, testable, and fail-closed. + +## API Key Auth Rules +- Parse `Authorization` strictly as `Bearer `. +- Hash incoming PAT values with SHA-256 before lookup; never persist raw PATs. +- Use constant-time comparison for hash matching. +- Use Drizzle through `src/db/client.ts` for lookup/update queries so auth code stays schema-driven. +- Only allow `api_keys.status = "active"` and `humans.status = "active"`. +- On success, inject `ctx.human` for downstream handlers. +- Return auth failures through `AppError` with 401 status and stable codes. + +## Verification +- Cover valid, invalid, and missing PAT paths in `server.test.ts`. +- Verify middleware updates `api_keys.last_used_at` on successful auth. diff --git a/apps/registry/src/auth/apiKeyAuth.ts b/apps/registry/src/auth/apiKeyAuth.ts new file mode 100644 index 0000000..74f2816 --- /dev/null +++ b/apps/registry/src/auth/apiKeyAuth.ts @@ -0,0 +1,162 @@ +import { AppError, nowIso } from "@clawdentity/sdk"; +import { eq } from "drizzle-orm"; +import { createMiddleware } from "hono/factory"; +import { createDb } from "../db/client.js"; +import { api_keys, humans } from "../db/schema.js"; + +type ApiKeyQueryRow = { + api_key_id: string; + key_hash: string; + api_key_status: "active" | "revoked"; + api_key_name: string; + human_id: string; + human_did: string; + human_display_name: string; + human_role: "admin" | "user"; + human_status: "active" | "suspended"; +}; + +export type AuthenticatedHuman = { + id: string; + did: string; + displayName: string; + role: "admin" | "user"; + apiKey: { + id: string; + name: string; + }; +}; + +function parseBearerPat(authorization?: string): string { + if (!authorization) { + throw new AppError({ + code: "API_KEY_MISSING", + message: "Authorization header is required", + status: 401, + expose: true, + }); + } + + const [scheme, token] = authorization.trim().split(/\s+/, 2); + if (scheme !== "Bearer" || !token) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "Authorization must be in the format 'Bearer '", + status: 401, + expose: true, + }); + } + + if (!token.startsWith("clw_pat_")) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "Authorization must contain a PAT token", + status: 401, + expose: true, + }); + } + + return token; +} + +function constantTimeEqual(left: string, right: string): boolean { + const maxLength = Math.max(left.length, right.length); + let mismatch = left.length ^ right.length; + + for (let index = 0; index < maxLength; index += 1) { + const leftCode = index < left.length ? left.charCodeAt(index) : 0; + const rightCode = index < right.length ? right.charCodeAt(index) : 0; + mismatch |= leftCode ^ rightCode; + } + + return mismatch === 0; +} + +export async function hashApiKeyToken(token: string): Promise { + const digest = await crypto.subtle.digest( + "SHA-256", + new TextEncoder().encode(token), + ); + + return Array.from(new Uint8Array(digest)) + .map((value) => value.toString(16).padStart(2, "0")) + .join(""); +} + +export function createApiKeyAuth() { + return createMiddleware<{ + Bindings: { DB: D1Database }; + Variables: { human: AuthenticatedHuman }; + }>(async (c, next) => { + const db = createDb(c.env.DB); + const token = parseBearerPat(c.req.header("authorization")); + const tokenHash = await hashApiKeyToken(token); + const tokenPrefix = token.slice(0, 8); + + const lookupResult = await db + .select({ + api_key_id: api_keys.id, + key_hash: api_keys.key_hash, + api_key_status: api_keys.status, + api_key_name: api_keys.name, + human_id: humans.id, + human_did: humans.did, + human_display_name: humans.display_name, + human_role: humans.role, + human_status: humans.status, + }) + .from(api_keys) + .innerJoin(humans, eq(humans.id, api_keys.human_id)) + .where(eq(api_keys.key_prefix, tokenPrefix)); + + const matchedRow = + lookupResult.find((row: ApiKeyQueryRow) => + constantTimeEqual(row.key_hash, tokenHash), + ) ?? undefined; + + if (!matchedRow) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "API key is invalid", + status: 401, + expose: true, + }); + } + + if (matchedRow.api_key_status !== "active") { + throw new AppError({ + code: "API_KEY_REVOKED", + message: "API key is revoked", + status: 401, + expose: true, + }); + } + + if (matchedRow.human_status !== "active") { + throw new AppError({ + code: "API_KEY_INVALID", + message: "API key is invalid", + status: 401, + expose: true, + }); + } + + await db + .update(api_keys) + .set({ last_used_at: nowIso() }) + .where(eq(api_keys.id, matchedRow.api_key_id)); + + c.set("human", { + id: matchedRow.human_id, + did: matchedRow.human_did, + displayName: matchedRow.human_display_name, + role: matchedRow.human_role, + apiKey: { + id: matchedRow.api_key_id, + name: matchedRow.api_key_name, + }, + }); + + await next(); + }); +} diff --git a/apps/registry/src/db/AGENTS.md b/apps/registry/src/db/AGENTS.md new file mode 100644 index 0000000..a96893a --- /dev/null +++ b/apps/registry/src/db/AGENTS.md @@ -0,0 +1,18 @@ +# AGENTS.md (apps/registry/src/db) + +## Purpose +- Keep registry database access consistent for Cloudflare D1 and Drizzle ORM. + +## Source Of Truth +- Define tables and indexes only in `schema.ts`. +- Keep migration SQL in `apps/registry/drizzle/` synchronized with schema changes. +- Add/adjust tests whenever schema contracts or indexes change. + +## Query Rules +- Prefer Drizzle (`createDb`) for application reads/writes. +- Keep raw SQL only for cases Drizzle cannot express cleanly; document why inline. +- For auth/security paths, keep constant-time comparisons in application code when matching secrets/hashes. + +## Verification +- Run `pnpm -F @clawdentity/registry run db:migrate:local` for migration smoke checks. +- Run `pnpm -F @clawdentity/registry run test` and `pnpm -F @clawdentity/registry run typecheck` after DB-layer changes. diff --git a/apps/registry/src/db/client.ts b/apps/registry/src/db/client.ts new file mode 100644 index 0000000..8c6b939 --- /dev/null +++ b/apps/registry/src/db/client.ts @@ -0,0 +1,17 @@ +import { type DrizzleD1Database, drizzle } from "drizzle-orm/d1"; +import * as schema from "./schema.js"; + +type RegistryDb = DrizzleD1Database; + +const databaseCache = new WeakMap(); + +export function createDb(database: D1Database): RegistryDb { + const cachedDatabase = databaseCache.get(database); + if (cachedDatabase) { + return cachedDatabase; + } + + const db = drizzle(database, { schema }); + databaseCache.set(database, db); + return db; +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 1687c61..9b3c040 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,7 +1,92 @@ import { REQUEST_ID_HEADER } from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; +import { hashApiKeyToken } from "./auth/apiKeyAuth.js"; import app, { createRegistryApp } from "./server.js"; +type FakeD1Row = { + apiKeyId: string; + keyHash: string; + apiKeyStatus: "active" | "revoked"; + apiKeyName: string; + humanId: string; + humanDid: string; + humanDisplayName: string; + humanRole: "admin" | "user"; + humanStatus: "active" | "suspended"; +}; + +function createFakeDb(rows: FakeD1Row[]) { + const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; + + const database: D1Database = { + prepare(query: string) { + let params: unknown[] = []; + const normalizedQuery = query.toLowerCase(); + + return { + bind(...values: unknown[]) { + params = values; + return this; + }, + async all() { + if ( + normalizedQuery.includes('from "api_keys"') || + normalizedQuery.includes("from api_keys") + ) { + return { + results: rows.map((row) => ({ + api_key_id: row.apiKeyId, + key_hash: row.keyHash, + api_key_status: row.apiKeyStatus, + api_key_name: row.apiKeyName, + human_id: row.humanId, + human_did: row.humanDid, + human_display_name: row.humanDisplayName, + human_role: row.humanRole, + human_status: row.humanStatus, + })), + }; + } + return { results: [] }; + }, + async raw() { + if ( + normalizedQuery.includes('from "api_keys"') || + normalizedQuery.includes("from api_keys") + ) { + return rows.map((row) => [ + row.apiKeyId, + row.keyHash, + row.apiKeyStatus, + row.apiKeyName, + row.humanId, + row.humanDid, + row.humanDisplayName, + row.humanRole, + row.humanStatus, + ]); + } + return []; + }, + async run() { + if ( + normalizedQuery.includes('update "api_keys"') || + normalizedQuery.includes("update api_keys") + ) { + updates.push({ + lastUsedAt: String(params[0] ?? ""), + apiKeyId: String(params[1] ?? ""), + }); + } + return { success: true } as D1Result; + }, + } as D1PreparedStatement; + }, + } as D1Database; + + return { database, updates }; +} + describe("GET /health", () => { it("returns status ok", async () => { const res = await app.request( @@ -34,3 +119,100 @@ describe("GET /health", () => { expect(body.error.message).toBe("Registry configuration is invalid"); }); }); + +describe("GET /v1/me", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/me", + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 401 for invalid PAT", async () => { + const validToken = "clw_pat_valid-token-value"; + const validHash = await hashApiKeyToken(validToken); + const { database } = createFakeDb([ + { + apiKeyId: "key-1", + keyHash: validHash, + apiKeyStatus: "active", + apiKeyName: "ci", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }, + ]); + + const res = await createRegistryApp().request( + "/v1/me", + { + headers: { Authorization: "Bearer clw_pat_invalid-token-value" }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_INVALID"); + }); + + it("authenticates valid PAT and injects ctx.human", async () => { + const validToken = "clw_pat_valid-token-value"; + const validHash = await hashApiKeyToken(validToken); + const { database, updates } = createFakeDb([ + { + apiKeyId: "key-1", + keyHash: validHash, + apiKeyStatus: "active", + apiKeyName: "ci", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }, + ]); + + const res = await createRegistryApp().request( + "/v1/me", + { + headers: { Authorization: `Bearer ${validToken}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + apiKey: { id: string; name: string }; + }; + }; + expect(body.human).toEqual({ + id: "human-1", + did: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + displayName: "Ravi", + role: "admin", + apiKey: { + id: "key-1", + name: "ci", + }, + }); + expect(updates).toHaveLength(1); + expect(updates[0]?.apiKeyId).toBe("key-1"); + }); +}); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 618470e..b341166 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -7,6 +7,10 @@ import { type RegistryConfig, } from "@clawdentity/sdk"; import { Hono } from "hono"; +import { + type AuthenticatedHuman, + createApiKeyAuth, +} from "./auth/apiKeyAuth.js"; type Bindings = { DB: D1Database; ENVIRONMENT: string }; const logger = createLogger({ service: "registry" }); @@ -25,7 +29,7 @@ function createRegistryApp() { const app = new Hono<{ Bindings: Bindings; - Variables: { requestId: string }; + Variables: { requestId: string; human: AuthenticatedHuman }; }>(); app.use("*", createRequestContextMiddleware()); @@ -41,6 +45,10 @@ function createRegistryApp() { }); }); + app.get("/v1/me", createApiKeyAuth(), (c) => { + return c.json({ human: c.get("human") }); + }); + return app; } From f3f58a43aa25886614668823cf64beb7cc17cbed Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 11:24:56 +0530 Subject: [PATCH 022/190] Review PAT auth updates --- apps/registry/src/AGENTS.md | 1 + apps/registry/src/auth/AGENTS.md | 2 ++ apps/registry/src/auth/apiKeyAuth.ts | 25 +++++++++++++++-- apps/registry/src/server.test.ts | 40 +++++++++++++++++++++++++--- 4 files changed, 63 insertions(+), 5 deletions(-) diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index d523d19..51d1cc3 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -14,3 +14,4 @@ ## Validation - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. +- When using fake D1 adapters in route tests, make select responses honor bound parameters so query-shape regressions are caught. diff --git a/apps/registry/src/auth/AGENTS.md b/apps/registry/src/auth/AGENTS.md index f2fbc6c..022e243 100644 --- a/apps/registry/src/auth/AGENTS.md +++ b/apps/registry/src/auth/AGENTS.md @@ -5,7 +5,9 @@ ## API Key Auth Rules - Parse `Authorization` strictly as `Bearer `. +- Reject marker-only PATs (for example, `clw_pat_` without entropy). - Hash incoming PAT values with SHA-256 before lookup; never persist raw PATs. +- Derive `api_keys.key_prefix` lookup keys from the PAT marker plus token entropy (not the static marker alone), and keep derivation logic in one shared helper. - Use constant-time comparison for hash matching. - Use Drizzle through `src/db/client.ts` for lookup/update queries so auth code stays schema-driven. - Only allow `api_keys.status = "active"` and `humans.status = "active"`. diff --git a/apps/registry/src/auth/apiKeyAuth.ts b/apps/registry/src/auth/apiKeyAuth.ts index 74f2816..af7e518 100644 --- a/apps/registry/src/auth/apiKeyAuth.ts +++ b/apps/registry/src/auth/apiKeyAuth.ts @@ -16,6 +16,9 @@ type ApiKeyQueryRow = { human_status: "active" | "suspended"; }; +const PAT_TOKEN_MARKER = "clw_pat_"; +const PAT_LOOKUP_ENTROPY_LENGTH = 8; + export type AuthenticatedHuman = { id: string; did: string; @@ -47,7 +50,16 @@ function parseBearerPat(authorization?: string): string { }); } - if (!token.startsWith("clw_pat_")) { + if (!token.startsWith(PAT_TOKEN_MARKER)) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "Authorization must contain a PAT token", + status: 401, + expose: true, + }); + } + + if (token.length <= PAT_TOKEN_MARKER.length) { throw new AppError({ code: "API_KEY_INVALID", message: "Authorization must contain a PAT token", @@ -59,6 +71,15 @@ function parseBearerPat(authorization?: string): string { return token; } +export function deriveApiKeyLookupPrefix(token: string): string { + const entropyPrefix = token.slice( + PAT_TOKEN_MARKER.length, + PAT_TOKEN_MARKER.length + PAT_LOOKUP_ENTROPY_LENGTH, + ); + + return `${PAT_TOKEN_MARKER}${entropyPrefix}`; +} + function constantTimeEqual(left: string, right: string): boolean { const maxLength = Math.max(left.length, right.length); let mismatch = left.length ^ right.length; @@ -91,7 +112,7 @@ export function createApiKeyAuth() { const db = createDb(c.env.DB); const token = parseBearerPat(c.req.header("authorization")); const tokenHash = await hashApiKeyToken(token); - const tokenPrefix = token.slice(0, 8); + const tokenPrefix = deriveApiKeyLookupPrefix(token); const lookupResult = await db .select({ diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 9b3c040..fecfb62 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,10 +1,14 @@ import { REQUEST_ID_HEADER } from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; -import { hashApiKeyToken } from "./auth/apiKeyAuth.js"; +import { + deriveApiKeyLookupPrefix, + hashApiKeyToken, +} from "./auth/apiKeyAuth.js"; import app, { createRegistryApp } from "./server.js"; type FakeD1Row = { apiKeyId: string; + keyPrefix: string; keyHash: string; apiKeyStatus: "active" | "revoked"; apiKeyName: string; @@ -33,8 +37,14 @@ function createFakeDb(rows: FakeD1Row[]) { normalizedQuery.includes('from "api_keys"') || normalizedQuery.includes("from api_keys") ) { + const requestedKeyPrefix = + typeof params[0] === "string" ? params[0] : ""; + const matchingRows = rows.filter( + (row) => row.keyPrefix === requestedKeyPrefix, + ); + return { - results: rows.map((row) => ({ + results: matchingRows.map((row) => ({ api_key_id: row.apiKeyId, key_hash: row.keyHash, api_key_status: row.apiKeyStatus, @@ -54,7 +64,13 @@ function createFakeDb(rows: FakeD1Row[]) { normalizedQuery.includes('from "api_keys"') || normalizedQuery.includes("from api_keys") ) { - return rows.map((row) => [ + const requestedKeyPrefix = + typeof params[0] === "string" ? params[0] : ""; + const matchingRows = rows.filter( + (row) => row.keyPrefix === requestedKeyPrefix, + ); + + return matchingRows.map((row) => [ row.apiKeyId, row.keyHash, row.apiKeyStatus, @@ -141,6 +157,7 @@ describe("GET /v1/me", () => { const { database } = createFakeDb([ { apiKeyId: "key-1", + keyPrefix: deriveApiKeyLookupPrefix(validToken), keyHash: validHash, apiKeyStatus: "active", apiKeyName: "ci", @@ -167,12 +184,29 @@ describe("GET /v1/me", () => { expect(body.error.code).toBe("API_KEY_INVALID"); }); + it("returns 401 when PAT contains only marker", async () => { + const res = await createRegistryApp().request( + "/v1/me", + { + headers: { Authorization: "Bearer clw_pat_" }, + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_INVALID"); + }); + it("authenticates valid PAT and injects ctx.human", async () => { const validToken = "clw_pat_valid-token-value"; const validHash = await hashApiKeyToken(validToken); const { database, updates } = createFakeDb([ { apiKeyId: "key-1", + keyPrefix: deriveApiKeyLookupPrefix(validToken), keyHash: validHash, apiKeyStatus: "active", apiKeyName: "ci", From 748c62d327f0ecee13727e63662679c4aca34a62 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 12:53:22 +0530 Subject: [PATCH 023/190] feat(registry): add well-known signing keyset endpoint --- apps/registry/.env.example | 1 + apps/registry/AGENTS.md | 5 + apps/registry/src/AGENTS.md | 5 + apps/registry/src/server.test.ts | 193 ++++++++++++++++++++++++++++++- apps/registry/src/server.ts | 21 +++- packages/sdk/AGENTS.md | 1 + packages/sdk/src/config.test.ts | 59 ++++++++++ packages/sdk/src/config.ts | 43 +++++++ 8 files changed, 326 insertions(+), 2 deletions(-) diff --git a/apps/registry/.env.example b/apps/registry/.env.example index fdce52f..8c8ec6d 100644 --- a/apps/registry/.env.example +++ b/apps/registry/.env.example @@ -1,6 +1,7 @@ # Cloudflare (set via wrangler secret) # BOOTSTRAP_SECRET= # REGISTRY_SIGNING_KEY= +# REGISTRY_SIGNING_KEYS=","status":"active"}]> # wrangler.jsonc vars (non-secret) # ENVIRONMENT=production diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 8f322b0..dc0e2c9 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -52,3 +52,8 @@ - Use constant-time comparison when checking the header-derived hash against `api_keys.key_hash`, only allow `status = 'active'`, and surface failures through `AppError` codes such as `API_KEY_MISSING`, `API_KEY_INVALID`, or `API_KEY_REVOKED` so the shared SDK error handler can produce consistent envelopes. - Enrich the request context with `humanId`, `apiKeyId`, and `apiKeyName` for downstream handlers and update `last_used_at` as part of the auth middleware/handler so analytics and revocation tooling stay honest. - Keep the middleware reversible: a no-auth `GET /health` can stay open but any future `/v1/*` endpoints should extend this middleware so unauthorized access never reaches the DB layer. + +## Public Key Discovery +- `GET /.well-known/claw-keys.json` is the canonical public key discovery endpoint for offline AIT verification. +- Source key material from validated runtime config (`REGISTRY_SIGNING_KEYS` JSON) and return entries with `kid`, `alg`, `crv`, `x`, and `status`. +- Keep cache headers explicit (`max-age=300` + `stale-while-revalidate`) to reduce client fetch load while allowing key rotation to propagate predictably. diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 51d1cc3..fd3b187 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -11,6 +11,11 @@ - `/health` must return HTTP 200 with `{ status, version, environment }` on valid config. - Invalid runtime config must fail through the shared error handler and return `CONFIG_VALIDATION_FAILED`. +## Registry Keyset Contract +- `/.well-known/claw-keys.json` is a public endpoint and must remain unauthenticated. +- Return `keys[]` entries with `kid`, `alg`, `crv`, `x`, and `status` so SDK/offline verifiers can consume directly. +- Keep cache headers explicit and short-lived (`max-age=300` + `stale-while-revalidate`) to balance key rotation with client efficiency. + ## Validation - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index fecfb62..8ca448b 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,4 +1,16 @@ -import { REQUEST_ID_HEADER } from "@clawdentity/sdk"; +import { + type AitClaims, + encodeBase64url, + generateUlid, + makeAgentDid, + makeHumanDid, +} from "@clawdentity/protocol"; +import { + generateEd25519Keypair, + REQUEST_ID_HEADER, + signAIT, + verifyAIT, +} from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; import { deriveApiKeyLookupPrefix, @@ -6,6 +18,29 @@ import { } from "./auth/apiKeyAuth.js"; import app, { createRegistryApp } from "./server.js"; +function makeAitClaims(publicKey: Uint8Array): AitClaims { + const now = Math.floor(Date.now() / 1000); + return { + iss: "https://registry.clawdentity.dev", + sub: makeAgentDid(generateUlid(1700100000000)), + ownerDid: makeHumanDid(generateUlid(1700100001000)), + name: "agent-registry-01", + framework: "openclaw", + description: "registry key publishing verification path", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(publicKey), + }, + }, + iat: now, + nbf: now - 5, + exp: now + 3600, + jti: generateUlid(1700100002000), + }; +} + type FakeD1Row = { apiKeyId: string; keyPrefix: string; @@ -136,6 +171,162 @@ describe("GET /health", () => { }); }); +describe("GET /.well-known/claw-keys.json", () => { + it("returns configured registry signing keys with cache headers", async () => { + const res = await createRegistryApp().request( + "/.well-known/claw-keys.json", + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(200); + expect(res.headers.get("Cache-Control")).toBe( + "public, max-age=300, s-maxage=300, stale-while-revalidate=60", + ); + + const body = (await res.json()) as { + keys: Array<{ + kid: string; + alg: string; + crv: string; + x: string; + status: string; + }>; + }; + expect(body.keys).toEqual([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]); + }); + + it("supports fetch-and-verify AIT flow using published keys", async () => { + const signer = await generateEd25519Keypair(); + const claims = makeAitClaims(signer.publicKey); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: signer, + }); + + const keysResponse = await createRegistryApp().request( + "/.well-known/claw-keys.json", + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const verifiedClaims = await verifyAIT({ + token, + expectedIssuer: claims.iss, + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(verifiedClaims).toEqual(claims); + }); + + it("does not verify AIT when published key status is revoked", async () => { + const signer = await generateEd25519Keypair(); + const claims = makeAitClaims(signer.publicKey); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: signer, + }); + + const keysResponse = await createRegistryApp().request( + "/.well-known/claw-keys.json", + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "revoked", + }, + ]), + }, + ); + + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + await expect( + verifyAIT({ + token, + expectedIssuer: claims.iss, + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }), + ).rejects.toThrow(/kid/i); + }); +}); + describe("GET /v1/me", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index b341166..3328c33 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -12,8 +12,14 @@ import { createApiKeyAuth, } from "./auth/apiKeyAuth.js"; -type Bindings = { DB: D1Database; ENVIRONMENT: string }; +type Bindings = { + DB: D1Database; + ENVIRONMENT: string; + REGISTRY_SIGNING_KEYS?: string; +}; const logger = createLogger({ service: "registry" }); +const REGISTRY_KEY_CACHE_CONTROL = + "public, max-age=300, s-maxage=300, stale-while-revalidate=60"; function createRegistryApp() { let cachedConfig: RegistryConfig | undefined; @@ -45,6 +51,19 @@ function createRegistryApp() { }); }); + app.get("/.well-known/claw-keys.json", (c) => { + const config = getConfig(c.env); + return c.json( + { + keys: config.REGISTRY_SIGNING_KEYS ?? [], + }, + 200, + { + "Cache-Control": REGISTRY_KEY_CACHE_CONTROL, + }, + ); + }); + app.get("/v1/me", createApiKeyAuth(), (c) => { return c.json({ human: c.get("human") }); }); diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index a268310..ff8f72f 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -37,6 +37,7 @@ - Treat any decoded PoP proof that is not 64 bytes as `HTTP_SIGNATURE_INVALID_PROOF`. - Nonce cache accept path must prune expired entries across all agent buckets to keep memory bounded under high-cardinality agent traffic. - Nonce cache must validate the top-level input shape before reading fields so invalid JS callers receive structured `AppError`s instead of runtime `TypeError`s. +- Registry config parsing must validate `REGISTRY_SIGNING_KEYS` as JSON before runtime use so keyset endpoints fail fast with `CONFIG_VALIDATION_FAILED` on malformed key documents. ## Testing Rules - Unit test each shared module. diff --git a/packages/sdk/src/config.test.ts b/packages/sdk/src/config.test.ts index 71730f5..4269007 100644 --- a/packages/sdk/src/config.test.ts +++ b/packages/sdk/src/config.test.ts @@ -9,6 +9,31 @@ describe("config helpers", () => { }); }); + it("parses REGISTRY_SIGNING_KEYS into validated key entries", () => { + const config = parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }); + + expect(config.REGISTRY_SIGNING_KEYS).toEqual([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]); + }); + it("throws AppError on invalid registry config", () => { try { parseRegistryConfig({ ENVIRONMENT: "local" }); @@ -18,4 +43,38 @@ describe("config helpers", () => { expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); } }); + + it("throws AppError when REGISTRY_SIGNING_KEYS is invalid JSON", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_SIGNING_KEYS: "not-json", + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); + + it("throws AppError when REGISTRY_SIGNING_KEYS entries violate schema", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "", + alg: "ES256", + crv: "Ed25519", + x: "", + status: "active", + }, + ]), + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); }); diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index ab7253c..bd8f556 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -2,11 +2,54 @@ import { z } from "zod"; import { AppError } from "./exceptions.js"; const environmentSchema = z.enum(["development", "production", "test"]); +const registrySigningKeyStatusSchema = z.enum(["active", "revoked"]); + +const registrySigningPublicKeySchema = z.object({ + kid: z.string().min(1), + alg: z.literal("EdDSA"), + crv: z.literal("Ed25519"), + x: z.string().min(1), + status: registrySigningKeyStatusSchema, +}); + +const registrySigningKeysSchema = z.array(registrySigningPublicKeySchema); + +const registrySigningKeysEnvSchema = z + .string() + .min(1) + .transform((value, ctx) => { + let parsed: unknown; + + try { + parsed = JSON.parse(value); + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "REGISTRY_SIGNING_KEYS must be valid JSON", + }); + return z.NEVER; + } + + const keys = registrySigningKeysSchema.safeParse(parsed); + if (!keys.success) { + for (const issue of keys.error.issues) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: issue.message, + path: issue.path, + }); + } + return z.NEVER; + } + + return keys.data; + }); export const registryConfigSchema = z.object({ ENVIRONMENT: environmentSchema, BOOTSTRAP_SECRET: z.string().min(1).optional(), REGISTRY_SIGNING_KEY: z.string().min(1).optional(), + REGISTRY_SIGNING_KEYS: registrySigningKeysEnvSchema.optional(), }); export type RegistryConfig = z.infer; From af59790dd15afabeb4c1cccc1dfed9731518f2ec Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 13:03:23 +0530 Subject: [PATCH 024/190] fix(sdk): harden registry signing keyset validation --- packages/sdk/AGENTS.md | 1 + packages/sdk/src/config.test.ts | 70 +++++++++++++++++++++++++++++++++ packages/sdk/src/config.ts | 56 ++++++++++++++++++++++---- 3 files changed, 119 insertions(+), 8 deletions(-) diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index ff8f72f..82bcdff 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -38,6 +38,7 @@ - Nonce cache accept path must prune expired entries across all agent buckets to keep memory bounded under high-cardinality agent traffic. - Nonce cache must validate the top-level input shape before reading fields so invalid JS callers receive structured `AppError`s instead of runtime `TypeError`s. - Registry config parsing must validate `REGISTRY_SIGNING_KEYS` as JSON before runtime use so keyset endpoints fail fast with `CONFIG_VALIDATION_FAILED` on malformed key documents. +- Registry keyset validation must reject duplicate `kid` values and malformed `x` key material (non-base64url or non-32-byte Ed25519) so verifier behavior cannot become order-dependent. ## Testing Rules - Unit test each shared module. diff --git a/packages/sdk/src/config.test.ts b/packages/sdk/src/config.test.ts index 4269007..9633f6e 100644 --- a/packages/sdk/src/config.test.ts +++ b/packages/sdk/src/config.test.ts @@ -77,4 +77,74 @@ describe("config helpers", () => { expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); } }); + + it("throws AppError when REGISTRY_SIGNING_KEYS contains duplicate kids", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICE", + status: "revoked", + }, + ]), + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); + + it("throws AppError when REGISTRY_SIGNING_KEYS has malformed x", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "not+base64url", + status: "active", + }, + ]), + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); + + it("throws AppError when REGISTRY_SIGNING_KEYS x length is not Ed25519", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBA", + status: "active", + }, + ]), + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); }); diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index bd8f556..8388664 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -1,18 +1,58 @@ +import { decodeBase64url } from "@clawdentity/protocol"; import { z } from "zod"; import { AppError } from "./exceptions.js"; const environmentSchema = z.enum(["development", "production", "test"]); const registrySigningKeyStatusSchema = z.enum(["active", "revoked"]); +const ED25519_PUBLIC_KEY_LENGTH = 32; -const registrySigningPublicKeySchema = z.object({ - kid: z.string().min(1), - alg: z.literal("EdDSA"), - crv: z.literal("Ed25519"), - x: z.string().min(1), - status: registrySigningKeyStatusSchema, -}); +const registrySigningPublicKeySchema = z + .object({ + kid: z.string().min(1), + alg: z.literal("EdDSA"), + crv: z.literal("Ed25519"), + x: z.string().min(1), + status: registrySigningKeyStatusSchema, + }) + .superRefine((value, ctx) => { + let decodedPublicKey: Uint8Array; + + try { + decodedPublicKey = decodeBase64url(value.x); + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["x"], + message: "x must be valid base64url", + }); + return; + } + + if (decodedPublicKey.length !== ED25519_PUBLIC_KEY_LENGTH) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["x"], + message: "x must decode to 32-byte Ed25519 public key", + }); + } + }); -const registrySigningKeysSchema = z.array(registrySigningPublicKeySchema); +const registrySigningKeysSchema = z + .array(registrySigningPublicKeySchema) + .superRefine((keys, ctx) => { + const seenKids = new Set(); + for (const [index, key] of keys.entries()) { + if (seenKids.has(key.kid)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [index, "kid"], + message: `Duplicate kid "${key.kid}" is not allowed`, + }); + } else { + seenKids.add(key.kid); + } + } + }); const registrySigningKeysEnvSchema = z .string() From b7c3b4ffb99968965a3b7ae9c1eafbe9e8cb6b6d Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 14:19:44 +0530 Subject: [PATCH 025/190] feat(t14): add agent registration endpoint with centralized verbose-error policy --- apps/registry/AGENTS.md | 7 + apps/registry/src/AGENTS.md | 15 + apps/registry/src/agentRegistration.ts | 332 +++++++++++++ apps/registry/src/registrySigner.ts | 140 ++++++ apps/registry/src/server.test.ts | 496 +++++++++++++++++-- apps/registry/src/server.ts | 61 +++ packages/sdk/AGENTS.md | 3 + packages/sdk/src/config.ts | 3 +- packages/sdk/src/crypto/ed25519.test.ts | 14 + packages/sdk/src/crypto/ed25519.ts | 6 + packages/sdk/src/index.test.ts | 2 + packages/sdk/src/index.ts | 3 + packages/sdk/src/runtime-environment.test.ts | 24 + packages/sdk/src/runtime-environment.ts | 13 + 14 files changed, 1085 insertions(+), 34 deletions(-) create mode 100644 apps/registry/src/agentRegistration.ts create mode 100644 apps/registry/src/registrySigner.ts create mode 100644 packages/sdk/src/runtime-environment.test.ts create mode 100644 packages/sdk/src/runtime-environment.ts diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index dc0e2c9..0cf9d81 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -57,3 +57,10 @@ - `GET /.well-known/claw-keys.json` is the canonical public key discovery endpoint for offline AIT verification. - Source key material from validated runtime config (`REGISTRY_SIGNING_KEYS` JSON) and return entries with `kid`, `alg`, `crv`, `x`, and `status`. - Keep cache headers explicit (`max-age=300` + `stale-while-revalidate`) to reduce client fetch load while allowing key rotation to propagate predictably. + +## Agent Registration Testing +- POST `/v1/agents` coverage should stay offline/deterministic: reuse or extend the fake `D1Database` helper so Vitest can assert the exact SQL inserted into `agents` without touching a real D1 instance. +- Validate every registration payload against the `packages/protocol` `aitClaimsSchema` expectations (agent DID format, name char set/length, base64url public key, `exp > nbf`, etc.) and expect a structured `AppError` when any field fails so new tests exercise each validation branch. +- Reuse `createApiKeyAuth` tooling to prove `API_KEY_MISSING`, `API_KEY_INVALID`, `API_KEY_REVOKED`, and suspended-human failures before the handler even touches the DB; all auth tests should assert the matching error code/messages that will inform clients about misconfigured PATs. +- Assert that an accepted registration call writes exactly one `agents` row (status `active`, correct `owner_id`, `public_key`, and `current_jti`) and does not leave partial state on failure. Tests should also ensure `gateway_hint`, `expires_at`, and `framework` values propagate when provided so the schema stays in sync. +- When `REGISTRY_SIGNING_KEYS` exposes an active key, the handler must return a signed AIT whose `kid` matches the published key, and clients must be able to verify it via `verifyAIT`/`/.well-known/claw-keys.json`. Add a companion failure test that rejects registration when no valid signing key exists (missing `kid`, revoked status, or malformed `x`). diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index fd3b187..8b4b60d 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -20,3 +20,18 @@ - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. - When using fake D1 adapters in route tests, make select responses honor bound parameters so query-shape regressions are caught. + +## POST /v1/agents Contract +- Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. +- Validate request payload fields with explicit rules: + - `name`: protocol-compatible agent name validation. + - `framework`: optional; default to `openclaw` when omitted. + - `publicKey`: base64url Ed25519 key that decodes to 32 bytes. + - `ttlDays`: optional; default `30`; allow only integer range `1..90`. +- Keep request parsing and validation in a reusable helper module (`agentRegistration.ts`) so future routes can share the same constraints without duplicating schema logic. +- Keep error detail exposure environment-aware via `shouldExposeVerboseErrors` (shared SDK helper path): return generic messages without internals in `production`, but include validation/config details in `development`/`test` for debugging. +- Persist `agents.current_jti` and `agents.expires_at` on insert; generated AIT claims (`jti`, `exp`) must stay in sync with those persisted values. +- Use shared SDK datetime helpers (`nowIso`, `addSeconds`) for issuance/expiry math instead of ad-hoc `Date.now()` arithmetic in route logic. +- Resolve signing material through a reusable signer helper (`registrySigner.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. +- Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.api.clawdentity.com`, `production` -> `https://api.clawdentity.com`) rather than request-origin inference. +- Response shape remains `{ agent, ait }`; the token must be verifiable with the public keyset returned by `/.well-known/claw-keys.json`. diff --git a/apps/registry/src/agentRegistration.ts b/apps/registry/src/agentRegistration.ts new file mode 100644 index 0000000..5ea069d --- /dev/null +++ b/apps/registry/src/agentRegistration.ts @@ -0,0 +1,332 @@ +import { + type AitClaims, + decodeBase64url, + generateUlid, + makeAgentDid, + validateAgentName, +} from "@clawdentity/protocol"; +import { + AppError, + addSeconds, + nowIso, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const DEFAULT_AGENT_FRAMEWORK = "openclaw"; +const DEFAULT_AGENT_TTL_DAYS = 30; +const MAX_FRAMEWORK_LENGTH = 32; +const MIN_AGENT_TTL_DAYS = 1; +const MAX_AGENT_TTL_DAYS = 90; +const DAY_IN_SECONDS = 24 * 60 * 60; +const ED25519_PUBLIC_KEY_LENGTH = 32; +const REGISTRY_ISSUER_BY_ENVIRONMENT: Record< + RegistryConfig["ENVIRONMENT"], + string +> = { + development: "https://dev.api.clawdentity.com", + production: "https://api.clawdentity.com", + test: "https://dev.api.clawdentity.com", +}; + +type AgentRegistrationBody = { + name: string; + framework?: string; + publicKey: string; + ttlDays?: number; +}; + +export type AgentRegistrationResult = { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: "active"; + expiresAt: string; + createdAt: string; + updatedAt: string; + }; + claims: AitClaims; +}; + +function invalidRegistration(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REGISTRATION_INVALID", + message: exposeDetails + ? "Agent registration payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function addFieldError( + fieldErrors: Record, + field: string, + message: string, +): void { + const errors = fieldErrors[field] ?? []; + errors.push(message); + fieldErrors[field] = errors; +} + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function parseName( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError(fieldErrors, "name", "name is required"); + return ""; + } + + const value = input.trim(); + if (!validateAgentName(value)) { + addFieldError( + fieldErrors, + "name", + "name contains invalid characters or length", + ); + } + + return value; +} + +function parseFramework( + input: unknown, + fieldErrors: Record, +): string | undefined { + if (input === undefined) { + return undefined; + } + + if (typeof input !== "string") { + addFieldError(fieldErrors, "framework", "framework must be a string"); + return undefined; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "framework", "framework is required"); + return undefined; + } + + if (value.length > MAX_FRAMEWORK_LENGTH) { + addFieldError( + fieldErrors, + "framework", + `framework must be at most ${MAX_FRAMEWORK_LENGTH} characters`, + ); + } + + if (hasControlChars(value)) { + addFieldError( + fieldErrors, + "framework", + "framework contains control characters", + ); + } + + return value; +} + +function parsePublicKey( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError(fieldErrors, "publicKey", "publicKey is required"); + return ""; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "publicKey", "publicKey is required"); + return ""; + } + + let decodedKey: Uint8Array; + try { + decodedKey = decodeBase64url(value); + } catch { + addFieldError( + fieldErrors, + "publicKey", + "publicKey must be a base64url-encoded 32-byte Ed25519 key", + ); + return value; + } + + if (decodedKey.length !== ED25519_PUBLIC_KEY_LENGTH) { + addFieldError( + fieldErrors, + "publicKey", + "publicKey must be a base64url-encoded 32-byte Ed25519 key", + ); + } + + return value; +} + +function parseTtlDays( + input: unknown, + fieldErrors: Record, +): number | undefined { + if (input === undefined) { + return undefined; + } + + if (typeof input !== "number" || !Number.isFinite(input)) { + addFieldError(fieldErrors, "ttlDays", "ttlDays must be a number"); + return undefined; + } + + if (!Number.isInteger(input)) { + addFieldError(fieldErrors, "ttlDays", "ttlDays must be an integer"); + return undefined; + } + + if (input < MIN_AGENT_TTL_DAYS || input > MAX_AGENT_TTL_DAYS) { + addFieldError( + fieldErrors, + "ttlDays", + `ttlDays must be between ${MIN_AGENT_TTL_DAYS} and ${MAX_AGENT_TTL_DAYS}`, + ); + return undefined; + } + + return input; +} + +export function parseAgentRegistrationBody( + payload: unknown, + environment: RegistryConfig["ENVIRONMENT"], +): AgentRegistrationBody { + const fieldErrors: Record = {}; + + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw invalidRegistration({ + environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const objectPayload = payload as Record; + + const parsed: AgentRegistrationBody = { + name: parseName(objectPayload.name, fieldErrors), + framework: parseFramework(objectPayload.framework, fieldErrors), + publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), + ttlDays: parseTtlDays(objectPayload.ttlDays, fieldErrors), + }; + + if (Object.keys(fieldErrors).length > 0) { + throw invalidRegistration({ + environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return parsed; +} + +export function buildAgentRegistration(input: { + payload: unknown; + ownerDid: string; + issuer: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): AgentRegistrationResult { + const parsedBody = parseAgentRegistrationBody( + input.payload, + input.environment, + ); + + const issuedAt = nowIso(); + const issuedAtMs = Date.parse(issuedAt); + const issuedAtSeconds = Math.floor(issuedAtMs / 1000); + const ttlDays = parsedBody.ttlDays ?? DEFAULT_AGENT_TTL_DAYS; + const framework = parsedBody.framework ?? DEFAULT_AGENT_FRAMEWORK; + const ttlSeconds = ttlDays * DAY_IN_SECONDS; + const expiresAt = addSeconds(issuedAt, ttlSeconds); + + const agentId = generateUlid(issuedAtMs); + const agentDid = makeAgentDid(agentId); + const currentJti = generateUlid(issuedAtMs + 1); + const createdAt = issuedAt; + + return { + agent: { + id: agentId, + did: agentDid, + ownerDid: input.ownerDid, + name: parsedBody.name, + framework, + publicKey: parsedBody.publicKey, + currentJti, + ttlDays, + status: "active", + expiresAt, + createdAt, + updatedAt: createdAt, + }, + claims: { + iss: input.issuer, + sub: agentDid, + ownerDid: input.ownerDid, + name: parsedBody.name, + framework, + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: parsedBody.publicKey, + }, + }, + iat: issuedAtSeconds, + nbf: issuedAtSeconds, + exp: issuedAtSeconds + ttlSeconds, + jti: currentJti, + }, + }; +} + +export { + DEFAULT_AGENT_FRAMEWORK, + DEFAULT_AGENT_TTL_DAYS, + MAX_AGENT_TTL_DAYS, + MIN_AGENT_TTL_DAYS, +}; + +export function resolveRegistryIssuer( + environment: RegistryConfig["ENVIRONMENT"], +): string { + return REGISTRY_ISSUER_BY_ENVIRONMENT[environment]; +} diff --git a/apps/registry/src/registrySigner.ts b/apps/registry/src/registrySigner.ts new file mode 100644 index 0000000..5b64e75 --- /dev/null +++ b/apps/registry/src/registrySigner.ts @@ -0,0 +1,140 @@ +import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; +import { + AppError, + deriveEd25519PublicKey, + type Ed25519KeypairBytes, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const ED25519_SECRET_KEY_LENGTH = 32; + +type RegistrySigningKey = NonNullable< + RegistryConfig["REGISTRY_SIGNING_KEYS"] +>[number]; + +export type ResolvedRegistrySigner = { + signerKid: string; + signerKeypair: Ed25519KeypairBytes; +}; + +function invalidSigningConfig(options: { + environment: RegistryConfig["ENVIRONMENT"]; + field: "REGISTRY_SIGNING_KEY" | "REGISTRY_SIGNING_KEYS"; + message: string; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "CONFIG_VALIDATION_FAILED", + message: exposeDetails + ? "Registry configuration is invalid" + : "Request could not be processed", + status: 500, + expose: exposeDetails, + details: exposeDetails + ? { + fieldErrors: { + [options.field]: [options.message], + }, + formErrors: [], + } + : undefined, + }); +} + +function parseRegistrySecretKey( + environment: RegistryConfig["ENVIRONMENT"], + secretKeyBase64url: string | undefined, +): Uint8Array { + if (!secretKeyBase64url) { + throw invalidSigningConfig({ + environment, + field: "REGISTRY_SIGNING_KEY", + message: "REGISTRY_SIGNING_KEY is not configured", + }); + } + + let decodedSecretKey: Uint8Array; + try { + decodedSecretKey = decodeBase64url(secretKeyBase64url); + } catch { + throw invalidSigningConfig({ + environment, + field: "REGISTRY_SIGNING_KEY", + message: "REGISTRY_SIGNING_KEY must be valid base64url", + }); + } + + if (decodedSecretKey.length !== ED25519_SECRET_KEY_LENGTH) { + throw invalidSigningConfig({ + environment, + field: "REGISTRY_SIGNING_KEY", + message: "REGISTRY_SIGNING_KEY must decode to 32 bytes", + }); + } + + return decodedSecretKey; +} + +function findMatchingActiveKey(options: { + environment: RegistryConfig["ENVIRONMENT"]; + keys: RegistrySigningKey[]; + publicKeyBase64url: string; +}): RegistrySigningKey { + const activeKeys = options.keys.filter((key) => key.status === "active"); + if (activeKeys.length === 0) { + throw invalidSigningConfig({ + environment: options.environment, + field: "REGISTRY_SIGNING_KEYS", + message: "REGISTRY_SIGNING_KEYS must include an active key", + }); + } + + const matchingKey = activeKeys.find( + (key) => key.x === options.publicKeyBase64url, + ); + + if (!matchingKey) { + throw invalidSigningConfig({ + environment: options.environment, + field: "REGISTRY_SIGNING_KEYS", + message: + "REGISTRY_SIGNING_KEY does not match any active REGISTRY_SIGNING_KEYS entry", + }); + } + + return matchingKey; +} + +export async function resolveRegistrySigner( + config: RegistryConfig, +): Promise { + const publicKeys = config.REGISTRY_SIGNING_KEYS; + if (!publicKeys || publicKeys.length === 0) { + throw invalidSigningConfig({ + environment: config.ENVIRONMENT, + field: "REGISTRY_SIGNING_KEYS", + message: "REGISTRY_SIGNING_KEYS is not configured", + }); + } + + const secretKey = parseRegistrySecretKey( + config.ENVIRONMENT, + config.REGISTRY_SIGNING_KEY, + ); + const publicKey = await deriveEd25519PublicKey(secretKey); + const publicKeyBase64url = encodeBase64url(publicKey); + const signingKey = findMatchingActiveKey({ + environment: config.ENVIRONMENT, + keys: publicKeys, + publicKeyBase64url, + }); + + return { + signerKid: signingKey.kid, + signerKeypair: { + publicKey, + secretKey, + }, + }; +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 8ca448b..a43c9a5 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -12,6 +12,10 @@ import { verifyAIT, } from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; +import { + DEFAULT_AGENT_FRAMEWORK, + DEFAULT_AGENT_TTL_DAYS, +} from "./agentRegistration.js"; import { deriveApiKeyLookupPrefix, hashApiKeyToken, @@ -54,8 +58,21 @@ type FakeD1Row = { humanStatus: "active" | "suspended"; }; +type FakeAgentInsertRow = Record; + +function parseInsertColumns(query: string): string[] { + const match = query.match(/insert\s+into\s+"?agents"?\s*\(([^)]+)\)/i); + if (!match) { + return []; + } + + const columns = match[1]?.split(",") ?? []; + return columns.map((column) => column.replace(/["`\s]/g, "")); +} + function createFakeDb(rows: FakeD1Row[]) { const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; + const agentInserts: FakeAgentInsertRow[] = []; const database: D1Database = { prepare(query: string) { @@ -129,13 +146,46 @@ function createFakeDb(rows: FakeD1Row[]) { apiKeyId: String(params[1] ?? ""), }); } + if ( + normalizedQuery.includes('insert into "agents"') || + normalizedQuery.includes("insert into agents") + ) { + const columns = parseInsertColumns(query); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentInserts.push(row); + } return { success: true } as D1Result; }, } as D1PreparedStatement; }, } as D1Database; - return { database, updates }; + return { database, updates, agentInserts }; +} + +function makeValidPatContext(token = "clw_pat_valid-token-value") { + return hashApiKeyToken(token).then((tokenHash) => { + const authRow: FakeD1Row = { + apiKeyId: "key-1", + keyPrefix: deriveApiKeyLookupPrefix(token), + keyHash: tokenHash, + apiKeyStatus: "active", + apiKeyName: "ci", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + + return { token, authRow }; + }); } describe("GET /health", () => { @@ -343,22 +393,8 @@ describe("GET /v1/me", () => { }); it("returns 401 for invalid PAT", async () => { - const validToken = "clw_pat_valid-token-value"; - const validHash = await hashApiKeyToken(validToken); - const { database } = createFakeDb([ - { - apiKeyId: "key-1", - keyPrefix: deriveApiKeyLookupPrefix(validToken), - keyHash: validHash, - apiKeyStatus: "active", - apiKeyName: "ci", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }, - ]); + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); const res = await createRegistryApp().request( "/v1/me", @@ -392,22 +428,8 @@ describe("GET /v1/me", () => { }); it("authenticates valid PAT and injects ctx.human", async () => { - const validToken = "clw_pat_valid-token-value"; - const validHash = await hashApiKeyToken(validToken); - const { database, updates } = createFakeDb([ - { - apiKeyId: "key-1", - keyPrefix: deriveApiKeyLookupPrefix(validToken), - keyHash: validHash, - apiKeyStatus: "active", - apiKeyName: "ci", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }, - ]); + const { token: validToken, authRow } = await makeValidPatContext(); + const { database, updates } = createFakeDb([authRow]); const res = await createRegistryApp().request( "/v1/me", @@ -441,3 +463,411 @@ describe("GET /v1/me", () => { expect(updates[0]?.apiKeyId).toBe("key-1"); }); }); + +describe("POST /v1/agents", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + name: "agent-01", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 400 when request payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "!!!", + framework: "", + publicKey: "not-base64url", + ttlDays: 0, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Agent registration payload is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + name: expect.any(Array), + framework: expect.any(Array), + publicKey: expect.any(Array), + ttlDays: expect.any(Array), + }); + }); + + it("returns verbose malformed-json error in test", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: '{"name":"agent-01"', + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request body must be valid JSON"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns generic malformed-json error in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: '{"name":"agent-01"', + }, + { + DB: database, + ENVIRONMENT: "production", + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns generic validation error details in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "!!!", + publicKey: "not-base64url", + }), + }, + { + DB: database, + ENVIRONMENT: "production", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); + + it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentInserts } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-01", + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(201); + const body = (await res.json()) as { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: string; + expiresAt: string; + createdAt: string; + updatedAt: string; + }; + ait: string; + }; + + expect(body.agent.name).toBe("agent-01"); + expect(body.agent.framework).toBe(DEFAULT_AGENT_FRAMEWORK); + expect(body.agent.ttlDays).toBe(DEFAULT_AGENT_TTL_DAYS); + expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(body.agent.status).toBe("active"); + expect(body.ait).toEqual(expect.any(String)); + + expect(agentInserts).toHaveLength(1); + const inserted = agentInserts[0]; + expect(inserted?.owner_id).toBe("human-1"); + expect(inserted?.name).toBe("agent-01"); + expect(inserted?.framework).toBe(DEFAULT_AGENT_FRAMEWORK); + expect(inserted?.public_key).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(inserted?.current_jti).toBe(body.agent.currentJti); + expect(inserted?.expires_at).toBe(body.agent.expiresAt); + }); + + it("returns verifiable AIT using published keyset", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + + const registerResponse = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-registry-verify", + framework: "openclaw", + ttlDays: 10, + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(registerResponse.status).toBe(201); + const registerBody = (await registerResponse.json()) as { + agent: { + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + }; + ait: string; + }; + + const keysResponse = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyAIT({ + token: registerBody.ait, + expectedIssuer: "https://dev.api.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(claims.iss).toBe("https://dev.api.clawdentity.com"); + expect(claims.sub).toBe(registerBody.agent.did); + expect(claims.ownerDid).toBe(registerBody.agent.ownerDid); + expect(claims.name).toBe(registerBody.agent.name); + expect(claims.framework).toBe(registerBody.agent.framework); + expect(claims.cnf.jwk.x).toBe(registerBody.agent.publicKey); + expect(claims.jti).toBe(registerBody.agent.currentJti); + }); + + it("returns 500 when signer secret does not match any active published key", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const wrongPublishedKey = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-signer-mismatch", + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-2", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(wrongPublishedKey.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(500); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + REGISTRY_SIGNING_KEYS: expect.any(Array), + }); + }); +}); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 3328c33..91b95e3 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,20 +1,31 @@ import { + AppError, createHonoErrorHandler, createLogger, createRequestContextMiddleware, createRequestLoggingMiddleware, parseRegistryConfig, type RegistryConfig, + shouldExposeVerboseErrors, + signAIT, } from "@clawdentity/sdk"; import { Hono } from "hono"; +import { + buildAgentRegistration, + resolveRegistryIssuer, +} from "./agentRegistration.js"; import { type AuthenticatedHuman, createApiKeyAuth, } from "./auth/apiKeyAuth.js"; +import { createDb } from "./db/client.js"; +import { agents } from "./db/schema.js"; +import { resolveRegistrySigner } from "./registrySigner.js"; type Bindings = { DB: D1Database; ENVIRONMENT: string; + REGISTRY_SIGNING_KEY?: string; REGISTRY_SIGNING_KEYS?: string; }; const logger = createLogger({ service: "registry" }); @@ -68,6 +79,56 @@ function createRegistryApp() { return c.json({ human: c.get("human") }); }); + app.post("/v1/agents", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_REGISTRATION_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const human = c.get("human"); + const registration = buildAgentRegistration({ + payload, + ownerDid: human.did, + issuer: resolveRegistryIssuer(config.ENVIRONMENT), + environment: config.ENVIRONMENT, + }); + const signer = await resolveRegistrySigner(config); + const ait = await signAIT({ + claims: registration.claims, + signerKid: signer.signerKid, + signerKeypair: signer.signerKeypair, + }); + + const db = createDb(c.env.DB); + await db.insert(agents).values({ + id: registration.agent.id, + did: registration.agent.did, + owner_id: human.id, + name: registration.agent.name, + framework: registration.agent.framework, + public_key: registration.agent.publicKey, + current_jti: registration.agent.currentJti, + status: registration.agent.status, + expires_at: registration.agent.expiresAt, + created_at: registration.agent.createdAt, + updated_at: registration.agent.updatedAt, + }); + + return c.json({ agent: registration.agent, ait }, 201); + }); + return app; } diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 82bcdff..585cc0b 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -24,6 +24,7 @@ - Avoid leaking secrets in logs and error payloads. - Keep all parse/validation errors explicit and deterministic. - Keep cryptography APIs byte-first (`Uint8Array`) and runtime-portable. +- Derive Ed25519 public keys via `deriveEd25519PublicKey` (instead of ad-hoc noble calls) so key derivation behavior and validation stay centralized. - Reuse protocol base64url helpers as the single source of truth; do not duplicate encoding logic in SDK. - Keep CRL claim schema authority in `@clawdentity/protocol` (`crl.ts`); SDK JWT helpers should avoid duplicating claim-validation rules. - Never log secret keys or raw signature material. @@ -39,6 +40,7 @@ - Nonce cache must validate the top-level input shape before reading fields so invalid JS callers receive structured `AppError`s instead of runtime `TypeError`s. - Registry config parsing must validate `REGISTRY_SIGNING_KEYS` as JSON before runtime use so keyset endpoints fail fast with `CONFIG_VALIDATION_FAILED` on malformed key documents. - Registry keyset validation must reject duplicate `kid` values and malformed `x` key material (non-base64url or non-32-byte Ed25519) so verifier behavior cannot become order-dependent. +- Use `RuntimeEnvironment` + `shouldExposeVerboseErrors` from `runtime-environment` for environment-based error-detail behavior; do not duplicate ad-hoc `NODE_ENV`/string checks. ## Testing Rules - Unit test each shared module. @@ -49,3 +51,4 @@ - HTTP signing tests must include sign/verify happy path and explicit failures when method, path, body, or timestamp are altered. - Nonce cache tests must include duplicate nonce rejection within TTL and acceptance after TTL expiry. - CRL cache tests must cover revoked lookup, refresh-on-stale, and stale-path behavior in both `fail-open` and `fail-closed` modes. +- When new registry routes emit signed AITs (e.g., POST `/v1/agents`), tests should consume those tokens with the published `REGISTRY_SIGNING_KEYS` set (as returned by `/.well-known/claw-keys.json`) and assert that `verifyAIT` succeeds/fails exactly the same way the local `claw verify` workflow will, keeping the offline verification contract fully covered. diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index 8388664..5249669 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -1,8 +1,9 @@ import { decodeBase64url } from "@clawdentity/protocol"; import { z } from "zod"; import { AppError } from "./exceptions.js"; +import { runtimeEnvironmentValues } from "./runtime-environment.js"; -const environmentSchema = z.enum(["development", "production", "test"]); +const environmentSchema = z.enum(runtimeEnvironmentValues); const registrySigningKeyStatusSchema = z.enum(["active", "revoked"]); const ED25519_PUBLIC_KEY_LENGTH = 32; diff --git a/packages/sdk/src/crypto/ed25519.test.ts b/packages/sdk/src/crypto/ed25519.test.ts index eb593d4..f5ade61 100644 --- a/packages/sdk/src/crypto/ed25519.test.ts +++ b/packages/sdk/src/crypto/ed25519.test.ts @@ -3,6 +3,7 @@ import { describe, expect, it } from "vitest"; import { decodeEd25519KeypairBase64url, decodeEd25519SignatureBase64url, + deriveEd25519PublicKey, encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, generateEd25519Keypair, @@ -32,6 +33,13 @@ describe("ed25519 crypto helpers", () => { expect(isValid).toBe(true); }); + it("derives the same public key from the matching secret key", async () => { + const keypair = await generateEd25519Keypair(); + const derivedPublicKey = await deriveEd25519PublicKey(keypair.secretKey); + + expect(Array.from(derivedPublicKey)).toEqual(Array.from(keypair.publicKey)); + }); + it("fails verification with the wrong message", async () => { const keypair = await generateEd25519Keypair(); const message = encoder.encode("t03-original-message"); @@ -107,4 +115,10 @@ describe("ed25519 crypto helpers", () => { expect((error as ProtocolParseError).code).toBe("INVALID_BASE64URL"); } }); + + it("rejects deriving a public key when the secret key length is invalid", async () => { + await expect( + deriveEd25519PublicKey(new Uint8Array(31)), + ).rejects.toThrowError(); + }); }); diff --git a/packages/sdk/src/crypto/ed25519.ts b/packages/sdk/src/crypto/ed25519.ts index d590ef3..d89f2fc 100644 --- a/packages/sdk/src/crypto/ed25519.ts +++ b/packages/sdk/src/crypto/ed25519.ts @@ -26,6 +26,12 @@ export async function signEd25519( return ed25519.signAsync(message, secretKey); } +export async function deriveEd25519PublicKey( + secretKey: Uint8Array, +): Promise { + return ed25519.getPublicKeyAsync(secretKey); +} + export async function verifyEd25519( signature: Uint8Array, message: Uint8Array, diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index d4d7e9f..1467adb 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -18,6 +18,7 @@ import { REQUEST_ID_HEADER, resolveRequestId, SDK_VERSION, + shouldExposeVerboseErrors, signAIT, signCRL, signEd25519, @@ -41,6 +42,7 @@ describe("sdk", () => { expect(parseRegistryConfig({ ENVIRONMENT: "test" }).ENVIRONMENT).toBe( "test", ); + expect(shouldExposeVerboseErrors("test")).toBe(true); expect(REQUEST_ID_HEADER).toBe("x-request-id"); expect(AppError).toBeTypeOf("function"); }); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 1b33291..20d5115 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -21,6 +21,7 @@ export type { export { decodeEd25519KeypairBase64url, decodeEd25519SignatureBase64url, + deriveEd25519PublicKey, encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, generateEd25519Keypair, @@ -63,6 +64,8 @@ export { REQUEST_ID_HEADER, resolveRequestId, } from "./request-context.js"; +export type { RuntimeEnvironment } from "./runtime-environment.js"; +export { shouldExposeVerboseErrors } from "./runtime-environment.js"; export type { NonceCache, NonceCacheInput, diff --git a/packages/sdk/src/runtime-environment.test.ts b/packages/sdk/src/runtime-environment.test.ts new file mode 100644 index 0000000..29b9648 --- /dev/null +++ b/packages/sdk/src/runtime-environment.test.ts @@ -0,0 +1,24 @@ +import { describe, expect, it } from "vitest"; +import { + runtimeEnvironmentValues, + shouldExposeVerboseErrors, +} from "./runtime-environment.js"; + +describe("runtime environment helpers", () => { + it("declares the supported runtime environments", () => { + expect(runtimeEnvironmentValues).toEqual([ + "development", + "production", + "test", + ]); + }); + + it("exposes verbose errors for non-production environments", () => { + expect(shouldExposeVerboseErrors("development")).toBe(true); + expect(shouldExposeVerboseErrors("test")).toBe(true); + }); + + it("hides verbose errors in production", () => { + expect(shouldExposeVerboseErrors("production")).toBe(false); + }); +}); diff --git a/packages/sdk/src/runtime-environment.ts b/packages/sdk/src/runtime-environment.ts new file mode 100644 index 0000000..4952a34 --- /dev/null +++ b/packages/sdk/src/runtime-environment.ts @@ -0,0 +1,13 @@ +export const runtimeEnvironmentValues = [ + "development", + "production", + "test", +] as const; + +export type RuntimeEnvironment = (typeof runtimeEnvironmentValues)[number]; + +export function shouldExposeVerboseErrors( + environment: RuntimeEnvironment, +): boolean { + return environment !== "production"; +} From 18b04303099bd09c9405e5ae1b5caf3263644024 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 16:20:50 +0530 Subject: [PATCH 026/190] feat(registry): implement T15 owned agent listing endpoint --- apps/registry/src/AGENTS.md | 12 + apps/registry/src/agentList.ts | 219 ++++++++++++++ apps/registry/src/server.test.ts | 471 ++++++++++++++++++++++++++++++- apps/registry/src/server.ts | 50 ++++ 4 files changed, 751 insertions(+), 1 deletion(-) create mode 100644 apps/registry/src/agentList.ts diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 8b4b60d..f9d2e54 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -21,6 +21,18 @@ - Run `pnpm -F @clawdentity/registry run typecheck` before commit. - When using fake D1 adapters in route tests, make select responses honor bound parameters so query-shape regressions are caught. +## GET /v1/agents Contract +- Require PAT auth via `createApiKeyAuth`; only caller-owned agents may be returned. +- Keep query parsing in `agentList.ts` to avoid duplicating validation rules in route handlers. +- Supported optional filters: + - `status`: `active | revoked` + - `framework`: trimmed non-empty string, max 32 chars, no control chars + - `limit`: integer `1..100`, default `20` + - `cursor`: ULID (opaque page token) +- Return minimal agent fields only: `{ id, did, name, status, expires }` plus pagination `{ limit, nextCursor }`. +- Keep ordering deterministic (`id` descending) and compute `nextCursor` from the last item in the returned page. +- Keep error detail exposure environment-aware via `shouldExposeVerboseErrors`: generic 400 message in `production`, detailed `fieldErrors` in `development`/`test`. + ## POST /v1/agents Contract - Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. - Validate request payload fields with explicit rules: diff --git a/apps/registry/src/agentList.ts b/apps/registry/src/agentList.ts new file mode 100644 index 0000000..5fdf03a --- /dev/null +++ b/apps/registry/src/agentList.ts @@ -0,0 +1,219 @@ +import { parseUlid } from "@clawdentity/protocol"; +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const DEFAULT_AGENT_LIST_LIMIT = 20; +const MAX_AGENT_LIST_LIMIT = 100; +const MAX_FRAMEWORK_LENGTH = 32; + +type AgentStatus = "active" | "revoked"; + +export type AgentListQuery = { + status?: AgentStatus; + framework?: string; + limit: number; + cursor?: string; +}; + +export type AgentListRow = { + id: string; + did: string; + name: string; + status: AgentStatus; + expires_at: string | null; +}; + +export type ListedAgent = { + id: string; + did: string; + name: string; + status: AgentStatus; + expires: string | null; +}; + +type QueryRecord = Record; + +function invalidListQuery(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + + return new AppError({ + code: "AGENT_LIST_INVALID_QUERY", + message: exposeDetails + ? "Agent list query is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function addFieldError( + fieldErrors: Record, + field: string, + message: string, +): void { + const errors = fieldErrors[field] ?? []; + errors.push(message); + fieldErrors[field] = errors; +} + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function parseStatus( + input: string | undefined, + fieldErrors: Record, +): AgentStatus | undefined { + if (input === undefined) { + return undefined; + } + + if (input === "active" || input === "revoked") { + return input; + } + + addFieldError( + fieldErrors, + "status", + "status must be either 'active' or 'revoked'", + ); + return undefined; +} + +function parseFramework( + input: string | undefined, + fieldErrors: Record, +): string | undefined { + if (input === undefined) { + return undefined; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "framework", "framework is required"); + return undefined; + } + + if (value.length > MAX_FRAMEWORK_LENGTH) { + addFieldError( + fieldErrors, + "framework", + `framework must be at most ${MAX_FRAMEWORK_LENGTH} characters`, + ); + } + + if (hasControlChars(value)) { + addFieldError( + fieldErrors, + "framework", + "framework contains control characters", + ); + } + + return value; +} + +function parseLimit( + input: string | undefined, + fieldErrors: Record, +): number { + if (input === undefined) { + return DEFAULT_AGENT_LIST_LIMIT; + } + + const value = Number(input); + if (!Number.isFinite(value) || !Number.isInteger(value)) { + addFieldError(fieldErrors, "limit", "limit must be an integer"); + return DEFAULT_AGENT_LIST_LIMIT; + } + + if (value < 1 || value > MAX_AGENT_LIST_LIMIT) { + addFieldError( + fieldErrors, + "limit", + `limit must be between 1 and ${MAX_AGENT_LIST_LIMIT}`, + ); + return DEFAULT_AGENT_LIST_LIMIT; + } + + return value; +} + +function parseCursor( + input: string | undefined, + fieldErrors: Record, +): string | undefined { + if (input === undefined) { + return undefined; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "cursor", "cursor is required"); + return undefined; + } + + try { + return parseUlid(value).value; + } catch { + addFieldError(fieldErrors, "cursor", "cursor must be a valid ULID"); + return undefined; + } +} + +export function parseAgentListQuery(input: { + query: QueryRecord; + environment: RegistryConfig["ENVIRONMENT"]; +}): AgentListQuery { + const fieldErrors: Record = {}; + const status = parseStatus(input.query.status, fieldErrors); + const framework = parseFramework(input.query.framework, fieldErrors); + const limit = parseLimit(input.query.limit, fieldErrors); + const cursor = parseCursor(input.query.cursor, fieldErrors); + + if (Object.keys(fieldErrors).length > 0) { + throw invalidListQuery({ + environment: input.environment, + details: { + fieldErrors, + formErrors: [], + }, + }); + } + + return { + status, + framework, + limit, + cursor, + }; +} + +export function mapAgentListRow(row: AgentListRow): ListedAgent { + return { + id: row.id, + did: row.did, + name: row.name, + status: row.status, + expires: row.expires_at, + }; +} + +export { DEFAULT_AGENT_LIST_LIMIT, MAX_AGENT_LIST_LIMIT }; diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index a43c9a5..d214f4b 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -12,6 +12,7 @@ import { verifyAIT, } from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; +import { DEFAULT_AGENT_LIST_LIMIT } from "./agentList.js"; import { DEFAULT_AGENT_FRAMEWORK, DEFAULT_AGENT_TTL_DAYS, @@ -59,6 +60,23 @@ type FakeD1Row = { }; type FakeAgentInsertRow = Record; +type FakeAgentRow = { + id: string; + did: string; + ownerId: string; + name: string; + framework: string | null; + status: "active" | "revoked"; + expiresAt: string | null; +}; + +type FakeAgentSelectRow = { + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires_at: string | null; +}; function parseInsertColumns(query: string): string[] { const match = query.match(/insert\s+into\s+"?agents"?\s*\(([^)]+)\)/i); @@ -70,7 +88,86 @@ function parseInsertColumns(query: string): string[] { return columns.map((column) => column.replace(/["`\s]/g, "")); } -function createFakeDb(rows: FakeD1Row[]) { +function extractWhereClause(query: string): string { + const normalized = query.toLowerCase(); + const whereIndex = normalized.indexOf(" where "); + if (whereIndex < 0) { + return ""; + } + + const orderByIndex = normalized.indexOf(" order by ", whereIndex + 7); + const limitIndex = normalized.indexOf(" limit ", whereIndex + 7); + const endIndex = + orderByIndex >= 0 + ? orderByIndex + : limitIndex >= 0 + ? limitIndex + : normalized.length; + + return normalized.slice(whereIndex, endIndex); +} + +function resolveAgentSelectRows(options: { + query: string; + params: unknown[]; + agentRows: FakeAgentRow[]; +}): FakeAgentSelectRow[] { + const whereClause = extractWhereClause(options.query); + const hasStatusFilter = + whereClause.includes("status") && whereClause.includes("= ?"); + const hasFrameworkFilter = + whereClause.includes("framework") && whereClause.includes("= ?"); + const hasCursorFilter = + whereClause.includes("id") && whereClause.includes("< ?"); + + let parameterIndex = 0; + const ownerId = String(options.params[parameterIndex] ?? ""); + parameterIndex += 1; + + const statusFilter = hasStatusFilter + ? String(options.params[parameterIndex] ?? "") + : undefined; + if (hasStatusFilter) { + parameterIndex += 1; + } + + const frameworkFilter = hasFrameworkFilter + ? String(options.params[parameterIndex] ?? "") + : undefined; + if (hasFrameworkFilter) { + parameterIndex += 1; + } + + const cursorFilter = hasCursorFilter + ? String(options.params[parameterIndex] ?? "") + : undefined; + + const maybeLimit = Number(options.params[options.params.length - 1]); + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.agentRows.length; + + const filteredRows = options.agentRows + .filter((row) => row.ownerId === ownerId) + .filter((row) => (statusFilter ? row.status === statusFilter : true)) + .filter((row) => + frameworkFilter ? row.framework === frameworkFilter : true, + ) + .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) + .sort((left, right) => right.id.localeCompare(left.id)) + .slice(0, limit) + .map((row) => ({ + id: row.id, + did: row.did, + name: row.name, + status: row.status, + expires_at: row.expiresAt, + })); + + return filteredRows; +} + +function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; const agentInserts: FakeAgentInsertRow[] = []; @@ -109,6 +206,20 @@ function createFakeDb(rows: FakeD1Row[]) { })), }; } + if ( + (normalizedQuery.includes('from "agents"') || + normalizedQuery.includes("from agents")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + return { + results: resolveAgentSelectRows({ + query, + params, + agentRows, + }), + }; + } return { results: [] }; }, async raw() { @@ -134,6 +245,23 @@ function createFakeDb(rows: FakeD1Row[]) { row.humanStatus, ]); } + if ( + normalizedQuery.includes('from "agents"') || + normalizedQuery.includes("from agents") + ) { + const resultRows = resolveAgentSelectRows({ + query, + params, + agentRows, + }); + return resultRows.map((row) => [ + row.id, + row.did, + row.name, + row.status, + row.expires_at, + ]); + } return []; }, async run() { @@ -464,6 +592,347 @@ describe("GET /v1/me", () => { }); }); +describe("GET /v1/agents", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/agents", + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns only caller-owned agents with minimal fields", async () => { + const { token, authRow } = await makeValidPatContext(); + const ownerAgentNewId = generateUlid(1700100010000); + const ownerAgentOldId = generateUlid(1700100005000); + const foreignAgentId = generateUlid(1700100015000); + const { database } = createFakeDb( + [authRow], + [ + { + id: ownerAgentNewId, + did: makeAgentDid(ownerAgentNewId), + ownerId: "human-1", + name: "owner-agent-new", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: ownerAgentOldId, + did: makeAgentDid(ownerAgentOldId), + ownerId: "human-1", + name: "owner-agent-old", + framework: "langchain", + status: "revoked", + expiresAt: "2026-02-20T00:00:00.000Z", + }, + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + "/v1/agents", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + pagination: { + limit: number; + nextCursor: string | null; + }; + }; + + expect(body.agents).toEqual([ + { + id: ownerAgentNewId, + did: makeAgentDid(ownerAgentNewId), + name: "owner-agent-new", + status: "active", + expires: "2026-03-01T00:00:00.000Z", + }, + { + id: ownerAgentOldId, + did: makeAgentDid(ownerAgentOldId), + name: "owner-agent-old", + status: "revoked", + expires: "2026-02-20T00:00:00.000Z", + }, + ]); + expect(body.pagination).toEqual({ + limit: DEFAULT_AGENT_LIST_LIMIT, + nextCursor: null, + }); + expect(body.agents[0]).not.toHaveProperty("framework"); + expect(body.agents[0]).not.toHaveProperty("ownerId"); + }); + + it("applies status and framework filters", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentIdOne = generateUlid(1700100010000); + const agentIdTwo = generateUlid(1700100011000); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentIdOne, + did: makeAgentDid(agentIdOne), + ownerId: "human-1", + name: "owner-openclaw-active", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: agentIdTwo, + did: makeAgentDid(agentIdTwo), + ownerId: "human-1", + name: "owner-langchain-revoked", + framework: "langchain", + status: "revoked", + expiresAt: "2026-03-05T00:00:00.000Z", + }, + ], + ); + + const statusRes = await createRegistryApp().request( + "/v1/agents?status=revoked", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(statusRes.status).toBe(200); + const statusBody = (await statusRes.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + }; + expect(statusBody.agents).toEqual([ + { + id: agentIdTwo, + did: makeAgentDid(agentIdTwo), + name: "owner-langchain-revoked", + status: "revoked", + expires: "2026-03-05T00:00:00.000Z", + }, + ]); + + const frameworkRes = await createRegistryApp().request( + "/v1/agents?framework=openclaw", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(frameworkRes.status).toBe(200); + const frameworkBody = (await frameworkRes.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + }; + expect(frameworkBody.agents).toEqual([ + { + id: agentIdOne, + did: makeAgentDid(agentIdOne), + name: "owner-openclaw-active", + status: "active", + expires: "2026-03-01T00:00:00.000Z", + }, + ]); + }); + + it("supports cursor pagination and returns nextCursor", async () => { + const { token, authRow } = await makeValidPatContext(); + const newestId = generateUlid(1700100012000); + const olderId = generateUlid(1700100011000); + const oldestId = generateUlid(1700100010000); + const { database } = createFakeDb( + [authRow], + [ + { + id: newestId, + did: makeAgentDid(newestId), + ownerId: "human-1", + name: "newest", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: olderId, + did: makeAgentDid(olderId), + ownerId: "human-1", + name: "older", + framework: "openclaw", + status: "active", + expiresAt: "2026-02-28T00:00:00.000Z", + }, + { + id: oldestId, + did: makeAgentDid(oldestId), + ownerId: "human-1", + name: "oldest", + framework: "openclaw", + status: "active", + expiresAt: "2026-02-27T00:00:00.000Z", + }, + ], + ); + + const firstPage = await createRegistryApp().request( + "/v1/agents?limit=1", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(firstPage.status).toBe(200); + const firstBody = (await firstPage.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + pagination: { limit: number; nextCursor: string | null }; + }; + expect(firstBody.agents).toEqual([ + { + id: newestId, + did: makeAgentDid(newestId), + name: "newest", + status: "active", + expires: "2026-03-01T00:00:00.000Z", + }, + ]); + expect(firstBody.pagination).toEqual({ + limit: 1, + nextCursor: newestId, + }); + + const secondPage = await createRegistryApp().request( + `/v1/agents?limit=1&cursor=${newestId}`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(secondPage.status).toBe(200); + const secondBody = (await secondPage.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + pagination: { limit: number; nextCursor: string | null }; + }; + expect(secondBody.agents).toEqual([ + { + id: olderId, + did: makeAgentDid(olderId), + name: "older", + status: "active", + expires: "2026-02-28T00:00:00.000Z", + }, + ]); + expect(secondBody.pagination).toEqual({ + limit: 1, + nextCursor: olderId, + }); + }); + + it("returns verbose query validation errors in non-production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents?status=invalid", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_LIST_INVALID_QUERY"); + expect(body.error.message).toBe("Agent list query is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + status: expect.any(Array), + }); + }); + + it("returns generic query validation errors in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents?cursor=not-a-ulid", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "production" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: Record; + }; + }; + expect(body.error.code).toBe("AGENT_LIST_INVALID_QUERY"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); +}); + describe("POST /v1/agents", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 91b95e3..56d3617 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -9,7 +9,9 @@ import { shouldExposeVerboseErrors, signAIT, } from "@clawdentity/sdk"; +import { and, desc, eq, lt } from "drizzle-orm"; import { Hono } from "hono"; +import { mapAgentListRow, parseAgentListQuery } from "./agentList.js"; import { buildAgentRegistration, resolveRegistryIssuer, @@ -79,6 +81,54 @@ function createRegistryApp() { return c.json({ human: c.get("human") }); }); + app.get("/v1/agents", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const query = parseAgentListQuery({ + query: c.req.query(), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const filters = [eq(agents.owner_id, human.id)]; + if (query.status) { + filters.push(eq(agents.status, query.status)); + } + if (query.framework) { + filters.push(eq(agents.framework, query.framework)); + } + if (query.cursor) { + filters.push(lt(agents.id, query.cursor)); + } + + const rows = await db + .select({ + id: agents.id, + did: agents.did, + name: agents.name, + status: agents.status, + expires_at: agents.expires_at, + }) + .from(agents) + .where(and(...filters)) + .orderBy(desc(agents.id)) + .limit(query.limit + 1); + + const hasNextPage = rows.length > query.limit; + const pageRows = hasNextPage ? rows.slice(0, query.limit) : rows; + const nextCursor = hasNextPage + ? (pageRows[pageRows.length - 1]?.id ?? null) + : null; + + return c.json({ + agents: pageRows.map(mapAgentListRow), + pagination: { + limit: query.limit, + nextCursor, + }, + }); + }); + app.post("/v1/agents", createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); From bafffdf5def4a8e7de0903d110728c67cf3ff71f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 17:08:25 +0530 Subject: [PATCH 027/190] feat(registry): add agent revoke endpoint and standardize file naming --- apps/registry/src/AGENTS.md | 18 +- .../src/{agentList.ts => agent-list.ts} | 0 ...tRegistration.ts => agent-registration.ts} | 0 apps/registry/src/agent-revocation.ts | 83 ++++ .../auth/{apiKeyAuth.ts => api-key-auth.ts} | 0 .../{registrySigner.ts => registry-signer.ts} | 0 apps/registry/src/server.test.ts | 447 +++++++++++++++++- apps/registry/src/server.ts | 80 +++- 8 files changed, 596 insertions(+), 32 deletions(-) rename apps/registry/src/{agentList.ts => agent-list.ts} (100%) rename apps/registry/src/{agentRegistration.ts => agent-registration.ts} (100%) create mode 100644 apps/registry/src/agent-revocation.ts rename apps/registry/src/auth/{apiKeyAuth.ts => api-key-auth.ts} (100%) rename apps/registry/src/{registrySigner.ts => registry-signer.ts} (100%) diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index f9d2e54..aa50829 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -23,7 +23,7 @@ ## GET /v1/agents Contract - Require PAT auth via `createApiKeyAuth`; only caller-owned agents may be returned. -- Keep query parsing in `agentList.ts` to avoid duplicating validation rules in route handlers. +- Keep query parsing in `agent-list.ts` to avoid duplicating validation rules in route handlers. - Supported optional filters: - `status`: `active | revoked` - `framework`: trimmed non-empty string, max 32 chars, no control chars @@ -40,10 +40,22 @@ - `framework`: optional; default to `openclaw` when omitted. - `publicKey`: base64url Ed25519 key that decodes to 32 bytes. - `ttlDays`: optional; default `30`; allow only integer range `1..90`. -- Keep request parsing and validation in a reusable helper module (`agentRegistration.ts`) so future routes can share the same constraints without duplicating schema logic. +- Keep request parsing and validation in a reusable helper module (`agent-registration.ts`) so future routes can share the same constraints without duplicating schema logic. - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors` (shared SDK helper path): return generic messages without internals in `production`, but include validation/config details in `development`/`test` for debugging. - Persist `agents.current_jti` and `agents.expires_at` on insert; generated AIT claims (`jti`, `exp`) must stay in sync with those persisted values. - Use shared SDK datetime helpers (`nowIso`, `addSeconds`) for issuance/expiry math instead of ad-hoc `Date.now()` arithmetic in route logic. -- Resolve signing material through a reusable signer helper (`registrySigner.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. +- Resolve signing material through a reusable signer helper (`registry-signer.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. - Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.api.clawdentity.com`, `production` -> `https://api.clawdentity.com`) rather than request-origin inference. - Response shape remains `{ agent, ait }`; the token must be verifiable with the public keyset returned by `/.well-known/claw-keys.json`. + +## DELETE /v1/agents/:id Contract +- Require PAT auth via `createApiKeyAuth`; only the caller-owned agent may be revoked. +- Validate `:id` as ULID in `agent-revocation.ts`; path validation errors must be environment-aware via `shouldExposeVerboseErrors`. +- For unknown IDs or foreign-owned IDs, return `404 AGENT_NOT_FOUND` (single not-found behavior to avoid ownership leaks). +- Keep revocation idempotent: + - return `204` when agent is already `revoked` + - return `204` after first successful revoke +- If an owned active agent has no `current_jti`, fail with `409 AGENT_REVOKE_INVALID_STATE` rather than writing a partial revocation. +- Perform state changes in one DB transaction: + - update `agents.status` to `revoked` and `agents.updated_at` to `nowIso()` + - insert `revocations` row using the previous `current_jti` diff --git a/apps/registry/src/agentList.ts b/apps/registry/src/agent-list.ts similarity index 100% rename from apps/registry/src/agentList.ts rename to apps/registry/src/agent-list.ts diff --git a/apps/registry/src/agentRegistration.ts b/apps/registry/src/agent-registration.ts similarity index 100% rename from apps/registry/src/agentRegistration.ts rename to apps/registry/src/agent-registration.ts diff --git a/apps/registry/src/agent-revocation.ts b/apps/registry/src/agent-revocation.ts new file mode 100644 index 0000000..8295070 --- /dev/null +++ b/apps/registry/src/agent-revocation.ts @@ -0,0 +1,83 @@ +import { parseUlid } from "@clawdentity/protocol"; +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +function invalidRevokePath(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REVOKE_INVALID_PATH", + message: exposeDetails + ? "Agent revoke path is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +export function parseAgentRevokePath(input: { + id: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): string { + const id = input.id.trim(); + if (id.length === 0) { + throw invalidRevokePath({ + environment: input.environment, + details: { + fieldErrors: { id: ["id is required"] }, + formErrors: [], + }, + }); + } + + try { + return parseUlid(id).value; + } catch { + throw invalidRevokePath({ + environment: input.environment, + details: { + fieldErrors: { id: ["id must be a valid ULID"] }, + formErrors: [], + }, + }); + } +} + +export function agentNotFoundError(): AppError { + return new AppError({ + code: "AGENT_NOT_FOUND", + message: "Agent not found", + status: 404, + expose: true, + }); +} + +export function invalidAgentRevokeStateError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + reason: string; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REVOKE_INVALID_STATE", + message: exposeDetails + ? "Agent cannot be revoked" + : "Request could not be processed", + status: 409, + expose: exposeDetails, + details: exposeDetails + ? { + fieldErrors: { currentJti: [options.reason] }, + formErrors: [], + } + : undefined, + }); +} diff --git a/apps/registry/src/auth/apiKeyAuth.ts b/apps/registry/src/auth/api-key-auth.ts similarity index 100% rename from apps/registry/src/auth/apiKeyAuth.ts rename to apps/registry/src/auth/api-key-auth.ts diff --git a/apps/registry/src/registrySigner.ts b/apps/registry/src/registry-signer.ts similarity index 100% rename from apps/registry/src/registrySigner.ts rename to apps/registry/src/registry-signer.ts diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index d214f4b..011eef6 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -12,15 +12,15 @@ import { verifyAIT, } from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; -import { DEFAULT_AGENT_LIST_LIMIT } from "./agentList.js"; +import { DEFAULT_AGENT_LIST_LIMIT } from "./agent-list.js"; import { DEFAULT_AGENT_FRAMEWORK, DEFAULT_AGENT_TTL_DAYS, -} from "./agentRegistration.js"; +} from "./agent-registration.js"; import { deriveApiKeyLookupPrefix, hashApiKeyToken, -} from "./auth/apiKeyAuth.js"; +} from "./auth/api-key-auth.js"; import app, { createRegistryApp } from "./server.js"; function makeAitClaims(publicKey: Uint8Array): AitClaims { @@ -60,6 +60,8 @@ type FakeD1Row = { }; type FakeAgentInsertRow = Record; +type FakeAgentUpdateRow = Record; +type FakeRevocationInsertRow = Record; type FakeAgentRow = { id: string; did: string; @@ -68,6 +70,8 @@ type FakeAgentRow = { framework: string | null; status: "active" | "revoked"; expiresAt: string | null; + currentJti?: string | null; + updatedAt?: string; }; type FakeAgentSelectRow = { @@ -76,10 +80,13 @@ type FakeAgentSelectRow = { name: string; status: "active" | "revoked"; expires_at: string | null; + current_jti: string | null; }; -function parseInsertColumns(query: string): string[] { - const match = query.match(/insert\s+into\s+"?agents"?\s*\(([^)]+)\)/i); +function parseInsertColumns(query: string, tableName: string): string[] { + const match = query.match( + new RegExp(`insert\\s+into\\s+"?${tableName}"?\\s*\\(([^)]+)\\)`, "i"), + ); if (!match) { return []; } @@ -88,6 +95,21 @@ function parseInsertColumns(query: string): string[] { return columns.map((column) => column.replace(/["`\s]/g, "")); } +function parseUpdateSetColumns(query: string, tableName: string): string[] { + const match = query.match( + new RegExp(`update\\s+"?${tableName}"?\\s+set\\s+(.+?)\\s+where`, "i"), + ); + if (!match) { + return []; + } + + const assignments = match[1]?.split(",") ?? []; + return assignments + .map((assignment) => assignment.split("=")[0] ?? "") + .map((column) => column.replace(/["`\s]/g, "")) + .filter((column) => column.length > 0); +} + function extractWhereClause(query: string): string { const normalized = query.toLowerCase(); const whereIndex = normalized.indexOf(" where "); @@ -107,22 +129,100 @@ function extractWhereClause(query: string): string { return normalized.slice(whereIndex, endIndex); } +function hasFilter( + whereClause: string, + column: string, + operator = "=", +): boolean { + const escapedColumn = column.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); + const escapedOperator = operator.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); + const quotedPattern = new RegExp( + `"${escapedColumn}"\\s*${escapedOperator}\\s*\\?`, + ); + const barePattern = new RegExp( + `\\b${escapedColumn}\\b\\s*${escapedOperator}\\s*\\?`, + ); + return quotedPattern.test(whereClause) || barePattern.test(whereClause); +} + +function parseSelectedColumns(query: string): string[] { + const normalized = query.toLowerCase(); + const selectIndex = normalized.indexOf("select "); + const fromIndex = normalized.indexOf(" from "); + if (selectIndex < 0 || fromIndex < 0 || fromIndex <= selectIndex) { + return []; + } + + const selectClause = query.slice(selectIndex + 7, fromIndex); + return selectClause + .split(",") + .map((column) => column.trim()) + .map((column) => { + const aliasMatch = column.match(/\s+as\s+"?([a-zA-Z0-9_]+)"?\s*$/i); + if (aliasMatch?.[1]) { + return aliasMatch[1].toLowerCase(); + } + + const quotedMatch = column.match(/"([a-zA-Z0-9_]+)"\s*$/); + if (quotedMatch?.[1]) { + return quotedMatch[1].toLowerCase(); + } + + const bare = + column + .split(".") + .pop() + ?.replace(/["`\s]/g, "") ?? ""; + return bare.toLowerCase(); + }) + .filter((column) => column.length > 0); +} + +function getAgentSelectColumnValue( + row: FakeAgentSelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "did") { + return row.did; + } + if (column === "name") { + return row.name; + } + if (column === "status") { + return row.status; + } + if (column === "expires_at") { + return row.expires_at; + } + if (column === "current_jti") { + return row.current_jti; + } + return undefined; +} + function resolveAgentSelectRows(options: { query: string; params: unknown[]; agentRows: FakeAgentRow[]; }): FakeAgentSelectRow[] { const whereClause = extractWhereClause(options.query); - const hasStatusFilter = - whereClause.includes("status") && whereClause.includes("= ?"); - const hasFrameworkFilter = - whereClause.includes("framework") && whereClause.includes("= ?"); - const hasCursorFilter = - whereClause.includes("id") && whereClause.includes("< ?"); + const hasOwnerFilter = hasFilter(whereClause, "owner_id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasFrameworkFilter = hasFilter(whereClause, "framework"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasCursorFilter = hasFilter(whereClause, "id", "<"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); let parameterIndex = 0; - const ownerId = String(options.params[parameterIndex] ?? ""); - parameterIndex += 1; + const ownerId = hasOwnerFilter + ? String(options.params[parameterIndex] ?? "") + : undefined; + if (hasOwnerFilter) { + parameterIndex += 1; + } const statusFilter = hasStatusFilter ? String(options.params[parameterIndex] ?? "") @@ -138,21 +238,34 @@ function resolveAgentSelectRows(options: { parameterIndex += 1; } + const idFilter = hasIdFilter + ? String(options.params[parameterIndex] ?? "") + : undefined; + if (hasIdFilter) { + parameterIndex += 1; + } + const cursorFilter = hasCursorFilter ? String(options.params[parameterIndex] ?? "") : undefined; + if (hasCursorFilter) { + parameterIndex += 1; + } - const maybeLimit = Number(options.params[options.params.length - 1]); + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; const limit = Number.isFinite(maybeLimit) ? maybeLimit : options.agentRows.length; const filteredRows = options.agentRows - .filter((row) => row.ownerId === ownerId) + .filter((row) => (ownerId ? row.ownerId === ownerId : true)) .filter((row) => (statusFilter ? row.status === statusFilter : true)) .filter((row) => frameworkFilter ? row.framework === frameworkFilter : true, ) + .filter((row) => (idFilter ? row.id === idFilter : true)) .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) .sort((left, right) => right.id.localeCompare(left.id)) .slice(0, limit) @@ -162,6 +275,7 @@ function resolveAgentSelectRows(options: { name: row.name, status: row.status, expires_at: row.expiresAt, + current_jti: row.currentJti ?? null, })); return filteredRows; @@ -170,6 +284,8 @@ function resolveAgentSelectRows(options: { function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; const agentInserts: FakeAgentInsertRow[] = []; + const agentUpdates: FakeAgentUpdateRow[] = []; + const revocationInserts: FakeRevocationInsertRow[] = []; const database: D1Database = { prepare(query: string) { @@ -254,13 +370,12 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { params, agentRows, }); - return resultRows.map((row) => [ - row.id, - row.did, - row.name, - row.status, - row.expires_at, - ]); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getAgentSelectColumnValue(row, column), + ), + ); } return []; }, @@ -278,7 +393,7 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { normalizedQuery.includes('insert into "agents"') || normalizedQuery.includes("insert into agents") ) { - const columns = parseInsertColumns(query); + const columns = parseInsertColumns(query, "agents"); const row = columns.reduce( (acc, column, index) => { acc[column] = params[index]; @@ -288,13 +403,86 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { ); agentInserts.push(row); } + if ( + normalizedQuery.includes('update "agents"') || + normalizedQuery.includes("update agents") + ) { + const setColumns = parseUpdateSetColumns(query, "agents"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + let whereIndex = 0; + const ownerFilter = hasFilter(whereClause, "owner_id") + ? String(whereParams[whereIndex++] ?? "") + : undefined; + const idFilter = hasFilter(whereClause, "id") + ? String(whereParams[whereIndex++] ?? "") + : undefined; + + for (const row of agentRows) { + if (ownerFilter && row.ownerId !== ownerFilter) { + continue; + } + if (idFilter && row.id !== idFilter) { + continue; + } + + if ( + nextValues.status === "active" || + nextValues.status === "revoked" + ) { + row.status = nextValues.status; + } + if (typeof nextValues.updated_at === "string") { + row.updatedAt = nextValues.updated_at; + } + if ( + typeof nextValues.current_jti === "string" || + nextValues.current_jti === null + ) { + row.currentJti = nextValues.current_jti; + } + } + + agentUpdates.push({ + ...nextValues, + owner_id: ownerFilter, + id: idFilter, + }); + } + if ( + normalizedQuery.includes('insert into "revocations"') || + normalizedQuery.includes("insert into revocations") + ) { + const columns = parseInsertColumns(query, "revocations"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + revocationInserts.push(row); + } return { success: true } as D1Result; }, } as D1PreparedStatement; }, } as D1Database; - return { database, updates, agentInserts }; + return { + database, + updates, + agentInserts, + agentUpdates, + revocationInserts, + }; } function makeValidPatContext(token = "clw_pat_valid-token-value") { @@ -933,6 +1121,217 @@ describe("GET /v1/agents", () => { }); }); +describe("DELETE /v1/agents/:id", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700200000000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 404 when agent does not exist", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentUpdates, revocationInserts } = createFakeDb([ + authRow, + ]); + const agentId = generateUlid(1700200000100); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string; message: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 404 when agent is owned by another human", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700200000200); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700200000201), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("revokes owned agent and inserts revocation record", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000300); + const agentJti = generateUlid(1700200000301); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: agentJti, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(204); + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status: "revoked", + updated_at: expect.any(String), + }); + expect(revocationInserts).toHaveLength(1); + expect(revocationInserts[0]).toMatchObject({ + agent_id: agentId, + jti: agentJti, + reason: null, + revoked_at: expect.any(String), + }); + }); + + it("is idempotent for repeat revoke requests", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000400); + const agentJti = generateUlid(1700200000401); + const { database, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: agentJti, + }, + ], + ); + + const first = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + const second = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(first.status).toBe(204); + expect(second.status).toBe(204); + expect(revocationInserts).toHaveLength(1); + }); + + it("returns 409 when owned agent has missing current_jti", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000500); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: null, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REVOKE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); +}); + describe("POST /v1/agents", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 56d3617..4954208 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,9 +1,11 @@ +import { generateUlid } from "@clawdentity/protocol"; import { AppError, createHonoErrorHandler, createLogger, createRequestContextMiddleware, createRequestLoggingMiddleware, + nowIso, parseRegistryConfig, type RegistryConfig, shouldExposeVerboseErrors, @@ -11,18 +13,23 @@ import { } from "@clawdentity/sdk"; import { and, desc, eq, lt } from "drizzle-orm"; import { Hono } from "hono"; -import { mapAgentListRow, parseAgentListQuery } from "./agentList.js"; +import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; import { buildAgentRegistration, resolveRegistryIssuer, -} from "./agentRegistration.js"; +} from "./agent-registration.js"; +import { + agentNotFoundError, + invalidAgentRevokeStateError, + parseAgentRevokePath, +} from "./agent-revocation.js"; import { type AuthenticatedHuman, createApiKeyAuth, -} from "./auth/apiKeyAuth.js"; +} from "./auth/api-key-auth.js"; import { createDb } from "./db/client.js"; -import { agents } from "./db/schema.js"; -import { resolveRegistrySigner } from "./registrySigner.js"; +import { agents, revocations } from "./db/schema.js"; +import { resolveRegistrySigner } from "./registry-signer.js"; type Bindings = { DB: D1Database; @@ -179,6 +186,69 @@ function createRegistryApp() { return c.json({ agent: registration.agent, ait }, 201); }); + app.delete("/v1/agents/:id", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const matches = await db + .select({ + id: agents.id, + status: agents.status, + current_jti: agents.current_jti, + }) + .from(agents) + .where(and(eq(agents.owner_id, human.id), eq(agents.id, agentId))) + .limit(1); + const existingAgent = matches[0]; + + if (!existingAgent) { + throw agentNotFoundError(); + } + + if (existingAgent.status === "revoked") { + return c.body(null, 204); + } + + const currentJti = existingAgent.current_jti; + if (typeof currentJti !== "string" || currentJti.length === 0) { + throw invalidAgentRevokeStateError({ + environment: config.ENVIRONMENT, + reason: "agent.current_jti is required for revocation", + }); + } + + const revokedAt = nowIso(); + await db.transaction(async (tx) => { + await tx + .update(agents) + .set({ + status: "revoked", + updated_at: revokedAt, + }) + .where(eq(agents.id, existingAgent.id)); + + await tx + .insert(revocations) + .values({ + id: generateUlid(Date.now()), + jti: currentJti, + agent_id: existingAgent.id, + reason: null, + revoked_at: revokedAt, + }) + .onConflictDoNothing({ + target: revocations.jti, + }); + }); + + return c.body(null, 204); + }); + return app; } From ec69ecef16d6e5437e68628ea04ba907e1e94b36 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 17:35:30 +0530 Subject: [PATCH 028/190] chore(governance): harden T38 deployment gate checks --- .github/workflows/deploy-develop.yml | 18 +- README.md | 1 + apps/registry/AGENTS.md | 1 + apps/registry/src/server.test.ts | 18 +- apps/registry/src/server.ts | 3 +- issues/AGENTS.md | 2 + issues/EXECUTION_PLAN.md | 7 + issues/T00.md | 48 +++++ issues/T01.md | 50 +++++ issues/T02.md | 48 +++++ issues/T03.md | 48 +++++ issues/T04.md | 49 +++++ issues/T05.md | 50 +++++ issues/T06.md | 50 +++++ issues/T07.md | 52 +++++ issues/T08.md | 48 +++++ issues/T09.md | 49 +++++ issues/T10.md | 49 +++++ issues/T11.md | 50 +++++ issues/T12.md | 52 +++++ issues/T13.md | 49 +++++ issues/T14.md | 55 +++++ issues/T15.md | 50 +++++ issues/T16.md | 50 +++++ issues/T17.md | 51 +++++ issues/T18.md | 52 +++++ issues/T19.md | 51 +++++ issues/T20.md | 49 +++++ issues/T21.md | 52 +++++ issues/T22.md | 48 +++++ issues/T23.md | 48 +++++ issues/T24.md | 51 +++++ issues/T25.md | 48 +++++ issues/T26.md | 49 +++++ issues/T27.md | 53 +++++ issues/T28.md | 50 +++++ issues/T29.md | 49 +++++ issues/T30.md | 49 +++++ issues/T31.md | 50 +++++ issues/T32.md | 48 +++++ issues/T33.md | 48 +++++ issues/T34.md | 51 +++++ issues/T35.md | 50 +++++ issues/T36.md | 53 +++++ issues/T37.md | 48 +++++ issues/T38.md | 51 +++++ issues/scripts/AGENTS.md | 18 ++ issues/scripts/validate-ticket-set.mjs | 277 +++++++++++++++++++++++++ package.json | 1 + packages/sdk/src/config.test.ts | 25 +++ packages/sdk/src/config.ts | 1 + 51 files changed, 2313 insertions(+), 5 deletions(-) create mode 100644 issues/T00.md create mode 100644 issues/T01.md create mode 100644 issues/T02.md create mode 100644 issues/T03.md create mode 100644 issues/T04.md create mode 100644 issues/T05.md create mode 100644 issues/T06.md create mode 100644 issues/T07.md create mode 100644 issues/T08.md create mode 100644 issues/T09.md create mode 100644 issues/T10.md create mode 100644 issues/T11.md create mode 100644 issues/T12.md create mode 100644 issues/T13.md create mode 100644 issues/T14.md create mode 100644 issues/T15.md create mode 100644 issues/T16.md create mode 100644 issues/T17.md create mode 100644 issues/T18.md create mode 100644 issues/T19.md create mode 100644 issues/T20.md create mode 100644 issues/T21.md create mode 100644 issues/T22.md create mode 100644 issues/T23.md create mode 100644 issues/T24.md create mode 100644 issues/T25.md create mode 100644 issues/T26.md create mode 100644 issues/T27.md create mode 100644 issues/T28.md create mode 100644 issues/T29.md create mode 100644 issues/T30.md create mode 100644 issues/T31.md create mode 100644 issues/T32.md create mode 100644 issues/T33.md create mode 100644 issues/T34.md create mode 100644 issues/T35.md create mode 100644 issues/T36.md create mode 100644 issues/T37.md create mode 100644 issues/T38.md create mode 100644 issues/scripts/AGENTS.md create mode 100644 issues/scripts/validate-ticket-set.mjs diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 35715f8..f4bd6d8 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -21,6 +21,7 @@ jobs: CF_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + APP_VERSION: ${{ github.sha }} steps: - name: Checkout uses: actions/checkout@v4 @@ -30,7 +31,7 @@ jobs: - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: 20 + node-version: 22 cache: pnpm - name: Install pnpm @@ -69,13 +70,16 @@ jobs: wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql - name: Apply dev migrations and deploy - run: pnpm --filter @clawdentity/registry run deploy:dev + run: | + wrangler --cwd apps/registry d1 migrations apply clawdentity-db-dev --remote --env dev + wrangler --cwd apps/registry deploy --env dev --var APP_VERSION:${APP_VERSION} - name: Verify health endpoint run: | python3 - <<'PY' - import json, sys, urllib.request, urllib.error + import json, os, sys, urllib.request, urllib.error url = "https://dev.api.clawdentity.com/health" + expected_version = os.environ.get("APP_VERSION", "") try: resp = urllib.request.urlopen(url, timeout=10) except urllib.error.HTTPError as exc: @@ -84,6 +88,14 @@ jobs: data = json.load(resp) if data.get("status") != "ok" or data.get("environment") != "development": raise SystemExit(f"unexpected health payload: {data}") + if not expected_version: + raise SystemExit("APP_VERSION was not set in workflow environment") + if data.get("version") == "0.0.0": + raise SystemExit(f"unexpected placeholder version after deploy: {data}") + if data.get("version") != expected_version: + raise SystemExit( + f"unexpected version: expected {expected_version}, got {data.get('version')}" + ) print("healthcheck passed", data) PY diff --git a/README.md b/README.md index fa1498f..5be336f 100644 --- a/README.md +++ b/README.md @@ -275,6 +275,7 @@ No one shares keys/files between agents. Identity is presented per request. - **PRD:** see [`PRD.md`](./PRD.md) (MVP product requirements + execution plan) - **Issue execution plan:** see [`issues/EXECUTION_PLAN.md`](./issues/EXECUTION_PLAN.md) (deployment-first ordering + waves) - **Issue authoring rules:** see [`issues/AGENTS.md`](./issues/AGENTS.md) (required issue schema + blockers policy) +- **Canonical ticket specs:** `issues/T00.md` through `issues/T38.md` are versioned in-repo and should be validated with `pnpm issues:validate` in local checks and CI. --- diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index af22a67..91a3b06 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -31,6 +31,7 @@ ## Health & Config Readiness - Treat `/health` as the release verification surface: return `status`, the build `version`, and the live `environment`. Prefer sourcing `version` from build metadata or an environment override rather than hard-coded `0.0.0` so deployments can be differentiated. +- Use `APP_VERSION` runtime config as the deployment-time version signal. CI deploy workflows should inject a non-placeholder value (for example `${GITHUB_SHA}`) and assert `/health.version` matches it. - Rely on `parseRegistryConfig` early and cache it once per worker—fail-fast with `CONFIG_VALIDATION_FAILED` errors when the schema rejects the runtime bindings. - Cover both happy and failure paths in Vitest (status/headers plus config validation) so downstream tickets can rely on this contract without reintroducing regressions. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index d214f4b..aa2a996 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -317,7 +317,7 @@ function makeValidPatContext(token = "clw_pat_valid-token-value") { } describe("GET /health", () => { - it("returns status ok", async () => { + it("returns status ok with fallback version", async () => { const res = await app.request( "/health", {}, @@ -333,6 +333,22 @@ describe("GET /health", () => { expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); }); + it("returns APP_VERSION when provided by runtime bindings", async () => { + const res = await createRegistryApp().request( + "/health", + {}, + { DB: {}, ENVIRONMENT: "test", APP_VERSION: "sha-1234567890" }, + ); + + expect(res.status).toBe(200); + const body = await res.json(); + expect(body).toEqual({ + status: "ok", + version: "sha-1234567890", + environment: "test", + }); + }); + it("returns config validation error for invalid environment", async () => { const res = await createRegistryApp().request( "/health", diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 56d3617..299204b 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -27,6 +27,7 @@ import { resolveRegistrySigner } from "./registrySigner.js"; type Bindings = { DB: D1Database; ENVIRONMENT: string; + APP_VERSION?: string; REGISTRY_SIGNING_KEY?: string; REGISTRY_SIGNING_KEYS?: string; }; @@ -59,7 +60,7 @@ function createRegistryApp() { const config = getConfig(c.env); return c.json({ status: "ok", - version: "0.0.0", + version: config.APP_VERSION ?? "0.0.0", environment: config.ENVIRONMENT, }); }); diff --git a/issues/AGENTS.md b/issues/AGENTS.md index 4e0ccac..9127b27 100644 --- a/issues/AGENTS.md +++ b/issues/AGENTS.md @@ -22,6 +22,7 @@ Every `T*.md` file must include these sections in this order: - `Dependencies` must list only valid ticket IDs (`T00` format) that exist in this folder. - `Dependencies` must include a `Blockers` line. - Before marking an issue complete, validate that all blockers are resolved. +- Run `pnpm issues:validate` before closing deployment-gate tickets (`T37`, `T38`) or changing dependency/wave metadata. - Do not reorder dependency logic without updating `EXECUTION_PLAN.md`. ## Deployment-First Rule @@ -49,3 +50,4 @@ Every `T*.md` file must include these sections in this order: ## Audit Best Practices - Confirm each feature ticket (`T01`-`T36`) lists `T38` under `Dependencies` and in the `Blockers` line; document any gaps before capturing new wave assignments. - When sequencing or wave assignments evolve, update `EXECUTION_PLAN.md` in the same commit so the deployment-first narrative stays accurate and blockers remain visible to reviewers. +- Use `pnpm issues:validate` as the final audit step after editing any `issues/T*.md` file. diff --git a/issues/EXECUTION_PLAN.md b/issues/EXECUTION_PLAN.md index a654200..c09d934 100644 --- a/issues/EXECUTION_PLAN.md +++ b/issues/EXECUTION_PLAN.md @@ -36,6 +36,13 @@ - UI/docs (`T32`, `T33`): `frontend-design`, `web-design-guidelines`, `hld-generator` ## Validation Scenarios +0. Ticket-set validator (authoritative gate check): +- Command: +```bash +pnpm issues:validate +``` +- Expected output: `issues:validate passed (...)` + 1. Schema consistency: - Command: ```bash diff --git a/issues/T00.md b/issues/T00.md new file mode 100644 index 0000000..3f485f1 --- /dev/null +++ b/issues/T00.md @@ -0,0 +1,48 @@ +Source: `T00.md` + +## Goal +Create a monorepo workspace for registry, SDK, CLI, and proxy with consistent tooling. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- None +- Blockers: None + +## Execution Mode +- Sequential-ready + +## Parallel Wave +- Wave 0 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- pnpm workspace setup (`pnpm-workspace.yaml`, root `package.json` scripts: lint/test/build) +- Shared TS config (`tsconfig.base.json`) +- Folders: `packages/protocol`, `packages/sdk`, `apps/registry`, `apps/cli`, `apps/proxy` +- CI-friendly scripts: `pnpm -r build`, `pnpm -r test` (can be placeholders) + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- `pnpm -r build` runs without errors (even if builds are stubbed) +- Repo has consistent formatting/linting configuration (ESLint/Prettier or Biome) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T00 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (None) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T01.md b/issues/T01.md new file mode 100644 index 0000000..bdfe215 --- /dev/null +++ b/issues/T01.md @@ -0,0 +1,50 @@ +Source: `T01.md` + +## Goal +Implement shared encoding + identifier helpers used across SDK/registry/CLI. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T00 +- T38 +- Blockers: T00, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 3 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/protocol/src/base64url.ts` (encode/decode) +- `packages/protocol/src/ulid.ts` (generate/parse wrapper) +- `packages/protocol/src/did.ts` (make/parse human + agent DIDs) +- Unit tests for roundtrips and parsing + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- base64url roundtrip tests pass +- DID parsing rejects invalid formats + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T01 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T02.md b/issues/T02.md new file mode 100644 index 0000000..417d6ca --- /dev/null +++ b/issues/T02.md @@ -0,0 +1,48 @@ +Source: `T02.md` + +## Goal +Define the canonical string format used for PoP request signing (stable and unambiguous). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T01 +- T38 +- Blockers: T01, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 4 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/protocol/src/http-signing.ts` with `canonicalizeRequest()` +- Tests asserting canonical output is stable (snapshot) + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- Given same inputs, canonical string is identical across runs +- Canonical format includes method/path/ts/nonce/body-hash + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T02 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T01, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T03.md b/issues/T03.md new file mode 100644 index 0000000..609e400 --- /dev/null +++ b/issues/T03.md @@ -0,0 +1,48 @@ +Source: `T03.md` + +## Goal +Implement Ed25519 keypair generation and sign/verify utilities. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T01 +- T38 +- Blockers: T01, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 4 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/sdk/src/crypto/ed25519.ts` (generate/sign/verify, base64url helpers) +- Unit tests: sign/verify happy path + negative cases + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- Sign/verify works for known vectors or generated keys +- Wrong message/signature fails verification + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T03 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T01, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T04.md b/issues/T04.md new file mode 100644 index 0000000..339cdbe --- /dev/null +++ b/issues/T04.md @@ -0,0 +1,49 @@ +Source: `T04.md` + +## Goal +Define a strict schema for AIT claims and validate risky fields (name, description). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T01 +- T38 +- Blockers: T01, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 4 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/protocol/src/ait.ts` (schema + types) +- `validateAgentName()` (strict regex + max length) +- Unit tests for valid/invalid names + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- Invalid names (control chars, too long) are rejected +- Schema covers required claims for MVP + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T04 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T01, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T05.md b/issues/T05.md new file mode 100644 index 0000000..6ef414e --- /dev/null +++ b/issues/T05.md @@ -0,0 +1,50 @@ +Source: `T05.md` + +## Goal +Implement JWT (JWS) encoding/decoding and EdDSA signing for AIT tokens. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T03 +- T04 +- T38 +- Blockers: T03, T04, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 5 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/sdk/src/jwt/ait-jwt.ts` (`signAIT`, `verifyAIT`) +- Support `kid` lookup from registry keys +- Unit tests: sign then verify; wrong issuer/kid fails + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- AIT created by `signAIT` verifies with published public key +- Verifier enforces `alg=EdDSA` and `typ=AIT` + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T05 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T03, T04, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T06.md b/issues/T06.md new file mode 100644 index 0000000..c8019cf --- /dev/null +++ b/issues/T06.md @@ -0,0 +1,50 @@ +Source: `T06.md` + +## Goal +Define signed CRL format and verification logic. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T01 +- T03 +- T38 +- Blockers: T01, T03, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 5 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/protocol/src/crl.ts` schema +- `packages/sdk/src/jwt/crl-jwt.ts` (`signCRL`, `verifyCRL`) +- Unit tests: tampering invalidates signature + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- CRL signature verification fails on tampered payload +- Schema enforces expected fields (iss, iat, revocations) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T06 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T01, T03, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T07.md b/issues/T07.md new file mode 100644 index 0000000..7febd03 --- /dev/null +++ b/issues/T07.md @@ -0,0 +1,52 @@ +Source: `T07.md` + +## Goal +Sign outbound HTTP requests and verify inbound requests using PoP headers (replay-resistant). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T02 +- T03 +- T04 +- T38 +- Blockers: T02, T03, T04, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 5 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/sdk/src/http/sign.ts` (hash body, produce headers) +- `packages/sdk/src/http/verify.ts` (verify headers + proof) +- Bind signature to method/path/timestamp/nonce/body hash +- Unit tests for verification success/failure + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- Signed request verifies successfully +- Altering method/path/body/timestamp causes verification failure + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T07 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T02, T03, T04, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T08.md b/issues/T08.md new file mode 100644 index 0000000..a8f68a1 --- /dev/null +++ b/issues/T08.md @@ -0,0 +1,48 @@ +Source: `T08.md` + +## Goal +Prevent request replay by tracking seen nonces per agent DID within a TTL window. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T07 +- T38 +- Blockers: T07, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 6 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/sdk/src/security/nonce-cache.ts` (TTL store keyed by agent+nonce) +- Unit tests: duplicate nonce within TTL is rejected + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- Second request with same nonce is rejected +- Expired nonces are pruned (or treated as unseen) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T08 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T07, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T09.md b/issues/T09.md new file mode 100644 index 0000000..5246027 --- /dev/null +++ b/issues/T09.md @@ -0,0 +1,49 @@ +Source: `T09.md` + +## Goal +Implement a CRL cache that refreshes periodically and reports staleness. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T06 +- T38 +- Blockers: T06, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 6 + +## Required Skills +- `code-quality` +- `testing-framework` +- `validation-schema` + +## Deliverables +- `packages/sdk/src/crl/cache.ts` (`refreshIfStale`, `isRevoked`) +- Config: refresh interval, max age, fail-open/fail-closed +- Unit tests for stale behavior and revoked lookup + +## Refactor Opportunities +- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. + +## Definition of Done +- Revoked `jti` is rejected +- Stale CRL triggers refresh attempt; warnings surfaced + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T09 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T06, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T10.md b/issues/T10.md new file mode 100644 index 0000000..73cea8e --- /dev/null +++ b/issues/T10.md @@ -0,0 +1,49 @@ +Source: `T10.md` + +## Goal +Create minimal database schema for humans, agents, revocations, and PAT API keys. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T00 +- T38 +- Blockers: T00, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 3 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- SQL migrations for: `humans`, `agents`, `revocations`, `api_keys` +- Indexes on agent owner/status and revocations jti + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Migration applies cleanly on a fresh DB +- Tables match the PRD MVP needs + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T10 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T11.md b/issues/T11.md new file mode 100644 index 0000000..0d3cc5e --- /dev/null +++ b/issues/T11.md @@ -0,0 +1,50 @@ +Source: `T11.md` + +## Goal +Bootstrap the registry API service with health endpoint and configuration. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T10 +- T38 +- Blockers: T10, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 4 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- `apps/registry/src/server.ts` server entry +- `GET /health` returns JSON + 200 +- Centralized config loading (env validation) + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Local dev server starts successfully +- Health endpoint returns expected JSON + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T11 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T12.md b/issues/T12.md new file mode 100644 index 0000000..e0556e6 --- /dev/null +++ b/issues/T12.md @@ -0,0 +1,52 @@ +Source: `T12.md` + +## Goal +Implement simple PAT authentication for MVP (Authorization: Bearer ). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T11 +- T10 +- T38 +- Blockers: T11, T10, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 5 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- `apps/registry/src/auth/apiKeyAuth.ts` +- Token hashing + constant-time compare +- Context injection (`ctx.human`) + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Valid PAT authenticates successfully +- Invalid/missing PAT returns 401 +- Tests cover both cases + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T12 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T11, T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T13.md b/issues/T13.md new file mode 100644 index 0000000..ebb01d6 --- /dev/null +++ b/issues/T13.md @@ -0,0 +1,49 @@ +Source: `T13.md` + +## Goal +Expose registry signing public keys for offline verification. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T11 +- T38 +- Blockers: T11, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 5 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- `GET /.well-known/claw-keys.json` endpoint +- Response includes keys: kid/alg/crv/x/status +- Cache headers (reasonable) + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- SDK can fetch keys and verify a signed AIT using them + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T13 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T11, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T14.md b/issues/T14.md new file mode 100644 index 0000000..470a400 --- /dev/null +++ b/issues/T14.md @@ -0,0 +1,55 @@ +Source: `T14.md` + +## Goal +Allow an authenticated human to register an agent public key and receive a registry-signed AIT. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T12 +- T13 +- T10 +- T05 +- T38 +- Blockers: T12, T13, T10, T05, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 6 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- Endpoint `POST /v1/agents` (validates name/framework/publicKey/ttlDays) +- Stores agent row with `current_jti` + expiry +- Signs AIT with registry issuer key +- Returns `{ agent, ait }` + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Creating an agent returns an AIT that SDK verifies locally +- Name validation rejects unsafe strings +- Only authenticated humans can create agents + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T14 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T12, T13, T10, T05, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T15.md b/issues/T15.md new file mode 100644 index 0000000..1b42c8d --- /dev/null +++ b/issues/T15.md @@ -0,0 +1,50 @@ +Source: `T15.md` + +## Goal +List agents owned by the authenticated human. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T12 +- T10 +- T38 +- Blockers: T12, T10, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 6 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- Endpoint `GET /v1/agents` with filters (status/framework) optional +- Returns minimal fields (id/did/name/status/expires) + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Only returns caller-owned agents +- Pagination pattern established (even if minimal) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T15 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T12, T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T16.md b/issues/T16.md new file mode 100644 index 0000000..19af68e --- /dev/null +++ b/issues/T16.md @@ -0,0 +1,50 @@ +Source: `T16.md` + +## Goal +Revoke an agent (kill switch) and publish revocation via CRL. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T14 +- T38 +- Blockers: T14, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 7 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- Endpoint `DELETE /v1/agents/:id` +- Transaction: mark agent revoked + insert revocation for current_jti +- Idempotent behavior for repeat revoke + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Revoked agent’s `current_jti` appears in CRL +- SDK/proxy rejects revoked AIT + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T16 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T14, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T17.md b/issues/T17.md new file mode 100644 index 0000000..04d23b6 --- /dev/null +++ b/issues/T17.md @@ -0,0 +1,51 @@ +Source: `T17.md` + +## Goal +Reissue an AIT for the same agent and revoke the previous token to enforce 'one active token'. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T14 +- T16 +- T38 +- Blockers: T14, T16, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- Endpoint `POST /v1/agents/:id/reissue` +- Revokes previous `current_jti` +- Issues new AIT with new `jti` and updates agent row + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Old AIT becomes invalid due to CRL revocation +- New AIT verifies successfully + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T17 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T14, T16, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T18.md b/issues/T18.md new file mode 100644 index 0000000..542e7ec --- /dev/null +++ b/issues/T18.md @@ -0,0 +1,52 @@ +Source: `T18.md` + +## Goal +Serve a signed CRL snapshot (MVP: full list). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T16 +- T06 +- T13 +- T38 +- Blockers: T16, T06, T13, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- Endpoint `GET /v1/crl` returning `{ crl: }` +- CRL includes all revocations (MVP) +- Cache headers set appropriately + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- SDK verifies CRL signature +- CRL contains expected revocations + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T18 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T16, T06, T13, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T19.md b/issues/T19.md new file mode 100644 index 0000000..bedbafc --- /dev/null +++ b/issues/T19.md @@ -0,0 +1,51 @@ +Source: `T19.md` + +## Goal +Resolve an agent ID to a public profile (no PII). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T11 +- T10 +- T38 +- Blockers: T11, T10, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 5 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- Endpoint `GET /v1/resolve/:id` +- Returns `{ did, name, framework, status, ownerDid }` + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- No auth required +- Does not leak email or private user data +- Rate limiting enabled (basic) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T19 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T11, T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T20.md b/issues/T20.md new file mode 100644 index 0000000..89efc9b --- /dev/null +++ b/issues/T20.md @@ -0,0 +1,49 @@ +Source: `T20.md` + +## Goal +Create CLI framework and local config storage. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T00 +- T38 +- Blockers: T00, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 3 + +## Required Skills +- `command-development` +- `code-quality` +- `testing-framework` + +## Deliverables +- `apps/cli/src/index.ts` with command router +- Config at `~/.clawdentity/config.json` (registryUrl, apiKey) +- `claw --help` and `claw config set` + +## Refactor Opportunities +- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. + +## Definition of Done +- CLI runs on macOS/Linux +- Config read/write works + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T20 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T21.md b/issues/T21.md new file mode 100644 index 0000000..c461906 --- /dev/null +++ b/issues/T21.md @@ -0,0 +1,52 @@ +Source: `T21.md` + +## Goal +Generate keypair locally, register public key, and save AIT + key to disk. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T20 +- T14 +- T03 +- T38 +- Blockers: T20, T14, T03, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 7 + +## Required Skills +- `command-development` +- `code-quality` +- `testing-framework` + +## Deliverables +- Command `claw agent create ` +- Generates Ed25519 keypair +- Calls registry `POST /v1/agents` +- Writes identity files under `~/.clawdentity/agents//` + +## Refactor Opportunities +- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. + +## Definition of Done +- Command prints agent DID + expiry +- Files created with secure permissions (0600 where applicable) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T21 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T20, T14, T03, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T22.md b/issues/T22.md new file mode 100644 index 0000000..ea545af --- /dev/null +++ b/issues/T22.md @@ -0,0 +1,48 @@ +Source: `T22.md` + +## Goal +Print decoded AIT fields for an existing local identity. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T20 +- T05 +- T38 +- Blockers: T20, T05, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 6 + +## Required Skills +- `command-development` +- `code-quality` +- `testing-framework` + +## Deliverables +- Command `claw agent inspect ` +- Displays: did, owner, exp, kid, pubkey, framework + +## Refactor Opportunities +- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. + +## Definition of Done +- Works offline (no registry call needed) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T22 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T20, T05, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T23.md b/issues/T23.md new file mode 100644 index 0000000..302b77c --- /dev/null +++ b/issues/T23.md @@ -0,0 +1,48 @@ +Source: `T23.md` + +## Goal +Revoke an agent via registry and print confirmation. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T20 +- T16 +- T38 +- Blockers: T20, T16, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `command-development` +- `code-quality` +- `testing-framework` + +## Deliverables +- Command `claw agent revoke ` +- Calls `DELETE /v1/agents/:id` + +## Refactor Opportunities +- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. + +## Definition of Done +- Revocation succeeds and is visible via CRL after refresh + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T23 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T20, T16, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T24.md b/issues/T24.md new file mode 100644 index 0000000..647ddfa --- /dev/null +++ b/issues/T24.md @@ -0,0 +1,51 @@ +Source: `T24.md` + +## Goal +Verify an AIT locally (signature + expiry + CRL). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T20 +- T05 +- T09 +- T38 +- Blockers: T20, T05, T09, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 7 + +## Required Skills +- `command-development` +- `code-quality` +- `testing-framework` + +## Deliverables +- Command `claw verify ` +- Fetches keys/CRL if needed; caches them +- Outputs ✅/❌ with reason + +## Refactor Opportunities +- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. + +## Definition of Done +- Valid token verifies +- Revoked token fails with reason 'revoked' + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T24 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T20, T05, T09, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T25.md b/issues/T25.md new file mode 100644 index 0000000..3cbbdd2 --- /dev/null +++ b/issues/T25.md @@ -0,0 +1,48 @@ +Source: `T25.md` + +## Goal +Define proxy runtime configuration and validation. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T00 +- T38 +- Blockers: T00, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 3 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- `apps/proxy/src/config.ts` with schema and defaults +- Config: listenPort, openclawBaseUrl, openclawHookToken, registryUrl, allowList, crl refresh, stale policy + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Proxy refuses to start with invalid config + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T25 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T26.md b/issues/T26.md new file mode 100644 index 0000000..89be86f --- /dev/null +++ b/issues/T26.md @@ -0,0 +1,49 @@ +Source: `T26.md` + +## Goal +Start proxy server with health endpoint and basic logging. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T25 +- T38 +- Blockers: T25, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 4 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- `apps/proxy/src/server.ts` +- `GET /health` +- Structured request logging + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Proxy starts and responds to /health + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T26 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T25, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T27.md b/issues/T27.md new file mode 100644 index 0000000..58208be --- /dev/null +++ b/issues/T27.md @@ -0,0 +1,53 @@ +Source: `T27.md` + +## Goal +Verify Clawdentity auth headers on inbound requests (AIT + CRL + PoP + nonce). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T26 +- T07 +- T09 +- T13 +- T38 +- Blockers: T26, T07, T09, T13, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 7 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- Middleware integrating SDK verifier +- Returns 401 on invalid/expired/revoked/proof failures +- Maintains nonce replay cache + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Invalid requests rejected with 401 +- Replay requests rejected (nonce reuse) + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T27 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T26, T07, T09, T13, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T28.md b/issues/T28.md new file mode 100644 index 0000000..ca6221a --- /dev/null +++ b/issues/T28.md @@ -0,0 +1,50 @@ +Source: `T28.md` + +## Goal +Enforce allowlist by agent DID and/or owner DID after verification. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T27 +- T38 +- Blockers: T27, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- Allowlist config: owners[], agents[] +- Optional `allowAllVerified` (default false) +- Return 403 when verified but not allowed + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Verified-but-not-allowed returns 403 +- Allowed callers proceed to forwarding + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T28 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T27, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T29.md b/issues/T29.md new file mode 100644 index 0000000..1831ed4 --- /dev/null +++ b/issues/T29.md @@ -0,0 +1,49 @@ +Source: `T29.md` + +## Goal +Forward verified requests to OpenClaw webhooks using OpenClaw hook token internally. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T28 +- T38 +- Blockers: T28, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 9 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- Proxy route `POST /hooks/agent` +- Forward JSON body to `${openclawBaseUrl}/hooks/agent` +- Add header `x-openclaw-token: ` + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Valid call yields OpenClaw 202 for `/hooks/agent` + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T29 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T28, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T30.md b/issues/T30.md new file mode 100644 index 0000000..d7c8c41 --- /dev/null +++ b/issues/T30.md @@ -0,0 +1,49 @@ +Source: `T30.md` + +## Goal +Add basic rate limiting per verified agent DID to reduce abuse. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T27 +- T38 +- Blockers: T27, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- In-memory limiter (default 60 req/min per agent DID) +- Return 429 when exceeded + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Excess requests return 429 +- Limit is configurable + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T30 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T27, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T31.md b/issues/T31.md new file mode 100644 index 0000000..330d7a7 --- /dev/null +++ b/issues/T31.md @@ -0,0 +1,50 @@ +Source: `T31.md` + +## Goal +Optionally prepend a sanitized identity block into webhook `message` (off by default). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T27 +- T38 +- Blockers: T27, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- Config `injectIdentityIntoMessage: boolean` (default false) +- Sanitize identity fields (no control chars, length limits) +- Document clearly as optional + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- When enabled, OpenClaw receives augmented message +- When disabled, payload unchanged + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T31 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T27, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T32.md b/issues/T32.md new file mode 100644 index 0000000..889e187 --- /dev/null +++ b/issues/T32.md @@ -0,0 +1,48 @@ +Source: `T32.md` + +## Goal +Provide a minimal web UI for mobile-friendly agent revocation (kill switch). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T15 +- T16 +- T38 +- Blockers: T15, T16, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `frontend-design` +- `web-design-guidelines` +- `hld-generator` + +## Deliverables +- Minimal app (Next.js or simple UI) for login/auth and listing agents +- Revoke button triggers registry revoke + +## Refactor Opportunities +- Reuse common UI/documentation templates to reduce repeated structure and maintenance overhead. + +## Definition of Done +- Operator can revoke from mobile browser and see effect via CRL + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T32 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T15, T16, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T33.md b/issues/T33.md new file mode 100644 index 0000000..9e9c5ef --- /dev/null +++ b/issues/T33.md @@ -0,0 +1,48 @@ +Source: `T33.md` + +## Goal +Write a reproducible README walkthrough for the end-to-end MVP demo. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T29 +- T21 +- T24 +- T38 +- Blockers: T29, T21, T24, T38 + +## Execution Mode +- Sequential-ready + +## Parallel Wave +- Wave 10 + +## Required Skills +- `frontend-design` +- `web-design-guidelines` +- `hld-generator` + +## Deliverables +- Step-by-step: enable OpenClaw hooks, run proxy, create agent, send signed call, revoke and retry +- Provide curl examples and expected outputs + +## Refactor Opportunities +- Reuse common UI/documentation templates to reduce repeated structure and maintenance overhead. + +## Definition of Done +- A new machine can reproduce first verified call in < 10 minutes + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T33 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T29, T21, T24, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T34.md b/issues/T34.md new file mode 100644 index 0000000..1d38a14 --- /dev/null +++ b/issues/T34.md @@ -0,0 +1,51 @@ +Source: `T34.md` + +## Goal +Enable endpoint discovery by allowing agents to publish a callable endpoint (`gateway_hint`). + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T14 +- T19 +- T38 +- Blockers: T14, T19, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 7 + +## Required Skills +- `database` +- `api-standards` +- `identity-service` +- `error-handling` + +## Deliverables +- DB migration: add `gateway_hint` to agents table +- Endpoint `PATCH /v1/agents/:id` to set/unset gateway_hint with strict URL validation +- `GET /v1/resolve/:id` includes `gatewayHint` when present + +## Refactor Opportunities +- Centralize request validation and authorization checks into reusable middleware/utilities. + +## Definition of Done +- Valid gateway_hint is stored and returned via resolve +- Invalid URLs are rejected with 400 + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T34 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T14, T19, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T35.md b/issues/T35.md new file mode 100644 index 0000000..67a2609 --- /dev/null +++ b/issues/T35.md @@ -0,0 +1,50 @@ +Source: `T35.md` + +## Goal +Make sharing easy: print a copy/paste contact card (verify URL + endpoint) without sharing secrets. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T34 +- T20 +- T38 +- Blockers: T34, T20, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 8 + +## Required Skills +- `command-development` +- `code-quality` +- `testing-framework` + +## Deliverables +- Command `claw share [--json] [--qr]` +- Fetches gateway_hint from registry if not present locally +- Prints contact card with DID + verify URL + endpoint + +## Refactor Opportunities +- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. + +## Definition of Done +- Contact card prints even if endpoint missing (with guidance) +- `--json` outputs valid machine-readable JSON + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T35 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T34, T20, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T36.md b/issues/T36.md new file mode 100644 index 0000000..c4ceb58 --- /dev/null +++ b/issues/T36.md @@ -0,0 +1,53 @@ +Source: `T36.md` + +## Goal +Add pairing code flow so operators can approve first contact and auto-update allowlist. + +## In Scope +- Implement the ticket objective described in `Goal`. +- Complete all items listed in `Deliverables`. +- Keep changes compatible with the declared dependency chain. + +## Out of Scope +- Features not explicitly required by this ticket. +- Reordering dependencies without updating the issue dependency graph. +- Cross-ticket changes that are not required for this ticket to pass. + +## Dependencies +- T27 +- T28 +- T34 +- T38 +- Blockers: T27, T28, T34, T38 + +## Execution Mode +- Sequential-ready +- Parallel-ready + +## Parallel Wave +- Wave 9 + +## Required Skills +- `api-client` +- `data-fetching` +- `logging` +- `error-handling` + +## Deliverables +- Persistent allowlist file store (`allowlist.json`) +- `POST /pair/start` protected by admin token; returns time-limited code +- `POST /pair/confirm` requires Claw auth; adds caller owner DID (or agent DID) to allowlist + +## Refactor Opportunities +- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. + +## Definition of Done +- Pairing code expires and cannot be reused +- Paired caller can send /hooks/agent successfully after pairing +- Allowlist survives proxy restarts + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T36 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T27, T28, T34, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T37.md b/issues/T37.md new file mode 100644 index 0000000..a6cf57f --- /dev/null +++ b/issues/T37.md @@ -0,0 +1,48 @@ +Source: `T37.md` + +## Goal +Define deployment scaffolding for the support plugin so deployments are repeatable before feature delivery. + +## In Scope +- Create deployment scaffolding artifacts (environment template, release/deploy workflow outline, and runbook placeholders). +- Define required secrets and configuration contract for staging and production. +- Establish health check and rollback expectations for the first deployment. + +## Out of Scope +- Implementing feature endpoints or business logic. +- Production traffic cutover without passing staging checks. +- Environment-specific tweaks not documented in the scaffolding contract. + +## Dependencies +- T00 +- Blockers: T00 + +## Execution Mode +- Sequential-ready + +## Parallel Wave +- Wave 1 + +## Required Skills +- `deployment` +- `configuration-management` +- `code-quality` + +## Deliverables +- Deployment scaffolding checklist document under `issues/` references. +- CI/CD deployment workflow skeleton with explicit staging gate. +- Environment variable contract listing required secrets and defaults. + +## Refactor Opportunities +- Consolidate deployment configuration into shared templates to avoid environment drift. + +## Definition of Done +- Deployment scaffolding is documented and versioned. +- CI can validate presence/shape of required deployment configuration. +- Team has a clear staging deploy path before feature work starts. + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including checks tied to T37 scaffolding changes. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Confirm blocker status (T00) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T38.md b/issues/T38.md new file mode 100644 index 0000000..cfef7b7 --- /dev/null +++ b/issues/T38.md @@ -0,0 +1,51 @@ +Source: `T38.md` + +## Goal +Deploy the scaffolded support plugin baseline to staging and verify it is healthy before feature implementation. + +## In Scope +- Execute staging deployment using the scaffolding from T37. +- Verify health checks, basic connectivity, and rollback path. +- Capture deployment verification evidence and operator checklist. + +## Out of Scope +- Shipping feature tickets prior to staging deploy validation. +- Performance tuning beyond baseline health checks. +- Non-essential infrastructure changes unrelated to baseline deployment. + +## Dependencies +- T37 +- Blockers: T37 + +## Execution Mode +- Sequential-ready + +## Parallel Wave +- Wave 2 + +## Required Skills +- `deployment` +- `observability` +- `testing-framework` + +## Deliverables +- Successful staging deployment run using the agreed workflow. +- Health verification checklist with pass/fail evidence. +- Rollback procedure validated at least once in non-production. + +## Refactor Opportunities +- Automate repetitive deployment verification steps into reusable CI jobs/scripts. + +## Definition of Done +- Staging deployment is successful and repeatable. +- Health checks pass for all baseline services. +- Deployment sign-off recorded; feature tickets may proceed. + +## Validation Steps +- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. +- Run `pnpm -r test`; expected: all tests pass, including tests covering T38 acceptance criteria. +- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. +- Run deployment workflow in staging; expected: deployment completes without manual patching. +- Execute health checks; expected: all required endpoints return healthy status. +- Execute rollback drill; expected: previous stable version restores successfully. +- Confirm blocker status (T37) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/scripts/AGENTS.md b/issues/scripts/AGENTS.md new file mode 100644 index 0000000..35ec73a --- /dev/null +++ b/issues/scripts/AGENTS.md @@ -0,0 +1,18 @@ +# AGENTS.md (issues/scripts) + +## Purpose +- Keep issue-governance scripts deterministic and local-only. +- Ensure dependency/order checks remain stable as backlog metadata evolves. + +## Script Rules +- Scripts in this folder must run without network access. +- Prefer read-only checks that fail with actionable messages. +- Treat `issues/T00.md` through `issues/T38.md` as canonical ticket inputs. + +## Validation Expectations +- `validate-ticket-set.mjs` must verify schema order, dependency integrity, deployment gate (`T38`) requirements, sequential order, and parallel-wave safety. +- Exit with non-zero status on any violation and print each violation on its own line. + +## Maintenance +- When `issues/EXECUTION_PLAN.md` wave/order format changes, update parser logic in the same commit. +- Keep checks strict enough to block drift, but avoid coupling to cosmetic markdown formatting. diff --git a/issues/scripts/validate-ticket-set.mjs b/issues/scripts/validate-ticket-set.mjs new file mode 100644 index 0000000..96f1ed1 --- /dev/null +++ b/issues/scripts/validate-ticket-set.mjs @@ -0,0 +1,277 @@ +import { readFileSync } from "node:fs"; +import { existsSync } from "node:fs"; +import { resolve } from "node:path"; + +const REQUIRED_HEADINGS = [ + "Goal", + "In Scope", + "Out of Scope", + "Dependencies", + "Execution Mode", + "Parallel Wave", + "Required Skills", + "Deliverables", + "Refactor Opportunities", + "Definition of Done", + "Validation Steps", +]; + +const TICKET_CODES = Array.from({ length: 39 }, (_, index) => + `T${String(index).padStart(2, "0")}`, +); + +const projectRoot = process.cwd(); +const issuesDir = resolve(projectRoot, "issues"); +const executionPlanPath = resolve(issuesDir, "EXECUTION_PLAN.md"); + +const errors = []; +const dependencyGraph = new Map(); +const blockerGraph = new Map(); + +function readUtf8(path) { + return readFileSync(path, "utf8"); +} + +function parseHeadings(markdown) { + return [...markdown.matchAll(/^##\s+(.+)$/gm)].map((match) => ({ + heading: match[1]?.trim() ?? "", + index: match.index ?? -1, + fullLength: match[0].length, + })); +} + +function extractSection(markdown, headings, sectionName) { + const sectionIndex = headings.findIndex( + (entry) => entry.heading === sectionName, + ); + if (sectionIndex < 0) { + return null; + } + + const sectionStart = + (headings[sectionIndex]?.index ?? 0) + + (headings[sectionIndex]?.fullLength ?? 0); + const sectionEnd = + sectionIndex < headings.length - 1 + ? (headings[sectionIndex + 1]?.index ?? markdown.length) + : markdown.length; + + return markdown.slice(sectionStart, sectionEnd).trim(); +} + +function parseTicketReferences(text) { + return new Set(text.match(/\bT\d{2}\b/g) ?? []); +} + +for (const code of TICKET_CODES) { + const ticketPath = resolve(issuesDir, `${code}.md`); + if (!existsSync(ticketPath)) { + errors.push(`Missing ticket file: issues/${code}.md`); + continue; + } + + const markdown = readUtf8(ticketPath); + const headings = parseHeadings(markdown); + let previousRequiredIndex = -1; + + for (const heading of REQUIRED_HEADINGS) { + const currentIndex = headings.findIndex((entry) => entry.heading === heading); + if (currentIndex < 0) { + errors.push(`issues/${code}.md is missing section: "## ${heading}"`); + continue; + } + if (currentIndex < previousRequiredIndex) { + errors.push( + `issues/${code}.md has out-of-order section: "## ${heading}"`, + ); + } + previousRequiredIndex = currentIndex; + } + + const dependenciesSection = extractSection(markdown, headings, "Dependencies"); + if (dependenciesSection === null) { + dependencyGraph.set(code, new Set()); + blockerGraph.set(code, new Set()); + continue; + } + + const blockersMatch = dependenciesSection.match( + /^\s*-\s*Blockers:\s*(.+)$/im, + ); + if (!blockersMatch) { + errors.push(`issues/${code}.md is missing a "- Blockers:" line`); + } + + const dependencyIds = new Set(); + for (const line of dependenciesSection.split("\n")) { + const trimmed = line.trim(); + if (!trimmed.startsWith("-")) { + continue; + } + if (/^-\s*Blockers:/i.test(trimmed)) { + continue; + } + for (const ticketId of parseTicketReferences(trimmed)) { + dependencyIds.add(ticketId); + } + } + + const blockerIds = blockersMatch + ? parseTicketReferences(blockersMatch[1] ?? "") + : new Set(); + + dependencyGraph.set(code, dependencyIds); + blockerGraph.set(code, blockerIds); +} + +for (const [ticket, dependencies] of dependencyGraph.entries()) { + for (const dep of dependencies) { + if (!TICKET_CODES.includes(dep)) { + errors.push(`issues/${ticket}.md references unknown dependency: ${dep}`); + } + } +} + +for (const [ticket, blockers] of blockerGraph.entries()) { + for (const blocker of blockers) { + if (!TICKET_CODES.includes(blocker)) { + errors.push(`issues/${ticket}.md references unknown blocker: ${blocker}`); + } + } +} + +for (let ticketNumber = 1; ticketNumber <= 36; ticketNumber += 1) { + const ticket = `T${String(ticketNumber).padStart(2, "0")}`; + const dependencies = dependencyGraph.get(ticket) ?? new Set(); + const blockers = blockerGraph.get(ticket) ?? new Set(); + + if (!dependencies.has("T38")) { + errors.push(`issues/${ticket}.md must include T38 under Dependencies`); + } + if (!blockers.has("T38")) { + errors.push(`issues/${ticket}.md must include T38 in Blockers`); + } +} + +const dfsState = new Map(); +const recursionStack = []; + +function visit(ticket) { + const state = dfsState.get(ticket) ?? 0; + if (state === 1) { + const cycleStart = recursionStack.indexOf(ticket); + const cyclePath = [...recursionStack.slice(cycleStart), ticket].join(" -> "); + errors.push(`Dependency cycle detected: ${cyclePath}`); + return; + } + if (state === 2) { + return; + } + + dfsState.set(ticket, 1); + recursionStack.push(ticket); + for (const dep of dependencyGraph.get(ticket) ?? []) { + if (TICKET_CODES.includes(dep)) { + visit(dep); + } + } + recursionStack.pop(); + dfsState.set(ticket, 2); +} + +for (const ticket of TICKET_CODES) { + visit(ticket); +} + +const executionPlan = readUtf8(executionPlanPath); +const sequenceMatch = executionPlan.match(/`(T\d{2}\s*->[^`]+)`/); +if (!sequenceMatch) { + errors.push("issues/EXECUTION_PLAN.md is missing canonical sequential order"); +} + +const sequentialOrder = sequenceMatch + ? sequenceMatch[1] + .split("->") + .map((item) => item.trim()) + .filter((item) => item.length > 0) + : []; +const sequentialIndex = new Map(); +for (const [index, ticket] of sequentialOrder.entries()) { + if (sequentialIndex.has(ticket)) { + errors.push(`Sequential order duplicates ticket ${ticket}`); + } else { + sequentialIndex.set(ticket, index); + } +} + +for (const ticket of TICKET_CODES) { + if (!sequentialIndex.has(ticket)) { + errors.push(`Sequential order is missing ticket ${ticket}`); + } +} + +for (const [ticket, dependencies] of dependencyGraph.entries()) { + const ticketOrder = sequentialIndex.get(ticket); + if (ticketOrder === undefined) { + continue; + } + for (const dep of dependencies) { + const dependencyOrder = sequentialIndex.get(dep); + if (dependencyOrder === undefined) { + continue; + } + if (dependencyOrder >= ticketOrder) { + errors.push( + `Sequential order violation: ${ticket} appears before dependency ${dep}`, + ); + } + } +} + +const waveMatches = [...executionPlan.matchAll(/^- Wave \d+:\s*`([^`]+)`$/gm)]; +const waveByTicket = new Map(); +for (const [waveIndex, match] of waveMatches.entries()) { + const tickets = (match[1] ?? "") + .split(",") + .map((ticket) => ticket.trim()) + .filter((ticket) => ticket.length > 0); + for (const ticket of tickets) { + if (waveByTicket.has(ticket)) { + errors.push( + `Parallel waves duplicate ticket ${ticket} (wave ${waveByTicket.get(ticket)} and wave ${waveIndex})`, + ); + continue; + } + waveByTicket.set(ticket, waveIndex); + } +} + +for (const [ticket, dependencies] of dependencyGraph.entries()) { + const ticketWave = waveByTicket.get(ticket); + if (ticketWave === undefined) { + continue; + } + for (const dep of dependencies) { + const dependencyWave = waveByTicket.get(dep); + if (dependencyWave === undefined) { + continue; + } + if (dependencyWave === ticketWave) { + errors.push( + `Parallel wave conflict: ${ticket} and dependency ${dep} are both in wave ${ticketWave}`, + ); + } + } +} + +if (errors.length > 0) { + console.error("issues:validate failed"); + for (const error of errors) { + console.error(`- ${error}`); + } + process.exit(1); +} + +console.log( + `issues:validate passed (${TICKET_CODES.length} tickets, ${waveMatches.length} waves checked)`, +); diff --git a/package.json b/package.json index 30f4322..a8dc08a 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "affected:typecheck:local": "nx affected -t typecheck --base=origin/main --head=HEAD", "affected:test:local": "nx affected -t lint,format,typecheck,test --base=origin/main --head=HEAD", "affected:ci": "nx affected -t lint,format,typecheck,test,build --base=$NX_BASE --head=$NX_HEAD", + "issues:validate": "node issues/scripts/validate-ticket-set.mjs", "dev:registry:local": "pnpm -F @clawdentity/registry run dev:local" }, "devDependencies": { diff --git a/packages/sdk/src/config.test.ts b/packages/sdk/src/config.test.ts index 9633f6e..f1354b1 100644 --- a/packages/sdk/src/config.test.ts +++ b/packages/sdk/src/config.test.ts @@ -34,6 +34,31 @@ describe("config helpers", () => { ]); }); + it("parses APP_VERSION when provided", () => { + expect( + parseRegistryConfig({ + ENVIRONMENT: "development", + APP_VERSION: "sha-abcdef123456", + }), + ).toEqual({ + ENVIRONMENT: "development", + APP_VERSION: "sha-abcdef123456", + }); + }); + + it("throws AppError when APP_VERSION is empty", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + APP_VERSION: "", + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); + it("throws AppError on invalid registry config", () => { try { parseRegistryConfig({ ENVIRONMENT: "local" }); diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index 5249669..846489b 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -88,6 +88,7 @@ const registrySigningKeysEnvSchema = z export const registryConfigSchema = z.object({ ENVIRONMENT: environmentSchema, + APP_VERSION: z.string().min(1).optional(), BOOTSTRAP_SECRET: z.string().min(1).optional(), REGISTRY_SIGNING_KEY: z.string().min(1).optional(), REGISTRY_SIGNING_KEYS: registrySigningKeysEnvSchema.optional(), From 70faac825f77f7098af3f2a565f918d471950126 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 17:37:49 +0530 Subject: [PATCH 029/190] fix(ci): avoid pnpm cache before pnpm install --- .github/workflows/deploy-develop.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index f4bd6d8..4c41fd3 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -32,7 +32,6 @@ jobs: uses: actions/setup-node@v4 with: node-version: 22 - cache: pnpm - name: Install pnpm uses: pnpm/action-setup@v4 From 10e774a2d13064713d6c24e92876ed556644e869 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 17:44:38 +0530 Subject: [PATCH 030/190] docs: close implemented tickets even with external blockers --- AGENTS.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index e1e26b4..0df9c20 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -18,6 +18,22 @@ - Ticket schema and quality rules are maintained in `issues/AGENTS.md`. - Any dependency/wave changes must update both affected `T*.md` files and `issues/EXECUTION_PLAN.md` in the same change. +## Ticket Lifecycle Workflow +- Operate in a self-serve loop for ticket delivery: pick an issue, execute, and keep GitHub status accurate without waiting for manual reminders. +- Standard sequence for every ticket: + - Select the target issue and confirm blockers from `issues/EXECUTION_PLAN.md` and `issues/T*.md`. + - Start from latest `develop`: `git checkout develop && git pull --ff-only`. + - Create a feature branch with `feature/` prefix scoped to the ticket. + - Implement the ticket with tests/docs updates required by the issue definition. + - Run required validations before pushing. + - Push branch and open a PR to `develop`. + - Update the issue with implementation summary, validation evidence, and PR link. + - Keep issue status aligned to reality: + - `OPEN` while implementation work is in progress. + - Close once implementation for the ticket is complete and evidence is posted, even if external operational follow-ups (for example missing CI secrets or environment access) remain. + - Track external blockers in a separate follow-up issue/comment and link it from the closed ticket. + - Inform the user after PR + issue update with links and any blockers needing action. + ## Documentation Sync - `README.md` must reflect current execution model and links to issue governance. - `PRD.md` must reflect current rollout order, deployment gating, and verification strategy. From c032221c1f3e5712e43eb4fc45b150b0cb90480b Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 18:47:34 +0530 Subject: [PATCH 031/190] feat(registry): add agent AIT reissue endpoint --- apps/registry/src/AGENTS.md | 13 + apps/registry/src/agent-registration.ts | 100 +++++++ apps/registry/src/agent-revocation.ts | 22 ++ apps/registry/src/server.test.ts | 336 ++++++++++++++++++++++++ apps/registry/src/server.ts | 199 ++++++++++++-- 5 files changed, 649 insertions(+), 21 deletions(-) diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index aa50829..ada2bc3 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -59,3 +59,16 @@ - Perform state changes in one DB transaction: - update `agents.status` to `revoked` and `agents.updated_at` to `nowIso()` - insert `revocations` row using the previous `current_jti` + +## POST /v1/agents/:id/reissue Contract +- Require PAT auth via `createApiKeyAuth`; only the caller-owned agent may be reissued. +- Reuse `parseAgentRevokePath` for ULID path validation and preserve environment-aware error exposure. +- Return `404 AGENT_NOT_FOUND` for unknown IDs or foreign-owned IDs (single not-found behavior to avoid ownership leaks). +- Reissue only active agents: + - if agent status is `revoked`, return `409 AGENT_REISSUE_INVALID_STATE` + - if owned active agent has no `current_jti`, return `409 AGENT_REISSUE_INVALID_STATE` +- Keep one active token invariant in one DB transaction: + - update `agents.current_jti`, `agents.expires_at`, `agents.updated_at` (and keep status `active`) + - insert revocation row for the previous `current_jti` +- Sign replacement AIT using existing registry signer/keyset flow and deterministic issuer mapping. +- Response shape is `{ agent, ait }`; `agent.currentJti` must match the returned AIT `jti`. diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index 5ea069d..9a9af8b 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -54,6 +54,23 @@ export type AgentRegistrationResult = { claims: AitClaims; }; +export type AgentReissueResult = { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: "active"; + expiresAt: string; + updatedAt: string; + }; + claims: AitClaims; +}; + function invalidRegistration(options: { environment: RegistryConfig["ENVIRONMENT"]; details?: { @@ -318,6 +335,89 @@ export function buildAgentRegistration(input: { }; } +function resolveReissueTtlDays(input: { + previousExpiresAt: string | null; + issuedAtMs: number; +}): number { + if (!input.previousExpiresAt) { + return DEFAULT_AGENT_TTL_DAYS; + } + + const previousExpiryMs = Date.parse(input.previousExpiresAt); + if ( + !Number.isFinite(previousExpiryMs) || + previousExpiryMs <= input.issuedAtMs + ) { + return DEFAULT_AGENT_TTL_DAYS; + } + + const remainingSeconds = Math.floor( + (previousExpiryMs - input.issuedAtMs) / 1000, + ); + const remainingDays = Math.ceil(remainingSeconds / DAY_IN_SECONDS); + return Math.min( + MAX_AGENT_TTL_DAYS, + Math.max(MIN_AGENT_TTL_DAYS, remainingDays), + ); +} + +export function buildAgentReissue(input: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string | null; + publicKey: string; + previousExpiresAt: string | null; + issuer: string; +}): AgentReissueResult { + const issuedAt = nowIso(); + const issuedAtMs = Date.parse(issuedAt); + const issuedAtSeconds = Math.floor(issuedAtMs / 1000); + const ttlDays = resolveReissueTtlDays({ + previousExpiresAt: input.previousExpiresAt, + issuedAtMs, + }); + const ttlSeconds = ttlDays * DAY_IN_SECONDS; + const currentJti = generateUlid(issuedAtMs + 1); + const expiresAt = addSeconds(issuedAt, ttlSeconds); + const framework = input.framework ?? DEFAULT_AGENT_FRAMEWORK; + + return { + agent: { + id: input.id, + did: input.did, + ownerDid: input.ownerDid, + name: input.name, + framework, + publicKey: input.publicKey, + currentJti, + ttlDays, + status: "active", + expiresAt, + updatedAt: issuedAt, + }, + claims: { + iss: input.issuer, + sub: input.did, + ownerDid: input.ownerDid, + name: input.name, + framework, + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: input.publicKey, + }, + }, + iat: issuedAtSeconds, + nbf: issuedAtSeconds, + exp: issuedAtSeconds + ttlSeconds, + jti: currentJti, + }, + }; +} + export { DEFAULT_AGENT_FRAMEWORK, DEFAULT_AGENT_TTL_DAYS, diff --git a/apps/registry/src/agent-revocation.ts b/apps/registry/src/agent-revocation.ts index 8295070..a0c04c3 100644 --- a/apps/registry/src/agent-revocation.ts +++ b/apps/registry/src/agent-revocation.ts @@ -81,3 +81,25 @@ export function invalidAgentRevokeStateError(options: { : undefined, }); } + +export function invalidAgentReissueStateError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + reason: string; + field?: "currentJti" | "status"; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REISSUE_INVALID_STATE", + message: exposeDetails + ? "Agent cannot be reissued" + : "Request could not be processed", + status: 409, + expose: exposeDetails, + details: exposeDetails + ? { + fieldErrors: { [options.field ?? "currentJti"]: [options.reason] }, + formErrors: [], + } + : undefined, + }); +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index e02e189..4995ee3 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -68,19 +68,26 @@ type FakeAgentRow = { ownerId: string; name: string; framework: string | null; + publicKey?: string; status: "active" | "revoked"; expiresAt: string | null; currentJti?: string | null; + createdAt?: string; updatedAt?: string; }; type FakeAgentSelectRow = { id: string; did: string; + owner_id: string; name: string; + framework: string | null; + public_key: string; status: "active" | "revoked"; expires_at: string | null; current_jti: string | null; + created_at: string; + updated_at: string; }; function parseInsertColumns(query: string, tableName: string): string[] { @@ -188,9 +195,18 @@ function getAgentSelectColumnValue( if (column === "did") { return row.did; } + if (column === "owner_id") { + return row.owner_id; + } if (column === "name") { return row.name; } + if (column === "framework") { + return row.framework; + } + if (column === "public_key") { + return row.public_key; + } if (column === "status") { return row.status; } @@ -200,6 +216,12 @@ function getAgentSelectColumnValue( if (column === "current_jti") { return row.current_jti; } + if (column === "created_at") { + return row.created_at; + } + if (column === "updated_at") { + return row.updated_at; + } return undefined; } @@ -272,10 +294,16 @@ function resolveAgentSelectRows(options: { .map((row) => ({ id: row.id, did: row.did, + owner_id: row.ownerId, name: row.name, + framework: row.framework, + public_key: + row.publicKey ?? "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", status: row.status, expires_at: row.expiresAt, current_jti: row.currentJti ?? null, + created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", + updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", })); return filteredRows; @@ -448,6 +476,12 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { ) { row.currentJti = nextValues.current_jti; } + if ( + typeof nextValues.expires_at === "string" || + nextValues.expires_at === null + ) { + row.expiresAt = nextValues.expires_at; + } } agentUpdates.push({ @@ -1348,6 +1382,308 @@ describe("DELETE /v1/agents/:id", () => { }); }); +describe("POST /v1/agents/:id/reissue", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700300000000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 404 when agent does not exist", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentUpdates, revocationInserts } = createFakeDb([ + authRow, + ]); + const agentId = generateUlid(1700300000100); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 404 when agent is owned by another human", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700300000200); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700300000201), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 409 when agent is revoked", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000300); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "revoked-agent", + framework: "openclaw", + status: "revoked", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700300000301), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + status: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 409 when owned agent has missing current_jti", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000400); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: null, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("reissues owned agent, revokes old jti, and returns verifiable AIT", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000500); + const previousJti = generateUlid(1700300000501); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + publicKey: encodeBase64url(agentKeypair.publicKey), + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: previousJti, + }, + ], + ); + const appInstance = createRegistryApp(); + + const res = await appInstance.request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + status: string; + expiresAt: string; + updatedAt: string; + }; + ait: string; + }; + expect(body.agent.id).toBe(agentId); + expect(body.agent.did).toBe(makeAgentDid(agentId)); + expect(body.agent.ownerDid).toBe(authRow.humanDid); + expect(body.agent.framework).toBe("openclaw"); + expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(body.agent.currentJti).not.toBe(previousJti); + expect(body.agent.status).toBe("active"); + expect(body.ait).toEqual(expect.any(String)); + + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status: "active", + current_jti: body.agent.currentJti, + expires_at: body.agent.expiresAt, + updated_at: body.agent.updatedAt, + }); + + expect(revocationInserts).toHaveLength(1); + expect(revocationInserts[0]).toMatchObject({ + agent_id: agentId, + jti: previousJti, + reason: "reissued", + revoked_at: expect.any(String), + }); + + const keysRes = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + const keysBody = (await keysRes.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyAIT({ + token: body.ait, + expectedIssuer: "https://dev.api.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + expect(claims.sub).toBe(body.agent.did); + expect(claims.ownerDid).toBe(body.agent.ownerDid); + expect(claims.name).toBe(body.agent.name); + expect(claims.framework).toBe(body.agent.framework); + expect(claims.cnf.jwk.x).toBe(body.agent.publicKey); + expect(claims.jti).toBe(body.agent.currentJti); + expect(claims.jti).not.toBe(previousJti); + }); +}); + describe("POST /v1/agents", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 80c8ea3..db5e3b5 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -16,10 +16,12 @@ import { Hono } from "hono"; import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; import { buildAgentRegistration, + buildAgentReissue, resolveRegistryIssuer, } from "./agent-registration.js"; import { agentNotFoundError, + invalidAgentReissueStateError, invalidAgentRevokeStateError, parseAgentRevokePath, } from "./agent-revocation.js"; @@ -42,6 +44,59 @@ const logger = createLogger({ service: "registry" }); const REGISTRY_KEY_CACHE_CONTROL = "public, max-age=300, s-maxage=300, stale-while-revalidate=60"; +type OwnedAgent = { + id: string; + did: string; + name: string; + framework: string | null; + public_key: string; + status: "active" | "revoked"; + expires_at: string | null; + current_jti: string | null; +}; + +async function findOwnedAgent(input: { + db: ReturnType; + ownerId: string; + agentId: string; +}): Promise { + const rows = await input.db + .select({ + id: agents.id, + did: agents.did, + name: agents.name, + framework: agents.framework, + public_key: agents.public_key, + status: agents.status, + expires_at: agents.expires_at, + current_jti: agents.current_jti, + }) + .from(agents) + .where( + and(eq(agents.owner_id, input.ownerId), eq(agents.id, input.agentId)), + ) + .limit(1); + + return rows[0]; +} + +function requireCurrentJti(input: { + currentJti: string | null; + onInvalid: (reason: string) => AppError; +}): string { + if (typeof input.currentJti !== "string" || input.currentJti.length === 0) { + throw input.onInvalid("agent.current_jti is required"); + } + + return input.currentJti; +} + +function isUnsupportedLocalTransactionError(error: unknown): boolean { + return ( + error instanceof Error && error.message.includes("Failed query: begin") + ); +} + function createRegistryApp() { let cachedConfig: RegistryConfig | undefined; @@ -196,16 +251,11 @@ function createRegistryApp() { const human = c.get("human"); const db = createDb(c.env.DB); - const matches = await db - .select({ - id: agents.id, - status: agents.status, - current_jti: agents.current_jti, - }) - .from(agents) - .where(and(eq(agents.owner_id, human.id), eq(agents.id, agentId))) - .limit(1); - const existingAgent = matches[0]; + const existingAgent = await findOwnedAgent({ + db, + ownerId: human.id, + agentId, + }); if (!existingAgent) { throw agentNotFoundError(); @@ -215,17 +265,18 @@ function createRegistryApp() { return c.body(null, 204); } - const currentJti = existingAgent.current_jti; - if (typeof currentJti !== "string" || currentJti.length === 0) { - throw invalidAgentRevokeStateError({ - environment: config.ENVIRONMENT, - reason: "agent.current_jti is required for revocation", - }); - } + const currentJti = requireCurrentJti({ + currentJti: existingAgent.current_jti, + onInvalid: (reason) => + invalidAgentRevokeStateError({ + environment: config.ENVIRONMENT, + reason: `${reason} for revocation`, + }), + }); const revokedAt = nowIso(); - await db.transaction(async (tx) => { - await tx + const applyRevokeMutation = async (executor: typeof db): Promise => { + await executor .update(agents) .set({ status: "revoked", @@ -233,7 +284,7 @@ function createRegistryApp() { }) .where(eq(agents.id, existingAgent.id)); - await tx + await executor .insert(revocations) .values({ id: generateUlid(Date.now()), @@ -245,11 +296,117 @@ function createRegistryApp() { .onConflictDoNothing({ target: revocations.jti, }); - }); + }; + + try { + await db.transaction(async (tx) => { + await applyRevokeMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRevokeMutation(db); + } return c.body(null, 204); }); + app.post("/v1/agents/:id/reissue", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const existingAgent = await findOwnedAgent({ + db, + ownerId: human.id, + agentId, + }); + + if (!existingAgent) { + throw agentNotFoundError(); + } + + if (existingAgent.status === "revoked") { + throw invalidAgentReissueStateError({ + environment: config.ENVIRONMENT, + field: "status", + reason: "revoked agents cannot be reissued", + }); + } + + const currentJti = requireCurrentJti({ + currentJti: existingAgent.current_jti, + onInvalid: (reason) => + invalidAgentReissueStateError({ + environment: config.ENVIRONMENT, + reason: `${reason} for reissue`, + }), + }); + + const reissue = buildAgentReissue({ + id: existingAgent.id, + did: existingAgent.did, + ownerDid: human.did, + name: existingAgent.name, + framework: existingAgent.framework, + publicKey: existingAgent.public_key, + previousExpiresAt: existingAgent.expires_at, + issuer: resolveRegistryIssuer(config.ENVIRONMENT), + }); + const signer = await resolveRegistrySigner(config); + const ait = await signAIT({ + claims: reissue.claims, + signerKid: signer.signerKid, + signerKeypair: signer.signerKeypair, + }); + + const revokedAt = nowIso(); + const applyReissueMutation = async (executor: typeof db): Promise => { + await executor + .update(agents) + .set({ + status: "active", + current_jti: reissue.agent.currentJti, + expires_at: reissue.agent.expiresAt, + updated_at: reissue.agent.updatedAt, + }) + .where(eq(agents.id, existingAgent.id)); + + await executor + .insert(revocations) + .values({ + id: generateUlid(Date.now()), + jti: currentJti, + agent_id: existingAgent.id, + reason: "reissued", + revoked_at: revokedAt, + }) + .onConflictDoNothing({ + target: revocations.jti, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyReissueMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyReissueMutation(db); + } + + return c.json({ agent: reissue.agent, ait }); + }); + return app; } From 16e10605661fb56f240745d9d9482bd7d6bab0ba Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 19:01:39 +0530 Subject: [PATCH 032/190] fix(registry): harden reissue race and preserve expiry cap --- apps/registry/src/AGENTS.md | 5 + apps/registry/src/agent-registration.ts | 55 ++-- apps/registry/src/server.test.ts | 322 +++++++++++++++++++++--- apps/registry/src/server.ts | 43 +++- 4 files changed, 367 insertions(+), 58 deletions(-) diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index ada2bc3..fb2bae1 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -68,7 +68,12 @@ - if agent status is `revoked`, return `409 AGENT_REISSUE_INVALID_STATE` - if owned active agent has no `current_jti`, return `409 AGENT_REISSUE_INVALID_STATE` - Keep one active token invariant in one DB transaction: + - update must be optimistic and state-guarded (`id` + expected `status=active` + expected previous `current_jti`) so concurrent revoke/reissue cannot mint multiple valid AITs + - fail with `409 AGENT_REISSUE_INVALID_STATE` when the guarded update matches zero rows (state changed concurrently) - update `agents.current_jti`, `agents.expires_at`, `agents.updated_at` (and keep status `active`) - insert revocation row for the previous `current_jti` +- Reissue rotates token identity, not privileges: + - keep replacement AIT `exp` capped to prior `agents.expires_at` when that expiry is still in the future + - do not round near-expiry windows up to full days during rotation - Sign replacement AIT using existing registry signer/keyset flow and deterministic issuer mapping. - Response shape is `{ agent, ait }`; `agent.currentJti` must match the returned AIT `jti`. diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index 9a9af8b..b0ba517 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -335,12 +335,26 @@ export function buildAgentRegistration(input: { }; } -function resolveReissueTtlDays(input: { +function resolveReissueExpiry(input: { previousExpiresAt: string | null; + issuedAt: string; issuedAtMs: number; -}): number { + issuedAtSeconds: number; +}): { + expiresAt: string; + exp: number; + ttlDays: number; +} { + const defaultTtlSeconds = DEFAULT_AGENT_TTL_DAYS * DAY_IN_SECONDS; + const defaultExp = input.issuedAtSeconds + defaultTtlSeconds; + const defaultExpiry = addSeconds(input.issuedAt, defaultTtlSeconds); + if (!input.previousExpiresAt) { - return DEFAULT_AGENT_TTL_DAYS; + return { + expiresAt: defaultExpiry, + exp: defaultExp, + ttlDays: DEFAULT_AGENT_TTL_DAYS, + }; } const previousExpiryMs = Date.parse(input.previousExpiresAt); @@ -348,17 +362,28 @@ function resolveReissueTtlDays(input: { !Number.isFinite(previousExpiryMs) || previousExpiryMs <= input.issuedAtMs ) { - return DEFAULT_AGENT_TTL_DAYS; + return { + expiresAt: defaultExpiry, + exp: defaultExp, + ttlDays: DEFAULT_AGENT_TTL_DAYS, + }; } - const remainingSeconds = Math.floor( - (previousExpiryMs - input.issuedAtMs) / 1000, + const previousExpirySeconds = Math.floor(previousExpiryMs / 1000); + const remainingSeconds = Math.max( + 1, + previousExpirySeconds - input.issuedAtSeconds, ); - const remainingDays = Math.ceil(remainingSeconds / DAY_IN_SECONDS); - return Math.min( + const ttlDays = Math.min( MAX_AGENT_TTL_DAYS, - Math.max(MIN_AGENT_TTL_DAYS, remainingDays), + Math.max(MIN_AGENT_TTL_DAYS, Math.ceil(remainingSeconds / DAY_IN_SECONDS)), ); + + return { + expiresAt: new Date(previousExpiryMs).toISOString(), + exp: previousExpirySeconds, + ttlDays, + }; } export function buildAgentReissue(input: { @@ -374,13 +399,13 @@ export function buildAgentReissue(input: { const issuedAt = nowIso(); const issuedAtMs = Date.parse(issuedAt); const issuedAtSeconds = Math.floor(issuedAtMs / 1000); - const ttlDays = resolveReissueTtlDays({ + const expiry = resolveReissueExpiry({ previousExpiresAt: input.previousExpiresAt, + issuedAt, issuedAtMs, + issuedAtSeconds, }); - const ttlSeconds = ttlDays * DAY_IN_SECONDS; const currentJti = generateUlid(issuedAtMs + 1); - const expiresAt = addSeconds(issuedAt, ttlSeconds); const framework = input.framework ?? DEFAULT_AGENT_FRAMEWORK; return { @@ -392,9 +417,9 @@ export function buildAgentReissue(input: { framework, publicKey: input.publicKey, currentJti, - ttlDays, + ttlDays: expiry.ttlDays, status: "active", - expiresAt, + expiresAt: expiry.expiresAt, updatedAt: issuedAt, }, claims: { @@ -412,7 +437,7 @@ export function buildAgentReissue(input: { }, iat: issuedAtSeconds, nbf: issuedAtSeconds, - exp: issuedAtSeconds + ttlSeconds, + exp: expiry.exp, jti: currentJti, }, }; diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 4995ee3..4d2c111 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -90,6 +90,10 @@ type FakeAgentSelectRow = { updated_at: string; }; +type FakeDbOptions = { + beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; +}; + function parseInsertColumns(query: string, tableName: string): string[] { const match = query.match( new RegExp(`insert\\s+into\\s+"?${tableName}"?\\s*\\(([^)]+)\\)`, "i"), @@ -152,6 +156,32 @@ function hasFilter( return quotedPattern.test(whereClause) || barePattern.test(whereClause); } +function parseWhereEqualityParams(options: { + whereClause: string; + params: unknown[]; +}): { values: Record; consumedParams: number } { + const values: Record = {}; + const pattern = /"?([a-zA-Z0-9_]+)"?\s*=\s*\?/g; + let parameterIndex = 0; + + let match = pattern.exec(options.whereClause); + while (match !== null) { + const column = match[1]?.toLowerCase(); + if (!column) { + match = pattern.exec(options.whereClause); + continue; + } + + const entries = values[column] ?? []; + entries.push(options.params[parameterIndex]); + values[column] = entries; + parameterIndex += 1; + match = pattern.exec(options.whereClause); + } + + return { values, consumedParams: parameterIndex }; +} + function parseSelectedColumns(query: string): string[] { const normalized = query.toLowerCase(); const selectIndex = normalized.indexOf("select "); @@ -231,48 +261,41 @@ function resolveAgentSelectRows(options: { agentRows: FakeAgentRow[]; }): FakeAgentSelectRow[] { const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); const hasOwnerFilter = hasFilter(whereClause, "owner_id"); const hasStatusFilter = hasFilter(whereClause, "status"); const hasFrameworkFilter = hasFilter(whereClause, "framework"); const hasIdFilter = hasFilter(whereClause, "id"); + const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); const hasCursorFilter = hasFilter(whereClause, "id", "<"); const hasLimitClause = options.query.toLowerCase().includes(" limit "); - let parameterIndex = 0; - const ownerId = hasOwnerFilter - ? String(options.params[parameterIndex] ?? "") - : undefined; - if (hasOwnerFilter) { - parameterIndex += 1; - } - - const statusFilter = hasStatusFilter - ? String(options.params[parameterIndex] ?? "") - : undefined; - if (hasStatusFilter) { - parameterIndex += 1; - } - - const frameworkFilter = hasFrameworkFilter - ? String(options.params[parameterIndex] ?? "") - : undefined; - if (hasFrameworkFilter) { - parameterIndex += 1; - } - - const idFilter = hasIdFilter - ? String(options.params[parameterIndex] ?? "") + const ownerId = + hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id?.[0]) + : undefined; + const statusFilter = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status?.[0]) + : undefined; + const frameworkFilter = + hasFrameworkFilter && + typeof equalityParams.values.framework?.[0] === "string" + ? String(equalityParams.values.framework?.[0]) + : undefined; + const idFilter = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id?.[0]) + : undefined; + const currentJtiFilter = hasCurrentJtiFilter + ? (equalityParams.values.current_jti?.[0] as string | null | undefined) : undefined; - if (hasIdFilter) { - parameterIndex += 1; - } - const cursorFilter = hasCursorFilter - ? String(options.params[parameterIndex] ?? "") + ? String(options.params[equalityParams.consumedParams] ?? "") : undefined; - if (hasCursorFilter) { - parameterIndex += 1; - } const maybeLimit = hasLimitClause ? Number(options.params[options.params.length - 1]) @@ -288,6 +311,11 @@ function resolveAgentSelectRows(options: { frameworkFilter ? row.framework === frameworkFilter : true, ) .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => + currentJtiFilter !== undefined + ? (row.currentJti ?? null) === currentJtiFilter + : true, + ) .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) .sort((left, right) => right.id.localeCompare(left.id)) .slice(0, limit) @@ -309,11 +337,16 @@ function resolveAgentSelectRows(options: { return filteredRows; } -function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { +function createFakeDb( + rows: FakeD1Row[], + agentRows: FakeAgentRow[] = [], + options: FakeDbOptions = {}, +) { const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; const agentInserts: FakeAgentInsertRow[] = []; const agentUpdates: FakeAgentUpdateRow[] = []; const revocationInserts: FakeRevocationInsertRow[] = []; + let beforeFirstAgentUpdateApplied = false; const database: D1Database = { prepare(query: string) { @@ -408,6 +441,8 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { return []; }, async run() { + let changes = 0; + if ( normalizedQuery.includes('update "api_keys"') || normalizedQuery.includes("update api_keys") @@ -416,6 +451,7 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { lastUsedAt: String(params[0] ?? ""), apiKeyId: String(params[1] ?? ""), }); + changes = 1; } if ( normalizedQuery.includes('insert into "agents"') || @@ -430,11 +466,20 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { {}, ); agentInserts.push(row); + changes = 1; } if ( normalizedQuery.includes('update "agents"') || normalizedQuery.includes("update agents") ) { + if ( + !beforeFirstAgentUpdateApplied && + options.beforeFirstAgentUpdate + ) { + options.beforeFirstAgentUpdate(agentRows); + beforeFirstAgentUpdateApplied = true; + } + const setColumns = parseUpdateSetColumns(query, "agents"); const nextValues = setColumns.reduce>( (acc, column, index) => { @@ -445,13 +490,28 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { ); const whereClause = extractWhereClause(query); const whereParams = params.slice(setColumns.length); - let whereIndex = 0; - const ownerFilter = hasFilter(whereClause, "owner_id") - ? String(whereParams[whereIndex++] ?? "") - : undefined; - const idFilter = hasFilter(whereClause, "id") - ? String(whereParams[whereIndex++] ?? "") - : undefined; + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + const ownerFilter = + typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id?.[0]) + : undefined; + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id?.[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status?.[0]) + : undefined; + const currentJtiFilter = equalityParams.values.current_jti?.[0] as + | string + | null + | undefined; + + let matchedRows = 0; for (const row of agentRows) { if (ownerFilter && row.ownerId !== ownerFilter) { @@ -460,6 +520,20 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { if (idFilter && row.id !== idFilter) { continue; } + if ( + statusFilter && + row.status !== (statusFilter as "active" | "revoked") + ) { + continue; + } + if ( + currentJtiFilter !== undefined && + (row.currentJti ?? null) !== currentJtiFilter + ) { + continue; + } + + matchedRows += 1; if ( nextValues.status === "active" || @@ -488,7 +562,11 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { ...nextValues, owner_id: ownerFilter, id: idFilter, + status_where: statusFilter, + current_jti_where: currentJtiFilter, + matched_rows: matchedRows, }); + changes = matchedRows; } if ( normalizedQuery.includes('insert into "revocations"') || @@ -503,8 +581,9 @@ function createFakeDb(rows: FakeD1Row[], agentRows: FakeAgentRow[] = []) { {}, ); revocationInserts.push(row); + changes = 1; } - return { success: true } as D1Result; + return { success: true, meta: { changes } } as D1Result; }, } as D1PreparedStatement; }, @@ -1627,6 +1706,9 @@ describe("POST /v1/agents/:id/reissue", () => { expect(agentUpdates[0]).toMatchObject({ id: agentId, status: "active", + status_where: "active", + current_jti_where: previousJti, + matched_rows: 1, current_jti: body.agent.currentJti, expires_at: body.agent.expiresAt, updated_at: body.agent.updatedAt, @@ -1682,6 +1764,164 @@ describe("POST /v1/agents/:id/reissue", () => { expect(claims.jti).toBe(body.agent.currentJti); expect(claims.jti).not.toBe(previousJti); }); + + it("returns 409 when guarded reissue update matches zero rows", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000550); + const previousJti = generateUlid(1700300000551); + const racedJti = generateUlid(1700300000552); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + publicKey: encodeBase64url(agentKeypair.publicKey), + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: previousJti, + }, + ], + { + beforeFirstAgentUpdate: (rows) => { + if (rows[0]) { + rows[0].currentJti = racedJti; + } + }, + }, + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status_where: "active", + current_jti_where: previousJti, + matched_rows: 0, + }); + expect(revocationInserts).toHaveLength(0); + }); + + it("does not extend expiry when reissuing a near-expiry token", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000560); + const previousJti = generateUlid(1700300000561); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const previousExpiresAt = new Date( + Date.now() + 5 * 60 * 1000, + ).toISOString(); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + publicKey: encodeBase64url(agentKeypair.publicKey), + status: "active", + expiresAt: previousExpiresAt, + currentJti: previousJti, + }, + ], + ); + + const appInstance = createRegistryApp(); + const res = await appInstance.request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + agent: { + expiresAt: string; + }; + ait: string; + }; + expect(Date.parse(body.agent.expiresAt)).toBeLessThanOrEqual( + Date.parse(previousExpiresAt), + ); + + const claims = await verifyAIT({ + token: body.ait, + expectedIssuer: "https://dev.api.clawdentity.com", + registryKeys: [ + { + kid: "reg-key-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + }, + }, + ], + }); + expect(claims.exp).toBeLessThanOrEqual( + Math.floor(Date.parse(previousExpiresAt) / 1000), + ); + expect(claims.exp).toBe( + Math.floor(Date.parse(body.agent.expiresAt) / 1000), + ); + }); }); describe("POST /v1/agents", () => { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index db5e3b5..8b311f2 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -97,6 +97,30 @@ function isUnsupportedLocalTransactionError(error: unknown): boolean { ); } +function getMutationRowCount(result: unknown): number | undefined { + if (!result || typeof result !== "object") { + return undefined; + } + + const directChanges = (result as { changes?: unknown }).changes; + if (typeof directChanges === "number") { + return directChanges; + } + + const rowsAffected = (result as { rowsAffected?: unknown }).rowsAffected; + if (typeof rowsAffected === "number") { + return rowsAffected; + } + + const metaChanges = (result as { meta?: { changes?: unknown } }).meta + ?.changes; + if (typeof metaChanges === "number") { + return metaChanges; + } + + return undefined; +} + function createRegistryApp() { let cachedConfig: RegistryConfig | undefined; @@ -368,7 +392,7 @@ function createRegistryApp() { const revokedAt = nowIso(); const applyReissueMutation = async (executor: typeof db): Promise => { - await executor + const updateResult = await executor .update(agents) .set({ status: "active", @@ -376,7 +400,22 @@ function createRegistryApp() { expires_at: reissue.agent.expiresAt, updated_at: reissue.agent.updatedAt, }) - .where(eq(agents.id, existingAgent.id)); + .where( + and( + eq(agents.id, existingAgent.id), + eq(agents.status, "active"), + eq(agents.current_jti, currentJti), + ), + ); + + const updatedRows = getMutationRowCount(updateResult); + if (updatedRows === 0) { + throw invalidAgentReissueStateError({ + environment: config.ENVIRONMENT, + field: "currentJti", + reason: "agent state changed during reissue; retry request", + }); + } await executor .insert(revocations) From feb1497d0a345e70ed521a596b592b56b201644e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 20:32:52 +0530 Subject: [PATCH 033/190] feat(registry): add signed CRL snapshot endpoint --- apps/registry/src/AGENTS.md | 9 + apps/registry/src/server.test.ts | 321 +++++++++++++++++++++++++++++++ apps/registry/src/server.ts | 142 +++++++++++++- 3 files changed, 470 insertions(+), 2 deletions(-) diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index fb2bae1..3093283 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -16,6 +16,15 @@ - Return `keys[]` entries with `kid`, `alg`, `crv`, `x`, and `status` so SDK/offline verifiers can consume directly. - Keep cache headers explicit and short-lived (`max-age=300` + `stale-while-revalidate`) to balance key rotation with client efficiency. +## CRL Snapshot Contract +- `GET /v1/crl` is a public endpoint and must remain unauthenticated so SDK/proxy clients can refresh revocation state without PAT bootstrap dependencies. +- Success response shape must remain `{ crl: }` where `crl` is an EdDSA-signed token with `typ=CRL`. +- Build CRL claims from the full `revocations` table (MVP full snapshot), joining each row to `agents.did` for `revocations[].agentDid`. +- Keep CRL cache headers explicit and short-lived (`max-age=300` + `stale-while-revalidate`) for predictable revocation propagation. +- Ensure CRL JWT `exp` exceeds the full cache serve window (`max-age + stale-while-revalidate`) with a small safety buffer so strict verifiers never see cache-valid but token-expired snapshots. +- If no revocations exist yet, return `404 CRL_NOT_FOUND` instead of emitting an unsigned or schema-invalid empty snapshot. +- Route tests should verify the returned CRL token using SDK `verifyCRL` and the published active keys from `/.well-known/claw-keys.json`. + ## Validation - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 4d2c111..4738169 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -10,6 +10,7 @@ import { REQUEST_ID_HEADER, signAIT, verifyAIT, + verifyCRL, } from "@clawdentity/sdk"; import { describe, expect, it } from "vitest"; import { DEFAULT_AGENT_LIST_LIMIT } from "./agent-list.js"; @@ -62,6 +63,13 @@ type FakeD1Row = { type FakeAgentInsertRow = Record; type FakeAgentUpdateRow = Record; type FakeRevocationInsertRow = Record; +type FakeRevocationRow = { + id: string; + jti: string; + agentId: string; + reason: string | null; + revokedAt: string; +}; type FakeAgentRow = { id: string; did: string; @@ -92,6 +100,16 @@ type FakeAgentSelectRow = { type FakeDbOptions = { beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; + revocationRows?: FakeRevocationRow[]; +}; + +type FakeCrlSelectRow = { + id: string; + jti: string; + reason: string | null; + revoked_at: string; + agent_did: string; + did: string; }; function parseInsertColumns(query: string, tableName: string): string[] { @@ -337,6 +355,66 @@ function resolveAgentSelectRows(options: { return filteredRows; } +function getCrlSelectColumnValue( + row: FakeCrlSelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "jti") { + return row.jti; + } + if (column === "reason") { + return row.reason; + } + if (column === "revoked_at") { + return row.revoked_at; + } + if (column === "revokedat") { + return row.revoked_at; + } + if (column === "agent_did") { + return row.agent_did; + } + if (column === "agentdid" || column === "did") { + return row.did; + } + return undefined; +} + +function resolveCrlSelectRows(options: { + agentRows: FakeAgentRow[]; + revocationRows: FakeRevocationRow[]; +}): FakeCrlSelectRow[] { + return options.revocationRows + .map((row) => { + const agent = options.agentRows.find( + (agentRow) => agentRow.id === row.agentId, + ); + if (!agent) { + return null; + } + + return { + id: row.id, + jti: row.jti, + reason: row.reason, + revoked_at: row.revokedAt, + agent_did: agent.did, + did: agent.did, + }; + }) + .filter((row): row is FakeCrlSelectRow => row !== null) + .sort((left, right) => { + const timestampCompare = right.revoked_at.localeCompare(left.revoked_at); + if (timestampCompare !== 0) { + return timestampCompare; + } + return right.id.localeCompare(left.id); + }); +} + function createFakeDb( rows: FakeD1Row[], agentRows: FakeAgentRow[] = [], @@ -346,6 +424,7 @@ function createFakeDb( const agentInserts: FakeAgentInsertRow[] = []; const agentUpdates: FakeAgentUpdateRow[] = []; const revocationInserts: FakeRevocationInsertRow[] = []; + const revocationRows = [...(options.revocationRows ?? [])]; let beforeFirstAgentUpdateApplied = false; const database: D1Database = { @@ -397,6 +476,18 @@ function createFakeDb( }), }; } + if ( + (normalizedQuery.includes('from "revocations"') || + normalizedQuery.includes("from revocations")) && + normalizedQuery.includes("select") + ) { + return { + results: resolveCrlSelectRows({ + agentRows, + revocationRows, + }), + }; + } return { results: [] }; }, async raw() { @@ -438,6 +529,21 @@ function createFakeDb( ), ); } + if ( + normalizedQuery.includes('from "revocations"') || + normalizedQuery.includes("from revocations") + ) { + const resultRows = resolveCrlSelectRows({ + agentRows, + revocationRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getCrlSelectColumnValue(row, column), + ), + ); + } return []; }, async run() { @@ -581,6 +687,20 @@ function createFakeDb( {}, ); revocationInserts.push(row); + if ( + typeof row.id === "string" && + typeof row.jti === "string" && + typeof row.agent_id === "string" && + typeof row.revoked_at === "string" + ) { + revocationRows.push({ + id: row.id, + jti: row.jti, + agentId: row.agent_id, + reason: typeof row.reason === "string" ? row.reason : null, + revokedAt: row.revoked_at, + }); + } changes = 1; } return { success: true, meta: { changes } } as D1Result; @@ -822,6 +942,207 @@ describe("GET /.well-known/claw-keys.json", () => { }); }); +describe("GET /v1/crl", () => { + it("returns signed CRL snapshot with cache headers", async () => { + const signer = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const agentIdOne = generateUlid(1700400000000); + const agentIdTwo = generateUlid(1700400000100); + const revocationJtiOne = generateUlid(1700400000200); + const revocationJtiTwo = generateUlid(1700400000300); + const { database } = createFakeDb( + [], + [ + { + id: agentIdOne, + did: makeAgentDid(agentIdOne), + ownerId: "human-1", + name: "revoked-one", + framework: "openclaw", + status: "revoked", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: agentIdTwo, + did: makeAgentDid(agentIdTwo), + ownerId: "human-2", + name: "revoked-two", + framework: "langchain", + status: "revoked", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + { + revocationRows: [ + { + id: generateUlid(1700400000400), + jti: revocationJtiOne, + agentId: agentIdOne, + reason: null, + revokedAt: "2026-02-11T10:00:00.000Z", + }, + { + id: generateUlid(1700400000500), + jti: revocationJtiTwo, + agentId: agentIdTwo, + reason: "manual revoke", + revokedAt: "2026-02-11T11:00:00.000Z", + }, + ], + }, + ); + + const response = await appInstance.request( + "/v1/crl", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(response.status).toBe(200); + expect(response.headers.get("Cache-Control")).toBe( + "public, max-age=300, s-maxage=300, stale-while-revalidate=60", + ); + const body = (await response.json()) as { crl: string }; + expect(body.crl).toEqual(expect.any(String)); + + const keysResponse = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyCRL({ + token: body.crl, + expectedIssuer: "https://dev.api.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(claims.revocations).toHaveLength(2); + expect(claims.revocations).toEqual( + expect.arrayContaining([ + { + jti: revocationJtiOne, + agentDid: makeAgentDid(agentIdOne), + revokedAt: Math.floor(Date.parse("2026-02-11T10:00:00.000Z") / 1000), + }, + { + jti: revocationJtiTwo, + agentDid: makeAgentDid(agentIdTwo), + reason: "manual revoke", + revokedAt: Math.floor(Date.parse("2026-02-11T11:00:00.000Z") / 1000), + }, + ]), + ); + expect(claims.exp).toBeGreaterThan(claims.iat); + expect(claims.exp - claims.iat).toBe(390); + }); + + it("returns 404 when no revocations are available", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/crl", + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(404); + const body = (await response.json()) as { + error: { + code: string; + message: string; + }; + }; + expect(body.error.code).toBe("CRL_NOT_FOUND"); + expect(body.error.message).toBe("CRL snapshot is not available"); + }); + + it("returns 500 when CRL signing configuration is missing", async () => { + const agentId = generateUlid(1700400000600); + const { database } = createFakeDb( + [], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "revoked-agent", + framework: "openclaw", + status: "revoked", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + { + revocationRows: [ + { + id: generateUlid(1700400000700), + jti: generateUlid(1700400000800), + agentId, + reason: null, + revokedAt: "2026-02-11T12:00:00.000Z", + }, + ], + }, + ); + + const response = await createRegistryApp().request( + "/v1/crl", + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(500); + const body = (await response.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + REGISTRY_SIGNING_KEYS: expect.any(Array), + }); + }); +}); + describe("GET /v1/me", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 8b311f2..7035a40 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -10,6 +10,7 @@ import { type RegistryConfig, shouldExposeVerboseErrors, signAIT, + signCRL, } from "@clawdentity/sdk"; import { and, desc, eq, lt } from "drizzle-orm"; import { Hono } from "hono"; @@ -41,8 +42,15 @@ type Bindings = { REGISTRY_SIGNING_KEYS?: string; }; const logger = createLogger({ service: "registry" }); -const REGISTRY_KEY_CACHE_CONTROL = - "public, max-age=300, s-maxage=300, stale-while-revalidate=60"; +const REGISTRY_CACHE_MAX_AGE_SECONDS = 300; +const REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS = 60; +const REGISTRY_KEY_CACHE_CONTROL = `public, max-age=${REGISTRY_CACHE_MAX_AGE_SECONDS}, s-maxage=${REGISTRY_CACHE_MAX_AGE_SECONDS}, stale-while-revalidate=${REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS}`; +const REGISTRY_CRL_CACHE_CONTROL = `public, max-age=${REGISTRY_CACHE_MAX_AGE_SECONDS}, s-maxage=${REGISTRY_CACHE_MAX_AGE_SECONDS}, stale-while-revalidate=${REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS}`; +const CRL_EXPIRY_SAFETY_BUFFER_SECONDS = 30; +const CRL_TTL_SECONDS = + REGISTRY_CACHE_MAX_AGE_SECONDS + + REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS + + CRL_EXPIRY_SAFETY_BUFFER_SECONDS; type OwnedAgent = { id: string; @@ -55,6 +63,92 @@ type OwnedAgent = { current_jti: string | null; }; +type CrlSnapshotRow = { + id: string; + jti: string; + reason: string | null; + revoked_at: string; + agent_did: string; +}; + +function crlBuildError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + message: string; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "CRL_BUILD_FAILED", + message: exposeDetails + ? options.message + : "CRL snapshot could not be generated", + status: 500, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function parseRevokedAtSeconds(options: { + environment: RegistryConfig["ENVIRONMENT"]; + revocationId: string; + revokedAtIso: string; +}): number { + const epochMillis = Date.parse(options.revokedAtIso); + if (!Number.isFinite(epochMillis)) { + throw crlBuildError({ + environment: options.environment, + message: "CRL revocation timestamp is invalid", + details: { + fieldErrors: { + revokedAt: [ + `revocation ${options.revocationId} has invalid revoked_at timestamp`, + ], + }, + formErrors: [], + }, + }); + } + + return Math.floor(epochMillis / 1000); +} + +function buildCrlClaims(input: { + rows: CrlSnapshotRow[]; + environment: RegistryConfig["ENVIRONMENT"]; + issuer: string; + nowSeconds: number; +}) { + return { + iss: input.issuer, + jti: generateUlid(Date.now()), + iat: input.nowSeconds, + exp: input.nowSeconds + CRL_TTL_SECONDS, + revocations: input.rows.map((row) => { + const base = { + jti: row.jti, + agentDid: row.agent_did, + revokedAt: parseRevokedAtSeconds({ + environment: input.environment, + revocationId: row.id, + revokedAtIso: row.revoked_at, + }), + }; + + if (typeof row.reason === "string" && row.reason.length > 0) { + return { + ...base, + reason: row.reason, + }; + } + + return base; + }), + }; +} + async function findOwnedAgent(input: { db: ReturnType; ownerId: string; @@ -164,6 +258,50 @@ function createRegistryApp() { ); }); + app.get("/v1/crl", async (c) => { + const config = getConfig(c.env); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: revocations.id, + jti: revocations.jti, + reason: revocations.reason, + revoked_at: revocations.revoked_at, + agent_did: agents.did, + }) + .from(revocations) + .innerJoin(agents, eq(revocations.agent_id, agents.id)) + .orderBy(desc(revocations.revoked_at), desc(revocations.id)); + + if (rows.length === 0) { + throw new AppError({ + code: "CRL_NOT_FOUND", + message: "CRL snapshot is not available", + status: 404, + expose: true, + }); + } + + const signer = await resolveRegistrySigner(config); + const nowSeconds = Math.floor(Date.now() / 1000); + const claims = buildCrlClaims({ + rows, + environment: config.ENVIRONMENT, + issuer: resolveRegistryIssuer(config.ENVIRONMENT), + nowSeconds, + }); + const crl = await signCRL({ + claims, + signerKid: signer.signerKid, + signerKeypair: signer.signerKeypair, + }); + + return c.json({ crl }, 200, { + "Cache-Control": REGISTRY_CRL_CACHE_CONTROL, + }); + }); + app.get("/v1/me", createApiKeyAuth(), (c) => { return c.json({ human: c.get("human") }); }); From 07bc6fc4f81bfbd4670bc28ea2e1737f9b995764 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 13 Feb 2026 21:10:25 +0530 Subject: [PATCH 034/190] feat(registry): implement T19 public agent resolve endpoint --- apps/registry/src/AGENTS.md | 12 +- apps/registry/src/agent-resolve.ts | 99 +++++++++++ apps/registry/src/rate-limit.ts | 87 ++++++++++ apps/registry/src/server.test.ts | 258 ++++++++++++++++++++++++++--- apps/registry/src/server.ts | 46 ++++- 5 files changed, 481 insertions(+), 21 deletions(-) create mode 100644 apps/registry/src/agent-resolve.ts create mode 100644 apps/registry/src/rate-limit.ts diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 3093283..a490b83 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -25,10 +25,20 @@ - If no revocations exist yet, return `404 CRL_NOT_FOUND` instead of emitting an unsigned or schema-invalid empty snapshot. - Route tests should verify the returned CRL token using SDK `verifyCRL` and the published active keys from `/.well-known/claw-keys.json`. +## GET /v1/resolve/:id Contract +- Public endpoint: no PAT/auth required. +- Validate `:id` as ULID via dedicated parser and return `400 AGENT_RESOLVE_INVALID_PATH` on invalid path input. +- Rate-limit by client IP with a basic in-memory limiter and return `429 RATE_LIMIT_EXCEEDED` when over threshold. +- Return only public fields: `{ did, name, framework, status, ownerDid }`. +- Do not expose PII or internal fields (email, API key metadata, or private key material). +- For unknown IDs, return `404 AGENT_NOT_FOUND` with no ownership-leak variants. +- Keep framework output stable as a non-empty string for legacy rows missing `framework`. + ## Validation - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. -- When using fake D1 adapters in route tests, make select responses honor bound parameters so query-shape regressions are caught. +- When using fake D1 adapters in route tests, make select responses honor bound parameters, selected-column projection, and join semantics so query-shape regressions are caught. +- Fake D1 join emulation should drop rows when `innerJoin` targets are missing so tests catch missing/incorrect joins instead of masking them with stubbed values. ## GET /v1/agents Contract - Require PAT auth via `createApiKeyAuth`; only caller-owned agents may be returned. diff --git a/apps/registry/src/agent-resolve.ts b/apps/registry/src/agent-resolve.ts new file mode 100644 index 0000000..e02641e --- /dev/null +++ b/apps/registry/src/agent-resolve.ts @@ -0,0 +1,99 @@ +import { parseUlid } from "@clawdentity/protocol"; +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const DEFAULT_RESOLVED_FRAMEWORK = "openclaw"; + +type AgentStatus = "active" | "revoked"; + +type ResolvePathErrorDetails = { + fieldErrors: Record; + formErrors: string[]; +}; + +export type ResolvedAgentRow = { + did: string; + name: string; + framework: string | null; + status: AgentStatus; + owner_did: string; +}; + +export type ResolvedAgent = { + did: string; + name: string; + framework: string; + status: AgentStatus; + ownerDid: string; +}; + +function invalidResolvePathError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: ResolvePathErrorDetails; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_RESOLVE_INVALID_PATH", + message: exposeDetails + ? "Agent resolve path is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +export function parseAgentResolvePath(input: { + id: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): string { + const id = input.id.trim(); + if (id.length === 0) { + throw invalidResolvePathError({ + environment: input.environment, + details: { + fieldErrors: { id: ["id is required"] }, + formErrors: [], + }, + }); + } + + try { + return parseUlid(id).value; + } catch { + throw invalidResolvePathError({ + environment: input.environment, + details: { + fieldErrors: { id: ["id must be a valid ULID"] }, + formErrors: [], + }, + }); + } +} + +export function agentResolveNotFoundError(): AppError { + return new AppError({ + code: "AGENT_NOT_FOUND", + message: "Agent not found", + status: 404, + expose: true, + }); +} + +export function mapResolvedAgentRow(row: ResolvedAgentRow): ResolvedAgent { + const framework = + typeof row.framework === "string" && row.framework.length > 0 + ? row.framework + : DEFAULT_RESOLVED_FRAMEWORK; + + return { + did: row.did, + name: row.name, + framework, + status: row.status, + ownerDid: row.owner_did, + }; +} diff --git a/apps/registry/src/rate-limit.ts b/apps/registry/src/rate-limit.ts new file mode 100644 index 0000000..99bffc4 --- /dev/null +++ b/apps/registry/src/rate-limit.ts @@ -0,0 +1,87 @@ +import { AppError } from "@clawdentity/sdk"; +import type { MiddlewareHandler } from "hono"; + +export const RESOLVE_RATE_LIMIT_WINDOW_MS = 60_000; +export const RESOLVE_RATE_LIMIT_MAX_REQUESTS = 10; +export const RESOLVE_RATE_LIMIT_MAX_BUCKETS = 10_000; + +type InMemoryBucket = { + windowStartedAtMs: number; + count: number; +}; + +type RateLimitOptions = { + bucketKey: string; + maxRequests: number; + maxBuckets?: number; + windowMs: number; + nowMs?: () => number; +}; + +function resolveClientIp(request: Request): string { + const cfIp = request.headers.get("cf-connecting-ip"); + if (typeof cfIp === "string" && cfIp.trim().length > 0) { + return cfIp.trim(); + } + + return "unknown"; +} + +export function createInMemoryRateLimit( + options: RateLimitOptions, +): MiddlewareHandler { + const nowMs = options.nowMs ?? Date.now; + const maxBuckets = options.maxBuckets ?? RESOLVE_RATE_LIMIT_MAX_BUCKETS; + const buckets = new Map(); + + return async (c, next) => { + const now = nowMs(); + for (const [key, bucket] of buckets.entries()) { + if (now - bucket.windowStartedAtMs >= options.windowMs) { + buckets.delete(key); + } + } + + const clientIp = resolveClientIp(c.req.raw); + const key = `${options.bucketKey}:${clientIp}`; + const existing = buckets.get(key); + + if (!existing || now - existing.windowStartedAtMs >= options.windowMs) { + if (!existing && buckets.size >= maxBuckets) { + let oldestKey: string | undefined; + let oldestWindowStart = Number.POSITIVE_INFINITY; + + for (const [bucketKey, bucket] of buckets.entries()) { + if (bucket.windowStartedAtMs < oldestWindowStart) { + oldestWindowStart = bucket.windowStartedAtMs; + oldestKey = bucketKey; + } + } + + if (oldestKey) { + buckets.delete(oldestKey); + } + } + + buckets.set(key, { + windowStartedAtMs: now, + count: 1, + }); + await next(); + return; + } + + if (existing.count >= options.maxRequests) { + throw new AppError({ + code: "RATE_LIMIT_EXCEEDED", + message: "Too many requests", + status: 429, + expose: true, + }); + } + + existing.count += 1; + buckets.set(key, existing); + await next(); + }; +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 4738169..dedb6bb 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -22,6 +22,7 @@ import { deriveApiKeyLookupPrefix, hashApiKeyToken, } from "./auth/api-key-auth.js"; +import { RESOLVE_RATE_LIMIT_MAX_REQUESTS } from "./rate-limit.js"; import app, { createRegistryApp } from "./server.js"; function makeAitClaims(publicKey: Uint8Array): AitClaims { @@ -88,6 +89,7 @@ type FakeAgentSelectRow = { id: string; did: string; owner_id: string; + owner_did: string; name: string; framework: string | null; public_key: string; @@ -213,6 +215,21 @@ function parseSelectedColumns(query: string): string[] { .split(",") .map((column) => column.trim()) .map((column) => { + const normalizedColumn = column.toLowerCase(); + if ( + normalizedColumn.includes(`"humans"."did"`) || + normalizedColumn.includes("humans.did") + ) { + return "owner_did"; + } + + if ( + normalizedColumn.includes(`"agents"."did"`) || + normalizedColumn.includes("agents.did") + ) { + return "did"; + } + const aliasMatch = column.match(/\s+as\s+"?([a-zA-Z0-9_]+)"?\s*$/i); if (aliasMatch?.[1]) { return aliasMatch[1].toLowerCase(); @@ -233,6 +250,17 @@ function parseSelectedColumns(query: string): string[] { .filter((column) => column.length > 0); } +function createFakePublicKey(agentId: string): string { + const seed = agentId.length > 0 ? agentId : "agent"; + const bytes = new Uint8Array(32); + + for (let index = 0; index < bytes.length; index += 1) { + bytes[index] = seed.charCodeAt(index % seed.length) & 0xff; + } + + return encodeBase64url(bytes); +} + function getAgentSelectColumnValue( row: FakeAgentSelectRow, column: string, @@ -246,6 +274,9 @@ function getAgentSelectColumnValue( if (column === "owner_id") { return row.owner_id; } + if (column === "owner_did") { + return row.owner_did; + } if (column === "name") { return row.name; } @@ -276,8 +307,10 @@ function getAgentSelectColumnValue( function resolveAgentSelectRows(options: { query: string; params: unknown[]; + authRows: FakeD1Row[]; agentRows: FakeAgentRow[]; }): FakeAgentSelectRow[] { + const normalizedQuery = options.query.toLowerCase(); const whereClause = extractWhereClause(options.query); const equalityParams = parseWhereEqualityParams({ whereClause, @@ -290,6 +323,9 @@ function resolveAgentSelectRows(options: { const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); const hasCursorFilter = hasFilter(whereClause, "id", "<"); const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); const ownerId = hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" @@ -336,21 +372,28 @@ function resolveAgentSelectRows(options: { ) .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) .sort((left, right) => right.id.localeCompare(left.id)) - .slice(0, limit) - .map((row) => ({ - id: row.id, - did: row.did, - owner_id: row.ownerId, - name: row.name, - framework: row.framework, - public_key: - row.publicKey ?? "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: row.status, - expires_at: row.expiresAt, - current_jti: row.currentJti ?? null, - created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", - updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", - })); + .map((row) => { + const ownerDid = options.authRows.find( + (authRow) => authRow.humanId === row.ownerId, + )?.humanDid; + + return { + id: row.id, + did: row.did, + owner_id: row.ownerId, + owner_did: ownerDid ?? "", + name: row.name, + framework: row.framework, + public_key: row.publicKey ?? createFakePublicKey(row.id), + status: row.status, + expires_at: row.expiresAt, + current_jti: row.currentJti ?? null, + created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", + updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", + }; + }) + .filter((row) => (requiresHumanJoin ? row.owner_did.length > 0 : true)) + .slice(0, limit); return filteredRows; } @@ -468,11 +511,27 @@ function createFakeDb( (normalizedQuery.includes("select") || normalizedQuery.includes("returning")) ) { + const resultRows = resolveAgentSelectRows({ + query, + params, + authRows: rows, + agentRows, + }); + const selectedColumns = parseSelectedColumns(query); + return { - results: resolveAgentSelectRows({ - query, - params, - agentRows, + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getAgentSelectColumnValue(row, column); + return acc; + }, + {}, + ); }), }; } @@ -520,6 +579,7 @@ function createFakeDb( const resultRows = resolveAgentSelectRows({ query, params, + authRows: rows, agentRows, }); const selectedColumns = parseSelectedColumns(query); @@ -1143,6 +1203,166 @@ describe("GET /v1/crl", () => { }); }); +describe("GET /v1/resolve/:id", () => { + it("returns public profile fields without requiring auth", async () => { + const { authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700500000000); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "resolve-me", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/resolve/${agentId}`, + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + did: string; + name: string; + framework: string; + status: "active" | "revoked"; + ownerDid: string; + email?: string; + displayName?: string; + }; + expect(body).toEqual({ + did: makeAgentDid(agentId), + name: "resolve-me", + framework: "openclaw", + status: "active", + ownerDid: authRow.humanDid, + }); + expect(body).not.toHaveProperty("email"); + expect(body).not.toHaveProperty("displayName"); + }); + + it("falls back framework to openclaw when stored framework is null", async () => { + const { authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700500000100); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "legacy-framework-null", + framework: null, + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/resolve/${agentId}`, + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { framework: string }; + expect(body.framework).toBe("openclaw"); + }); + + it("returns 400 for invalid id path", async () => { + const res = await createRegistryApp().request( + "/v1/resolve/not-a-ulid", + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_RESOLVE_INVALID_PATH"); + expect(body.error.details?.fieldErrors?.id).toEqual([ + "id must be a valid ULID", + ]); + }); + + it("returns 404 when agent does not exist", async () => { + const { authRow } = await makeValidPatContext(); + const missingAgentId = generateUlid(1700500000200); + const { database } = createFakeDb([authRow], []); + + const res = await createRegistryApp().request( + `/v1/resolve/${missingAgentId}`, + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + }); + + it("returns 429 when rate limit is exceeded for the same client", async () => { + const { authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700500000300); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "rate-limited-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + const appInstance = createRegistryApp(); + + for (let index = 0; index < RESOLVE_RATE_LIMIT_MAX_REQUESTS; index += 1) { + const response = await appInstance.request( + `/v1/resolve/${agentId}`, + { + headers: { + "CF-Connecting-IP": "203.0.113.10", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(200); + } + + const rateLimited = await appInstance.request( + `/v1/resolve/${agentId}`, + { + headers: { + "CF-Connecting-IP": "203.0.113.10", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); +}); + describe("GET /v1/me", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 7035a40..9b7d6bc 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -20,6 +20,11 @@ import { buildAgentReissue, resolveRegistryIssuer, } from "./agent-registration.js"; +import { + agentResolveNotFoundError, + mapResolvedAgentRow, + parseAgentResolvePath, +} from "./agent-resolve.js"; import { agentNotFoundError, invalidAgentReissueStateError, @@ -31,7 +36,12 @@ import { createApiKeyAuth, } from "./auth/api-key-auth.js"; import { createDb } from "./db/client.js"; -import { agents, revocations } from "./db/schema.js"; +import { agents, humans, revocations } from "./db/schema.js"; +import { + createInMemoryRateLimit, + RESOLVE_RATE_LIMIT_MAX_REQUESTS, + RESOLVE_RATE_LIMIT_WINDOW_MS, +} from "./rate-limit.js"; import { resolveRegistrySigner } from "./registry-signer.js"; type Bindings = { @@ -231,6 +241,11 @@ function createRegistryApp() { Bindings: Bindings; Variables: { requestId: string; human: AuthenticatedHuman }; }>(); + const resolveRateLimit = createInMemoryRateLimit({ + bucketKey: "resolve", + maxRequests: RESOLVE_RATE_LIMIT_MAX_REQUESTS, + windowMs: RESOLVE_RATE_LIMIT_WINDOW_MS, + }); app.use("*", createRequestContextMiddleware()); app.use("*", createRequestLoggingMiddleware(logger)); @@ -302,6 +317,35 @@ function createRegistryApp() { }); }); + app.get("/v1/resolve/:id", resolveRateLimit, async (c) => { + const config = getConfig(c.env); + const id = parseAgentResolvePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + did: agents.did, + name: agents.name, + framework: agents.framework, + status: agents.status, + owner_did: humans.did, + }) + .from(agents) + .innerJoin(humans, eq(agents.owner_id, humans.id)) + .where(eq(agents.id, id)) + .limit(1); + + const row = rows[0]; + if (!row) { + throw agentResolveNotFoundError(); + } + + return c.json(mapResolvedAgentRow(row)); + }); + app.get("/v1/me", createApiKeyAuth(), (c) => { return c.json({ human: c.get("human") }); }); From 0622728e0ab0406b7cc9be15216201e880002594 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 14:15:20 +0530 Subject: [PATCH 035/190] feat(cli): implement T20 command framework and local config --- apps/cli/AGENTS.md | 35 +++++ apps/cli/package.json | 9 +- apps/cli/sample-config.json | 3 + apps/cli/src/bin.ts | 15 +++ apps/cli/src/commands/config.test.ts | 190 +++++++++++++++++++++++++++ apps/cli/src/commands/config.ts | 158 ++++++++++++++++++++++ apps/cli/src/config/manager.test.ts | 141 ++++++++++++++++++++ apps/cli/src/config/manager.ts | 114 ++++++++++++++++ apps/cli/src/index.test.ts | 34 ++++- apps/cli/src/index.ts | 10 ++ apps/cli/src/io.ts | 10 ++ apps/cli/tsconfig.json | 4 +- apps/cli/tsup.config.ts | 5 +- pnpm-lock.yaml | 36 ++++- 14 files changed, 752 insertions(+), 12 deletions(-) create mode 100644 apps/cli/AGENTS.md create mode 100644 apps/cli/sample-config.json create mode 100644 apps/cli/src/bin.ts create mode 100644 apps/cli/src/commands/config.test.ts create mode 100644 apps/cli/src/commands/config.ts create mode 100644 apps/cli/src/config/manager.test.ts create mode 100644 apps/cli/src/config/manager.ts create mode 100644 apps/cli/src/io.ts diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md new file mode 100644 index 0000000..039aabb --- /dev/null +++ b/apps/cli/AGENTS.md @@ -0,0 +1,35 @@ +# AGENTS.md (apps/cli) + +## Purpose +- Define conventions for the `clawdentity` CLI package. +- Keep command behavior predictable, testable, and safe for local credential storage. + +## Command Architecture +- Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. +- Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). +- Implement command groups under `src/commands/*` and register them from `createProgram()`. +- Prefer shared helpers (for validation, output, and error handling) over repeating per-command logic. +- Use `process.exitCode` instead of `process.exit()`. +- Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. + +## Config and Secrets +- Local CLI config lives at `~/.clawdentity/config.json`. +- Resolve values with explicit precedence: environment variables > config file > built-in defaults. +- Keep API tokens masked in human-facing output (`show`, success logs, debug prints). +- Write config with restrictive permissions (`0600`) and never commit secrets or generated local config. + +## Testing Rules +- Use Vitest for all tests. +- Unit-test config I/O and precedence logic with mocked `node:fs/promises` and `node:os`. +- Command tests should assert both behavior and output, using `vi.spyOn(console, ...)` where needed. +- Cover invalid input and failure paths, not only happy paths. + +## Validation Commands +- `pnpm -F @clawdentity/cli lint` +- `pnpm -F @clawdentity/cli typecheck` +- `pnpm -F @clawdentity/cli test` +- `pnpm -F @clawdentity/cli build` +- For cross-package changes, run root checks: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build`. + +## Refactor Trigger +- If command count grows, move to a typed command registry/builder so command wiring stays declarative and avoids duplicate validation/output code. diff --git a/apps/cli/package.json b/apps/cli/package.json index a4158a2..6728932 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -5,6 +5,9 @@ "type": "module", "main": "./dist/index.js", "types": "./dist/index.d.ts", + "bin": { + "clawdentity": "./dist/bin.js" + }, "exports": { ".": { "import": "./dist/index.js", @@ -20,6 +23,10 @@ }, "dependencies": { "@clawdentity/protocol": "workspace:*", - "@clawdentity/sdk": "workspace:*" + "@clawdentity/sdk": "workspace:*", + "commander": "^13.1.0" + }, + "devDependencies": { + "@types/node": "^22.18.11" } } diff --git a/apps/cli/sample-config.json b/apps/cli/sample-config.json new file mode 100644 index 0000000..3f58dde --- /dev/null +++ b/apps/cli/sample-config.json @@ -0,0 +1,3 @@ +{ + "registryUrl": "https://api.clawdentity.com" +} diff --git a/apps/cli/src/bin.ts b/apps/cli/src/bin.ts new file mode 100644 index 0000000..61b020d --- /dev/null +++ b/apps/cli/src/bin.ts @@ -0,0 +1,15 @@ +import { createLogger } from "@clawdentity/sdk"; +import { createProgram } from "./index.js"; +import { writeStderrLine } from "./io.js"; + +const logger = createLogger({ service: "cli", module: "bin" }); + +createProgram() + .parseAsync(process.argv) + .catch((error: unknown) => { + process.exitCode = 1; + const message = error instanceof Error ? error.message : String(error); + + logger.error("cli.execution_failed", { errorMessage: message }); + writeStderrLine(message); + }); diff --git a/apps/cli/src/commands/config.test.ts b/apps/cli/src/commands/config.test.ts new file mode 100644 index 0000000..8e99134 --- /dev/null +++ b/apps/cli/src/commands/config.test.ts @@ -0,0 +1,190 @@ +import { access } from "node:fs/promises"; +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +vi.mock("node:fs/promises", () => ({ + access: vi.fn(), +})); + +vi.mock("../config/manager.js", () => ({ + getConfigFilePath: vi.fn(() => "/mock-home/.clawdentity/config.json"), + getConfigValue: vi.fn(), + readConfig: vi.fn(), + resolveConfig: vi.fn(), + setConfigValue: vi.fn(), + writeConfig: vi.fn(), +})); + +vi.mock("@clawdentity/sdk", () => ({ + createLogger: vi.fn(() => ({ + child: vi.fn(), + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + })), +})); + +import { + getConfigValue, + readConfig, + resolveConfig, + setConfigValue, + writeConfig, +} from "../config/manager.js"; +import { createConfigCommand } from "./config.js"; + +const mockedAccess = vi.mocked(access); +const mockedReadConfig = vi.mocked(readConfig); +const mockedWriteConfig = vi.mocked(writeConfig); +const mockedSetConfigValue = vi.mocked(setConfigValue); +const mockedGetConfigValue = vi.mocked(getConfigValue); +const mockedResolveConfig = vi.mocked(resolveConfig); + +const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +const runConfigCommand = async (args: string[]) => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createConfigCommand(); + + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "config", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +}; + +describe("config command", () => { + beforeEach(() => { + vi.clearAllMocks(); + + mockedReadConfig.mockResolvedValue({ + registryUrl: "https://api.clawdentity.com", + }); + mockedResolveConfig.mockResolvedValue({ + registryUrl: "https://api.clawdentity.com", + }); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("initializes config when missing", async () => { + mockedAccess.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + const result = await runConfigCommand(["init"]); + + expect(mockedReadConfig).toHaveBeenCalled(); + expect(mockedWriteConfig).toHaveBeenCalledWith({ + registryUrl: "https://api.clawdentity.com", + }); + expect(result.stdout).toContain( + "Initialized config at /mock-home/.clawdentity/config.json", + ); + expect(result.exitCode).toBeUndefined(); + }); + + it("skips init when config already exists", async () => { + mockedAccess.mockResolvedValueOnce(undefined); + + const result = await runConfigCommand(["init"]); + + expect(mockedWriteConfig).not.toHaveBeenCalled(); + expect(result.stdout).toContain( + "Config already exists at /mock-home/.clawdentity/config.json", + ); + }); + + it("sets registry url", async () => { + await runConfigCommand(["set", "registryUrl", "http://localhost:8787"]); + + expect(mockedSetConfigValue).toHaveBeenCalledWith( + "registryUrl", + "http://localhost:8787", + ); + }); + + it("masks apiKey output when setting", async () => { + const result = await runConfigCommand(["set", "apiKey", "super-secret"]); + + expect(mockedSetConfigValue).toHaveBeenCalledWith("apiKey", "super-secret"); + expect(result.stdout).toContain("Set apiKey=********"); + }); + + it("rejects invalid keys for set", async () => { + const result = await runConfigCommand(["set", "invalid", "value"]); + + expect(mockedSetConfigValue).not.toHaveBeenCalled(); + expect(result.stderr).toContain("Invalid config key"); + expect(result.exitCode).toBe(1); + }); + + it("returns config values", async () => { + mockedGetConfigValue.mockResolvedValueOnce("http://localhost:8787"); + + const result = await runConfigCommand(["get", "registryUrl"]); + + expect(result.stdout).toContain("http://localhost:8787"); + }); + + it("prints not set for missing value", async () => { + mockedGetConfigValue.mockResolvedValueOnce(undefined); + + const result = await runConfigCommand(["get", "apiKey"]); + + expect(result.stdout).toContain("(not set)"); + }); + + it("shows resolved config", async () => { + mockedResolveConfig.mockResolvedValueOnce({ + registryUrl: "http://localhost:8787", + apiKey: "super-secret", + }); + + const result = await runConfigCommand(["show"]); + + expect(result.stdout).toContain("http://localhost:8787"); + expect(result.stdout).toContain('"apiKey": "********"'); + }); +}); diff --git a/apps/cli/src/commands/config.ts b/apps/cli/src/commands/config.ts new file mode 100644 index 0000000..5fa0efb --- /dev/null +++ b/apps/cli/src/commands/config.ts @@ -0,0 +1,158 @@ +import { access } from "node:fs/promises"; +import { createLogger } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { + type CliConfig, + type CliConfigKey, + getConfigFilePath, + getConfigValue, + readConfig, + resolveConfig, + setConfigValue, + writeConfig, +} from "../config/manager.js"; +import { writeStderrLine, writeStdoutLine } from "../io.js"; + +const logger = createLogger({ service: "cli", module: "config" }); + +const VALID_KEYS = [ + "registryUrl", + "apiKey", +] as const satisfies readonly CliConfigKey[]; + +const withErrorHandling = ( + command: string, + handler: (...args: T) => Promise, +) => { + return async (...args: T) => { + try { + await handler(...args); + } catch (error) { + process.exitCode = 1; + const message = error instanceof Error ? error.message : String(error); + + logger.error("cli.command_failed", { + command, + errorMessage: message, + }); + writeStderrLine(message); + } + }; +}; + +const isValidConfigKey = (value: string): value is CliConfigKey => { + return VALID_KEYS.includes(value as CliConfigKey); +}; + +const maskApiKey = (config: CliConfig): CliConfig => { + if (!config.apiKey) { + return config; + } + + return { + ...config, + apiKey: "********", + }; +}; + +const isNotFoundError = (error: unknown): boolean => { + const nodeError = error as NodeJS.ErrnoException; + return nodeError.code === "ENOENT"; +}; + +const getValidatedKey = (key: string): CliConfigKey | undefined => { + if (isValidConfigKey(key)) { + return key; + } + + process.exitCode = 1; + writeStderrLine( + `Invalid config key "${key}". Valid keys: ${VALID_KEYS.join(", ")}`, + ); + logger.warn("cli.invalid_config_key", { key }); + + return undefined; +}; + +export const createConfigCommand = (): Command => { + const configCommand = new Command("config").description( + "Manage local CLI configuration", + ); + + configCommand + .command("init") + .description("Initialize local config file") + .action( + withErrorHandling("config init", async () => { + const configFilePath = getConfigFilePath(); + + try { + await access(configFilePath); + writeStdoutLine(`Config already exists at ${configFilePath}`); + return; + } catch (error) { + if (!isNotFoundError(error)) { + throw error; + } + } + + const config = await readConfig(); + await writeConfig(config); + + writeStdoutLine(`Initialized config at ${configFilePath}`); + writeStdoutLine(JSON.stringify(maskApiKey(config), null, 2)); + }), + ); + + configCommand + .command("set ") + .description("Set a config value") + .action( + withErrorHandling("config set", async (key: string, value: string) => { + const validatedKey = getValidatedKey(key); + + if (!validatedKey) { + return; + } + + await setConfigValue(validatedKey, value); + + const printedValue = validatedKey === "apiKey" ? "********" : value; + writeStdoutLine(`Set ${validatedKey}=${printedValue}`); + }), + ); + + configCommand + .command("get ") + .description("Get a resolved config value") + .action( + withErrorHandling("config get", async (key: string) => { + const validatedKey = getValidatedKey(key); + + if (!validatedKey) { + return; + } + + const value = await getConfigValue(validatedKey); + + if (value === undefined) { + writeStdoutLine("(not set)"); + return; + } + + writeStdoutLine(value); + }), + ); + + configCommand + .command("show") + .description("Show resolved config values") + .action( + withErrorHandling("config show", async () => { + const resolvedConfig = await resolveConfig(); + writeStdoutLine(JSON.stringify(maskApiKey(resolvedConfig), null, 2)); + }), + ); + + return configCommand; +}; diff --git a/apps/cli/src/config/manager.test.ts b/apps/cli/src/config/manager.test.ts new file mode 100644 index 0000000..c5fe579 --- /dev/null +++ b/apps/cli/src/config/manager.test.ts @@ -0,0 +1,141 @@ +import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +vi.mock("node:os", () => ({ + homedir: vi.fn(() => "/mock-home"), +})); + +vi.mock("node:fs/promises", () => ({ + chmod: vi.fn(), + mkdir: vi.fn(), + readFile: vi.fn(), + writeFile: vi.fn(), +})); + +import { + DEFAULT_REGISTRY_URL, + getConfigDir, + getConfigFilePath, + getConfigValue, + readConfig, + resolveConfig, + setConfigValue, + writeConfig, +} from "./manager.js"; + +const mockedReadFile = vi.mocked(readFile); +const mockedWriteFile = vi.mocked(writeFile); +const mockedMkdir = vi.mocked(mkdir); +const mockedChmod = vi.mocked(chmod); +const mockedHomedir = vi.mocked(homedir); + +const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +describe("config manager", () => { + const previousEnv = process.env; + + beforeEach(() => { + vi.clearAllMocks(); + mockedHomedir.mockReturnValue("/mock-home"); + process.env = { ...previousEnv }; + }); + + afterEach(() => { + process.env = previousEnv; + }); + + it("returns defaults when config does not exist", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + await expect(readConfig()).resolves.toEqual({ + registryUrl: DEFAULT_REGISTRY_URL, + }); + }); + + it("merges file contents with defaults", async () => { + mockedReadFile.mockResolvedValueOnce('{"apiKey":"secret"}'); + + await expect(readConfig()).resolves.toEqual({ + registryUrl: DEFAULT_REGISTRY_URL, + apiKey: "secret", + }); + }); + + it("rethrows non-ENOENT read failures", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("EACCES")); + + await expect(readConfig()).rejects.toMatchObject({ + code: "EACCES", + }); + }); + + it("writes config and secures file permissions", async () => { + await writeConfig({ + registryUrl: "http://localhost:8787", + apiKey: "token", + }); + + expect(mockedMkdir).toHaveBeenCalledWith("/mock-home/.clawdentity", { + recursive: true, + }); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/config.json", + '{\n "registryUrl": "http://localhost:8787",\n "apiKey": "token"\n}\n', + "utf-8", + ); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/config.json", + 0o600, + ); + }); + + it("applies env override over file config", async () => { + mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + process.env.CLAWDENTITY_REGISTRY_URL = "http://env:8787"; + + await expect(resolveConfig()).resolves.toEqual({ + registryUrl: "http://env:8787", + }); + }); + + it("prefers env apiKey over config file", async () => { + mockedReadFile.mockResolvedValueOnce('{"apiKey":"from-file"}'); + process.env.CLAWDENTITY_API_KEY = "from-env"; + + await expect(resolveConfig()).resolves.toEqual({ + registryUrl: DEFAULT_REGISTRY_URL, + apiKey: "from-env", + }); + }); + + it("returns a single resolved value", async () => { + mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + process.env.CLAWDENTITY_REGISTRY_URL = "http://env:8787"; + + await expect(getConfigValue("registryUrl")).resolves.toBe( + "http://env:8787", + ); + }); + + it("reads, merges, and writes when setting values", async () => { + mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + + await setConfigValue("apiKey", "new-token"); + + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/config.json", + '{\n "registryUrl": "http://file:8787",\n "apiKey": "new-token"\n}\n', + "utf-8", + ); + }); + + it("exposes config location helpers", () => { + expect(getConfigDir()).toBe("/mock-home/.clawdentity"); + expect(getConfigFilePath()).toBe("/mock-home/.clawdentity/config.json"); + }); +}); diff --git a/apps/cli/src/config/manager.ts b/apps/cli/src/config/manager.ts new file mode 100644 index 0000000..6106c95 --- /dev/null +++ b/apps/cli/src/config/manager.ts @@ -0,0 +1,114 @@ +import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { dirname, join } from "node:path"; + +export const DEFAULT_REGISTRY_URL = "https://api.clawdentity.com"; + +export interface CliConfig { + registryUrl: string; + apiKey?: string; +} + +export type CliConfigKey = keyof CliConfig; + +const CONFIG_DIR = ".clawdentity"; +const CONFIG_FILE = "config.json"; + +const ENV_KEY_MAP: Record = { + registryUrl: "CLAWDENTITY_REGISTRY_URL", + apiKey: "CLAWDENTITY_API_KEY", +}; + +const DEFAULT_CONFIG: CliConfig = { + registryUrl: DEFAULT_REGISTRY_URL, +}; + +const isConfigObject = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +const normalizeConfig = (raw: unknown): CliConfig => { + if (!isConfigObject(raw)) { + return { ...DEFAULT_CONFIG }; + } + + const config: CliConfig = { + ...DEFAULT_CONFIG, + }; + + if (typeof raw.registryUrl === "string" && raw.registryUrl.length > 0) { + config.registryUrl = raw.registryUrl; + } + + if (typeof raw.apiKey === "string" && raw.apiKey.length > 0) { + config.apiKey = raw.apiKey; + } + + return config; +}; + +export const getConfigDir = (): string => join(homedir(), CONFIG_DIR); + +export const getConfigFilePath = (): string => + join(getConfigDir(), CONFIG_FILE); + +export const readConfig = async (): Promise => { + try { + const configContents = await readFile(getConfigFilePath(), "utf-8"); + return normalizeConfig(JSON.parse(configContents)); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + + if (nodeError.code === "ENOENT") { + return { ...DEFAULT_CONFIG }; + } + + throw error; + } +}; + +export const resolveConfig = async (): Promise => { + const config = await readConfig(); + + for (const key of Object.keys(ENV_KEY_MAP) as CliConfigKey[]) { + const envVar = process.env[ENV_KEY_MAP[key]]; + + if (typeof envVar === "string" && envVar.length > 0) { + config[key] = envVar; + } + } + + return config; +}; + +export const writeConfig = async (config: CliConfig): Promise => { + const configFilePath = getConfigFilePath(); + const configDirectory = dirname(configFilePath); + + await mkdir(configDirectory, { recursive: true }); + await writeFile( + configFilePath, + `${JSON.stringify(config, null, 2)}\n`, + "utf-8", + ); + await chmod(configFilePath, 0o600); +}; + +export const getConfigValue = async ( + key: K, +): Promise => { + const config = await resolveConfig(); + return config[key]; +}; + +export const setConfigValue = async ( + key: K, + value: CliConfig[K], +): Promise => { + const currentConfig = await readConfig(); + + await writeConfig({ + ...currentConfig, + [key]: value, + }); +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 401e371..f1a2366 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -1,8 +1,40 @@ import { describe, expect, it } from "vitest"; -import { CLI_VERSION } from "./index.js"; +import { CLI_VERSION, createProgram } from "./index.js"; describe("cli", () => { it("exports CLI_VERSION", () => { expect(CLI_VERSION).toBe("0.0.0"); }); + + it("creates a program named clawdentity", () => { + expect(createProgram().name()).toBe("clawdentity"); + }); + + it("registers the config command", () => { + const hasConfigCommand = createProgram() + .commands.map((command) => command.name()) + .includes("config"); + + expect(hasConfigCommand).toBe(true); + }); + + it("prints version output", async () => { + const output: string[] = []; + const program = createProgram(); + + program.exitOverride(); + program.configureOutput({ + writeOut: (value) => output.push(value), + writeErr: (value) => output.push(value), + }); + + await expect( + program.parseAsync(["node", "clawdentity", "--version"]), + ).rejects.toMatchObject({ + code: "commander.version", + exitCode: 0, + }); + + expect(output.join("")).toContain("0.0.0"); + }); }); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 2cd4f9e..521e978 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1 +1,11 @@ +import { Command } from "commander"; +import { createConfigCommand } from "./commands/config.js"; + export const CLI_VERSION = "0.0.0"; + +export const createProgram = (): Command => { + return new Command("clawdentity") + .description("Clawdentity CLI - Agent identity management") + .version(CLI_VERSION) + .addCommand(createConfigCommand()); +}; diff --git a/apps/cli/src/io.ts b/apps/cli/src/io.ts new file mode 100644 index 0000000..367ce86 --- /dev/null +++ b/apps/cli/src/io.ts @@ -0,0 +1,10 @@ +const withTrailingNewline = (value: string): string => + value.endsWith("\n") ? value : `${value}\n`; + +export const writeStdoutLine = (value: string): void => { + process.stdout.write(withTrailingNewline(value)); +}; + +export const writeStderrLine = (value: string): void => { + process.stderr.write(withTrailingNewline(value)); +}; diff --git a/apps/cli/tsconfig.json b/apps/cli/tsconfig.json index 792172f..949b3a8 100644 --- a/apps/cli/tsconfig.json +++ b/apps/cli/tsconfig.json @@ -1,8 +1,8 @@ { "extends": "../../tsconfig.base.json", "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" + "types": ["node"], + "outDir": "./dist" }, "include": ["src"] } diff --git a/apps/cli/tsup.config.ts b/apps/cli/tsup.config.ts index 7a3d66a..0305890 100644 --- a/apps/cli/tsup.config.ts +++ b/apps/cli/tsup.config.ts @@ -1,8 +1,11 @@ import { defineConfig } from "tsup"; export default defineConfig({ - entry: ["src/index.ts"], + entry: ["src/index.ts", "src/bin.ts"], format: ["esm"], dts: true, clean: true, + banner: { + js: "#!/usr/bin/env node", + }, }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 12347f2..9195b8c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -28,7 +28,7 @@ importers: version: 5.8.3 vitest: specifier: ^4.0.18 - version: 4.0.18(yaml@2.8.2) + version: 4.0.18(@types/node@22.19.11)(yaml@2.8.2) wrangler: specifier: ^4.64.0 version: 4.64.0(@cloudflare/workers-types@4.20260210.0) @@ -41,6 +41,13 @@ importers: '@clawdentity/sdk': specifier: workspace:* version: link:../../packages/sdk + commander: + specifier: ^13.1.0 + version: 13.1.0 + devDependencies: + '@types/node': + specifier: ^22.18.11 + version: 22.19.11 apps/proxy: dependencies: @@ -1063,6 +1070,9 @@ packages: '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/node@22.19.11': + resolution: {integrity: sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==} + '@vitest/expect@4.0.18': resolution: {integrity: sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==} @@ -2134,6 +2144,9 @@ packages: resolution: {integrity: sha512-yu26mwteFYzBAot7KVMqFGCVpsF6g8wXfJzQUHvu1no3+rRRSFcSV2nKeYvNPLD2J4b08jYBDhHUjeH0ygIl9w==} hasBin: true + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici@7.18.2: resolution: {integrity: sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==} engines: {node: '>=20.18.1'} @@ -2885,6 +2898,10 @@ snapshots: '@types/estree@1.0.8': {} + '@types/node@22.19.11': + dependencies: + undici-types: 6.21.0 + '@vitest/expect@4.0.18': dependencies: '@standard-schema/spec': 1.1.0 @@ -2894,13 +2911,13 @@ snapshots: chai: 6.2.2 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.18(vite@7.3.1(yaml@2.8.2))': + '@vitest/mocker@4.0.18(vite@7.3.1(@types/node@22.19.11)(yaml@2.8.2))': dependencies: '@vitest/spy': 4.0.18 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.3.1(yaml@2.8.2) + vite: 7.3.1(@types/node@22.19.11)(yaml@2.8.2) '@vitest/pretty-format@4.0.18': dependencies: @@ -3938,6 +3955,8 @@ snapshots: ulid@3.0.2: {} + undici-types@6.21.0: {} + undici@7.18.2: {} unenv@2.0.0-rc.24: @@ -3946,7 +3965,7 @@ snapshots: util-deprecate@1.0.2: {} - vite@7.3.1(yaml@2.8.2): + vite@7.3.1(@types/node@22.19.11)(yaml@2.8.2): dependencies: esbuild: 0.27.3 fdir: 6.5.0(picomatch@4.0.3) @@ -3955,13 +3974,14 @@ snapshots: rollup: 4.57.1 tinyglobby: 0.2.15 optionalDependencies: + '@types/node': 22.19.11 fsevents: 2.3.3 yaml: 2.8.2 - vitest@4.0.18(yaml@2.8.2): + vitest@4.0.18(@types/node@22.19.11)(yaml@2.8.2): dependencies: '@vitest/expect': 4.0.18 - '@vitest/mocker': 4.0.18(vite@7.3.1(yaml@2.8.2)) + '@vitest/mocker': 4.0.18(vite@7.3.1(@types/node@22.19.11)(yaml@2.8.2)) '@vitest/pretty-format': 4.0.18 '@vitest/runner': 4.0.18 '@vitest/snapshot': 4.0.18 @@ -3978,8 +3998,10 @@ snapshots: tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vite: 7.3.1(yaml@2.8.2) + vite: 7.3.1(@types/node@22.19.11)(yaml@2.8.2) why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 22.19.11 transitivePeerDependencies: - jiti - less From 4ad153039bdd6b275760053157fcfe9219e92f43 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 14:40:58 +0530 Subject: [PATCH 036/190] feat(cli): add agent create command and shared command error handler --- apps/cli/AGENTS.md | 8 +- apps/cli/src/commands/agent.test.ts | 300 ++++++++++++++++++++ apps/cli/src/commands/agent.ts | 387 ++++++++++++++++++++++++++ apps/cli/src/commands/config.ts | 21 +- apps/cli/src/commands/helpers.test.ts | 61 ++++ apps/cli/src/commands/helpers.ts | 24 ++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + 8 files changed, 788 insertions(+), 23 deletions(-) create mode 100644 apps/cli/src/commands/agent.test.ts create mode 100644 apps/cli/src/commands/agent.ts create mode 100644 apps/cli/src/commands/helpers.test.ts create mode 100644 apps/cli/src/commands/helpers.ts diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 039aabb..9fbba28 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -8,20 +8,22 @@ - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Implement command groups under `src/commands/*` and register them from `createProgram()`. -- Prefer shared helpers (for validation, output, and error handling) over repeating per-command logic. +- Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. +- Keep user-facing command output on `writeStdoutLine` / `writeStderrLine`; reserve structured logger calls for diagnostic events. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. +- Agent identities live at `~/.clawdentity/agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. - Resolve values with explicit precedence: environment variables > config file > built-in defaults. - Keep API tokens masked in human-facing output (`show`, success logs, debug prints). -- Write config with restrictive permissions (`0600`) and never commit secrets or generated local config. +- Write config and identity artifacts with restrictive permissions (`0600`) and never commit secrets or generated local config. ## Testing Rules - Use Vitest for all tests. - Unit-test config I/O and precedence logic with mocked `node:fs/promises` and `node:os`. -- Command tests should assert both behavior and output, using `vi.spyOn(console, ...)` where needed. +- Command tests should assert both behavior and output by capturing `process.stdout.write` / `process.stderr.write`. - Cover invalid input and failure paths, not only happy paths. ## Validation Commands diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts new file mode 100644 index 0000000..dda7882 --- /dev/null +++ b/apps/cli/src/commands/agent.test.ts @@ -0,0 +1,300 @@ +import { access, chmod, mkdir, writeFile } from "node:fs/promises"; +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +vi.mock("node:fs/promises", () => ({ + access: vi.fn(), + chmod: vi.fn(), + mkdir: vi.fn(), + writeFile: vi.fn(), +})); + +vi.mock("../config/manager.js", () => ({ + getConfigDir: vi.fn(() => "/mock-home/.clawdentity"), + resolveConfig: vi.fn(), +})); + +vi.mock("@clawdentity/sdk", () => ({ + createLogger: vi.fn(() => ({ + child: vi.fn(), + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + })), + encodeEd25519KeypairBase64url: vi.fn(), + generateEd25519Keypair: vi.fn(), +})); + +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, +} from "@clawdentity/sdk"; +import { resolveConfig } from "../config/manager.js"; +import { createAgentCommand } from "./agent.js"; + +const mockedAccess = vi.mocked(access); +const mockedChmod = vi.mocked(chmod); +const mockedMkdir = vi.mocked(mkdir); +const mockedWriteFile = vi.mocked(writeFile); +const mockedResolveConfig = vi.mocked(resolveConfig); +const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); +const mockedEncodeEd25519KeypairBase64url = vi.mocked( + encodeEd25519KeypairBase64url, +); + +const mockFetch = vi.fn(); + +const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +const createJsonResponse = (status: number, body: unknown): Response => { + return { + ok: status >= 200 && status < 300, + status, + json: vi.fn(async () => body), + } as unknown as Response; +}; + +const runAgentCommand = async (args: string[]) => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createAgentCommand(); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "agent", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +}; + +describe("agent create command", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.stubGlobal("fetch", mockFetch); + + mockedResolveConfig.mockResolvedValue({ + registryUrl: "https://api.clawdentity.com", + apiKey: "pat_123", + }); + + mockedAccess.mockRejectedValue(buildErrnoError("ENOENT")); + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + mockedChmod.mockResolvedValue(undefined); + + mockedGenerateEd25519Keypair.mockResolvedValue({ + publicKey: Uint8Array.from({ length: 32 }, (_, index) => index + 1), + secretKey: Uint8Array.from({ length: 32 }, (_, index) => 64 - index), + }); + + mockedEncodeEd25519KeypairBase64url.mockReturnValue({ + publicKey: "public-key-b64url", + secretKey: "secret-key-b64url", + }); + + mockFetch.mockResolvedValue( + createJsonResponse(201, { + agent: { + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + name: "agent-01", + framework: "openclaw", + expiresAt: "2030-01-01T00:00:00.000Z", + }, + ait: "ait.jwt.value", + }), + ); + }); + + afterEach(() => { + process.exitCode = undefined; + vi.unstubAllGlobals(); + }); + + it("creates an agent identity and writes all files", async () => { + const result = await runAgentCommand(["create", "agent-01"]); + + expect(mockedGenerateEd25519Keypair).toHaveBeenCalled(); + expect(mockFetch).toHaveBeenCalledWith( + "https://api.clawdentity.com/v1/agents", + expect.objectContaining({ + method: "POST", + headers: expect.objectContaining({ + authorization: "Bearer pat_123", + "content-type": "application/json", + }), + }), + ); + + expect(mockedWriteFile).toHaveBeenCalledTimes(4); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/secret.key", + "secret-key-b64url", + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/public.key", + "public-key-b64url", + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/identity.json", + expect.stringContaining( + '"did": "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"', + ), + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/ait.jwt", + "ait.jwt.value", + "utf-8", + ); + + expect(result.stdout).toContain( + "Agent DID: did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + ); + expect(result.stdout).toContain("Expires At: 2030-01-01T00:00:00.000Z"); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when API key is missing", async () => { + mockedResolveConfig.mockResolvedValueOnce({ + registryUrl: "https://api.clawdentity.com", + }); + + const result = await runAgentCommand(["create", "agent-01"]); + + expect(result.stderr).toContain("API key is not configured"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("handles registry 401 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(401, { + error: { + message: "Invalid API key", + }, + }), + ); + + const result = await runAgentCommand(["create", "agent-01"]); + + expect(result.stderr).toContain("authentication failed"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry 400 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(400, { + error: { + message: "name contains invalid characters", + }, + }), + ); + + const result = await runAgentCommand(["create", "agent-01"]); + + expect(result.stderr).toContain("rejected the request"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry connection errors", async () => { + mockFetch.mockRejectedValueOnce(new Error("socket hang up")); + + const result = await runAgentCommand(["create", "agent-01"]); + + expect(result.stderr).toContain("Unable to connect to the registry"); + expect(result.exitCode).toBe(1); + }); + + it("fails when agent directory already exists", async () => { + mockedAccess.mockResolvedValueOnce(undefined); + + const result = await runAgentCommand(["create", "agent-01"]); + + expect(result.stderr).toContain("already exists"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("sets 0600 permissions on every identity file", async () => { + await runAgentCommand(["create", "agent-01"]); + + expect(mockedChmod).toHaveBeenCalledTimes(4); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/secret.key", + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/public.key", + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/identity.json", + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/ait.jwt", + 0o600, + ); + }); + + it("sends optional framework and ttl-days values", async () => { + await runAgentCommand([ + "create", + "agent-01", + "--framework", + "langgraph", + "--ttl-days", + "45", + ]); + + const request = mockFetch.mock.calls[0] as [string, RequestInit]; + const requestBody = JSON.parse(String(request[1]?.body)) as { + framework?: string; + ttlDays?: number; + }; + + expect(requestBody.framework).toBe("langgraph"); + expect(requestBody.ttlDays).toBe(45); + }); +}); diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts new file mode 100644 index 0000000..7e8c5d8 --- /dev/null +++ b/apps/cli/src/commands/agent.ts @@ -0,0 +1,387 @@ +import { access, chmod, mkdir, writeFile } from "node:fs/promises"; +import { join } from "node:path"; +import { validateAgentName } from "@clawdentity/protocol"; +import { + createLogger, + encodeEd25519KeypairBase64url, + generateEd25519Keypair, +} from "@clawdentity/sdk"; +import { Command } from "commander"; +import { getConfigDir, resolveConfig } from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "agent" }); + +const AGENTS_DIR_NAME = "agents"; +const FILE_MODE = 0o600; + +type AgentCreateOptions = { + framework?: string; + ttlDays?: string; +}; + +type AgentRegistrationResponse = { + agent: { + did: string; + name: string; + framework: string; + expiresAt: string; + }; + ait: string; +}; + +type RegistryErrorEnvelope = { + error?: { + message?: string; + }; +}; + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +const getAgentDirectory = (name: string): string => { + return join(getConfigDir(), AGENTS_DIR_NAME, name); +}; + +const assertValidAgentName = (name: string): string => { + const normalizedName = name.trim(); + + if (!validateAgentName(normalizedName)) { + throw new Error( + "Agent name contains invalid characters or length. Use 1-64 chars: a-z, A-Z, 0-9, ., _, -", + ); + } + + return normalizedName; +}; + +const resolveFramework = ( + framework: string | undefined, +): string | undefined => { + if (framework === undefined) { + return undefined; + } + + const normalizedFramework = framework.trim(); + if (normalizedFramework.length === 0) { + throw new Error("--framework must not be empty when provided"); + } + + return normalizedFramework; +}; + +const resolveTtlDays = (ttlDays: string | undefined): number | undefined => { + if (ttlDays === undefined) { + return undefined; + } + + const parsed = Number.parseInt(ttlDays, 10); + if (!Number.isInteger(parsed) || parsed <= 0) { + throw new Error("--ttl-days must be a positive integer"); + } + + return parsed; +}; + +const extractRegistryErrorMessage = (payload: unknown): string | undefined => { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const trimmed = envelope.error.message.trim(); + return trimmed.length > 0 ? trimmed : undefined; +}; + +const parseJsonResponse = async (response: Response): Promise => { + try { + return await response.json(); + } catch { + return undefined; + } +}; + +const toRegistryRequestUrl = (registryUrl: string): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL("v1/agents", normalizedBaseUrl).toString(); +}; + +const toHttpErrorMessage = (status: number, responseBody: unknown): string => { + const registryMessage = extractRegistryErrorMessage(responseBody); + + if (status === 401) { + return registryMessage + ? `Registry authentication failed (401): ${registryMessage}` + : "Registry authentication failed (401). Check your API key."; + } + + if (status === 400) { + return registryMessage + ? `Registry rejected the request (400): ${registryMessage}` + : "Registry rejected the request (400). Check name/framework/ttl-days."; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (registryMessage) { + return `Registry request failed (${status}): ${registryMessage}`; + } + + return `Registry request failed (${status})`; +}; + +const parseAgentRegistrationResponse = ( + payload: unknown, +): AgentRegistrationResponse => { + if (!isRecord(payload)) { + throw new Error("Registry returned an invalid response payload"); + } + + const agentValue = payload.agent; + const aitValue = payload.ait; + + if (!isRecord(agentValue) || typeof aitValue !== "string") { + throw new Error("Registry returned an invalid response payload"); + } + + const did = agentValue.did; + const name = agentValue.name; + const framework = agentValue.framework; + const expiresAt = agentValue.expiresAt; + + if ( + typeof did !== "string" || + typeof name !== "string" || + typeof framework !== "string" || + typeof expiresAt !== "string" + ) { + throw new Error("Registry returned an invalid response payload"); + } + + return { + agent: { + did, + name, + framework, + expiresAt, + }, + ait: aitValue, + }; +}; + +const ensureAgentDirectoryAvailable = async ( + agentName: string, + agentDirectory: string, +): Promise => { + try { + await access(agentDirectory); + throw new Error(`Agent "${agentName}" already exists at ${agentDirectory}`); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + throw error; + } +}; + +const writeSecureFile = async ( + path: string, + content: string, +): Promise => { + await writeFile(path, content, "utf-8"); + await chmod(path, FILE_MODE); +}; + +const ensureAgentDirectory = async ( + agentName: string, + agentDirectory: string, +): Promise => { + await mkdir(join(getConfigDir(), AGENTS_DIR_NAME), { recursive: true }); + + try { + await mkdir(agentDirectory); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "EEXIST") { + throw new Error( + `Agent "${agentName}" already exists at ${agentDirectory}`, + ); + } + + throw error; + } +}; + +const writeAgentIdentity = async (input: { + agentDirectory: string; + did: string; + name: string; + framework: string; + expiresAt: string; + registryUrl: string; + publicKey: string; + secretKey: string; + ait: string; +}): Promise => { + await ensureAgentDirectory(input.name, input.agentDirectory); + + const identityJson = { + did: input.did, + name: input.name, + framework: input.framework, + expiresAt: input.expiresAt, + registryUrl: input.registryUrl, + }; + + await writeSecureFile( + join(input.agentDirectory, "secret.key"), + input.secretKey, + ); + await writeSecureFile( + join(input.agentDirectory, "public.key"), + input.publicKey, + ); + await writeSecureFile( + join(input.agentDirectory, "identity.json"), + `${JSON.stringify(identityJson, null, 2)}\n`, + ); + await writeSecureFile(join(input.agentDirectory, "ait.jwt"), input.ait); +}; + +const registerAgent = async (input: { + apiKey: string; + registryUrl: string; + name: string; + publicKey: string; + framework?: string; + ttlDays?: number; +}): Promise => { + const requestBody: { + name: string; + publicKey: string; + framework?: string; + ttlDays?: number; + } = { + name: input.name, + publicKey: input.publicKey, + }; + + if (input.framework) { + requestBody.framework = input.framework; + } + + if (input.ttlDays !== undefined) { + requestBody.ttlDays = input.ttlDays; + } + + let response: Response; + try { + response = await fetch(toRegistryRequestUrl(input.registryUrl), { + method: "POST", + headers: { + authorization: `Bearer ${input.apiKey}`, + "content-type": "application/json", + }, + body: JSON.stringify(requestBody), + }); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw new Error(toHttpErrorMessage(response.status, responseBody)); + } + + return parseAgentRegistrationResponse(responseBody); +}; + +export const createAgentCommand = (): Command => { + const agentCommand = new Command("agent").description( + "Manage local agent identities", + ); + + agentCommand + .command("create ") + .description("Generate and register a new agent identity") + .option( + "--framework ", + "Agent framework label (registry defaults to openclaw)", + ) + .option( + "--ttl-days ", + "Agent token TTL in days (registry default when omitted)", + ) + .action( + withErrorHandling( + "agent create", + async (name: string, options: AgentCreateOptions) => { + const config = await resolveConfig(); + if (!config.apiKey) { + throw new Error( + "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", + ); + } + + const agentName = assertValidAgentName(name); + const framework = resolveFramework(options.framework); + const ttlDays = resolveTtlDays(options.ttlDays); + const agentDirectory = getAgentDirectory(agentName); + + await ensureAgentDirectoryAvailable(agentName, agentDirectory); + + const keypair = await generateEd25519Keypair(); + const encoded = encodeEd25519KeypairBase64url(keypair); + const registration = await registerAgent({ + apiKey: config.apiKey, + registryUrl: config.registryUrl, + name: agentName, + publicKey: encoded.publicKey, + framework, + ttlDays, + }); + + await writeAgentIdentity({ + agentDirectory, + did: registration.agent.did, + name: registration.agent.name, + framework: registration.agent.framework, + expiresAt: registration.agent.expiresAt, + registryUrl: config.registryUrl, + publicKey: encoded.publicKey, + secretKey: encoded.secretKey, + ait: registration.ait, + }); + + logger.info("cli.agent_created", { + name: registration.agent.name, + did: registration.agent.did, + agentDirectory, + registryUrl: config.registryUrl, + expiresAt: registration.agent.expiresAt, + }); + + writeStdoutLine(`Agent DID: ${registration.agent.did}`); + writeStdoutLine(`Expires At: ${registration.agent.expiresAt}`); + }, + ), + ); + + return agentCommand; +}; diff --git a/apps/cli/src/commands/config.ts b/apps/cli/src/commands/config.ts index 5fa0efb..29abc27 100644 --- a/apps/cli/src/commands/config.ts +++ b/apps/cli/src/commands/config.ts @@ -12,6 +12,7 @@ import { writeConfig, } from "../config/manager.js"; import { writeStderrLine, writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "config" }); @@ -20,26 +21,6 @@ const VALID_KEYS = [ "apiKey", ] as const satisfies readonly CliConfigKey[]; -const withErrorHandling = ( - command: string, - handler: (...args: T) => Promise, -) => { - return async (...args: T) => { - try { - await handler(...args); - } catch (error) { - process.exitCode = 1; - const message = error instanceof Error ? error.message : String(error); - - logger.error("cli.command_failed", { - command, - errorMessage: message, - }); - writeStderrLine(message); - } - }; -}; - const isValidConfigKey = (value: string): value is CliConfigKey => { return VALID_KEYS.includes(value as CliConfigKey); }; diff --git a/apps/cli/src/commands/helpers.test.ts b/apps/cli/src/commands/helpers.test.ts new file mode 100644 index 0000000..d91ae30 --- /dev/null +++ b/apps/cli/src/commands/helpers.test.ts @@ -0,0 +1,61 @@ +import { afterEach, describe, expect, it, vi } from "vitest"; + +const { mockLoggerError } = vi.hoisted(() => ({ + mockLoggerError: vi.fn(), +})); + +vi.mock("@clawdentity/sdk", () => ({ + createLogger: vi.fn(() => ({ + child: vi.fn(), + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: mockLoggerError, + })), +})); + +import { withErrorHandling } from "./helpers.js"; + +describe("withErrorHandling", () => { + afterEach(() => { + process.exitCode = undefined; + mockLoggerError.mockReset(); + vi.restoreAllMocks(); + }); + + it("catches command errors, sets exit code, and writes to stderr", async () => { + const stderr: string[] = []; + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + const wrapped = withErrorHandling("agent create", async () => { + throw new Error("command failed"); + }); + + await wrapped(); + + stderrSpy.mockRestore(); + + expect(process.exitCode).toBe(1); + expect(stderr.join("")).toContain("command failed"); + expect(mockLoggerError).toHaveBeenCalledWith("cli.command_failed", { + command: "agent create", + errorMessage: "command failed", + }); + }); + + it("passes through successful command execution", async () => { + const handler = vi.fn(async (name: string) => {}); + const wrapped = withErrorHandling("agent create", handler); + + await wrapped("agent-01"); + + expect(process.exitCode).toBeUndefined(); + expect(handler).toHaveBeenCalledWith("agent-01"); + expect(mockLoggerError).not.toHaveBeenCalled(); + }); +}); diff --git a/apps/cli/src/commands/helpers.ts b/apps/cli/src/commands/helpers.ts new file mode 100644 index 0000000..aee03b1 --- /dev/null +++ b/apps/cli/src/commands/helpers.ts @@ -0,0 +1,24 @@ +import { createLogger } from "@clawdentity/sdk"; +import { writeStderrLine } from "../io.js"; + +const logger = createLogger({ service: "cli", module: "commands" }); + +export const withErrorHandling = ( + command: string, + handler: (...args: T) => Promise, +) => { + return async (...args: T) => { + try { + await handler(...args); + } catch (error) { + process.exitCode = 1; + const message = error instanceof Error ? error.message : String(error); + + logger.error("cli.command_failed", { + command, + errorMessage: message, + }); + writeStderrLine(message); + } + }; +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index f1a2366..ec32b4a 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -18,6 +18,14 @@ describe("cli", () => { expect(hasConfigCommand).toBe(true); }); + it("registers the agent command", () => { + const hasAgentCommand = createProgram() + .commands.map((command) => command.name()) + .includes("agent"); + + expect(hasAgentCommand).toBe(true); + }); + it("prints version output", async () => { const output: string[] = []; const program = createProgram(); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 521e978..70df547 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,4 +1,5 @@ import { Command } from "commander"; +import { createAgentCommand } from "./commands/agent.js"; import { createConfigCommand } from "./commands/config.js"; export const CLI_VERSION = "0.0.0"; @@ -7,5 +8,6 @@ export const createProgram = (): Command => { return new Command("clawdentity") .description("Clawdentity CLI - Agent identity management") .version(CLI_VERSION) + .addCommand(createAgentCommand()) .addCommand(createConfigCommand()); }; From d7c9146d8dfcd996b600bd313264244b968111de Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 15:35:18 +0530 Subject: [PATCH 037/190] feat(cli): add offline agent inspect with safe path validation --- apps/cli/AGENTS.md | 8 ++ apps/cli/src/commands/agent.test.ts | 140 +++++++++++++++++++++++++- apps/cli/src/commands/agent.ts | 69 ++++++++++++- apps/cli/src/commands/helpers.test.ts | 2 +- issues/AGENTS.md | 1 + issues/T22.md | 2 +- packages/sdk/AGENTS.md | 2 +- packages/sdk/src/index.ts | 4 +- packages/sdk/src/jwt/ait-jwt.test.ts | 80 ++++++++++++++- packages/sdk/src/jwt/ait-jwt.ts | 56 ++++++++--- 10 files changed, 343 insertions(+), 21 deletions(-) diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 9fbba28..bfbc9d2 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -12,10 +12,13 @@ - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. - Keep user-facing command output on `writeStdoutLine` / `writeStderrLine`; reserve structured logger calls for diagnostic events. +- Prefer `@clawdentity/sdk` helpers (`decodeAIT`) when surfacing agent metadata instead of parsing JWTs manually. + - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. - Agent identities live at `~/.clawdentity/agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. +- Reject `.` and `..` as agent names before any filesystem operation to prevent directory traversal outside `~/.clawdentity/agents/`. - Resolve values with explicit precedence: environment variables > config file > built-in defaults. - Keep API tokens masked in human-facing output (`show`, success logs, debug prints). - Write config and identity artifacts with restrictive permissions (`0600`) and never commit secrets or generated local config. @@ -26,6 +29,11 @@ - Command tests should assert both behavior and output by capturing `process.stdout.write` / `process.stderr.write`. - Cover invalid input and failure paths, not only happy paths. +## Agent Inspection +- `agent inspect ` reads `~/.clawdentity/agents//ait.jwt`, decodes it with `decodeAIT`, and prints DID, Owner, Expires, Key ID, Public Key, and Framework so operators can audit metadata offline. +- Surface user-friendly errors when the JWT is missing or cannot be decoded, mentioning `ait.jwt` explicitly and defaulting to the normalized agent name when validating input. +- Tests for new inspection behavior must mock `node:fs/promises.readFile` and `@clawdentity/sdk.decodeAIT`, assert the visible output, and confirm missing-file handling covers `ENOENT`. + ## Validation Commands - `pnpm -F @clawdentity/cli lint` - `pnpm -F @clawdentity/cli typecheck` diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index dda7882..cc2f126 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -1,4 +1,4 @@ -import { access, chmod, mkdir, writeFile } from "node:fs/promises"; +import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { Command } from "commander"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; @@ -6,6 +6,7 @@ vi.mock("node:fs/promises", () => ({ access: vi.fn(), chmod: vi.fn(), mkdir: vi.fn(), + readFile: vi.fn(), writeFile: vi.fn(), })); @@ -22,11 +23,14 @@ vi.mock("@clawdentity/sdk", () => ({ warn: vi.fn(), error: vi.fn(), })), + decodeAIT: vi.fn(), encodeEd25519KeypairBase64url: vi.fn(), generateEd25519Keypair: vi.fn(), })); import { + type DecodedAit, + decodeAIT, encodeEd25519KeypairBase64url, generateEd25519Keypair, } from "@clawdentity/sdk"; @@ -36,12 +40,14 @@ import { createAgentCommand } from "./agent.js"; const mockedAccess = vi.mocked(access); const mockedChmod = vi.mocked(chmod); const mockedMkdir = vi.mocked(mkdir); +const mockedReadFile = vi.mocked(readFile); const mockedWriteFile = vi.mocked(writeFile); const mockedResolveConfig = vi.mocked(resolveConfig); const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); const mockedEncodeEd25519KeypairBase64url = vi.mocked( encodeEd25519KeypairBase64url, ); +const mockedDecodeAIT = vi.mocked(decodeAIT); const mockFetch = vi.fn(); @@ -297,4 +303,136 @@ describe("agent create command", () => { expect(requestBody.framework).toBe("langgraph"); expect(requestBody.ttlDays).toBe(45); }); + + it("rejects dot-segment agent names before hitting the filesystem", async () => { + const result = await runAgentCommand(["create", "."]); + + expect(result.stderr).toContain('Agent name must not be "." or "..".'); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + expect(mockedMkdir).not.toHaveBeenCalled(); + expect(mockedWriteFile).not.toHaveBeenCalled(); + }); +}); + +describe("agent inspect command", () => { + const decodedAit: DecodedAit = { + header: { + alg: "EdDSA", + typ: "AIT", + kid: "key-01", + }, + claims: { + iss: "https://registry.clawdentity.dev", + sub: "did:claw:agent:abc", + ownerDid: "did:claw:human:def", + name: "agent-01", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: "pub-key", + }, + }, + iat: 1672531100, + nbf: 1672531100, + exp: 1672531200, + jti: "01HF7YAT00W6W7CM7N3W5FDXT4", + }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + mockedReadFile.mockResolvedValue("mock-ait-token"); + mockedDecodeAIT.mockReturnValue(decodedAit); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("displays all six decoded AIT fields", async () => { + const result = await runAgentCommand(["inspect", "agent-01"]); + + expect(result.stdout).toContain("DID: did:claw:agent:abc"); + expect(result.stdout).toContain("Owner: did:claw:human:def"); + expect(result.stdout).toContain("Expires: 2023-01-01T00:00:00.000Z"); + expect(result.stdout).toContain("Key ID: key-01"); + expect(result.stdout).toContain("Public Key: pub-key"); + expect(result.stdout).toContain("Framework: openclaw"); + expect(result.exitCode).toBeUndefined(); + }); + + it("reads AIT from the expected local file path", async () => { + await runAgentCommand(["inspect", "agent-01"]); + + expect(mockedReadFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/ait.jwt", + "utf-8", + ); + expect(mockedDecodeAIT).toHaveBeenCalledWith("mock-ait-token"); + }); + + it("fails when the AIT file is missing", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + const result = await runAgentCommand(["inspect", "agent-01"]); + + expect(result.stderr).toContain("not found"); + expect(result.stderr).toContain("ait.jwt"); + expect(result.exitCode).toBe(1); + }); + + it("rejects dot-segment agent names before resolving the AIT path", async () => { + const result = await runAgentCommand(["inspect", ".."]); + + expect(result.stderr).toContain('Agent name must not be "." or "..".'); + expect(result.exitCode).toBe(1); + expect(mockedReadFile).not.toHaveBeenCalled(); + }); + + it("fails when the AIT file is empty", async () => { + mockedReadFile.mockResolvedValueOnce(" \n"); + + const result = await runAgentCommand(["inspect", "agent-01"]); + + expect(result.stderr).toContain("empty"); + expect(result.stderr).toContain("ait.jwt"); + expect(result.exitCode).toBe(1); + }); + + it("fails when AIT decoding fails", async () => { + mockedDecodeAIT.mockImplementationOnce(() => { + throw new Error("Invalid AIT payload"); + }); + + const result = await runAgentCommand(["inspect", "agent-01"]); + + expect(result.stderr).toContain("Invalid AIT payload"); + expect(result.exitCode).toBe(1); + }); + + it("fails on invalid agent names", async () => { + const result = await runAgentCommand(["inspect", "agent/../../etc"]); + + expect(result.stderr).toContain("invalid characters"); + expect(mockedReadFile).not.toHaveBeenCalled(); + expect(result.exitCode).toBe(1); + }); + + it("formats exp as ISO-8601", async () => { + mockedDecodeAIT.mockReturnValueOnce({ + ...decodedAit, + claims: { + ...decodedAit.claims, + exp: 1893456000, + }, + }); + + const result = await runAgentCommand(["inspect", "agent-01"]); + + expect(result.stdout).toContain("Expires: 2030-01-01T00:00:00.000Z"); + expect(result.exitCode).toBeUndefined(); + }); }); diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index 7e8c5d8..defefee 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -1,8 +1,10 @@ -import { access, chmod, mkdir, writeFile } from "node:fs/promises"; +import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { join } from "node:path"; import { validateAgentName } from "@clawdentity/protocol"; import { createLogger, + type DecodedAit, + decodeAIT, encodeEd25519KeypairBase64url, generateEd25519Keypair, } from "@clawdentity/sdk"; @@ -14,6 +16,8 @@ import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "agent" }); const AGENTS_DIR_NAME = "agents"; +const AIT_FILE_NAME = "ait.jwt"; +const RESERVED_AGENT_NAMES = new Set([".", ".."]); const FILE_MODE = 0o600; type AgentCreateOptions = { @@ -45,12 +49,47 @@ const getAgentDirectory = (name: string): string => { return join(getConfigDir(), AGENTS_DIR_NAME, name); }; +const getAgentAitPath = (name: string): string => { + return join(getAgentDirectory(name), AIT_FILE_NAME); +}; + +const readAgentAitToken = async (agentName: string): Promise => { + const aitPath = getAgentAitPath(agentName); + + let rawToken: string; + try { + rawToken = await readFile(aitPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error(`Agent "${agentName}" not found (${aitPath})`); + } + + throw error; + } + + const token = rawToken.trim(); + if (token.length === 0) { + throw new Error(`Agent "${agentName}" has an empty ${AIT_FILE_NAME}`); + } + + return token; +}; + +const formatExpiresAt = (expires: number): string => { + return new Date(expires * 1000).toISOString(); +}; + const assertValidAgentName = (name: string): string => { const normalizedName = name.trim(); + if (RESERVED_AGENT_NAMES.has(normalizedName)) { + throw new Error('Agent name must not be "." or "..".'); + } + if (!validateAgentName(normalizedName)) { throw new Error( - "Agent name contains invalid characters or length. Use 1-64 chars: a-z, A-Z, 0-9, ., _, -", + "Agent name contains invalid characters, reserved path segments, or length. Use 1-64 chars: a-z, A-Z, 0-9, ., _, -", ); } @@ -312,6 +351,23 @@ const registerAgent = async (input: { return parseAgentRegistrationResponse(responseBody); }; +const printAgentInspect = (decoded: DecodedAit): void => { + writeStdoutLine(`DID: ${decoded.claims.sub}`); + writeStdoutLine(`Owner: ${decoded.claims.ownerDid}`); + writeStdoutLine(`Expires: ${formatExpiresAt(decoded.claims.exp)}`); + writeStdoutLine(`Key ID: ${decoded.header.kid}`); + writeStdoutLine(`Public Key: ${decoded.claims.cnf.jwk.x}`); + writeStdoutLine(`Framework: ${decoded.claims.framework}`); +}; + +const printAgentInspectCommand = async (name: string): Promise => { + const normalizedName = assertValidAgentName(name); + const aitToken = await readAgentAitToken(normalizedName); + const decoded = decodeAIT(aitToken); + + printAgentInspect(decoded); +}; + export const createAgentCommand = (): Command => { const agentCommand = new Command("agent").description( "Manage local agent identities", @@ -383,5 +439,14 @@ export const createAgentCommand = (): Command => { ), ); + agentCommand + .command("inspect ") + .description("Decode and show metadata from an agent's stored AIT") + .action( + withErrorHandling("agent inspect", async (name: string) => { + await printAgentInspectCommand(name); + }), + ); + return agentCommand; }; diff --git a/apps/cli/src/commands/helpers.test.ts b/apps/cli/src/commands/helpers.test.ts index d91ae30..dfddc2e 100644 --- a/apps/cli/src/commands/helpers.test.ts +++ b/apps/cli/src/commands/helpers.test.ts @@ -49,7 +49,7 @@ describe("withErrorHandling", () => { }); it("passes through successful command execution", async () => { - const handler = vi.fn(async (name: string) => {}); + const handler = vi.fn(async (_name: string) => {}); const wrapped = withErrorHandling("agent create", handler); await wrapped("agent-01"); diff --git a/issues/AGENTS.md b/issues/AGENTS.md index 9127b27..de5367b 100644 --- a/issues/AGENTS.md +++ b/issues/AGENTS.md @@ -36,6 +36,7 @@ Every `T*.md` file must include these sections in this order: - Add at least one refactor opportunity, or explicitly state `None`. - Add concrete validation commands with expected outcomes. - Keep scope narrow: one issue should represent one coherent unit of delivery. +- For CLI deliverables, argument placeholders must match supported identifier semantics (for example `` for local filesystem lookups) and avoid ambiguous `` unless resolution rules are explicitly defined. ## Skill Rules - Every issue must declare required skills. diff --git a/issues/T22.md b/issues/T22.md index ea545af..c12002c 100644 --- a/issues/T22.md +++ b/issues/T22.md @@ -32,7 +32,7 @@ Print decoded AIT fields for an existing local identity. - `testing-framework` ## Deliverables -- Command `claw agent inspect ` +- Command `claw agent inspect ` - Displays: did, owner, exp, kid, pubkey, framework ## Refactor Opportunities diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 585cc0b..b5b17d5 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -11,7 +11,7 @@ - `config`: schema-validated runtime config parsing. - `request-context`: request ID extraction/generation and propagation. - `crypto/ed25519`: byte-first keypair/sign/verify helpers for PoP and token workflows. -- `jwt/ait-jwt`: AIT JWS signing and verification with strict header and issuer checks. +- `jwt/ait-jwt`: AIT JWS signing, verification, and header-only inspection via `decodeAIT`; both helpers reuse the same protected-header guard so alg/typ/kid invariants stay aligned even when skipping signature validation. - `jwt/crl-jwt`: CRL JWT helpers with EdDSA signing, header consistency checks, and tamper-detection test coverage. - `crl/cache`: in-memory CRL cache with periodic refresh, staleness reporting, and configurable stale behavior. - `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 20d5115..a170c74 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -44,11 +44,13 @@ export type { } from "./http/types.js"; export { verifyHttpRequest } from "./http/verify.js"; export type { + DecodedAit, + DecodedAitHeader, RegistryAitVerificationKey, SignAitInput, VerifyAitInput, } from "./jwt/ait-jwt.js"; -export { AitJwtError, signAIT, verifyAIT } from "./jwt/ait-jwt.js"; +export { AitJwtError, decodeAIT, signAIT, verifyAIT } from "./jwt/ait-jwt.js"; export type { CrlClaims, RegistryCrlVerificationKey, diff --git a/packages/sdk/src/jwt/ait-jwt.test.ts b/packages/sdk/src/jwt/ait-jwt.test.ts index 6caa3bf..08ce7b8 100644 --- a/packages/sdk/src/jwt/ait-jwt.test.ts +++ b/packages/sdk/src/jwt/ait-jwt.test.ts @@ -7,7 +7,7 @@ import { } from "@clawdentity/protocol"; import { describe, expect, it } from "vitest"; import { generateEd25519Keypair } from "../crypto/ed25519.js"; -import { signAIT, verifyAIT } from "./ait-jwt.js"; +import { decodeAIT, signAIT, verifyAIT } from "./ait-jwt.js"; function makeClaims(overrides: Partial = {}): AitClaims { const agentUlid = generateUlid(1700100000000); @@ -118,4 +118,82 @@ describe("AIT JWT helpers", () => { }), ).rejects.toThrow(/kid/i); }); + + function replaceProtectedHeader( + token: string, + header: Record, + ): string { + const [_, payload, signature] = token.split("."); + if (!payload || !signature) { + throw new Error("malformed token"); + } + const encodedHeader = encodeBase64url( + new TextEncoder().encode(JSON.stringify(header)), + ); + return `${encodedHeader}.${payload}.${signature}`; + } + + describe("decodeAIT", () => { + async function makeSignedToken(): Promise<{ + token: string; + claims: AitClaims; + }> { + const keypair = await generateEd25519Keypair(); + const claims = makeClaims(); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: keypair, + }); + return { token, claims }; + } + + it("returns header + claims without verifying signature", async () => { + const { token, claims } = await makeSignedToken(); + const decoded = decodeAIT(token); + + expect(decoded.header).toEqual({ + alg: "EdDSA", + typ: "AIT", + kid: "reg-key-1", + }); + expect(decoded.claims).toEqual(claims); + }); + + it("rejects tokens with the wrong alg header", async () => { + const { token } = await makeSignedToken(); + const badToken = replaceProtectedHeader(token, { + alg: "HS256", + typ: "AIT", + kid: "reg-key-1", + }); + + expect(() => decodeAIT(badToken)).toThrow(/alg=EdDSA/); + }); + + it("rejects tokens with the wrong typ header", async () => { + const { token } = await makeSignedToken(); + const badToken = replaceProtectedHeader(token, { + alg: "EdDSA", + typ: "JWT", + kid: "reg-key-1", + }); + + expect(() => decodeAIT(badToken)).toThrow(/typ=AIT/); + }); + + it("requires a kid in the protected header", async () => { + const { token } = await makeSignedToken(); + const badToken = replaceProtectedHeader(token, { + alg: "EdDSA", + typ: "AIT", + }); + + expect(() => decodeAIT(badToken)).toThrow(/missing protected kid header/); + }); + + it("throws for malformed JWT strings", () => { + expect(() => decodeAIT("not-a-jwt")).toThrow(); + }); + }); }); diff --git a/packages/sdk/src/jwt/ait-jwt.ts b/packages/sdk/src/jwt/ait-jwt.ts index db48107..8dd50b8 100644 --- a/packages/sdk/src/jwt/ait-jwt.ts +++ b/packages/sdk/src/jwt/ait-jwt.ts @@ -1,7 +1,13 @@ import type { AitClaims, AitCnfJwk } from "@clawdentity/protocol"; import { parseAitClaims } from "@clawdentity/protocol"; import type { JWTVerifyOptions } from "jose"; -import { decodeProtectedHeader, importJWK, jwtVerify, SignJWT } from "jose"; +import { + decodeJwt, + decodeProtectedHeader, + importJWK, + jwtVerify, + SignJWT, +} from "jose"; import { type Ed25519KeypairBytes, encodeEd25519KeypairBase64url, @@ -28,6 +34,17 @@ export type VerifyAitInput = { expectedIssuer?: string; }; +export type DecodedAitHeader = { + alg: "EdDSA"; + typ: "AIT"; + kid: string; +}; + +export type DecodedAit = { + header: DecodedAitHeader; + claims: AitClaims; +}; + export class AitJwtError extends Error { readonly code: "INVALID_AIT_HEADER" | "UNKNOWN_AIT_KID"; @@ -46,6 +63,24 @@ function unknownAitKid(kid: string): AitJwtError { return new AitJwtError("UNKNOWN_AIT_KID", `Unknown AIT signing kid: ${kid}`); } +function ensureAitProtectedHeader( + header: ReturnType, +): DecodedAitHeader { + if (header.alg !== "EdDSA") { + throw invalidAitHeader("AIT token must use alg=EdDSA"); + } + + if (header.typ !== "AIT") { + throw invalidAitHeader("AIT token must use typ=AIT"); + } + + if (typeof header.kid !== "string" || header.kid.length === 0) { + throw invalidAitHeader("AIT token missing protected kid header"); + } + + return { alg: "EdDSA", typ: "AIT", kid: header.kid }; +} + export async function signAIT(input: SignAitInput): Promise { const claims = parseAitClaims(input.claims); const encodedKeypair = encodeEd25519KeypairBase64url(input.signerKeypair); @@ -67,18 +102,7 @@ export async function signAIT(input: SignAitInput): Promise { } export async function verifyAIT(input: VerifyAitInput): Promise { - const header = decodeProtectedHeader(input.token); - if (header.alg !== "EdDSA") { - throw invalidAitHeader("AIT token must use alg=EdDSA"); - } - - if (header.typ !== "AIT") { - throw invalidAitHeader("AIT token must use typ=AIT"); - } - - if (typeof header.kid !== "string" || header.kid.length === 0) { - throw invalidAitHeader("AIT token missing protected kid header"); - } + const header = ensureAitProtectedHeader(decodeProtectedHeader(input.token)); const key = input.registryKeys.find((item) => item.kid === header.kid); if (!key) { @@ -98,3 +122,9 @@ export async function verifyAIT(input: VerifyAitInput): Promise { const { payload } = await jwtVerify(input.token, publicKey, options); return parseAitClaims(payload); } + +export function decodeAIT(token: string): DecodedAit { + const header = ensureAitProtectedHeader(decodeProtectedHeader(token)); + const payload = decodeJwt(token); + return { header, claims: parseAitClaims(payload) }; +} From 41dd28139ef61ad24ade095a087130c73ba29ce9 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 15:53:17 +0530 Subject: [PATCH 038/190] feat(cli): add agent revoke command by name (T23) --- apps/cli/AGENTS.md | 9 +- apps/cli/src/commands/agent.test.ts | 181 ++++++++++++++++++++++++++++ apps/cli/src/commands/agent.ts | 179 ++++++++++++++++++++++++++- 3 files changed, 364 insertions(+), 5 deletions(-) diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index bfbc9d2..85242d2 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -13,7 +13,7 @@ - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. - Keep user-facing command output on `writeStdoutLine` / `writeStderrLine`; reserve structured logger calls for diagnostic events. - Prefer `@clawdentity/sdk` helpers (`decodeAIT`) when surfacing agent metadata instead of parsing JWTs manually. - - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. +- Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. @@ -34,6 +34,13 @@ - Surface user-friendly errors when the JWT is missing or cannot be decoded, mentioning `ait.jwt` explicitly and defaulting to the normalized agent name when validating input. - Tests for new inspection behavior must mock `node:fs/promises.readFile` and `@clawdentity/sdk.decodeAIT`, assert the visible output, and confirm missing-file handling covers `ENOENT`. +## Agent Revocation +- `agent revoke ` accepts local agent name only, then resolves `~/.clawdentity/agents//identity.json` to load the DID and derive the registry ULID path parameter. +- Keep revoke flow name-first and filesystem-backed; do not require operators to pass raw ULIDs for locally managed identities. +- Use registry `DELETE /v1/agents/:id` with PAT auth, and print human-readable confirmation that includes agent name + DID. +- Keep error messaging explicit for missing/malformed `identity.json`, invalid DID data, missing API key, and registry/network failures. +- Tests for revoke must cover success/idempotent `204`, auth/config failures, missing/invalid identity metadata, and HTTP error mapping for `401/404/409`. + ## Validation Commands - `pnpm -F @clawdentity/cli lint` - `pnpm -F @clawdentity/cli typecheck` diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index cc2f126..1f1112e 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -315,6 +315,187 @@ describe("agent create command", () => { }); }); +describe("agent revoke command", () => { + const agentDid = "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"; + const agentId = "01HF7YAT00W6W7CM7N3W5FDXT4"; + + beforeEach(() => { + vi.clearAllMocks(); + vi.stubGlobal("fetch", mockFetch); + + mockedResolveConfig.mockResolvedValue({ + registryUrl: "https://api.clawdentity.com", + apiKey: "pat_123", + }); + + mockedReadFile.mockResolvedValue( + JSON.stringify({ + did: agentDid, + }), + ); + + mockFetch.mockResolvedValue( + createJsonResponse(204, { + ok: true, + }), + ); + }); + + afterEach(() => { + process.exitCode = undefined; + vi.unstubAllGlobals(); + }); + + it("revokes agent by local name and prints confirmation", async () => { + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(mockedReadFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/identity.json", + "utf-8", + ); + expect(mockFetch).toHaveBeenCalledWith( + `https://api.clawdentity.com/v1/agents/${agentId}`, + expect.objectContaining({ + method: "DELETE", + headers: expect.objectContaining({ + authorization: "Bearer pat_123", + }), + }), + ); + + expect(result.stdout).toContain(`Agent revoked: agent-01 (${agentDid})`); + expect(result.stdout).toContain( + "CRL visibility depends on verifier refresh interval.", + ); + expect(result.exitCode).toBeUndefined(); + }); + + it("treats repeat revoke as success (idempotent 204)", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(204, { + ok: true, + }), + ); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stdout).toContain("Agent revoked: agent-01"); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when API key is missing", async () => { + mockedResolveConfig.mockResolvedValueOnce({ + registryUrl: "https://api.clawdentity.com", + }); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("API key is not configured"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails when local identity.json does not exist", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("not found"); + expect(result.stderr).toContain("identity.json"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails when identity.json is invalid JSON", async () => { + mockedReadFile.mockResolvedValueOnce("{ did:"); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("identity.json"); + expect(result.stderr).toContain("valid JSON"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails when identity did is invalid", async () => { + mockedReadFile.mockResolvedValueOnce( + JSON.stringify({ + did: "invalid-did", + }), + ); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("invalid did"); + expect(result.stderr).toContain("identity.json"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("handles registry 401 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(401, { + error: { + message: "Invalid API key", + }, + }), + ); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("authentication failed"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry 404 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(404, { + error: { + message: "Agent not found", + }, + }), + ); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("Agent not found"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry 409 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(409, { + error: { + message: "Agent cannot be revoked", + }, + }), + ); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("cannot be revoked"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry connection errors", async () => { + mockFetch.mockRejectedValueOnce(new Error("socket hang up")); + + const result = await runAgentCommand(["revoke", "agent-01"]); + + expect(result.stderr).toContain("Unable to connect to the registry"); + expect(result.exitCode).toBe(1); + }); + + it("rejects dot-segment agent names before resolving identity path", async () => { + const result = await runAgentCommand(["revoke", ".."]); + + expect(result.stderr).toContain('Agent name must not be "." or "..".'); + expect(result.exitCode).toBe(1); + expect(mockedReadFile).not.toHaveBeenCalled(); + expect(mockFetch).not.toHaveBeenCalled(); + }); +}); + describe("agent inspect command", () => { const decodedAit: DecodedAit = { header: { diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index defefee..f078311 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -1,6 +1,6 @@ import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { join } from "node:path"; -import { validateAgentName } from "@clawdentity/protocol"; +import { parseDid, validateAgentName } from "@clawdentity/protocol"; import { createLogger, type DecodedAit, @@ -17,6 +17,7 @@ const logger = createLogger({ service: "cli", module: "agent" }); const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; +const IDENTITY_FILE_NAME = "identity.json"; const RESERVED_AGENT_NAMES = new Set([".", ".."]); const FILE_MODE = 0o600; @@ -35,6 +36,10 @@ type AgentRegistrationResponse = { ait: string; }; +type LocalAgentIdentity = { + did: string; +}; + type RegistryErrorEnvelope = { error?: { message?: string; @@ -53,6 +58,10 @@ const getAgentAitPath = (name: string): string => { return join(getAgentDirectory(name), AIT_FILE_NAME); }; +const getAgentIdentityPath = (name: string): string => { + return join(getAgentDirectory(name), IDENTITY_FILE_NAME); +}; + const readAgentAitToken = async (agentName: string): Promise => { const aitPath = getAgentAitPath(agentName); @@ -76,6 +85,63 @@ const readAgentAitToken = async (agentName: string): Promise => { return token; }; +const readAgentIdentity = async ( + agentName: string, +): Promise => { + const identityPath = getAgentIdentityPath(agentName); + + let rawIdentity: string; + try { + rawIdentity = await readFile(identityPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error(`Agent "${agentName}" not found (${identityPath})`); + } + + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawIdentity); + } catch { + throw new Error( + `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (must be valid JSON)`, + ); + } + + if (!isRecord(parsed) || typeof parsed.did !== "string") { + throw new Error( + `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (missing did)`, + ); + } + + const did = parsed.did.trim(); + if (did.length === 0) { + throw new Error( + `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (missing did)`, + ); + } + + return { did }; +}; + +const parseAgentIdFromDid = (agentName: string, did: string): string => { + try { + const parsedDid = parseDid(did); + if (parsedDid.kind !== "agent") { + throw new Error("DID is not an agent DID"); + } + + return parsedDid.ulid; + } catch { + throw new Error( + `Agent "${agentName}" has invalid did in ${IDENTITY_FILE_NAME}: ${did}`, + ); + } +}; + const formatExpiresAt = (expires: number): string => { return new Date(expires * 1000).toISOString(); }; @@ -146,12 +212,19 @@ const parseJsonResponse = async (response: Response): Promise => { } }; -const toRegistryRequestUrl = (registryUrl: string): string => { +const toRegistryAgentsRequestUrl = ( + registryUrl: string, + agentId?: string, +): string => { const normalizedBaseUrl = registryUrl.endsWith("/") ? registryUrl : `${registryUrl}/`; - return new URL("v1/agents", normalizedBaseUrl).toString(); + const path = agentId + ? `v1/agents/${encodeURIComponent(agentId)}` + : "v1/agents"; + + return new URL(path, normalizedBaseUrl).toString(); }; const toHttpErrorMessage = (status: number, responseBody: unknown): string => { @@ -328,7 +401,7 @@ const registerAgent = async (input: { let response: Response; try { - response = await fetch(toRegistryRequestUrl(input.registryUrl), { + response = await fetch(toRegistryAgentsRequestUrl(input.registryUrl), { method: "POST", headers: { authorization: `Bearer ${input.apiKey}`, @@ -351,6 +424,70 @@ const registerAgent = async (input: { return parseAgentRegistrationResponse(responseBody); }; +const toRevokeHttpErrorMessage = ( + status: number, + responseBody: unknown, +): string => { + const registryMessage = extractRegistryErrorMessage(responseBody); + + if (status === 401) { + return registryMessage + ? `Registry authentication failed (401): ${registryMessage}` + : "Registry authentication failed (401). Check your API key."; + } + + if (status === 404) { + return registryMessage + ? `Agent not found (404): ${registryMessage}` + : "Agent not found in the registry (404)."; + } + + if (status === 409) { + return registryMessage + ? `Agent cannot be revoked (409): ${registryMessage}` + : "Agent cannot be revoked (409)."; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (registryMessage) { + return `Registry request failed (${status}): ${registryMessage}`; + } + + return `Registry request failed (${status})`; +}; + +const revokeAgent = async (input: { + apiKey: string; + registryUrl: string; + agentId: string; +}): Promise => { + let response: Response; + try { + response = await fetch( + toRegistryAgentsRequestUrl(input.registryUrl, input.agentId), + { + method: "DELETE", + headers: { + authorization: `Bearer ${input.apiKey}`, + }, + }, + ); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw new Error(toRevokeHttpErrorMessage(response.status, responseBody)); + } +}; + const printAgentInspect = (decoded: DecodedAit): void => { writeStdoutLine(`DID: ${decoded.claims.sub}`); writeStdoutLine(`Owner: ${decoded.claims.ownerDid}`); @@ -448,5 +585,39 @@ export const createAgentCommand = (): Command => { }), ); + agentCommand + .command("revoke ") + .description("Revoke a local agent identity via the registry") + .action( + withErrorHandling("agent revoke", async (name: string) => { + const config = await resolveConfig(); + if (!config.apiKey) { + throw new Error( + "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", + ); + } + + const agentName = assertValidAgentName(name); + const identity = await readAgentIdentity(agentName); + const agentId = parseAgentIdFromDid(agentName, identity.did); + + await revokeAgent({ + apiKey: config.apiKey, + registryUrl: config.registryUrl, + agentId, + }); + + logger.info("cli.agent_revoked", { + name: agentName, + did: identity.did, + agentId, + registryUrl: config.registryUrl, + }); + + writeStdoutLine(`Agent revoked: ${agentName} (${identity.did})`); + writeStdoutLine("CRL visibility depends on verifier refresh interval."); + }), + ); + return agentCommand; }; From b729f808eb3eb4cc21ea1415be0f447492c1184a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 16:27:19 +0530 Subject: [PATCH 039/190] feat(cli): implement local AIT verify command with cached key/CRL checks (T24) --- apps/cli/AGENTS.md | 9 + apps/cli/src/AGENTS.md | 25 ++ apps/cli/src/commands/AGENTS.md | 21 ++ apps/cli/src/commands/verify.test.ts | 350 +++++++++++++++++++ apps/cli/src/commands/verify.ts | 496 +++++++++++++++++++++++++++ apps/cli/src/config/manager.test.ts | 39 +++ apps/cli/src/config/manager.ts | 46 ++- apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 4 +- 9 files changed, 989 insertions(+), 9 deletions(-) create mode 100644 apps/cli/src/AGENTS.md create mode 100644 apps/cli/src/commands/AGENTS.md create mode 100644 apps/cli/src/commands/verify.test.ts create mode 100644 apps/cli/src/commands/verify.ts diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 85242d2..26f95cb 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -8,6 +8,7 @@ - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Implement command groups under `src/commands/*` and register them from `createProgram()`. +- Keep top-level command contracts stable (`config`, `agent`, `verify`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. @@ -17,6 +18,7 @@ ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. +- CLI verification caches live under `~/.clawdentity/cache/` and must never include private keys or PATs. - Agent identities live at `~/.clawdentity/agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. - Reject `.` and `..` as agent names before any filesystem operation to prevent directory traversal outside `~/.clawdentity/agents/`. - Resolve values with explicit precedence: environment variables > config file > built-in defaults. @@ -41,6 +43,13 @@ - Keep error messaging explicit for missing/malformed `identity.json`, invalid DID data, missing API key, and registry/network failures. - Tests for revoke must cover success/idempotent `204`, auth/config failures, missing/invalid identity metadata, and HTTP error mapping for `401/404/409`. +## Token Verification +- `verify ` accepts either a raw AIT token or a filesystem path to a file containing one token. +- Verification is fail-closed for revocation checks: if CRL cannot be fetched/validated and no fresh cache is available, command must fail. +- Verify flow must use SDK primitives (`verifyAIT`, `verifyCRL`) and registry endpoints (`/.well-known/claw-keys.json`, `/v1/crl`) instead of local JWT parsing. +- Keep user output explicit and command-like: successful checks print `✅ ...`; failed checks print `❌ ` and set non-zero exit code. +- Cache files (`registry-keys.json`, `crl-claims.json`) should include source registry URL + fetch timestamp so stale or cross-environment cache reuse is avoided. + ## Validation Commands - `pnpm -F @clawdentity/cli lint` - `pnpm -F @clawdentity/cli typecheck` diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md new file mode 100644 index 0000000..a6cefb7 --- /dev/null +++ b/apps/cli/src/AGENTS.md @@ -0,0 +1,25 @@ +# AGENTS.md (apps/cli/src) + +## Purpose +- Keep CLI source modules small, composable, and safe for local operator workflows. + +## Command Modules +- Keep each command implementation in `commands/.ts` with one exported factory (`createCommand`). +- Reuse shared command wrappers (`withErrorHandling`) and IO helpers (`writeStdoutLine`, `writeStderrLine`) instead of inline process writes. +- Prefer explicit error-to-reason mapping for operator-facing failures rather than generic stack traces. + +## Verification Flow Contract +- `verify` must support both raw token input and file-path input without requiring extra flags. +- Resolve registry material from configured `registryUrl` only (`/.well-known/claw-keys.json`, `/v1/crl`). +- Use cached key/CRL artifacts only when fresh and scoped to the same registry URL. +- Treat CRL refresh/validation failures as hard verification failures (fail-closed behavior). + +## Caching Rules +- Cache reads must be tolerant of malformed JSON by ignoring bad cache and fetching fresh data. +- Cache writes must use restrictive permissions through config-manager helpers. +- Cache payloads must be JSON and include `fetchedAtMs` timestamps for TTL checks. + +## Testing Rules +- Command tests must capture `stdout`/`stderr` and assert exit-code behavior. +- Include success, revoked, invalid token, keyset failure, CRL failure, and cache-hit scenarios for `verify`. +- Keep tests deterministic by mocking network and filesystem dependencies. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md new file mode 100644 index 0000000..1237da4 --- /dev/null +++ b/apps/cli/src/commands/AGENTS.md @@ -0,0 +1,21 @@ +# AGENTS.md (apps/cli/src/commands) + +## Purpose +- Define implementation guardrails for individual CLI command modules. + +## Command Patterns +- Export one command factory per file (`createCommand`). +- Keep command handlers focused on orchestration; move reusable logic into local helpers. +- Use `withErrorHandling` for command actions unless a command has a documented reason not to. +- Route all user-facing messages through `writeStdoutLine`/`writeStderrLine`. + +## Verification Command Rules +- `verify` must preserve the `✅`/`❌` output contract with explicit reasons. +- Token argument can be either a raw token or file path; missing file paths should fall back to raw token mode. +- Signature and CRL validation must use SDK helpers (`verifyAIT`, `verifyCRL`), not local JWT cryptography code. +- Cache usage must enforce TTL and registry URL matching before reuse. + +## Testing Rules +- Mock network and filesystem dependencies in command tests. +- Include success and failure scenarios for external calls, parsing, and cache behavior. +- Assert exit code behavior in addition to stdout/stderr text. diff --git a/apps/cli/src/commands/verify.test.ts b/apps/cli/src/commands/verify.test.ts new file mode 100644 index 0000000..97a8f5c --- /dev/null +++ b/apps/cli/src/commands/verify.test.ts @@ -0,0 +1,350 @@ +import { readFile } from "node:fs/promises"; +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), +})); + +vi.mock("../config/manager.js", () => ({ + readCacheFile: vi.fn(), + resolveConfig: vi.fn(), + writeCacheFile: vi.fn(), +})); + +vi.mock("@clawdentity/sdk", () => ({ + createLogger: vi.fn(() => ({ + child: vi.fn(), + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + })), + parseRegistryConfig: vi.fn(), + verifyAIT: vi.fn(), + verifyCRL: vi.fn(), +})); + +import { parseRegistryConfig, verifyAIT, verifyCRL } from "@clawdentity/sdk"; +import { + readCacheFile, + resolveConfig, + writeCacheFile, +} from "../config/manager.js"; +import { createVerifyCommand } from "./verify.js"; + +const mockedTokenReadFile = vi.mocked(readFile); +const mockedResolveConfig = vi.mocked(resolveConfig); +const mockedReadCacheFile = vi.mocked(readCacheFile); +const mockedWriteCacheFile = vi.mocked(writeCacheFile); +const mockedParseRegistryConfig = vi.mocked(parseRegistryConfig); +const mockedVerifyAit = vi.mocked(verifyAIT); +const mockedVerifyCrl = vi.mocked(verifyCRL); + +const mockFetch = vi.fn(); + +const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +const createJsonResponse = (status: number, body: unknown): Response => { + return { + ok: status >= 200 && status < 300, + status, + json: vi.fn(async () => body), + } as unknown as Response; +}; + +const runVerifyCommand = async (args: string[]) => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createVerifyCommand(); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "verify", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +}; + +const activeSigningKey = { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", +} as const; + +const tokenClaims = { + iss: "https://api.clawdentity.com", + sub: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + ownerDid: "did:claw:human:01HF7YAT00W6W7CM7N3W5FDXT5", + name: "agent-01", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + }, + }, + iat: 1_700_000_000, + nbf: 1_700_000_000, + exp: 1_900_000_000, + jti: "01HF7YAT5QJ4K3YVQJ6Q2F9M1N", +} as const; + +const crlClaims = { + iss: "https://api.clawdentity.com", + jti: "01HF7YAT4TXP6AW5QNXA2Y9K43", + iat: 1_700_000_000, + exp: 1_900_000_000, + revocations: [ + { + jti: "01HF7YAT31JZHSMW1CG6Q6MHB7", + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + revokedAt: 1_700_000_100, + }, + ], +}; + +describe("verify command", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.stubGlobal("fetch", mockFetch); + + mockedTokenReadFile.mockRejectedValue(buildErrnoError("ENOENT")); + mockedResolveConfig.mockResolvedValue({ + registryUrl: "https://api.clawdentity.com", + }); + mockedReadCacheFile.mockResolvedValue(undefined); + mockedWriteCacheFile.mockResolvedValue(undefined); + + mockedParseRegistryConfig.mockReturnValue({ + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: [activeSigningKey], + }); + + mockedVerifyAit.mockResolvedValue(tokenClaims); + mockedVerifyCrl.mockResolvedValue(crlClaims); + + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + keys: [activeSigningKey], + }), + ); + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + crl: "crl.jwt.value", + }), + ); + }); + + afterEach(() => { + process.exitCode = undefined; + vi.unstubAllGlobals(); + vi.useRealTimers(); + }); + + it("verifies a valid token", async () => { + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("✅ token verified"); + expect(result.exitCode).toBeUndefined(); + expect(mockedVerifyAit).toHaveBeenCalledWith( + expect.objectContaining({ + token: "token.jwt", + }), + ); + }); + + it("fails when token is revoked", async () => { + mockedVerifyCrl.mockResolvedValueOnce({ + ...crlClaims, + revocations: [ + { + ...crlClaims.revocations[0], + jti: tokenClaims.jti, + }, + ], + }); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("❌ revoked"); + expect(result.exitCode).toBe(1); + }); + + it("fails with reason when token signature is invalid", async () => { + mockedVerifyAit.mockRejectedValueOnce( + new Error("signature verification failed"), + ); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("❌ invalid token"); + expect(result.exitCode).toBe(1); + }); + + it("fails when keyset cannot be fetched", async () => { + mockFetch.mockReset(); + mockFetch.mockRejectedValueOnce(new Error("network down")); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("❌ verification keys unavailable"); + expect(result.exitCode).toBe(1); + }); + + it("fails when CRL cannot be fetched", async () => { + mockFetch.mockReset(); + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + keys: [activeSigningKey], + }), + ); + mockFetch.mockRejectedValueOnce(new Error("network down")); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("❌ revocation check unavailable"); + expect(result.exitCode).toBe(1); + }); + + it("fails when fetched CRL cannot be verified", async () => { + mockedVerifyCrl.mockRejectedValueOnce(new Error("invalid CRL token")); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain( + "❌ revocation check unavailable (invalid CRL)", + ); + expect(result.exitCode).toBe(1); + }); + + it("supports reading token from file path", async () => { + mockedTokenReadFile.mockResolvedValueOnce("file-token.jwt\n"); + + const result = await runVerifyCommand(["./ait.jwt"]); + + expect(result.stdout).toContain("✅ token verified"); + expect(mockedVerifyAit).toHaveBeenCalledWith( + expect.objectContaining({ token: "file-token.jwt" }), + ); + }); + + it("uses fresh disk caches and skips network fetch", async () => { + vi.useFakeTimers(); + vi.setSystemTime(new Date("2026-02-15T16:00:00.000Z")); + + mockedReadCacheFile.mockImplementation(async (fileName: string) => { + if (fileName === "registry-keys.json") { + return JSON.stringify({ + registryUrl: "https://api.clawdentity.com/", + fetchedAtMs: Date.now() - 1_000, + keys: [activeSigningKey], + }); + } + + if (fileName === "crl-claims.json") { + return JSON.stringify({ + registryUrl: "https://api.clawdentity.com/", + fetchedAtMs: Date.now() - 1_000, + claims: crlClaims, + }); + } + + return undefined; + }); + + mockFetch.mockReset(); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("✅ token verified"); + expect(mockFetch).not.toHaveBeenCalled(); + expect(mockedVerifyCrl).not.toHaveBeenCalled(); + expect(mockedWriteCacheFile).not.toHaveBeenCalled(); + }); + + it("refreshes stale caches from the network", async () => { + vi.useFakeTimers(); + vi.setSystemTime(new Date("2026-02-15T16:00:00.000Z")); + + mockedReadCacheFile.mockImplementation(async (fileName: string) => { + if (fileName === "registry-keys.json") { + return JSON.stringify({ + registryUrl: "https://api.clawdentity.com/", + fetchedAtMs: Date.now() - 60 * 60 * 1000 - 1, + keys: [activeSigningKey], + }); + } + + if (fileName === "crl-claims.json") { + return JSON.stringify({ + registryUrl: "https://api.clawdentity.com/", + fetchedAtMs: Date.now() - 15 * 60 * 1000 - 1, + claims: crlClaims, + }); + } + + return undefined; + }); + + mockFetch.mockReset(); + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + keys: [activeSigningKey], + }), + ); + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + crl: "crl.jwt.value", + }), + ); + + const result = await runVerifyCommand(["token.jwt"]); + + expect(result.stdout).toContain("✅ token verified"); + expect(mockFetch).toHaveBeenCalledTimes(2); + expect(mockedWriteCacheFile).toHaveBeenCalledTimes(2); + }); +}); diff --git a/apps/cli/src/commands/verify.ts b/apps/cli/src/commands/verify.ts new file mode 100644 index 0000000..348a037 --- /dev/null +++ b/apps/cli/src/commands/verify.ts @@ -0,0 +1,496 @@ +import { readFile } from "node:fs/promises"; +import { parseCrlClaims } from "@clawdentity/protocol"; +import { + createLogger, + parseRegistryConfig, + type RegistryConfig, + verifyAIT, + verifyCRL, +} from "@clawdentity/sdk"; +import { Command } from "commander"; +import { + readCacheFile, + resolveConfig, + writeCacheFile, +} from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "verify" }); + +const REGISTRY_KEYS_CACHE_FILE = "registry-keys.json"; +const CRL_CLAIMS_CACHE_FILE = "crl-claims.json"; +const REGISTRY_KEYS_CACHE_TTL_MS = 60 * 60 * 1000; +const CRL_CACHE_MAX_AGE_MS = 15 * 60 * 1000; + +type RegistrySigningKey = NonNullable< + RegistryConfig["REGISTRY_SIGNING_KEYS"] +>[number]; + +type VerificationKey = { + kid: string; + jwk: { + kty: "OKP"; + crv: "Ed25519"; + x: string; + }; +}; + +type CrlVerificationClaims = Awaited>; + +type RegistryKeysCacheEntry = { + registryUrl: string; + fetchedAtMs: number; + keys: RegistrySigningKey[]; +}; + +type CrlClaimsCacheEntry = { + registryUrl: string; + fetchedAtMs: number; + claims: CrlVerificationClaims; +}; + +class VerifyCommandError extends Error { + constructor(message: string) { + super(message); + this.name = "VerifyCommandError"; + } +} + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +const normalizeRegistryUrl = (registryUrl: string): string => { + try { + return new URL(registryUrl).toString(); + } catch { + throw new VerifyCommandError( + "verification keys unavailable (registryUrl is invalid)", + ); + } +}; + +const toRegistryUrl = (registryUrl: string, path: string): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL(path, normalizedBaseUrl).toString(); +}; + +const toExpectedIssuer = (registryUrl: string): string | undefined => { + try { + const hostname = new URL(registryUrl).hostname; + if (hostname === "api.clawdentity.com") { + return "https://api.clawdentity.com"; + } + + if (hostname === "dev.api.clawdentity.com") { + return "https://dev.api.clawdentity.com"; + } + + return undefined; + } catch { + return undefined; + } +}; + +const resolveToken = async (tokenOrFile: string): Promise => { + const input = tokenOrFile.trim(); + if (input.length === 0) { + throw new VerifyCommandError("invalid token (value is empty)"); + } + + try { + const fileContents = await readFile(input, "utf-8"); + const token = fileContents.trim(); + if (token.length === 0) { + throw new VerifyCommandError(`invalid token (${input} is empty)`); + } + + return token; + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return input; + } + + if (error instanceof VerifyCommandError) { + throw error; + } + + throw new VerifyCommandError(`invalid token (unable to read ${input})`); + } +}; + +const parseJson = (raw: string): unknown => { + try { + return JSON.parse(raw); + } catch { + return undefined; + } +}; + +const parseResponseJson = async (response: Response): Promise => { + try { + return await response.json(); + } catch { + return undefined; + } +}; + +const parseSigningKeys = (payload: unknown): RegistrySigningKey[] => { + if (!isRecord(payload) || !Array.isArray(payload.keys)) { + throw new VerifyCommandError( + "verification keys unavailable (response payload is invalid)", + ); + } + + const parsedConfig = parseRegistryConfig({ + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify(payload.keys), + }); + + const keys = parsedConfig.REGISTRY_SIGNING_KEYS ?? []; + if (keys.length === 0) { + throw new VerifyCommandError( + "verification keys unavailable (no signing keys were published)", + ); + } + + return keys; +}; + +const parseRegistryKeysCache = ( + rawCache: string, +): RegistryKeysCacheEntry | undefined => { + const parsed = parseJson(rawCache); + if (!isRecord(parsed)) { + return undefined; + } + + const { registryUrl, fetchedAtMs, keys } = parsed; + if (typeof registryUrl !== "string") { + return undefined; + } + + if (typeof fetchedAtMs !== "number") { + return undefined; + } + + if (!Number.isFinite(fetchedAtMs) || fetchedAtMs < 0) { + return undefined; + } + + try { + const parsedKeys = parseSigningKeys({ keys }); + return { + registryUrl, + fetchedAtMs, + keys: parsedKeys, + }; + } catch { + return undefined; + } +}; + +const parseCrlCache = (rawCache: string): CrlClaimsCacheEntry | undefined => { + const parsed = parseJson(rawCache); + if (!isRecord(parsed)) { + return undefined; + } + + const { registryUrl, fetchedAtMs, claims } = parsed; + if (typeof registryUrl !== "string") { + return undefined; + } + + if (typeof fetchedAtMs !== "number") { + return undefined; + } + + if (!Number.isFinite(fetchedAtMs) || fetchedAtMs < 0) { + return undefined; + } + + try { + return { + registryUrl, + fetchedAtMs, + claims: parseCrlClaims(claims), + }; + } catch { + return undefined; + } +}; + +const toVerificationKeys = (keys: RegistrySigningKey[]): VerificationKey[] => { + return keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP", + crv: "Ed25519", + x: key.x, + }, + })); +}; + +const isFreshCache = (input: { + cache: { fetchedAtMs: number; registryUrl: string } | undefined; + nowMs: number; + registryUrl: string; + ttlMs: number; +}) => { + return ( + input.cache !== undefined && + input.cache.registryUrl === input.registryUrl && + input.nowMs - input.cache.fetchedAtMs <= input.ttlMs + ); +}; + +const fetchRegistryKeys = async ( + registryUrl: string, +): Promise => { + let response: Response; + + try { + response = await fetch( + toRegistryUrl(registryUrl, "/.well-known/claw-keys.json"), + ); + } catch { + throw new VerifyCommandError( + "verification keys unavailable (network error)", + ); + } + + if (!response.ok) { + throw new VerifyCommandError( + `verification keys unavailable (status ${response.status})`, + ); + } + + return parseSigningKeys(await parseResponseJson(response)); +}; + +const loadRegistryKeys = async ( + registryUrl: string, +): Promise => { + const now = Date.now(); + const rawCache = await readCacheFile(REGISTRY_KEYS_CACHE_FILE); + const cache = + typeof rawCache === "string" ? parseRegistryKeysCache(rawCache) : undefined; + + const isFresh = isFreshCache({ + cache, + nowMs: now, + registryUrl, + ttlMs: REGISTRY_KEYS_CACHE_TTL_MS, + }); + + if (isFresh && cache) { + return cache.keys; + } + + const keys = await fetchRegistryKeys(registryUrl); + + await writeCacheFile( + REGISTRY_KEYS_CACHE_FILE, + `${JSON.stringify( + { + registryUrl, + fetchedAtMs: now, + keys, + } satisfies RegistryKeysCacheEntry, + null, + 2, + )}\n`, + ); + + return keys; +}; + +const fetchCrlClaims = async (input: { + expectedIssuer?: string; + registryUrl: string; + verificationKeys: VerificationKey[]; +}): Promise => { + let response: Response; + + try { + response = await fetch(toRegistryUrl(input.registryUrl, "/v1/crl")); + } catch { + throw new VerifyCommandError( + "revocation check unavailable (network error)", + ); + } + + if (!response.ok) { + throw new VerifyCommandError( + `revocation check unavailable (status ${response.status})`, + ); + } + + const payload = await parseResponseJson(response); + if (!isRecord(payload) || typeof payload.crl !== "string") { + throw new VerifyCommandError( + "revocation check unavailable (response payload is invalid)", + ); + } + + try { + return await verifyCRL({ + token: payload.crl, + registryKeys: input.verificationKeys, + expectedIssuer: input.expectedIssuer, + }); + } catch { + throw new VerifyCommandError("revocation check unavailable (invalid CRL)"); + } +}; + +const loadCrlClaims = async (input: { + expectedIssuer?: string; + registryUrl: string; + verificationKeys: VerificationKey[]; +}): Promise => { + const now = Date.now(); + const rawCache = await readCacheFile(CRL_CLAIMS_CACHE_FILE); + const cache = + typeof rawCache === "string" ? parseCrlCache(rawCache) : undefined; + + const isFresh = isFreshCache({ + cache, + nowMs: now, + registryUrl: input.registryUrl, + ttlMs: CRL_CACHE_MAX_AGE_MS, + }); + + if (isFresh && cache) { + return cache.claims; + } + + const claims = await fetchCrlClaims(input); + + await writeCacheFile( + CRL_CLAIMS_CACHE_FILE, + `${JSON.stringify( + { + registryUrl: input.registryUrl, + fetchedAtMs: now, + claims, + } satisfies CrlClaimsCacheEntry, + null, + 2, + )}\n`, + ); + + return claims; +}; + +const toInvalidTokenReason = (error: unknown): string => { + if (isRecord(error) && typeof error.message === "string") { + return `invalid token (${error.message})`; + } + + if (error instanceof Error && error.message.length > 0) { + return `invalid token (${error.message})`; + } + + return "invalid token"; +}; + +const printResult = (passed: boolean, reason: string): void => { + if (passed) { + writeStdoutLine(`✅ ${reason}`); + return; + } + + process.exitCode = 1; + writeStdoutLine(`❌ ${reason}`); +}; + +const runVerify = async (tokenOrFile: string): Promise => { + const config = await resolveConfig(); + const registryUrl = normalizeRegistryUrl(config.registryUrl); + const expectedIssuer = toExpectedIssuer(registryUrl); + const token = await resolveToken(tokenOrFile); + + let keys: RegistrySigningKey[]; + try { + keys = await loadRegistryKeys(registryUrl); + } catch (error) { + if (error instanceof VerifyCommandError) { + printResult(false, error.message); + return; + } + + throw error; + } + + const verificationKeys = toVerificationKeys(keys); + if (verificationKeys.length === 0) { + printResult(false, "verification keys unavailable (no active keys)"); + return; + } + + let claims: Awaited>; + try { + claims = await verifyAIT({ + token, + registryKeys: verificationKeys, + expectedIssuer, + }); + } catch (error) { + printResult(false, toInvalidTokenReason(error)); + return; + } + + let crlClaims: CrlVerificationClaims; + try { + crlClaims = await loadCrlClaims({ + expectedIssuer, + registryUrl, + verificationKeys, + }); + } catch (error) { + if (error instanceof VerifyCommandError) { + printResult(false, error.message); + return; + } + + throw error; + } + + const isRevoked = crlClaims.revocations.some( + (revocation) => revocation.jti === claims.jti, + ); + + if (isRevoked) { + printResult(false, "revoked"); + return; + } + + logger.info("cli.verify.success", { + did: claims.sub, + jti: claims.jti, + issuer: claims.iss, + }); + printResult(true, `token verified (${claims.sub})`); +}; + +export const createVerifyCommand = (): Command => { + return new Command("verify") + .description("Verify an AIT using registry keys and CRL state") + .argument( + "", + "Raw AIT token or file path containing the token", + ) + .action( + withErrorHandling("verify", async (tokenOrFile: string) => { + await runVerify(tokenOrFile); + }), + ); +}; diff --git a/apps/cli/src/config/manager.test.ts b/apps/cli/src/config/manager.test.ts index c5fe579..cabcbfe 100644 --- a/apps/cli/src/config/manager.test.ts +++ b/apps/cli/src/config/manager.test.ts @@ -15,12 +15,16 @@ vi.mock("node:fs/promises", () => ({ import { DEFAULT_REGISTRY_URL, + getCacheDir, + getCacheFilePath, getConfigDir, getConfigFilePath, getConfigValue, + readCacheFile, readConfig, resolveConfig, setConfigValue, + writeCacheFile, writeConfig, } from "./manager.js"; @@ -137,5 +141,40 @@ describe("config manager", () => { it("exposes config location helpers", () => { expect(getConfigDir()).toBe("/mock-home/.clawdentity"); expect(getConfigFilePath()).toBe("/mock-home/.clawdentity/config.json"); + expect(getCacheDir()).toBe("/mock-home/.clawdentity/cache"); + expect(getCacheFilePath("registry-keys.json")).toBe( + "/mock-home/.clawdentity/cache/registry-keys.json", + ); + }); + + it("returns undefined when cache file does not exist", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + await expect(readCacheFile("registry-keys.json")).resolves.toBeUndefined(); + }); + + it("reads cache file contents", async () => { + mockedReadFile.mockResolvedValueOnce("cached-value"); + + await expect(readCacheFile("crl-claims.json")).resolves.toBe( + "cached-value", + ); + }); + + it("writes cache file and secures file permissions", async () => { + await writeCacheFile("registry-keys.json", '{\n "ok": true\n}\n'); + + expect(mockedMkdir).toHaveBeenCalledWith("/mock-home/.clawdentity/cache", { + recursive: true, + }); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/cache/registry-keys.json", + '{\n "ok": true\n}\n', + "utf-8", + ); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/cache/registry-keys.json", + 0o600, + ); }); }); diff --git a/apps/cli/src/config/manager.ts b/apps/cli/src/config/manager.ts index 6106c95..c017e14 100644 --- a/apps/cli/src/config/manager.ts +++ b/apps/cli/src/config/manager.ts @@ -13,6 +13,8 @@ export type CliConfigKey = keyof CliConfig; const CONFIG_DIR = ".clawdentity"; const CONFIG_FILE = "config.json"; +const CACHE_DIR = "cache"; +const FILE_MODE = 0o600; const ENV_KEY_MAP: Record = { registryUrl: "CLAWDENTITY_REGISTRY_URL", @@ -52,6 +54,18 @@ export const getConfigDir = (): string => join(homedir(), CONFIG_DIR); export const getConfigFilePath = (): string => join(getConfigDir(), CONFIG_FILE); +export const getCacheDir = (): string => join(getConfigDir(), CACHE_DIR); + +export const getCacheFilePath = (fileName: string): string => + join(getCacheDir(), fileName); + +const writeSecureFile = async (filePath: string, value: string) => { + const targetDirectory = dirname(filePath); + await mkdir(targetDirectory, { recursive: true }); + await writeFile(filePath, value, "utf-8"); + await chmod(filePath, FILE_MODE); +}; + export const readConfig = async (): Promise => { try { const configContents = await readFile(getConfigFilePath(), "utf-8"); @@ -82,16 +96,10 @@ export const resolveConfig = async (): Promise => { }; export const writeConfig = async (config: CliConfig): Promise => { - const configFilePath = getConfigFilePath(); - const configDirectory = dirname(configFilePath); - - await mkdir(configDirectory, { recursive: true }); - await writeFile( - configFilePath, + await writeSecureFile( + getConfigFilePath(), `${JSON.stringify(config, null, 2)}\n`, - "utf-8", ); - await chmod(configFilePath, 0o600); }; export const getConfigValue = async ( @@ -112,3 +120,25 @@ export const setConfigValue = async ( [key]: value, }); }; + +export const readCacheFile = async ( + fileName: string, +): Promise => { + try { + return await readFile(getCacheFilePath(fileName), "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return undefined; + } + + throw error; + } +}; + +export const writeCacheFile = async ( + fileName: string, + value: string, +): Promise => { + await writeSecureFile(getCacheFilePath(fileName), value); +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index ec32b4a..6103dea 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -26,6 +26,14 @@ describe("cli", () => { expect(hasAgentCommand).toBe(true); }); + it("registers the verify command", () => { + const hasVerifyCommand = createProgram() + .commands.map((command) => command.name()) + .includes("verify"); + + expect(hasVerifyCommand).toBe(true); + }); + it("prints version output", async () => { const output: string[] = []; const program = createProgram(); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 70df547..366bb1a 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,6 +1,7 @@ import { Command } from "commander"; import { createAgentCommand } from "./commands/agent.js"; import { createConfigCommand } from "./commands/config.js"; +import { createVerifyCommand } from "./commands/verify.js"; export const CLI_VERSION = "0.0.0"; @@ -9,5 +10,6 @@ export const createProgram = (): Command => { .description("Clawdentity CLI - Agent identity management") .version(CLI_VERSION) .addCommand(createAgentCommand()) - .addCommand(createConfigCommand()); + .addCommand(createConfigCommand()) + .addCommand(createVerifyCommand()); }; From 587cb737632de5757dc54dd21b3e5e6b3f1f0527 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 17:09:05 +0530 Subject: [PATCH 040/190] feat(proxy): implement runtime config loading and validation --- apps/proxy/AGENTS.md | 35 +++ apps/proxy/package.json | 5 +- apps/proxy/src/AGENTS.md | 14 + apps/proxy/src/config.test.ts | 223 ++++++++++++++ apps/proxy/src/config.ts | 537 ++++++++++++++++++++++++++++++++++ apps/proxy/src/index.test.ts | 19 +- apps/proxy/src/index.ts | 22 ++ pnpm-lock.yaml | 15 + 8 files changed, 868 insertions(+), 2 deletions(-) create mode 100644 apps/proxy/AGENTS.md create mode 100644 apps/proxy/src/AGENTS.md create mode 100644 apps/proxy/src/config.test.ts create mode 100644 apps/proxy/src/config.ts diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md new file mode 100644 index 0000000..acb194d --- /dev/null +++ b/apps/proxy/AGENTS.md @@ -0,0 +1,35 @@ +# AGENTS.md (apps/proxy) + +## Purpose +- Define conventions for the Clawdentity proxy app. +- Keep runtime config, auth boundaries, and forwarding behavior consistent across tickets. + +## Runtime Configuration +- Keep runtime config centralized in `src/config.ts`. +- Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. +- Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). +- Require hook token input via env (`OPENCLAW_HOOK_TOKEN` or OpenClaw-compatible alias `OPENCLAW_HOOKS_TOKEN`) and never log the token value. +- Load env files with OpenClaw precedence and no overrides: + - first `./.env` from the proxy working directory + - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) + - existing environment variables always win over `.env` values. +- If hook token env vars are missing, resolve fallback token from `hooks.token` in `openclaw.json` (`OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH`, default `$OPENCLAW_STATE_DIR/openclaw.json`). +- Keep env alias support stable for operator UX: + - `LISTEN_PORT` or `PORT` + - `OPENCLAW_HOOK_TOKEN` or `OPENCLAW_HOOKS_TOKEN` + - `REGISTRY_URL` or `CLAWDENTITY_REGISTRY_URL` + - state/config path aliases: `OPENCLAW_STATE_DIR`/`CLAWDBOT_STATE_DIR`, `OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH` + +## Allowlist and Access +- Keep allowlist shape as `{ owners: string[], agents: string[], allowAllVerified: boolean }`. +- Allow bootstrap from `ALLOW_LIST` JSON with optional explicit overrides (`ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`, `ALLOW_ALL_VERIFIED`). +- Keep allowlist parsing deterministic and reject malformed input with structured config errors. + +## CRL Policy +- Keep CRL timing defaults explicit in `src/config.ts` (`5m` refresh, `15m` max age) unless explicitly overridden. +- Keep stale policy explicit (`fail-open` or `fail-closed`) and configurable from env. + +## Testing Rules +- Cover both config happy paths and failure paths in `src/config.test.ts`. +- Keep startup tests in `src/index.test.ts` to verify runtime initialization fails when config is invalid. +- Keep tests offline and deterministic (no network, no filesystem dependency). diff --git a/apps/proxy/package.json b/apps/proxy/package.json index 7cf2c79..5f75505 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -20,6 +20,9 @@ }, "dependencies": { "@clawdentity/protocol": "workspace:*", - "@clawdentity/sdk": "workspace:*" + "@clawdentity/sdk": "workspace:*", + "dotenv": "^17.2.3", + "json5": "^2.2.3", + "zod": "^4.1.12" } } diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md new file mode 100644 index 0000000..5f2b731 --- /dev/null +++ b/apps/proxy/src/AGENTS.md @@ -0,0 +1,14 @@ +# AGENTS.md (apps/proxy/src) + +## Source Layout +- Keep `index.ts` as runtime bootstrap surface and version export. +- Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. +- Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. + +## Config Error Handling +- Convert parse failures to `ProxyConfigError` with code `CONFIG_VALIDATION_FAILED`. +- Keep error details field-focused (`fieldErrors` / `formErrors`) and avoid exposing secrets. + +## Maintainability +- Prefer schema-driven parsing with small pure helpers for coercion/overrides. +- Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts new file mode 100644 index 0000000..37d5e38 --- /dev/null +++ b/apps/proxy/src/config.test.ts @@ -0,0 +1,223 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { + DEFAULT_CRL_MAX_AGE_MS, + DEFAULT_CRL_REFRESH_INTERVAL_MS, + DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_PROXY_LISTEN_PORT, + DEFAULT_REGISTRY_URL, + loadProxyConfig, + ProxyConfigError, + parseProxyConfig, +} from "./config.js"; + +const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; + +describe("proxy config", () => { + it("parses required settings and applies defaults", () => { + const config = parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "super-secret-hook-token", + }); + + expect(config).toEqual({ + listenPort: DEFAULT_PROXY_LISTEN_PORT, + openclawBaseUrl: DEFAULT_OPENCLAW_BASE_URL, + openclawHookToken: "super-secret-hook-token", + registryUrl: DEFAULT_REGISTRY_URL, + allowList: { + owners: [], + agents: [], + allowAllVerified: false, + }, + crlRefreshIntervalMs: DEFAULT_CRL_REFRESH_INTERVAL_MS, + crlMaxAgeMs: DEFAULT_CRL_MAX_AGE_MS, + crlStaleBehavior: "fail-open", + }); + }); + + it("supports OpenClaw-compatible env aliases", () => { + const config = parseProxyConfig({ + PORT: "4100", + OPENCLAW_HOOKS_TOKEN: "hooks-token", + CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", + CRL_STALE_BEHAVIOR: "fail-closed", + }); + + expect(config.listenPort).toBe(4100); + expect(config.openclawHookToken).toBe("hooks-token"); + expect(config.registryUrl).toBe("https://registry.example.com"); + expect(config.crlStaleBehavior).toBe("fail-closed"); + }); + + it("parses allow list object and override env flags", () => { + const config = parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + ALLOW_LIST: JSON.stringify({ + owners: ["did:claw:owner:1"], + agents: ["did:claw:agent:1"], + allowAllVerified: false, + }), + ALLOWLIST_OWNERS: "did:claw:owner:2,did:claw:owner:3", + ALLOW_ALL_VERIFIED: "true", + }); + + expect(config.allowList).toEqual({ + owners: ["did:claw:owner:2", "did:claw:owner:3"], + agents: ["did:claw:agent:1"], + allowAllVerified: true, + }); + }); + + it("throws on missing hook token", () => { + expect(() => parseProxyConfig({})).toThrow(ProxyConfigError); + }); + + it("throws on malformed allow list JSON", () => { + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + ALLOW_LIST: "{not-json", + }), + ).toThrow(ProxyConfigError); + }); + + it("throws on invalid boolean override", () => { + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + ALLOW_ALL_VERIFIED: "maybe", + }), + ).toThrow(ProxyConfigError); + }); +}); + +describe("proxy config loading", () => { + function createSandbox() { + const root = mkdtempSync(join(tmpdir(), "clawdentity-proxy-config-")); + const cwd = join(root, "workspace"); + const stateDir = join(root, ".openclaw"); + mkdirSync(cwd, { recursive: true }); + mkdirSync(stateDir, { recursive: true }); + + const cleanup = () => { + rmSync(root, { recursive: true, force: true }); + }; + + return { root, cwd, stateDir, cleanup }; + } + + it("loads cwd .env first, then state .env without overriding existing values", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.cwd, ".env"), + [ + "OPENCLAW_BASE_URL=https://cwd.example.com", + "REGISTRY_URL=https://registry.cwd.example.com", + "OPENCLAW_HOOK_TOKEN=from-cwd-dotenv", + ].join("\n"), + ); + writeFileSync( + join(sandbox.stateDir, ".env"), + ["OPENCLAW_HOOK_TOKEN=from-state-dotenv", "LISTEN_PORT=4444"].join( + "\n", + ), + ); + + const config = loadProxyConfig( + { + OPENCLAW_BASE_URL: "https://env.example.com", + }, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawBaseUrl).toBe("https://env.example.com"); + expect(config.openclawHookToken).toBe("from-cwd-dotenv"); + expect(config.listenPort).toBe(4444); + expect(config.registryUrl).toBe("https://registry.cwd.example.com"); + } finally { + sandbox.cleanup(); + } + }); + + it("falls back to hooks.token from openclaw.json (JSON5) when env token is missing", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.stateDir, OPENCLAW_CONFIG_FILENAME), + [ + "{", + " // JSON5 comment", + " hooks: {", + ' token: "token-from-openclaw-config",', + " },", + "}", + ].join("\n"), + ); + + const config = loadProxyConfig( + {}, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawHookToken).toBe("token-from-openclaw-config"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses legacy state directory when canonical .openclaw does not exist", () => { + const sandbox = createSandbox(); + try { + rmSync(sandbox.stateDir, { recursive: true, force: true }); + const legacyStateDir = join(sandbox.root, ".clawdbot"); + mkdirSync(legacyStateDir, { recursive: true }); + writeFileSync( + join(legacyStateDir, ".env"), + "OPENCLAW_HOOK_TOKEN=legacy-token", + ); + + const config = loadProxyConfig( + {}, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawHookToken).toBe("legacy-token"); + } finally { + sandbox.cleanup(); + } + }); + + it("throws when openclaw.json is invalid and token fallback is required", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.stateDir, OPENCLAW_CONFIG_FILENAME), + "{bad-json", + ); + + expect(() => + loadProxyConfig( + {}, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ), + ).toThrow(ProxyConfigError); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts new file mode 100644 index 0000000..7931616 --- /dev/null +++ b/apps/proxy/src/config.ts @@ -0,0 +1,537 @@ +import { existsSync, readFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { isAbsolute, join, resolve } from "node:path"; +import dotenv from "dotenv"; +import JSON5 from "json5"; +import { z } from "zod"; + +export type ProxyCrlStaleBehavior = "fail-open" | "fail-closed"; + +export type ProxyConfigLoadOptions = { + cwd?: string; + homeDir?: string; +}; + +export const DEFAULT_PROXY_LISTEN_PORT = 4000; +export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; +export const DEFAULT_REGISTRY_URL = "https://api.clawdentity.com"; +export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; +export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; +export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; + +export class ProxyConfigError extends Error { + readonly code = "CONFIG_VALIDATION_FAILED"; + readonly status = 500; + readonly expose = true; + readonly details: Record; + + constructor(message: string, details: Record) { + super(message); + this.name = "ProxyConfigError"; + this.details = details; + } +} + +const BOOLEAN_TRUE_VALUES = new Set(["1", "true", "yes", "on"]); +const BOOLEAN_FALSE_VALUES = new Set(["0", "false", "no", "off"]); +const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; +const LEGACY_STATE_DIR_NAMES = [".clawdbot", ".moldbot", ".moltbot"] as const; + +const proxyRuntimeEnvSchema = z.object({ + LISTEN_PORT: z.coerce + .number() + .int() + .min(1) + .max(65535) + .default(DEFAULT_PROXY_LISTEN_PORT), + OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), + OPENCLAW_HOOK_TOKEN: z.string().trim().min(1), + REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), + ALLOW_LIST: z.string().optional(), + ALLOWLIST_OWNERS: z.string().optional(), + ALLOWLIST_AGENTS: z.string().optional(), + ALLOW_ALL_VERIFIED: z.string().optional(), + CRL_REFRESH_INTERVAL_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_CRL_REFRESH_INTERVAL_MS), + CRL_MAX_AGE_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_CRL_MAX_AGE_MS), + CRL_STALE_BEHAVIOR: z + .enum(["fail-open", "fail-closed"]) + .default(DEFAULT_CRL_STALE_BEHAVIOR), +}); + +const proxyAllowListSchema = z.object({ + owners: z.array(z.string().trim().min(1)).default([]), + agents: z.array(z.string().trim().min(1)).default([]), + allowAllVerified: z.boolean().default(false), +}); + +export const proxyConfigSchema = z.object({ + listenPort: z.number().int().min(1).max(65535), + openclawBaseUrl: z.string().url(), + openclawHookToken: z.string().min(1), + registryUrl: z.string().url(), + allowList: proxyAllowListSchema, + crlRefreshIntervalMs: z.number().int().positive(), + crlMaxAgeMs: z.number().int().positive(), + crlStaleBehavior: z.enum(["fail-open", "fail-closed"]), +}); + +export type ProxyConfig = z.infer; +export type ProxyAllowList = z.infer; + +type RuntimeEnvInput = { + LISTEN_PORT?: unknown; + PORT?: unknown; + OPENCLAW_BASE_URL?: unknown; + OPENCLAW_HOOK_TOKEN?: unknown; + OPENCLAW_HOOKS_TOKEN?: unknown; + REGISTRY_URL?: unknown; + CLAWDENTITY_REGISTRY_URL?: unknown; + ALLOW_LIST?: unknown; + ALLOWLIST_OWNERS?: unknown; + ALLOWLIST_AGENTS?: unknown; + ALLOW_ALL_VERIFIED?: unknown; + CRL_REFRESH_INTERVAL_MS?: unknown; + CRL_MAX_AGE_MS?: unknown; + CRL_STALE_BEHAVIOR?: unknown; + OPENCLAW_STATE_DIR?: unknown; + CLAWDBOT_STATE_DIR?: unknown; + OPENCLAW_CONFIG_PATH?: unknown; + CLAWDBOT_CONFIG_PATH?: unknown; + HOME?: unknown; + USERPROFILE?: unknown; +}; + +type MutableEnv = Record; + +function isRuntimeEnvInput(value: unknown): value is RuntimeEnvInput { + return typeof value === "object" && value !== null; +} + +function toConfigValidationError( + details: Record, +): ProxyConfigError { + return new ProxyConfigError("Proxy configuration is invalid", details); +} + +function firstNonEmpty( + env: RuntimeEnvInput, + keys: readonly (keyof RuntimeEnvInput)[], +): unknown { + for (const key of keys) { + const rawValue = env[key]; + if (rawValue === undefined || rawValue === null) { + continue; + } + + if (typeof rawValue === "string") { + const trimmed = rawValue.trim(); + if (trimmed.length === 0) { + continue; + } + + return trimmed; + } + + return rawValue; + } + + return undefined; +} + +function firstNonEmptyString( + env: RuntimeEnvInput, + keys: readonly (keyof RuntimeEnvInput)[], +): string | undefined { + const value = firstNonEmpty(env, keys); + return typeof value === "string" ? value : undefined; +} + +function resolveDefaultEnv(): unknown { + const nodeProcess = (globalThis as { process?: { env?: unknown } }).process; + return nodeProcess?.env ?? {}; +} + +function resolveDefaultCwd(): string { + const nodeProcess = ( + globalThis as { + process?: { + cwd?: () => string; + }; + } + ).process; + if (typeof nodeProcess?.cwd === "function") { + return nodeProcess.cwd(); + } + + return "."; +} + +function resolvePathWithHome( + inputPath: string, + cwd: string, + home: string, +): string { + const trimmed = inputPath.trim(); + if (trimmed === "~") { + return home; + } + + if (trimmed.startsWith("~/")) { + return resolve(home, trimmed.slice(2)); + } + + if (isAbsolute(trimmed)) { + return trimmed; + } + + return resolve(cwd, trimmed); +} + +function resolveHomeDir( + env: RuntimeEnvInput, + homeDirOverride?: string, +): string { + if (homeDirOverride !== undefined && homeDirOverride.trim().length > 0) { + return homeDirOverride.trim(); + } + + return firstNonEmptyString(env, ["HOME", "USERPROFILE"]) ?? homedir(); +} + +function resolveStateDir( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string { + const cwd = options.cwd ?? resolveDefaultCwd(); + const home = resolveHomeDir(env, options.homeDir); + const stateDirOverride = firstNonEmptyString(env, [ + "OPENCLAW_STATE_DIR", + "CLAWDBOT_STATE_DIR", + ]); + + if (stateDirOverride !== undefined) { + return resolvePathWithHome(stateDirOverride, cwd, home); + } + + const canonicalStateDir = join(home, ".openclaw"); + if (existsSync(canonicalStateDir)) { + return canonicalStateDir; + } + + for (const legacyDirName of LEGACY_STATE_DIR_NAMES) { + const legacyStateDir = join(home, legacyDirName); + if (existsSync(legacyStateDir)) { + return legacyStateDir; + } + } + + return canonicalStateDir; +} + +function resolveOpenClawConfigPath( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string { + const cwd = options.cwd ?? resolveDefaultCwd(); + const home = resolveHomeDir(env, options.homeDir); + const stateDir = resolveStateDir(env, options); + const configPathOverride = firstNonEmptyString(env, [ + "OPENCLAW_CONFIG_PATH", + "CLAWDBOT_CONFIG_PATH", + ]); + + if (configPathOverride !== undefined) { + return resolvePathWithHome(configPathOverride, cwd, home); + } + + return join(stateDir, OPENCLAW_CONFIG_FILENAME); +} + +function mergeMissingEnvValues( + target: MutableEnv, + values: Record, +): void { + for (const [key, value] of Object.entries(values)) { + if (target[key] !== undefined) { + continue; + } + + target[key] = value; + } +} + +function parseDotEnvFile(filePath: string): Record { + try { + const raw = readFileSync(filePath, "utf8"); + return dotenv.parse(raw); + } catch (error) { + throw toConfigValidationError({ + fieldErrors: { + DOTENV: [`Unable to parse dotenv file at ${filePath}`], + }, + formErrors: [ + error instanceof Error ? error.message : "Unknown dotenv parse error", + ], + }); + } +} + +function loadEnvWithDotEnvFallback( + env: unknown, + options: ProxyConfigLoadOptions, +): MutableEnv { + const mergedEnv: MutableEnv = isRuntimeEnvInput(env) ? { ...env } : {}; + const cwd = options.cwd ?? resolveDefaultCwd(); + const cwdDotEnvPath = join(cwd, ".env"); + if (existsSync(cwdDotEnvPath)) { + mergeMissingEnvValues(mergedEnv, parseDotEnvFile(cwdDotEnvPath)); + } + + const stateDir = resolveStateDir(mergedEnv as RuntimeEnvInput, options); + const stateDotEnvPath = join(stateDir, ".env"); + if (existsSync(stateDotEnvPath)) { + mergeMissingEnvValues(mergedEnv, parseDotEnvFile(stateDotEnvPath)); + } + + return mergedEnv; +} + +function resolveHookTokenFromOpenClawConfig( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string | undefined { + const configPath = resolveOpenClawConfigPath(env, options); + if (!existsSync(configPath)) { + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON5.parse(readFileSync(configPath, "utf8")); + } catch (error) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_CONFIG_PATH: [ + `Unable to parse OpenClaw config at ${configPath}`, + ], + }, + formErrors: [ + error instanceof Error + ? error.message + : "Unknown OpenClaw config parse error", + ], + }); + } + + if (typeof parsed !== "object" || parsed === null) { + return undefined; + } + + const hooksValue = (parsed as Record).hooks; + if (typeof hooksValue !== "object" || hooksValue === null) { + return undefined; + } + + const tokenValue = (hooksValue as Record).token; + if (tokenValue === undefined || tokenValue === null) { + return undefined; + } + + if (typeof tokenValue !== "string") { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_CONFIG_PATH: ["hooks.token must be a string when set"], + }, + formErrors: [], + }); + } + + const trimmedToken = tokenValue.trim(); + return trimmedToken.length > 0 ? trimmedToken : undefined; +} + +function normalizeRuntimeEnv(input: unknown): Record { + const env: RuntimeEnvInput = isRuntimeEnvInput(input) ? input : {}; + + return { + LISTEN_PORT: firstNonEmpty(env, ["LISTEN_PORT", "PORT"]), + OPENCLAW_BASE_URL: firstNonEmpty(env, ["OPENCLAW_BASE_URL"]), + OPENCLAW_HOOK_TOKEN: firstNonEmpty(env, [ + "OPENCLAW_HOOK_TOKEN", + "OPENCLAW_HOOKS_TOKEN", + ]), + REGISTRY_URL: firstNonEmpty(env, [ + "REGISTRY_URL", + "CLAWDENTITY_REGISTRY_URL", + ]), + ALLOW_LIST: firstNonEmpty(env, ["ALLOW_LIST"]), + ALLOWLIST_OWNERS: firstNonEmpty(env, ["ALLOWLIST_OWNERS"]), + ALLOWLIST_AGENTS: firstNonEmpty(env, ["ALLOWLIST_AGENTS"]), + ALLOW_ALL_VERIFIED: firstNonEmpty(env, ["ALLOW_ALL_VERIFIED"]), + CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), + CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), + CRL_STALE_BEHAVIOR: firstNonEmpty(env, ["CRL_STALE_BEHAVIOR"]), + }; +} + +function dedupe(values: readonly string[]): string[] { + return [...new Set(values)]; +} + +function parseDidList(input: string): string[] { + return dedupe( + input + .split(",") + .map((value) => value.trim()) + .filter((value) => value.length > 0), + ); +} + +function parseOptionalBoolean( + value: string | undefined, + field: string, +): boolean | undefined { + if (value === undefined) { + return undefined; + } + + const normalized = value.trim().toLowerCase(); + if (BOOLEAN_TRUE_VALUES.has(normalized)) { + return true; + } + + if (BOOLEAN_FALSE_VALUES.has(normalized)) { + return false; + } + + throw toConfigValidationError({ + fieldErrors: { + [field]: ["Expected one of true/false/1/0/yes/no/on/off"], + }, + formErrors: [], + }); +} + +function parseAllowList( + env: z.infer, +): ProxyAllowList { + let allowList: ProxyAllowList = { + owners: [], + agents: [], + allowAllVerified: false, + }; + + if (env.ALLOW_LIST !== undefined) { + let parsedAllowList: unknown; + try { + parsedAllowList = JSON.parse(env.ALLOW_LIST); + } catch { + throw toConfigValidationError({ + fieldErrors: { + ALLOW_LIST: ["Expected valid JSON object"], + }, + formErrors: [], + }); + } + + const parsed = proxyAllowListSchema.safeParse(parsedAllowList); + if (!parsed.success) { + throw toConfigValidationError({ + fieldErrors: parsed.error.flatten().fieldErrors, + formErrors: parsed.error.flatten().formErrors, + }); + } + + allowList = parsed.data; + } + + if (env.ALLOWLIST_OWNERS !== undefined) { + allowList = { ...allowList, owners: parseDidList(env.ALLOWLIST_OWNERS) }; + } + + if (env.ALLOWLIST_AGENTS !== undefined) { + allowList = { ...allowList, agents: parseDidList(env.ALLOWLIST_AGENTS) }; + } + + const allowAllVerified = parseOptionalBoolean( + env.ALLOW_ALL_VERIFIED, + "ALLOW_ALL_VERIFIED", + ); + if (allowAllVerified !== undefined) { + allowList = { ...allowList, allowAllVerified }; + } + + return allowList; +} + +function loadHookTokenFromFallback( + env: MutableEnv, + options: ProxyConfigLoadOptions, +): void { + if ( + firstNonEmpty(env as RuntimeEnvInput, [ + "OPENCLAW_HOOK_TOKEN", + "OPENCLAW_HOOKS_TOKEN", + ]) !== undefined + ) { + return; + } + + const token = resolveHookTokenFromOpenClawConfig( + env as RuntimeEnvInput, + options, + ); + if (token !== undefined) { + env.OPENCLAW_HOOK_TOKEN = token; + } +} + +export function parseProxyConfig(env: unknown): ProxyConfig { + const parsedRuntimeEnv = proxyRuntimeEnvSchema.safeParse( + normalizeRuntimeEnv(env), + ); + if (!parsedRuntimeEnv.success) { + throw toConfigValidationError({ + fieldErrors: parsedRuntimeEnv.error.flatten().fieldErrors, + formErrors: parsedRuntimeEnv.error.flatten().formErrors, + }); + } + + const candidateConfig = { + listenPort: parsedRuntimeEnv.data.LISTEN_PORT, + openclawBaseUrl: parsedRuntimeEnv.data.OPENCLAW_BASE_URL, + openclawHookToken: parsedRuntimeEnv.data.OPENCLAW_HOOK_TOKEN, + registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, + allowList: parseAllowList(parsedRuntimeEnv.data), + crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, + crlMaxAgeMs: parsedRuntimeEnv.data.CRL_MAX_AGE_MS, + crlStaleBehavior: parsedRuntimeEnv.data.CRL_STALE_BEHAVIOR, + }; + + const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); + if (parsedConfig.success) { + return parsedConfig.data; + } + + throw toConfigValidationError({ + fieldErrors: parsedConfig.error.flatten().fieldErrors, + formErrors: parsedConfig.error.flatten().formErrors, + }); +} + +export function loadProxyConfig( + env: unknown = resolveDefaultEnv(), + options: ProxyConfigLoadOptions = {}, +): ProxyConfig { + const mergedEnv = loadEnvWithDotEnvFallback(env, options); + loadHookTokenFromFallback(mergedEnv, options); + return parseProxyConfig(mergedEnv); +} diff --git a/apps/proxy/src/index.test.ts b/apps/proxy/src/index.test.ts index 91c2e23..18b0095 100644 --- a/apps/proxy/src/index.test.ts +++ b/apps/proxy/src/index.test.ts @@ -1,8 +1,25 @@ import { describe, expect, it } from "vitest"; -import { PROXY_VERSION } from "./index.js"; +import { ProxyConfigError } from "./config.js"; +import { initializeProxyRuntime, PROXY_VERSION } from "./index.js"; describe("proxy", () => { it("exports PROXY_VERSION", () => { expect(PROXY_VERSION).toBe("0.0.0"); }); + + it("initializes runtime with validated config", () => { + const runtime = initializeProxyRuntime({ + OPENCLAW_HOOK_TOKEN: "hook-token", + }); + + expect(runtime.version).toBe(PROXY_VERSION); + expect(runtime.config.openclawHookToken).toBe("hook-token"); + expect(runtime.config.listenPort).toBe(4000); + }); + + it("fails startup when config is invalid", () => { + expect(() => + initializeProxyRuntime({ OPENCLAW_BASE_URL: "bad-url" }), + ).toThrow(ProxyConfigError); + }); }); diff --git a/apps/proxy/src/index.ts b/apps/proxy/src/index.ts index 77280ed..da12fb8 100644 --- a/apps/proxy/src/index.ts +++ b/apps/proxy/src/index.ts @@ -1 +1,23 @@ +import type { ProxyConfig } from "./config.js"; +import { loadProxyConfig } from "./config.js"; + export const PROXY_VERSION = "0.0.0"; + +export type ProxyRuntime = { + version: string; + config: ProxyConfig; +}; + +function resolveDefaultEnv(): unknown { + const nodeProcess = (globalThis as { process?: { env?: unknown } }).process; + return nodeProcess?.env ?? {}; +} + +export function initializeProxyRuntime( + env: unknown = resolveDefaultEnv(), +): ProxyRuntime { + return { + version: PROXY_VERSION, + config: loadProxyConfig(env), + }; +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9195b8c..28b1b88 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -57,6 +57,15 @@ importers: '@clawdentity/sdk': specifier: workspace:* version: link:../../packages/sdk + dotenv: + specifier: ^17.2.3 + version: 17.3.1 + json5: + specifier: ^2.2.3 + version: 2.2.3 + zod: + specifier: ^4.1.12 + version: 4.3.6 apps/registry: dependencies: @@ -1316,6 +1325,10 @@ packages: resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} engines: {node: '>=12'} + dotenv@17.3.1: + resolution: {integrity: sha512-IO8C/dzEb6O3F9/twg6ZLXz164a2fhTnEWb95H23Dm4OuN+92NmEAlTrupP9VW6Jm3sO26tQlqyvyi4CsnY9GA==} + engines: {node: '>=12'} + drizzle-kit@0.31.9: resolution: {integrity: sha512-GViD3IgsXn7trFyBUUHyTFBpH/FsHTxYJ66qdbVggxef4UBPHRYxQaRzYLTuekYnk9i5FIEL9pbBIwMqX/Uwrg==} hasBin: true @@ -3117,6 +3130,8 @@ snapshots: dotenv@16.4.7: {} + dotenv@17.3.1: {} + drizzle-kit@0.31.9: dependencies: '@drizzle-team/brocli': 0.10.2 From 05b34b491881a8e87659e6a45f6fee9fecaa260a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 17:22:43 +0530 Subject: [PATCH 041/190] fix(proxy): allow dotenv fallback when env var is blank --- apps/proxy/src/config.test.ts | 24 ++++++++++++++++++++++++ apps/proxy/src/config.ts | 9 ++++++++- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 37d5e38..a9a8936 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -145,6 +145,30 @@ describe("proxy config loading", () => { } }); + it("treats empty env variables as missing and accepts dotenv fallback", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.cwd, ".env"), + "OPENCLAW_HOOK_TOKEN=from-cwd-dotenv", + ); + + const config = loadProxyConfig( + { + OPENCLAW_HOOK_TOKEN: "", + }, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawHookToken).toBe("from-cwd-dotenv"); + } finally { + sandbox.cleanup(); + } + }); + it("falls back to hooks.token from openclaw.json (JSON5) when env token is missing", () => { const sandbox = createSandbox(); try { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 7931616..b5bb0a4 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -260,7 +260,14 @@ function mergeMissingEnvValues( values: Record, ): void { for (const [key, value] of Object.entries(values)) { - if (target[key] !== undefined) { + const existingValue = target[key]; + if (existingValue !== undefined && existingValue !== null) { + if (typeof existingValue !== "string" || existingValue.trim() !== "") { + continue; + } + } + + if (value.trim() === "") { continue; } From 08900163582b58156f34710660f64ec197e9aef4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 17:24:01 +0530 Subject: [PATCH 042/190] docs(proxy): document blank env fallback semantics --- apps/proxy/AGENTS.md | 3 +++ apps/proxy/src/AGENTS.md | 1 + 2 files changed, 4 insertions(+) diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index acb194d..7568d41 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -13,6 +13,9 @@ - first `./.env` from the proxy working directory - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) - existing environment variables always win over `.env` values. +- Treat blank env values as unset for fallback resolution: + - empty/whitespace values (and null-like values) in inherited env must not block `.env` or config-file fallbacks + - dotenv merge semantics must match parser semantics (non-empty value wins). - If hook token env vars are missing, resolve fallback token from `hooks.token` in `openclaw.json` (`OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH`, default `$OPENCLAW_STATE_DIR/openclaw.json`). - Keep env alias support stable for operator UX: - `LISTEN_PORT` or `PORT` diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 5f2b731..0f48e5e 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -4,6 +4,7 @@ - Keep `index.ts` as runtime bootstrap surface and version export. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. +- Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. ## Config Error Handling - Convert parse failures to `ProxyConfigError` with code `CONFIG_VALIDATION_FAILED`. From 084fa4a117e35b156a301783dd0f7737caaec8b5 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 17:58:52 +0530 Subject: [PATCH 043/190] feat(proxy): bootstrap server health endpoint and request logging --- apps/proxy/AGENTS.md | 8 ++++ apps/proxy/package.json | 7 +++ apps/proxy/src/AGENTS.md | 3 ++ apps/proxy/src/bin.ts | 3 ++ apps/proxy/src/config.test.ts | 13 +++++ apps/proxy/src/config.ts | 15 ++++++ apps/proxy/src/server.test.ts | 81 +++++++++++++++++++++++++++++++ apps/proxy/src/server.ts | 90 +++++++++++++++++++++++++++++++++++ apps/proxy/tsconfig.json | 3 +- apps/proxy/tsup.config.ts | 2 +- pnpm-lock.yaml | 16 +++++++ 11 files changed, 238 insertions(+), 3 deletions(-) create mode 100644 apps/proxy/src/bin.ts create mode 100644 apps/proxy/src/server.test.ts create mode 100644 apps/proxy/src/server.ts diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 7568d41..6ac73b3 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -8,6 +8,7 @@ - Keep runtime config centralized in `src/config.ts`. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). +- Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Require hook token input via env (`OPENCLAW_HOOK_TOKEN` or OpenClaw-compatible alias `OPENCLAW_HOOKS_TOKEN`) and never log the token value. - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory @@ -35,4 +36,11 @@ ## Testing Rules - Cover both config happy paths and failure paths in `src/config.test.ts`. - Keep startup tests in `src/index.test.ts` to verify runtime initialization fails when config is invalid. +- Keep server route/middleware behavior in `src/server.test.ts` (`GET /health`, request id propagation, and structured request logging). - Keep tests offline and deterministic (no network, no filesystem dependency). + +## Server Runtime +- Keep `src/server.ts` as the HTTP app/runtime entry. +- Keep middleware order stable: request context -> request logging -> error handler. +- Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200. +- Log startup and request completion with structured JSON logs; never log secrets or tokens. diff --git a/apps/proxy/package.json b/apps/proxy/package.json index 5f75505..57216ed 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -9,19 +9,26 @@ ".": { "import": "./dist/index.js", "types": "./dist/index.d.ts" + }, + "./server": { + "import": "./dist/server.js", + "types": "./dist/server.d.ts" } }, "scripts": { "build": "tsup", "format": "biome format .", "lint": "biome lint .", + "start": "node ./dist/bin.js", "test": "vitest run", "typecheck": "tsc --noEmit" }, "dependencies": { + "@hono/node-server": "^1.19.6", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", "dotenv": "^17.2.3", + "hono": "^4.11.9", "json5": "^2.2.3", "zod": "^4.1.12" } diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 0f48e5e..b8d363e 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -3,8 +3,10 @@ ## Source Layout - Keep `index.ts` as runtime bootstrap surface and version export. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. +- Keep HTTP app/startup concerns in `server.ts`; use `bin.ts` as process entrypoint for Node runtime startup. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. +- Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. ## Config Error Handling - Convert parse failures to `ProxyConfigError` with code `CONFIG_VALIDATION_FAILED`. @@ -13,3 +15,4 @@ ## Maintainability - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. +- Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. diff --git a/apps/proxy/src/bin.ts b/apps/proxy/src/bin.ts new file mode 100644 index 0000000..6b433af --- /dev/null +++ b/apps/proxy/src/bin.ts @@ -0,0 +1,3 @@ +import { startProxyServer } from "./server.js"; + +startProxyServer(); diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index a9a8936..3c1900e 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -6,6 +6,7 @@ import { DEFAULT_CRL_MAX_AGE_MS, DEFAULT_CRL_REFRESH_INTERVAL_MS, DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_PROXY_ENVIRONMENT, DEFAULT_PROXY_LISTEN_PORT, DEFAULT_REGISTRY_URL, loadProxyConfig, @@ -26,6 +27,7 @@ describe("proxy config", () => { openclawBaseUrl: DEFAULT_OPENCLAW_BASE_URL, openclawHookToken: "super-secret-hook-token", registryUrl: DEFAULT_REGISTRY_URL, + environment: DEFAULT_PROXY_ENVIRONMENT, allowList: { owners: [], agents: [], @@ -42,12 +44,14 @@ describe("proxy config", () => { PORT: "4100", OPENCLAW_HOOKS_TOKEN: "hooks-token", CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", + ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", }); expect(config.listenPort).toBe(4100); expect(config.openclawHookToken).toBe("hooks-token"); expect(config.registryUrl).toBe("https://registry.example.com"); + expect(config.environment).toBe("local"); expect(config.crlStaleBehavior).toBe("fail-closed"); }); @@ -91,6 +95,15 @@ describe("proxy config", () => { }), ).toThrow(ProxyConfigError); }); + + it("throws on unsupported environment value", () => { + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + ENVIRONMENT: "staging", + }), + ).toThrow(ProxyConfigError); + }); }); describe("proxy config loading", () => { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index b5bb0a4..c6a8e45 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -6,6 +6,13 @@ import JSON5 from "json5"; import { z } from "zod"; export type ProxyCrlStaleBehavior = "fail-open" | "fail-closed"; +export const proxyEnvironmentValues = [ + "local", + "development", + "production", + "test", +] as const; +export type ProxyEnvironment = (typeof proxyEnvironmentValues)[number]; export type ProxyConfigLoadOptions = { cwd?: string; @@ -15,6 +22,7 @@ export type ProxyConfigLoadOptions = { export const DEFAULT_PROXY_LISTEN_PORT = 4000; export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; export const DEFAULT_REGISTRY_URL = "https://api.clawdentity.com"; +export const DEFAULT_PROXY_ENVIRONMENT: ProxyEnvironment = "development"; export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; @@ -47,6 +55,9 @@ const proxyRuntimeEnvSchema = z.object({ OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), OPENCLAW_HOOK_TOKEN: z.string().trim().min(1), REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), + ENVIRONMENT: z + .enum(proxyEnvironmentValues) + .default(DEFAULT_PROXY_ENVIRONMENT), ALLOW_LIST: z.string().optional(), ALLOWLIST_OWNERS: z.string().optional(), ALLOWLIST_AGENTS: z.string().optional(), @@ -77,6 +88,7 @@ export const proxyConfigSchema = z.object({ openclawBaseUrl: z.string().url(), openclawHookToken: z.string().min(1), registryUrl: z.string().url(), + environment: z.enum(proxyEnvironmentValues), allowList: proxyAllowListSchema, crlRefreshIntervalMs: z.number().int().positive(), crlMaxAgeMs: z.number().int().positive(), @@ -94,6 +106,7 @@ type RuntimeEnvInput = { OPENCLAW_HOOKS_TOKEN?: unknown; REGISTRY_URL?: unknown; CLAWDENTITY_REGISTRY_URL?: unknown; + ENVIRONMENT?: unknown; ALLOW_LIST?: unknown; ALLOWLIST_OWNERS?: unknown; ALLOWLIST_AGENTS?: unknown; @@ -379,6 +392,7 @@ function normalizeRuntimeEnv(input: unknown): Record { "REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL", ]), + ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), ALLOW_LIST: firstNonEmpty(env, ["ALLOW_LIST"]), ALLOWLIST_OWNERS: firstNonEmpty(env, ["ALLOWLIST_OWNERS"]), ALLOWLIST_AGENTS: firstNonEmpty(env, ["ALLOWLIST_AGENTS"]), @@ -517,6 +531,7 @@ export function parseProxyConfig(env: unknown): ProxyConfig { openclawBaseUrl: parsedRuntimeEnv.data.OPENCLAW_BASE_URL, openclawHookToken: parsedRuntimeEnv.data.OPENCLAW_HOOK_TOKEN, registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, + environment: parsedRuntimeEnv.data.ENVIRONMENT, allowList: parseAllowList(parsedRuntimeEnv.data), crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, crlMaxAgeMs: parsedRuntimeEnv.data.CRL_MAX_AGE_MS, diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts new file mode 100644 index 0000000..4d47082 --- /dev/null +++ b/apps/proxy/src/server.test.ts @@ -0,0 +1,81 @@ +import { describe, expect, it, vi } from "vitest"; +import { + DEFAULT_PROXY_ENVIRONMENT, + ProxyConfigError, + parseProxyConfig, +} from "./config.js"; +import { PROXY_VERSION } from "./index.js"; +import { createProxyApp, startProxyServer } from "./server.js"; + +describe("proxy server", () => { + it("returns health response with status, version, and environment", async () => { + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + }), + }); + + const res = await app.request("/health"); + const body = (await res.json()) as { + status: string; + version: string; + environment: string; + }; + + expect(res.status).toBe(200); + expect(body).toEqual({ + status: "ok", + version: PROXY_VERSION, + environment: DEFAULT_PROXY_ENVIRONMENT, + }); + expect(res.headers.get("x-request-id")).toBeTruthy(); + }); + + it("uses ENVIRONMENT from config for health payload", async () => { + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + ENVIRONMENT: "local", + }), + }); + + const res = await app.request("/health"); + const body = (await res.json()) as { environment: string }; + + expect(res.status).toBe(200); + expect(body.environment).toBe("local"); + }); + + it("emits structured request completion log for /health", async () => { + const logSpy = vi.spyOn(console, "info").mockImplementation(() => {}); + try { + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + }), + }); + + const res = await app.request("/health"); + expect(res.status).toBe(200); + + const line = String(logSpy.mock.calls.at(-1)?.[0] ?? ""); + const parsed = JSON.parse(line) as Record; + + expect(parsed.message).toBe("request.completed"); + expect(parsed.service).toBe("proxy"); + expect(parsed.path).toBe("/health"); + expect(parsed.status).toBe(200); + expect(typeof parsed.requestId).toBe("string"); + } finally { + logSpy.mockRestore(); + } + }); + + it("fails startup when required config is missing", () => { + expect(() => + startProxyServer({ + env: {}, + }), + ).toThrow(ProxyConfigError); + }); +}); diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts new file mode 100644 index 0000000..dcbbbe5 --- /dev/null +++ b/apps/proxy/src/server.ts @@ -0,0 +1,90 @@ +import { + createHonoErrorHandler, + createLogger, + createRequestContextMiddleware, + createRequestLoggingMiddleware, + type Logger, + type RequestContextVariables, +} from "@clawdentity/sdk"; +import { serve } from "@hono/node-server"; +import { Hono } from "hono"; +import type { ProxyConfig } from "./config.js"; +import { loadProxyConfig } from "./config.js"; +import { PROXY_VERSION } from "./index.js"; + +type CreateProxyAppOptions = { + config: ProxyConfig; + logger?: Logger; +}; + +type StartProxyServerOptions = { + env?: unknown; + config?: ProxyConfig; + logger?: Logger; + port?: number; +}; + +export type ProxyApp = Hono<{ + Variables: RequestContextVariables; +}>; + +export type StartedProxyServer = { + app: ProxyApp; + config: ProxyConfig; + port: number; + server: ReturnType; +}; + +function resolveLogger(logger?: Logger): Logger { + return logger ?? createLogger({ service: "proxy" }); +} + +export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { + const logger = resolveLogger(options.logger); + const app = new Hono<{ + Variables: RequestContextVariables; + }>(); + + app.use("*", createRequestContextMiddleware()); + app.use("*", createRequestLoggingMiddleware(logger)); + app.onError(createHonoErrorHandler(logger)); + + app.get("/health", (c) => + c.json({ + status: "ok", + version: PROXY_VERSION, + environment: options.config.environment, + }), + ); + + return app; +} + +export function startProxyServer( + options: StartProxyServerOptions = {}, +): StartedProxyServer { + const config = options.config ?? loadProxyConfig(options.env); + const logger = resolveLogger(options.logger); + const app = createProxyApp({ + config, + logger, + }); + const port = options.port ?? config.listenPort; + const server = serve({ + fetch: app.fetch, + port, + }); + + logger.info("proxy.server_started", { + port, + version: PROXY_VERSION, + environment: config.environment, + }); + + return { + app, + config, + port, + server, + }; +} diff --git a/apps/proxy/tsconfig.json b/apps/proxy/tsconfig.json index 792172f..b0fced2 100644 --- a/apps/proxy/tsconfig.json +++ b/apps/proxy/tsconfig.json @@ -1,8 +1,7 @@ { "extends": "../../tsconfig.base.json", "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" + "outDir": "./dist" }, "include": ["src"] } diff --git a/apps/proxy/tsup.config.ts b/apps/proxy/tsup.config.ts index 7a3d66a..88f8edd 100644 --- a/apps/proxy/tsup.config.ts +++ b/apps/proxy/tsup.config.ts @@ -1,7 +1,7 @@ import { defineConfig } from "tsup"; export default defineConfig({ - entry: ["src/index.ts"], + entry: ["src/index.ts", "src/server.ts", "src/bin.ts"], format: ["esm"], dts: true, clean: true, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 28b1b88..cc45563 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -57,9 +57,15 @@ importers: '@clawdentity/sdk': specifier: workspace:* version: link:../../packages/sdk + '@hono/node-server': + specifier: ^1.19.6 + version: 1.19.9(hono@4.11.9) dotenv: specifier: ^17.2.3 version: 17.3.1 + hono: + specifier: ^4.11.9 + version: 4.11.9 json5: specifier: ^2.2.3 version: 2.2.3 @@ -688,6 +694,12 @@ packages: cpu: [x64] os: [win32] + '@hono/node-server@1.19.9': + resolution: {integrity: sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==} + engines: {node: '>=18.14.1'} + peerDependencies: + hono: ^4 + '@img/colour@1.0.0': resolution: {integrity: sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==} engines: {node: '>=18'} @@ -2634,6 +2646,10 @@ snapshots: '@esbuild/win32-x64@0.27.3': optional: true + '@hono/node-server@1.19.9(hono@4.11.9)': + dependencies: + hono: 4.11.9 + '@img/colour@1.0.0': {} '@img/sharp-darwin-arm64@0.34.5': From bf92a60633e69b490710a17887aa387915e1c371 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 18:21:03 +0530 Subject: [PATCH 044/190] feat(proxy): verify inbound auth for protected routes (T27) --- apps/proxy/AGENTS.md | 8 + apps/proxy/src/AGENTS.md | 2 + apps/proxy/src/auth-middleware.test.ts | 420 ++++++++++++++++++++ apps/proxy/src/auth-middleware.ts | 526 +++++++++++++++++++++++++ apps/proxy/src/server.ts | 29 +- 5 files changed, 982 insertions(+), 3 deletions(-) create mode 100644 apps/proxy/src/auth-middleware.test.ts create mode 100644 apps/proxy/src/auth-middleware.ts diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 6ac73b3..59cdc23 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -29,6 +29,14 @@ - Allow bootstrap from `ALLOW_LIST` JSON with optional explicit overrides (`ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`, `ALLOW_ALL_VERIFIED`). - Keep allowlist parsing deterministic and reject malformed input with structured config errors. +## Auth Verification +- Protect all non-health routes with Clawdentity auth verification middleware. +- Keep `GET /health` unauthenticated for probes and deployment checks. +- Parse inbound identity token strictly as `Authorization: Claw `; do not accept Bearer or alternate token headers. +- Verify request pipeline order as: AIT -> timestamp skew -> PoP signature -> nonce replay -> CRL revocation. +- Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. +- Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. + ## CRL Policy - Keep CRL timing defaults explicit in `src/config.ts` (`5m` refresh, `15m` max age) unless explicitly overridden. - Keep stale policy explicit (`fail-open` or `fail-closed`) and configurable from env. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index b8d363e..9516450 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -4,6 +4,7 @@ - Keep `index.ts` as runtime bootstrap surface and version export. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. - Keep HTTP app/startup concerns in `server.ts`; use `bin.ts` as process entrypoint for Node runtime startup. +- Keep inbound auth verification in `auth-middleware.ts` with focused helpers for token parsing, registry material loading, CRL checks, and replay protection. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. @@ -16,3 +17,4 @@ - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. +- Keep auth failure semantics stable: auth-invalid requests map to `401`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts new file mode 100644 index 0000000..1954f4f --- /dev/null +++ b/apps/proxy/src/auth-middleware.test.ts @@ -0,0 +1,420 @@ +import { + generateUlid, + makeAgentDid, + makeHumanDid, +} from "@clawdentity/protocol"; +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, + signAIT, + signCRL, + signHttpRequest, +} from "@clawdentity/sdk"; +import { describe, expect, it, vi } from "vitest"; +import { parseProxyConfig } from "./config.js"; +import { createProxyApp } from "./server.js"; + +const REGISTRY_KID = "registry-active-kid"; +const NOW_MS = Date.now(); +const NOW_SECONDS = Math.floor(NOW_MS / 1000); +const ISSUER = "https://api.clawdentity.com"; +const BODY_JSON = JSON.stringify({ message: "hello" }); + +type AuthHarnessOptions = { + expired?: boolean; + crlStaleBehavior?: "fail-open" | "fail-closed"; + fetchCrlFails?: boolean; + fetchKeysFails?: boolean; + revoked?: boolean; +}; + +type AuthHarness = { + app: ReturnType; + claims: Awaited>; + createSignedHeaders: (input?: { + body?: string; + nonce?: string; + pathWithQuery?: string; + timestampSeconds?: number; + }) => Promise>; +}; + +async function buildAitClaims(input: { agentPublicKeyX: string }): Promise<{ + iss: string; + sub: string; + ownerDid: string; + name: string; + framework: string; + description: string; + cnf: { + jwk: { + kty: "OKP"; + crv: "Ed25519"; + x: string; + }; + }; + iat: number; + nbf: number; + exp: number; + jti: string; +}> { + return { + iss: ISSUER, + sub: makeAgentDid(generateUlid(NOW_MS + 10)), + ownerDid: makeHumanDid(generateUlid(NOW_MS + 20)), + name: "Proxy Agent", + framework: "openclaw", + description: "test agent", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: input.agentPublicKeyX, + }, + }, + iat: NOW_SECONDS - 10, + nbf: NOW_SECONDS - 10, + exp: NOW_SECONDS + 600, + jti: generateUlid(NOW_MS + 30), + }; +} + +function createFetchMock(input: { + crlToken: string; + fetchCrlFails?: boolean; + fetchKeysFails?: boolean; + registryPublicKeyX: string; +}) { + return vi.fn(async (requestInput: unknown): Promise => { + const url = (() => { + if (typeof requestInput === "string") { + return requestInput; + } + if (requestInput instanceof URL) { + return requestInput.toString(); + } + if ( + typeof requestInput === "object" && + requestInput !== null && + "url" in requestInput && + typeof (requestInput as { url?: unknown }).url === "string" + ) { + return (requestInput as { url: string }).url; + } + + return ""; + })(); + + if (url.endsWith("/.well-known/claw-keys.json")) { + if (input.fetchKeysFails) { + throw new Error("keys unavailable"); + } + + return new Response( + JSON.stringify({ + keys: [ + { + kid: REGISTRY_KID, + alg: "EdDSA", + crv: "Ed25519", + x: input.registryPublicKeyX, + status: "active", + }, + ], + }), + { status: 200 }, + ); + } + + if (url.endsWith("/v1/crl")) { + if (input.fetchCrlFails) { + throw new Error("crl unavailable"); + } + + return new Response( + JSON.stringify({ + crl: input.crlToken, + }), + { status: 200 }, + ); + } + + return new Response("not found", { status: 404 }); + }); +} + +async function createAuthHarness( + options: AuthHarnessOptions = {}, +): Promise { + const registryKeypair = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const encodedRegistry = encodeEd25519KeypairBase64url(registryKeypair); + const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); + const claims = await buildAitClaims({ + agentPublicKeyX: encodedAgent.publicKey, + }); + if (options.expired) { + claims.exp = NOW_SECONDS - 1; + } + + const ait = await signAIT({ + claims, + signerKid: REGISTRY_KID, + signerKeypair: registryKeypair, + }); + + const revocationJti = options.revoked + ? claims.jti + : generateUlid(NOW_MS + 40); + const crl = await signCRL({ + claims: { + iss: ISSUER, + jti: generateUlid(NOW_MS + 50), + iat: NOW_SECONDS - 10, + exp: NOW_SECONDS + 600, + revocations: [ + { + jti: revocationJti, + agentDid: claims.sub, + revokedAt: NOW_SECONDS - 5, + reason: "manual revoke", + }, + ], + }, + signerKid: REGISTRY_KID, + signerKeypair: registryKeypair, + }); + + const fetchMock = createFetchMock({ + crlToken: crl, + fetchCrlFails: options.fetchCrlFails, + fetchKeysFails: options.fetchKeysFails, + registryPublicKeyX: encodedRegistry.publicKey, + }); + + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", + ...(options.crlStaleBehavior + ? { CRL_STALE_BEHAVIOR: options.crlStaleBehavior } + : {}), + }), + auth: { + fetchImpl: fetchMock as typeof fetch, + clock: () => NOW_MS, + }, + registerRoutes: (nextApp) => { + nextApp.post("/protected", (c) => { + const auth = c.get("auth"); + return c.json({ + ok: true, + auth, + }); + }); + }, + }); + + return { + app, + claims, + createSignedHeaders: async (input = {}) => { + const body = input.body ?? BODY_JSON; + const nonce = input.nonce ?? "nonce-1"; + const pathWithQuery = input.pathWithQuery ?? "/protected"; + const timestampSeconds = input.timestampSeconds ?? NOW_SECONDS; + + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery, + timestamp: String(timestampSeconds), + nonce, + body: new TextEncoder().encode(body), + secretKey: agentKeypair.secretKey, + }); + + return { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signed.headers, + }; + }, + }; +} + +describe("proxy auth middleware", () => { + it("keeps /health open without auth headers", async () => { + const harness = await createAuthHarness(); + const response = await harness.app.request("/health"); + + expect(response.status).toBe(200); + }); + + it("verifies inbound auth and exposes auth context to downstream handlers", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders(); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + ok: boolean; + auth: { + agentDid: string; + ownerDid: string; + aitJti: string; + }; + }; + expect(body.ok).toBe(true); + expect(body.auth.agentDid).toBe(harness.claims.sub); + expect(body.auth.ownerDid).toBe(harness.claims.ownerDid); + expect(body.auth.aitJti).toBe(harness.claims.jti); + }); + + it("rejects non-health route when Authorization scheme is not Claw", async () => { + const harness = await createAuthHarness(); + const response = await harness.app.request("/protected", { + method: "POST", + headers: { + authorization: "Bearer token", + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); + }); + + it("rejects replayed nonce for the same agent", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-replay-1", + }); + + const first = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + const second = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(first.status).toBe(200); + expect(second.status).toBe(401); + const body = (await second.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_REPLAY"); + }); + + it("rejects requests outside the timestamp skew window", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + timestampSeconds: NOW_SECONDS - 301, + nonce: "nonce-old", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_TIMESTAMP_SKEW"); + }); + + it("rejects proof mismatches when body is tampered", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + body: BODY_JSON, + nonce: "nonce-tampered", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "tampered" }), + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_PROOF"); + }); + + it("rejects revoked AITs", async () => { + const harness = await createAuthHarness({ + revoked: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-revoked", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_REVOKED"); + }); + + it("rejects expired AITs", async () => { + const harness = await createAuthHarness({ + expired: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-expired", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_AIT"); + }); + + it("returns 503 when registry signing keys are unavailable", async () => { + const harness = await createAuthHarness({ + fetchKeysFails: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-keys-fail", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_DEPENDENCY_UNAVAILABLE"); + }); + + it("returns 503 when CRL is unavailable in fail-closed mode", async () => { + const harness = await createAuthHarness({ + fetchCrlFails: true, + crlStaleBehavior: "fail-closed", + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-crl-fail-closed", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_DEPENDENCY_UNAVAILABLE"); + }); +}); diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts new file mode 100644 index 0000000..65b5ae0 --- /dev/null +++ b/apps/proxy/src/auth-middleware.ts @@ -0,0 +1,526 @@ +import { decodeBase64url } from "@clawdentity/protocol"; +import { + AppError, + type CrlCache, + createCrlCache, + createNonceCache, + type Logger, + type NonceCache, + parseRegistryConfig, + type RequestContextVariables, + type VerifyHttpRequestInput, + verifyAIT, + verifyCRL, + verifyHttpRequest, +} from "@clawdentity/sdk"; +import { createMiddleware } from "hono/factory"; +import type { ProxyConfig } from "./config.js"; + +export const DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS = 60 * 60 * 1000; +export const DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS = 300; + +type RegistrySigningKey = NonNullable< + ReturnType["REGISTRY_SIGNING_KEYS"] +>[number]; + +type VerificationKey = { + kid: string; + jwk: { + kty: "OKP"; + crv: "Ed25519"; + x: string; + }; +}; + +export type ProxyAuthContext = { + agentDid: string; + ownerDid: string; + aitJti: string; + issuer: string; + cnfPublicKey: string; +}; + +export type ProxyRequestVariables = RequestContextVariables & { + auth?: ProxyAuthContext; +}; + +export type ProxyAuthMiddlewareOptions = { + config: ProxyConfig; + logger: Logger; + fetchImpl?: typeof fetch; + clock?: () => number; + nonceCache?: NonceCache; + crlCache?: CrlCache; + maxTimestampSkewSeconds?: number; + registryKeysCacheTtlMs?: number; +}; + +type RegistryKeysCache = { + fetchedAtMs: number; + keys: VerificationKey[]; +}; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function toErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : "unknown"; +} + +function toPathWithQuery(url: string): string { + const parsed = new URL(url, "http://localhost"); + return `${parsed.pathname}${parsed.search}`; +} + +function normalizeRegistryUrl(registryUrl: string): string { + try { + return new URL(registryUrl).toString(); + } catch { + throw new AppError({ + code: "PROXY_AUTH_INVALID_REGISTRY_URL", + message: "Proxy registry URL is invalid", + status: 500, + expose: true, + }); + } +} + +function toRegistryUrl(registryUrl: string, path: string): string { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + return new URL(path, normalizedBaseUrl).toString(); +} + +function unauthorizedError(options: { + code: string; + message: string; + details?: Record; +}): AppError { + return new AppError({ + code: options.code, + message: options.message, + status: 401, + details: options.details, + expose: true, + }); +} + +function dependencyUnavailableError(options: { + message: string; + details?: Record; +}): AppError { + return new AppError({ + code: "PROXY_AUTH_DEPENDENCY_UNAVAILABLE", + message: options.message, + status: 503, + details: options.details, + expose: true, + }); +} + +export function parseClawAuthorizationHeader(authorization?: string): string { + if (typeof authorization !== "string" || authorization.trim().length === 0) { + throw unauthorizedError({ + code: "PROXY_AUTH_MISSING_TOKEN", + message: "Authorization header is required", + }); + } + + const [scheme, token] = authorization.trim().split(/\s+/, 2); + if (scheme !== "Claw" || !token || token.trim().length === 0) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_SCHEME", + message: "Authorization must be in the format 'Claw '", + }); + } + + return token.trim(); +} + +export function resolveExpectedIssuer(registryUrl: string): string | undefined { + try { + const hostname = new URL(registryUrl).hostname; + if (hostname === "api.clawdentity.com") { + return "https://api.clawdentity.com"; + } + + if (hostname === "dev.api.clawdentity.com") { + return "https://dev.api.clawdentity.com"; + } + + return undefined; + } catch { + return undefined; + } +} + +function parseRegistrySigningKeys(payload: unknown): RegistrySigningKey[] { + if (!isRecord(payload) || !Array.isArray(payload.keys)) { + throw dependencyUnavailableError({ + message: "Registry signing keys payload is invalid", + }); + } + + const parsed = (() => { + try { + return parseRegistryConfig({ + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify(payload.keys), + }); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry signing keys are invalid", + details: { + reason: toErrorMessage(error), + }, + }); + } + })(); + + const keys = parsed.REGISTRY_SIGNING_KEYS ?? []; + if (keys.length === 0) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + }); + } + + return keys; +} + +function toVerificationKeys(keys: RegistrySigningKey[]): VerificationKey[] { + return keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP", + crv: "Ed25519", + x: key.x, + }, + })); +} + +function parseUnixTimestamp(headerValue: string): number { + const timestamp = Number.parseInt(headerValue, 10); + if (!Number.isInteger(timestamp) || timestamp < 0) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_TIMESTAMP", + message: "X-Claw-Timestamp must be a unix seconds integer", + }); + } + + return timestamp; +} + +function assertTimestampWithinSkew(options: { + clock: () => number; + maxSkewSeconds: number; + timestampSeconds: number; +}): void { + const nowSeconds = Math.floor(options.clock() / 1000); + const skew = Math.abs(nowSeconds - options.timestampSeconds); + if (skew > options.maxSkewSeconds) { + throw unauthorizedError({ + code: "PROXY_AUTH_TIMESTAMP_SKEW", + message: "X-Claw-Timestamp is outside the allowed skew window", + details: { + maxSkewSeconds: options.maxSkewSeconds, + }, + }); + } +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function toProofVerificationInput(input: { + method: string; + pathWithQuery: string; + headers: Headers; + body: Uint8Array; + publicKey: Uint8Array; +}): VerifyHttpRequestInput { + const headers = Object.fromEntries(input.headers.entries()); + return { + method: input.method, + pathWithQuery: input.pathWithQuery, + headers, + body: input.body, + publicKey: input.publicKey, + }; +} + +export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { + const fetchImpl = options.fetchImpl ?? fetch; + const clock = options.clock ?? Date.now; + const nonceCache = options.nonceCache ?? createNonceCache(); + const maxTimestampSkewSeconds = + options.maxTimestampSkewSeconds ?? DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS; + const registryKeysCacheTtlMs = + options.registryKeysCacheTtlMs ?? DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS; + const registryUrl = normalizeRegistryUrl(options.config.registryUrl); + const expectedIssuer = resolveExpectedIssuer(registryUrl); + + let registryKeysCache: RegistryKeysCache | undefined; + + async function getActiveRegistryKeys(): Promise { + if ( + registryKeysCache && + clock() - registryKeysCache.fetchedAtMs <= registryKeysCacheTtlMs + ) { + return registryKeysCache.keys; + } + + let response: Response; + try { + response = await fetchImpl( + toRegistryUrl(registryUrl, "/.well-known/claw-keys.json"), + ); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (!response.ok) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + details: { + status: response.status, + }, + }); + } + + const parsedKeys = parseRegistrySigningKeys( + await parseJsonResponse(response), + ); + const verificationKeys = toVerificationKeys(parsedKeys); + if (verificationKeys.length === 0) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + }); + } + + registryKeysCache = { + fetchedAtMs: clock(), + keys: verificationKeys, + }; + + return verificationKeys; + } + + async function fetchLatestCrlClaims(): Promise { + const verificationKeys = await getActiveRegistryKeys(); + + let response: Response; + try { + response = await fetchImpl(toRegistryUrl(registryUrl, "/v1/crl")); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry CRL is unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (!response.ok) { + throw dependencyUnavailableError({ + message: "Registry CRL is unavailable", + details: { + status: response.status, + }, + }); + } + + const payload = await parseJsonResponse(response); + if (!isRecord(payload) || typeof payload.crl !== "string") { + throw dependencyUnavailableError({ + message: "Registry CRL payload is invalid", + }); + } + + try { + return await verifyCRL({ + token: payload.crl, + registryKeys: verificationKeys, + expectedIssuer, + }); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry CRL is invalid", + details: { + reason: toErrorMessage(error), + }, + }); + } + } + + const crlCache = + options.crlCache ?? + createCrlCache({ + fetchLatest: fetchLatestCrlClaims, + refreshIntervalMs: options.config.crlRefreshIntervalMs, + maxAgeMs: options.config.crlMaxAgeMs, + staleBehavior: options.config.crlStaleBehavior, + clock, + }); + + return createMiddleware<{ Variables: ProxyRequestVariables }>( + async (c, next) => { + if (c.req.path === "/health") { + await next(); + return; + } + + const token = parseClawAuthorizationHeader(c.req.header("authorization")); + const verificationKeys = await getActiveRegistryKeys(); + + const claims = await (async () => { + try { + return await verifyAIT({ + token, + registryKeys: verificationKeys, + expectedIssuer, + }); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_AIT", + message: "AIT verification failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + })(); + + const timestampHeader = c.req.header("x-claw-timestamp"); + if (typeof timestampHeader !== "string") { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_TIMESTAMP", + message: "X-Claw-Timestamp header is required", + }); + } + + assertTimestampWithinSkew({ + clock, + maxSkewSeconds: maxTimestampSkewSeconds, + timestampSeconds: parseUnixTimestamp(timestampHeader), + }); + + const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); + const pathWithQuery = toPathWithQuery(c.req.url); + + let cnfPublicKey: Uint8Array; + try { + cnfPublicKey = decodeBase64url(claims.cnf.jwk.x); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_AIT", + message: "AIT public key is invalid", + details: { + reason: toErrorMessage(error), + }, + }); + } + + try { + await verifyHttpRequest( + toProofVerificationInput({ + method: c.req.method, + pathWithQuery, + headers: c.req.raw.headers, + body: bodyBytes, + publicKey: cnfPublicKey, + }), + ); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_PROOF", + message: "PoP verification failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + + const nonceHeader = c.req.header("x-claw-nonce"); + const nonce = typeof nonceHeader === "string" ? nonceHeader : ""; + const nonceResult = (() => { + try { + return nonceCache.tryAcceptNonce({ + agentDid: claims.sub, + nonce, + }); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_NONCE", + message: "Nonce validation failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + })(); + + if (!nonceResult.accepted) { + throw unauthorizedError({ + code: "PROXY_AUTH_REPLAY", + message: "Replay detected", + }); + } + + let isRevoked: boolean; + try { + isRevoked = await crlCache.isRevoked(claims.jti); + } catch (error) { + if ( + error instanceof AppError && + error.code === "PROXY_AUTH_DEPENDENCY_UNAVAILABLE" + ) { + throw error; + } + + throw dependencyUnavailableError({ + message: "Registry CRL is unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (isRevoked) { + throw unauthorizedError({ + code: "PROXY_AUTH_REVOKED", + message: "AIT has been revoked", + }); + } + + c.set("auth", { + agentDid: claims.sub, + ownerDid: claims.ownerDid, + aitJti: claims.jti, + issuer: claims.iss, + cnfPublicKey: claims.cnf.jwk.x, + }); + + options.logger.info("proxy.auth.verified", { + agentDid: claims.sub, + ownerDid: claims.ownerDid, + jti: claims.jti, + }); + + await next(); + }, + ); +} diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index dcbbbe5..86f725b 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -1,20 +1,34 @@ import { + type CrlCache, createHonoErrorHandler, createLogger, createRequestContextMiddleware, createRequestLoggingMiddleware, type Logger, - type RequestContextVariables, + type NonceCache, } from "@clawdentity/sdk"; import { serve } from "@hono/node-server"; import { Hono } from "hono"; +import { + createProxyAuthMiddleware, + type ProxyRequestVariables, +} from "./auth-middleware.js"; import type { ProxyConfig } from "./config.js"; import { loadProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; +type ProxyAuthRuntimeOptions = { + fetchImpl?: typeof fetch; + clock?: () => number; + nonceCache?: NonceCache; + crlCache?: CrlCache; +}; + type CreateProxyAppOptions = { config: ProxyConfig; logger?: Logger; + registerRoutes?: (app: ProxyApp) => void; + auth?: ProxyAuthRuntimeOptions; }; type StartProxyServerOptions = { @@ -25,7 +39,7 @@ type StartProxyServerOptions = { }; export type ProxyApp = Hono<{ - Variables: RequestContextVariables; + Variables: ProxyRequestVariables; }>; export type StartedProxyServer = { @@ -42,11 +56,19 @@ function resolveLogger(logger?: Logger): Logger { export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { const logger = resolveLogger(options.logger); const app = new Hono<{ - Variables: RequestContextVariables; + Variables: ProxyRequestVariables; }>(); app.use("*", createRequestContextMiddleware()); app.use("*", createRequestLoggingMiddleware(logger)); + app.use( + "*", + createProxyAuthMiddleware({ + config: options.config, + logger, + ...options.auth, + }), + ); app.onError(createHonoErrorHandler(logger)); app.get("/health", (c) => @@ -56,6 +78,7 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { environment: options.config.environment, }), ); + options.registerRoutes?.(app); return app; } From 662c55bda8848645e0b020e96d2aa089fc869a91 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 18:32:38 +0530 Subject: [PATCH 045/190] fix(proxy): refresh keyset on unknown kid and enforce strict Claw auth header --- apps/proxy/AGENTS.md | 2 + apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/auth-middleware.test.ts | 174 ++++++++++++++++++++++--- apps/proxy/src/auth-middleware.ts | 70 ++++++---- 4 files changed, 207 insertions(+), 40 deletions(-) diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 59cdc23..bb9aa91 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -33,7 +33,9 @@ - Protect all non-health routes with Clawdentity auth verification middleware. - Keep `GET /health` unauthenticated for probes and deployment checks. - Parse inbound identity token strictly as `Authorization: Claw `; do not accept Bearer or alternate token headers. +- Reject malformed authorization values that contain extra segments beyond `Claw `. - Verify request pipeline order as: AIT -> timestamp skew -> PoP signature -> nonce replay -> CRL revocation. +- When AIT verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning `401`. - Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 9516450..2d800cc 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -18,3 +18,4 @@ - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep auth failure semantics stable: auth-invalid requests map to `401`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index 1954f4f..aeb3ef5 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -79,6 +79,25 @@ async function buildAitClaims(input: { agentPublicKeyX: string }): Promise<{ }; } +function resolveRequestUrl(requestInput: unknown): string { + if (typeof requestInput === "string") { + return requestInput; + } + if (requestInput instanceof URL) { + return requestInput.toString(); + } + if ( + typeof requestInput === "object" && + requestInput !== null && + "url" in requestInput && + typeof (requestInput as { url?: unknown }).url === "string" + ) { + return (requestInput as { url: string }).url; + } + + return ""; +} + function createFetchMock(input: { crlToken: string; fetchCrlFails?: boolean; @@ -86,24 +105,7 @@ function createFetchMock(input: { registryPublicKeyX: string; }) { return vi.fn(async (requestInput: unknown): Promise => { - const url = (() => { - if (typeof requestInput === "string") { - return requestInput; - } - if (requestInput instanceof URL) { - return requestInput.toString(); - } - if ( - typeof requestInput === "object" && - requestInput !== null && - "url" in requestInput && - typeof (requestInput as { url?: unknown }).url === "string" - ) { - return (requestInput as { url: string }).url; - } - - return ""; - })(); + const url = resolveRequestUrl(requestInput); if (url.endsWith("/.well-known/claw-keys.json")) { if (input.fetchKeysFails) { @@ -273,6 +275,123 @@ describe("proxy auth middleware", () => { expect(body.auth.aitJti).toBe(harness.claims.jti); }); + it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { + const oldKid = "registry-old-kid"; + const newKid = "registry-new-kid"; + const oldRegistryKeypair = await generateEd25519Keypair(); + const newRegistryKeypair = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const encodedOldRegistry = + encodeEd25519KeypairBase64url(oldRegistryKeypair); + const encodedNewRegistry = + encodeEd25519KeypairBase64url(newRegistryKeypair); + const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); + + const claims = await buildAitClaims({ + agentPublicKeyX: encodedAgent.publicKey, + }); + const ait = await signAIT({ + claims, + signerKid: newKid, + signerKeypair: newRegistryKeypair, + }); + const crl = await signCRL({ + claims: { + iss: ISSUER, + jti: generateUlid(NOW_MS + 70), + iat: NOW_SECONDS - 10, + exp: NOW_SECONDS + 600, + revocations: [ + { + jti: generateUlid(NOW_MS + 80), + agentDid: claims.sub, + revokedAt: NOW_SECONDS - 5, + reason: "manual revoke", + }, + ], + }, + signerKid: newKid, + signerKeypair: newRegistryKeypair, + }); + + let keyFetchCount = 0; + const fetchMock = vi.fn( + async (requestInput: unknown): Promise => { + const url = resolveRequestUrl(requestInput); + if (url.endsWith("/.well-known/claw-keys.json")) { + keyFetchCount += 1; + const key = + keyFetchCount === 1 + ? { + kid: oldKid, + alg: "EdDSA", + crv: "Ed25519", + x: encodedOldRegistry.publicKey, + status: "active", + } + : { + kid: newKid, + alg: "EdDSA", + crv: "Ed25519", + x: encodedNewRegistry.publicKey, + status: "active", + }; + return new Response( + JSON.stringify({ + keys: [key], + }), + { status: 200 }, + ); + } + + if (url.endsWith("/v1/crl")) { + return new Response( + JSON.stringify({ + crl, + }), + { status: 200 }, + ); + } + + return new Response("not found", { status: 404 }); + }, + ); + + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", + }), + auth: { + fetchImpl: fetchMock as typeof fetch, + clock: () => NOW_MS, + }, + registerRoutes: (nextApp) => { + nextApp.post("/protected", (c) => c.json({ ok: true })); + }, + }); + + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: "/protected", + timestamp: String(NOW_SECONDS), + nonce: "nonce-rotation", + body: new TextEncoder().encode(BODY_JSON), + secretKey: agentKeypair.secretKey, + }); + const response = await app.request("/protected", { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signed.headers, + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(200); + expect(keyFetchCount).toBe(2); + }); + it("rejects non-health route when Authorization scheme is not Claw", async () => { const harness = await createAuthHarness(); const response = await harness.app.request("/protected", { @@ -288,6 +407,25 @@ describe("proxy auth middleware", () => { expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); }); + it("rejects Authorization headers with extra segments", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-auth-extra", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers: { + ...headers, + authorization: `${headers.authorization} extra`, + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); + }); + it("rejects replayed nonce for the same agent", async () => { const harness = await createAuthHarness(); const headers = await harness.createSignedHeaders({ diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 65b5ae0..d785960 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -1,5 +1,6 @@ import { decodeBase64url } from "@clawdentity/protocol"; import { + AitJwtError, AppError, type CrlCache, createCrlCache, @@ -128,15 +129,15 @@ export function parseClawAuthorizationHeader(authorization?: string): string { }); } - const [scheme, token] = authorization.trim().split(/\s+/, 2); - if (scheme !== "Claw" || !token || token.trim().length === 0) { + const parsed = authorization.trim().match(/^Claw\s+(\S+)$/); + if (!parsed || parsed[1].trim().length === 0) { throw unauthorizedError({ code: "PROXY_AUTH_INVALID_SCHEME", message: "Authorization must be in the format 'Claw '", }); } - return token.trim(); + return parsed[1].trim(); } export function resolveExpectedIssuer(registryUrl: string): string | undefined { @@ -270,8 +271,12 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { let registryKeysCache: RegistryKeysCache | undefined; - async function getActiveRegistryKeys(): Promise { + async function getActiveRegistryKeys(input?: { + forceRefresh?: boolean; + }): Promise { + const forceRefresh = input?.forceRefresh === true; if ( + !forceRefresh && registryKeysCache && clock() - registryKeysCache.fetchedAtMs <= registryKeysCacheTtlMs ) { @@ -376,33 +381,54 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { clock, }); - return createMiddleware<{ Variables: ProxyRequestVariables }>( - async (c, next) => { - if (c.req.path === "/health") { - await next(); - return; - } - - const token = parseClawAuthorizationHeader(c.req.header("authorization")); - const verificationKeys = await getActiveRegistryKeys(); + async function verifyAitClaims(token: string) { + const verifyWithKeys = async (registryKeys: VerificationKey[]) => + verifyAIT({ + token, + registryKeys, + expectedIssuer, + }); - const claims = await (async () => { + const verificationKeys = await getActiveRegistryKeys(); + try { + return await verifyWithKeys(verificationKeys); + } catch (error) { + if (error instanceof AitJwtError && error.code === "UNKNOWN_AIT_KID") { + const refreshedKeys = await getActiveRegistryKeys({ + forceRefresh: true, + }); try { - return await verifyAIT({ - token, - registryKeys: verificationKeys, - expectedIssuer, - }); - } catch (error) { + return await verifyWithKeys(refreshedKeys); + } catch (refreshedError) { throw unauthorizedError({ code: "PROXY_AUTH_INVALID_AIT", message: "AIT verification failed", details: { - reason: toErrorMessage(error), + reason: toErrorMessage(refreshedError), }, }); } - })(); + } + + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_AIT", + message: "AIT verification failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + } + + return createMiddleware<{ Variables: ProxyRequestVariables }>( + async (c, next) => { + if (c.req.path === "/health") { + await next(); + return; + } + + const token = parseClawAuthorizationHeader(c.req.header("authorization")); + const claims = await verifyAitClaims(token); const timestampHeader = c.req.header("x-claw-timestamp"); if (typeof timestampHeader !== "string") { From 560bc23cffedb7d9a27449c783fd38d93285c933 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 18:43:57 +0530 Subject: [PATCH 046/190] fix(proxy): retry CRL verification after key refresh --- apps/proxy/AGENTS.md | 1 + apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/auth-middleware.test.ts | 117 +++++++++++++++++++++++++ apps/proxy/src/auth-middleware.ts | 32 +++++-- 4 files changed, 145 insertions(+), 6 deletions(-) diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index bb9aa91..c8ea502 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -36,6 +36,7 @@ - Reject malformed authorization values that contain extra segments beyond `Claw `. - Verify request pipeline order as: AIT -> timestamp skew -> PoP signature -> nonce replay -> CRL revocation. - When AIT verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning `401`. +- When CRL verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning dependency failure. - Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 2d800cc..f50a2f3 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -19,3 +19,4 @@ - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep auth failure semantics stable: auth-invalid requests map to `401`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. +- Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index aeb3ef5..89fb195 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -392,6 +392,123 @@ describe("proxy auth middleware", () => { expect(keyFetchCount).toBe(2); }); + it("refreshes keyset and verifies CRL after registry CRL key rotation", async () => { + const oldKid = "registry-old-kid"; + const newKid = "registry-new-kid"; + const oldRegistryKeypair = await generateEd25519Keypair(); + const newRegistryKeypair = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const encodedOldRegistry = + encodeEd25519KeypairBase64url(oldRegistryKeypair); + const encodedNewRegistry = + encodeEd25519KeypairBase64url(newRegistryKeypair); + const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); + + const claims = await buildAitClaims({ + agentPublicKeyX: encodedAgent.publicKey, + }); + const ait = await signAIT({ + claims, + signerKid: oldKid, + signerKeypair: oldRegistryKeypair, + }); + const crl = await signCRL({ + claims: { + iss: ISSUER, + jti: generateUlid(NOW_MS + 90), + iat: NOW_SECONDS - 10, + exp: NOW_SECONDS + 600, + revocations: [ + { + jti: generateUlid(NOW_MS + 100), + agentDid: claims.sub, + revokedAt: NOW_SECONDS - 5, + reason: "manual revoke", + }, + ], + }, + signerKid: newKid, + signerKeypair: newRegistryKeypair, + }); + + let keyFetchCount = 0; + const fetchMock = vi.fn( + async (requestInput: unknown): Promise => { + const url = resolveRequestUrl(requestInput); + if (url.endsWith("/.well-known/claw-keys.json")) { + keyFetchCount += 1; + const key = + keyFetchCount === 1 + ? { + kid: oldKid, + alg: "EdDSA", + crv: "Ed25519", + x: encodedOldRegistry.publicKey, + status: "active", + } + : { + kid: newKid, + alg: "EdDSA", + crv: "Ed25519", + x: encodedNewRegistry.publicKey, + status: "active", + }; + return new Response( + JSON.stringify({ + keys: [key], + }), + { status: 200 }, + ); + } + + if (url.endsWith("/v1/crl")) { + return new Response( + JSON.stringify({ + crl, + }), + { status: 200 }, + ); + } + + return new Response("not found", { status: 404 }); + }, + ); + + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", + }), + auth: { + fetchImpl: fetchMock as typeof fetch, + clock: () => NOW_MS, + }, + registerRoutes: (nextApp) => { + nextApp.post("/protected", (c) => c.json({ ok: true })); + }, + }); + + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: "/protected", + timestamp: String(NOW_SECONDS), + nonce: "nonce-crl-rotation", + body: new TextEncoder().encode(BODY_JSON), + secretKey: agentKeypair.secretKey, + }); + const response = await app.request("/protected", { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signed.headers, + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(200); + expect(keyFetchCount).toBe(2); + }); + it("rejects non-health route when Authorization scheme is not Claw", async () => { const harness = await createAuthHarness(); const response = await harness.app.request("/protected", { diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index d785960..9d17f81 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -3,6 +3,7 @@ import { AitJwtError, AppError, type CrlCache, + CrlJwtError, createCrlCache, createNonceCache, type Logger, @@ -325,8 +326,6 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { } async function fetchLatestCrlClaims(): Promise { - const verificationKeys = await getActiveRegistryKeys(); - let response: Response; try { response = await fetchImpl(toRegistryUrl(registryUrl, "/v1/crl")); @@ -354,14 +353,35 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { message: "Registry CRL payload is invalid", }); } + const crlToken = payload.crl; - try { - return await verifyCRL({ - token: payload.crl, - registryKeys: verificationKeys, + const verifyWithKeys = async (registryKeys: VerificationKey[]) => + verifyCRL({ + token: crlToken, + registryKeys, expectedIssuer, }); + + try { + const verificationKeys = await getActiveRegistryKeys(); + return await verifyWithKeys(verificationKeys); } catch (error) { + if (error instanceof CrlJwtError && error.code === "UNKNOWN_CRL_KID") { + try { + const refreshedKeys = await getActiveRegistryKeys({ + forceRefresh: true, + }); + return await verifyWithKeys(refreshedKeys); + } catch (refreshedError) { + throw dependencyUnavailableError({ + message: "Registry CRL is invalid", + details: { + reason: toErrorMessage(refreshedError), + }, + }); + } + } + throw dependencyUnavailableError({ message: "Registry CRL is invalid", details: { From 3b5a5c348769da82da03db12d220849f137ae07e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 18:51:40 +0530 Subject: [PATCH 047/190] fix(proxy): enforce strict timestamp header format --- apps/proxy/AGENTS.md | 1 + apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/auth-middleware.test.ts | 26 ++++++++++++++++++++++++-- apps/proxy/src/auth-middleware.ts | 7 +++++++ 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index c8ea502..1afed7b 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -34,6 +34,7 @@ - Keep `GET /health` unauthenticated for probes and deployment checks. - Parse inbound identity token strictly as `Authorization: Claw `; do not accept Bearer or alternate token headers. - Reject malformed authorization values that contain extra segments beyond `Claw `. +- Reject malformed `X-Claw-Timestamp` values; accept only plain unix-seconds integer strings. - Verify request pipeline order as: AIT -> timestamp skew -> PoP signature -> nonce replay -> CRL revocation. - When AIT verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning `401`. - When CRL verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning dependency failure. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index f50a2f3..499e937 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -18,5 +18,6 @@ - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep auth failure semantics stable: auth-invalid requests map to `401`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index 89fb195..3195bc2 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -35,6 +35,7 @@ type AuthHarness = { body?: string; nonce?: string; pathWithQuery?: string; + timestamp?: string; timestampSeconds?: number; }) => Promise>; }; @@ -223,12 +224,13 @@ async function createAuthHarness( const body = input.body ?? BODY_JSON; const nonce = input.nonce ?? "nonce-1"; const pathWithQuery = input.pathWithQuery ?? "/protected"; - const timestampSeconds = input.timestampSeconds ?? NOW_SECONDS; + const timestamp = + input.timestamp ?? String(input.timestampSeconds ?? NOW_SECONDS); const signed = await signHttpRequest({ method: "POST", pathWithQuery, - timestamp: String(timestampSeconds), + timestamp, nonce, body: new TextEncoder().encode(body), secretKey: agentKeypair.secretKey, @@ -583,6 +585,26 @@ describe("proxy auth middleware", () => { expect(body.error.code).toBe("PROXY_AUTH_TIMESTAMP_SKEW"); }); + it.each([ + `${NOW_SECONDS}abc`, + `${NOW_SECONDS}.5`, + ])("rejects malformed X-Claw-Timestamp header: %s", async (malformedTimestamp) => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + timestamp: malformedTimestamp, + nonce: "nonce-invalid-timestamp", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_TIMESTAMP"); + }); + it("rejects proof mismatches when body is tampered", async () => { const harness = await createAuthHarness(); const headers = await harness.createSignedHeaders({ diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 9d17f81..aabe2f5 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -205,6 +205,13 @@ function toVerificationKeys(keys: RegistrySigningKey[]): VerificationKey[] { } function parseUnixTimestamp(headerValue: string): number { + if (!/^\d+$/.test(headerValue)) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_TIMESTAMP", + message: "X-Claw-Timestamp must be a unix seconds integer", + }); + } + const timestamp = Number.parseInt(headerValue, 10); if (!Number.isInteger(timestamp) || timestamp < 0) { throw unauthorizedError({ From b646de8fada1d8d77cc974559c03c1ddc2e4175b Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 19:31:27 +0530 Subject: [PATCH 048/190] feat(proxy): enforce agent DID allowlist without global bypass (T28) --- PRD.md | 2 +- apps/proxy/AGENTS.md | 7 ++- apps/proxy/src/AGENTS.md | 4 +- apps/proxy/src/auth-middleware.test.ts | 51 ++++++++++++++++++ apps/proxy/src/auth-middleware.ts | 28 ++++++++++ apps/proxy/src/config.test.ts | 23 +++++--- apps/proxy/src/config.ts | 72 +++++++++++--------------- issues/T28.md | 4 +- 8 files changed, 135 insertions(+), 56 deletions(-) diff --git a/PRD.md b/PRD.md index 5ea3a1c..5b70f89 100644 --- a/PRD.md +++ b/PRD.md @@ -75,7 +75,7 @@ Because OpenClaw requires `hooks.token` and expects Bearer/token auth for `/hook - **Proxy** - Verify inbound Clawdentity headers - - Enforce allowlist rules (by owner DID and/or agent DID) + - Enforce allowlist rules (agent DID only in current phase; owner DID support deferred) - Rate-limit per verified agent DID - Forward to OpenClaw `/hooks/agent` with `x-openclaw-token` diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 1afed7b..63330b6 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -25,9 +25,10 @@ - state/config path aliases: `OPENCLAW_STATE_DIR`/`CLAWDBOT_STATE_DIR`, `OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH` ## Allowlist and Access -- Keep allowlist shape as `{ owners: string[], agents: string[], allowAllVerified: boolean }`. -- Allow bootstrap from `ALLOW_LIST` JSON with optional explicit overrides (`ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`, `ALLOW_ALL_VERIFIED`). +- Keep allowlist shape as `{ owners: string[], agents: string[] }`. +- Allow bootstrap from `ALLOW_LIST` JSON with optional explicit overrides (`ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`). - Keep allowlist parsing deterministic and reject malformed input with structured config errors. +- Reject deprecated `ALLOW_ALL_VERIFIED` at startup; never provide a global allow-all bypass for verified callers. ## Auth Verification - Protect all non-health routes with Clawdentity auth verification middleware. @@ -36,9 +37,11 @@ - Reject malformed authorization values that contain extra segments beyond `Claw `. - Reject malformed `X-Claw-Timestamp` values; accept only plain unix-seconds integer strings. - Verify request pipeline order as: AIT -> timestamp skew -> PoP signature -> nonce replay -> CRL revocation. +- Enforce proxy access by explicit agent DID allowlist after auth verification; owner DID-only entries do not grant access. - When AIT verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning `401`. - When CRL verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning dependency failure. - Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. +- Return `403` when requests are verified but agent DID is not allowlisted. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. ## CRL Policy diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 499e937..4d85e0a 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -16,8 +16,10 @@ ## Maintainability - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. +- Keep allowlist schema strict and agent-first: reject unknown allowlist keys and require explicit `allowList.agents` membership after verification. +- Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. -- Keep auth failure semantics stable: auth-invalid requests map to `401`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index 3195bc2..bf79da8 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -25,6 +25,8 @@ type AuthHarnessOptions = { crlStaleBehavior?: "fail-open" | "fail-closed"; fetchCrlFails?: boolean; fetchKeysFails?: boolean; + allowCurrentAgent?: boolean; + allowCurrentOwner?: boolean; revoked?: boolean; }; @@ -195,9 +197,19 @@ async function createAuthHarness( registryPublicKeyX: encodedRegistry.publicKey, }); + const allowListAgents = + options.allowCurrentAgent === false ? [] : [claims.sub]; + const allowListOwners = options.allowCurrentOwner ? [claims.ownerDid] : []; + const app = createProxyApp({ config: parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", + ...(allowListAgents.length > 0 + ? { ALLOWLIST_AGENTS: allowListAgents.join(",") } + : {}), + ...(allowListOwners.length > 0 + ? { ALLOWLIST_OWNERS: allowListOwners.join(",") } + : {}), ...(options.crlStaleBehavior ? { CRL_STALE_BEHAVIOR: options.crlStaleBehavior } : {}), @@ -277,6 +289,43 @@ describe("proxy auth middleware", () => { expect(body.auth.aitJti).toBe(harness.claims.jti); }); + it("returns 403 when a verified caller is not allowlisted by agent DID", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-not-allowlisted", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); + }); + + it("returns 403 when only owner DID is allowlisted", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + allowCurrentOwner: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-owner-only-allowlisted", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); + }); + it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { const oldKid = "registry-old-kid"; const newKid = "registry-new-kid"; @@ -362,6 +411,7 @@ describe("proxy auth middleware", () => { const app = createProxyApp({ config: parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", + ALLOWLIST_AGENTS: claims.sub, }), auth: { fetchImpl: fetchMock as typeof fetch, @@ -479,6 +529,7 @@ describe("proxy auth middleware", () => { const app = createProxyApp({ config: parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", + ALLOWLIST_AGENTS: claims.sub, }), auth: { fetchImpl: fetchMock as typeof fetch, diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index aabe2f5..5b94004 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -122,6 +122,24 @@ function dependencyUnavailableError(options: { }); } +function forbiddenError(options: { + code: string; + message: string; + details?: Record; +}): AppError { + return new AppError({ + code: options.code, + message: options.message, + status: 403, + details: options.details, + expose: true, + }); +} + +function isAgentDidAllowed(config: ProxyConfig, agentDid: string): boolean { + return config.allowList.agents.includes(agentDid); +} + export function parseClawAuthorizationHeader(authorization?: string): string { if (typeof authorization !== "string" || authorization.trim().length === 0) { throw unauthorizedError({ @@ -559,6 +577,16 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { }); } + if (!isAgentDidAllowed(options.config, claims.sub)) { + throw forbiddenError({ + code: "PROXY_AUTH_FORBIDDEN", + message: "Verified caller is not allowlisted", + details: { + agentDid: claims.sub, + }, + }); + } + c.set("auth", { agentDid: claims.sub, ownerDid: claims.ownerDid, diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 3c1900e..9590ec2 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -31,7 +31,6 @@ describe("proxy config", () => { allowList: { owners: [], agents: [], - allowAllVerified: false, }, crlRefreshIntervalMs: DEFAULT_CRL_REFRESH_INTERVAL_MS, crlMaxAgeMs: DEFAULT_CRL_MAX_AGE_MS, @@ -55,22 +54,19 @@ describe("proxy config", () => { expect(config.crlStaleBehavior).toBe("fail-closed"); }); - it("parses allow list object and override env flags", () => { + it("parses allow list object and override env lists", () => { const config = parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "token", ALLOW_LIST: JSON.stringify({ owners: ["did:claw:owner:1"], agents: ["did:claw:agent:1"], - allowAllVerified: false, }), ALLOWLIST_OWNERS: "did:claw:owner:2,did:claw:owner:3", - ALLOW_ALL_VERIFIED: "true", }); expect(config.allowList).toEqual({ owners: ["did:claw:owner:2", "did:claw:owner:3"], agents: ["did:claw:agent:1"], - allowAllVerified: true, }); }); @@ -87,11 +83,24 @@ describe("proxy config", () => { ).toThrow(ProxyConfigError); }); - it("throws on invalid boolean override", () => { + it("throws when deprecated ALLOW_ALL_VERIFIED is set", () => { expect(() => parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "token", - ALLOW_ALL_VERIFIED: "maybe", + ALLOW_ALL_VERIFIED: "true", + }), + ).toThrow(ProxyConfigError); + }); + + it("throws when ALLOW_LIST includes unknown keys", () => { + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + ALLOW_LIST: JSON.stringify({ + owners: [], + agents: [], + allowAllVerified: true, + }), }), ).toThrow(ProxyConfigError); }); diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index c6a8e45..798f6f1 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -40,8 +40,6 @@ export class ProxyConfigError extends Error { } } -const BOOLEAN_TRUE_VALUES = new Set(["1", "true", "yes", "on"]); -const BOOLEAN_FALSE_VALUES = new Set(["0", "false", "no", "off"]); const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; const LEGACY_STATE_DIR_NAMES = [".clawdbot", ".moldbot", ".moltbot"] as const; @@ -61,7 +59,6 @@ const proxyRuntimeEnvSchema = z.object({ ALLOW_LIST: z.string().optional(), ALLOWLIST_OWNERS: z.string().optional(), ALLOWLIST_AGENTS: z.string().optional(), - ALLOW_ALL_VERIFIED: z.string().optional(), CRL_REFRESH_INTERVAL_MS: z.coerce .number() .int() @@ -77,11 +74,12 @@ const proxyRuntimeEnvSchema = z.object({ .default(DEFAULT_CRL_STALE_BEHAVIOR), }); -const proxyAllowListSchema = z.object({ - owners: z.array(z.string().trim().min(1)).default([]), - agents: z.array(z.string().trim().min(1)).default([]), - allowAllVerified: z.boolean().default(false), -}); +const proxyAllowListSchema = z + .object({ + owners: z.array(z.string().trim().min(1)).default([]), + agents: z.array(z.string().trim().min(1)).default([]), + }) + .strict(); export const proxyConfigSchema = z.object({ listenPort: z.number().int().min(1).max(65535), @@ -396,7 +394,6 @@ function normalizeRuntimeEnv(input: unknown): Record { ALLOW_LIST: firstNonEmpty(env, ["ALLOW_LIST"]), ALLOWLIST_OWNERS: firstNonEmpty(env, ["ALLOWLIST_OWNERS"]), ALLOWLIST_AGENTS: firstNonEmpty(env, ["ALLOWLIST_AGENTS"]), - ALLOW_ALL_VERIFIED: firstNonEmpty(env, ["ALLOW_ALL_VERIFIED"]), CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), CRL_STALE_BEHAVIOR: firstNonEmpty(env, ["CRL_STALE_BEHAVIOR"]), @@ -416,38 +413,12 @@ function parseDidList(input: string): string[] { ); } -function parseOptionalBoolean( - value: string | undefined, - field: string, -): boolean | undefined { - if (value === undefined) { - return undefined; - } - - const normalized = value.trim().toLowerCase(); - if (BOOLEAN_TRUE_VALUES.has(normalized)) { - return true; - } - - if (BOOLEAN_FALSE_VALUES.has(normalized)) { - return false; - } - - throw toConfigValidationError({ - fieldErrors: { - [field]: ["Expected one of true/false/1/0/yes/no/on/off"], - }, - formErrors: [], - }); -} - function parseAllowList( env: z.infer, ): ProxyAllowList { let allowList: ProxyAllowList = { owners: [], agents: [], - allowAllVerified: false, }; if (env.ALLOW_LIST !== undefined) { @@ -482,15 +453,27 @@ function parseAllowList( allowList = { ...allowList, agents: parseDidList(env.ALLOWLIST_AGENTS) }; } - const allowAllVerified = parseOptionalBoolean( - env.ALLOW_ALL_VERIFIED, - "ALLOW_ALL_VERIFIED", - ); - if (allowAllVerified !== undefined) { - allowList = { ...allowList, allowAllVerified }; + return allowList; +} + +function assertNoDeprecatedAllowAllVerified(env: RuntimeEnvInput): void { + const value = env.ALLOW_ALL_VERIFIED; + if ( + value === undefined || + value === null || + (typeof value === "string" && value.trim().length === 0) + ) { + return; } - return allowList; + throw toConfigValidationError({ + fieldErrors: { + ALLOW_ALL_VERIFIED: [ + "ALLOW_ALL_VERIFIED is no longer supported. Use ALLOWLIST_AGENTS.", + ], + }, + formErrors: [], + }); } function loadHookTokenFromFallback( @@ -516,8 +499,11 @@ function loadHookTokenFromFallback( } export function parseProxyConfig(env: unknown): ProxyConfig { + const inputEnv: RuntimeEnvInput = isRuntimeEnvInput(env) ? env : {}; + assertNoDeprecatedAllowAllVerified(inputEnv); + const parsedRuntimeEnv = proxyRuntimeEnvSchema.safeParse( - normalizeRuntimeEnv(env), + normalizeRuntimeEnv(inputEnv), ); if (!parsedRuntimeEnv.success) { throw toConfigValidationError({ diff --git a/issues/T28.md b/issues/T28.md index ca6221a..3c64380 100644 --- a/issues/T28.md +++ b/issues/T28.md @@ -1,7 +1,7 @@ Source: `T28.md` ## Goal -Enforce allowlist by agent DID and/or owner DID after verification. +Enforce allowlist by agent DID after verification. ## In Scope - Implement the ticket objective described in `Goal`. @@ -33,7 +33,7 @@ Enforce allowlist by agent DID and/or owner DID after verification. ## Deliverables - Allowlist config: owners[], agents[] -- Optional `allowAllVerified` (default false) +- Remove support for `allowAllVerified`/`ALLOW_ALL_VERIFIED` - Return 403 when verified but not allowed ## Refactor Opportunities From 1604efd7dc0df5ddab28d03ec1fc9284b703e27b Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 21:31:02 +0530 Subject: [PATCH 049/190] feat(proxy): implement OpenClaw hook forwarding and relay onboarding --- apps/cli/AGENTS.md | 8 + apps/cli/package.json | 4 +- apps/cli/src/AGENTS.md | 2 + apps/cli/src/commands/AGENTS.md | 6 + apps/cli/src/commands/agent-name.ts | 19 + apps/cli/src/commands/agent.ts | 20 +- apps/cli/src/commands/openclaw.test.ts | 232 ++++++ apps/cli/src/commands/openclaw.ts | 759 ++++++++++++++++++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/cli/tsup.config.ts | 5 + apps/openclaw-skill/AGENTS.md | 39 + apps/openclaw-skill/package.json | 20 + apps/openclaw-skill/skill/SKILL.md | 111 +++ .../skill/references/clawdentity-protocol.md | 113 +++ apps/openclaw-skill/src/AGENTS.md | 18 + apps/openclaw-skill/src/index.ts | 17 + .../src/transforms/peers-config.test.ts | 130 +++ .../src/transforms/peers-config.ts | 231 ++++++ .../src/transforms/relay-to-peer.test.ts | 224 ++++++ .../src/transforms/relay-to-peer.ts | 286 +++++++ apps/openclaw-skill/tsconfig.json | 9 + apps/openclaw-skill/tsup.config.ts | 16 + apps/openclaw-skill/vitest.config.ts | 20 + apps/proxy/src/AGENTS.md | 3 + apps/proxy/src/agent-hook-route.test.ts | 228 ++++++ apps/proxy/src/agent-hook-route.ts | 146 ++++ apps/proxy/src/server.ts | 14 + pnpm-lock.yaml | 13 + 29 files changed, 2683 insertions(+), 20 deletions(-) create mode 100644 apps/cli/src/commands/agent-name.ts create mode 100644 apps/cli/src/commands/openclaw.test.ts create mode 100644 apps/cli/src/commands/openclaw.ts create mode 100644 apps/openclaw-skill/AGENTS.md create mode 100644 apps/openclaw-skill/package.json create mode 100644 apps/openclaw-skill/skill/SKILL.md create mode 100644 apps/openclaw-skill/skill/references/clawdentity-protocol.md create mode 100644 apps/openclaw-skill/src/AGENTS.md create mode 100644 apps/openclaw-skill/src/index.ts create mode 100644 apps/openclaw-skill/src/transforms/peers-config.test.ts create mode 100644 apps/openclaw-skill/src/transforms/peers-config.ts create mode 100644 apps/openclaw-skill/src/transforms/relay-to-peer.test.ts create mode 100644 apps/openclaw-skill/src/transforms/relay-to-peer.ts create mode 100644 apps/openclaw-skill/tsconfig.json create mode 100644 apps/openclaw-skill/tsup.config.ts create mode 100644 apps/openclaw-skill/vitest.config.ts create mode 100644 apps/proxy/src/agent-hook-route.test.ts create mode 100644 apps/proxy/src/agent-hook-route.ts diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 26f95cb..fd09032 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -15,6 +15,7 @@ - Keep user-facing command output on `writeStdoutLine` / `writeStderrLine`; reserve structured logger calls for diagnostic events. - Prefer `@clawdentity/sdk` helpers (`decodeAIT`) when surfacing agent metadata instead of parsing JWTs manually. - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. +- Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. @@ -50,6 +51,13 @@ - Keep user output explicit and command-like: successful checks print `✅ ...`; failed checks print `❌ ` and set non-zero exit code. - Cache files (`registry-keys.json`, `crl-claims.json`) should include source registry URL + fetch timestamp so stale or cross-environment cache reuse is avoided. +## Adding new commands +- Keep `src/index.ts` as the only place wiring command builders (`createAgentCommand`, `createConfigCommand`, etc.); register a future `createOpenClawCommand()` there so the CLI surface stays predictable for automation/docs. +- Implement invite/setup behavior inside `src/commands/openclaw.ts` and reuse `withErrorHandling` from `src/commands/helpers.ts` for every subcommand. Pull shared config/paths from `../config/manager.js` to preserve the existing precedence and secrets handling semantics. +- Any logic shared between invite and setup (validation, payload construction, output formatting) should live in a dedicated helper module such as `src/commands/openclaw/helpers.ts` or exported helpers inside `helpers.ts`, not duplicated in multiple `.ts` files. +- Mimic the test pattern in `src/commands/agent.test.ts`: mock `node:fs/promises`, `@clawdentity/sdk`, `resolveConfig()`, and `fetch`, register the command under a root `Command`, and capture `stdout`/`stderr` so you can assert visible output plus exit codes for success/failure paths. +- Favor exporting pure helper functions so invite/setup logic can be unit-tested without needing to run the full CLI parse flow if you need tighter coverage. + ## Validation Commands - `pnpm -F @clawdentity/cli lint` - `pnpm -F @clawdentity/cli typecheck` diff --git a/apps/cli/package.json b/apps/cli/package.json index 6728932..51ac08f 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -22,11 +22,11 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@clawdentity/protocol": "workspace:*", - "@clawdentity/sdk": "workspace:*", "commander": "^13.1.0" }, "devDependencies": { + "@clawdentity/protocol": "workspace:*", + "@clawdentity/sdk": "workspace:*", "@types/node": "^22.18.11" } } diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index a6cefb7..60d11cf 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -7,6 +7,7 @@ - Keep each command implementation in `commands/.ts` with one exported factory (`createCommand`). - Reuse shared command wrappers (`withErrorHandling`) and IO helpers (`writeStdoutLine`, `writeStderrLine`) instead of inline process writes. - Prefer explicit error-to-reason mapping for operator-facing failures rather than generic stack traces. +- Prefer SDK shared primitives (`AppError`, `nowIso`) for new command error/date logic instead of ad-hoc equivalents. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. @@ -22,4 +23,5 @@ ## Testing Rules - Command tests must capture `stdout`/`stderr` and assert exit-code behavior. - Include success, revoked, invalid token, keyset failure, CRL failure, and cache-hit scenarios for `verify`. +- For OpenClaw invite/setup flow, cover invite encode/decode, config patch idempotency, and missing-file validation. - Keep tests deterministic by mocking network and filesystem dependencies. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 1237da4..127b526 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -8,6 +8,7 @@ - Keep command handlers focused on orchestration; move reusable logic into local helpers. - Use `withErrorHandling` for command actions unless a command has a documented reason not to. - Route all user-facing messages through `writeStdoutLine`/`writeStderrLine`. +- For new command-domain errors, use SDK `AppError` with stable `code` values. ## Verification Command Rules - `verify` must preserve the `✅`/`❌` output contract with explicit reasons. @@ -15,6 +16,11 @@ - Signature and CRL validation must use SDK helpers (`verifyAIT`, `verifyCRL`), not local JWT cryptography code. - Cache usage must enforce TTL and registry URL matching before reuse. +## OpenClaw Command Rules +- `openclaw invite` must generate self-contained invite code from admin-provided DID + proxy URL. +- `openclaw setup` must be idempotent for relay mapping updates and peer map writes. +- Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. + ## Testing Rules - Mock network and filesystem dependencies in command tests. - Include success and failure scenarios for external calls, parsing, and cache behavior. diff --git a/apps/cli/src/commands/agent-name.ts b/apps/cli/src/commands/agent-name.ts new file mode 100644 index 0000000..283cc06 --- /dev/null +++ b/apps/cli/src/commands/agent-name.ts @@ -0,0 +1,19 @@ +import { validateAgentName } from "@clawdentity/protocol"; + +const RESERVED_AGENT_NAMES = new Set([".", ".."]); + +export const assertValidAgentName = (name: string): string => { + const normalizedName = name.trim(); + + if (RESERVED_AGENT_NAMES.has(normalizedName)) { + throw new Error('Agent name must not be "." or "..".'); + } + + if (!validateAgentName(normalizedName)) { + throw new Error( + "Agent name contains invalid characters, reserved path segments, or length. Use 1-64 chars: a-z, A-Z, 0-9, ., _, -", + ); + } + + return normalizedName; +}; diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index f078311..4d7336a 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -1,6 +1,6 @@ import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { join } from "node:path"; -import { parseDid, validateAgentName } from "@clawdentity/protocol"; +import { parseDid } from "@clawdentity/protocol"; import { createLogger, type DecodedAit, @@ -11,6 +11,7 @@ import { import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; import { writeStdoutLine } from "../io.js"; +import { assertValidAgentName } from "./agent-name.js"; import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "agent" }); @@ -18,7 +19,6 @@ const logger = createLogger({ service: "cli", module: "agent" }); const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; const IDENTITY_FILE_NAME = "identity.json"; -const RESERVED_AGENT_NAMES = new Set([".", ".."]); const FILE_MODE = 0o600; type AgentCreateOptions = { @@ -146,22 +146,6 @@ const formatExpiresAt = (expires: number): string => { return new Date(expires * 1000).toISOString(); }; -const assertValidAgentName = (name: string): string => { - const normalizedName = name.trim(); - - if (RESERVED_AGENT_NAMES.has(normalizedName)) { - throw new Error('Agent name must not be "." or "..".'); - } - - if (!validateAgentName(normalizedName)) { - throw new Error( - "Agent name contains invalid characters, reserved path segments, or length. Use 1-64 chars: a-z, A-Z, 0-9, ., _, -", - ); - } - - return normalizedName; -}; - const resolveFramework = ( framework: string | undefined, ): string | undefined => { diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts new file mode 100644 index 0000000..42a2ade --- /dev/null +++ b/apps/cli/src/commands/openclaw.test.ts @@ -0,0 +1,232 @@ +import { + mkdirSync, + mkdtempSync, + readFileSync, + rmSync, + writeFileSync, +} from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { + createOpenclawInviteCode, + decodeOpenclawInviteCode, + setupOpenclawRelayFromInvite, +} from "./openclaw.js"; + +type OpenclawSandbox = { + cleanup: () => void; + homeDir: string; + openclawDir: string; + transformSourcePath: string; +}; + +function createSandbox(): OpenclawSandbox { + const root = mkdtempSync(join(tmpdir(), "clawdentity-cli-openclaw-")); + const homeDir = join(root, "home"); + const openclawDir = join(root, "openclaw"); + const transformSourcePath = join(root, "relay-to-peer.mjs"); + + mkdirSync(homeDir, { recursive: true }); + mkdirSync(openclawDir, { recursive: true }); + + writeFileSync( + join(openclawDir, "openclaw.json"), + JSON.stringify( + { + hooks: { + enabled: false, + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + writeFileSync( + transformSourcePath, + "export default async function relay(ctx){ return ctx?.payload ?? null; }\n", + "utf8", + ); + + return { + cleanup: () => { + rmSync(root, { recursive: true, force: true }); + }, + homeDir, + openclawDir, + transformSourcePath, + }; +} + +function seedLocalAgentCredentials(homeDir: string, agentName: string): void { + const agentDir = join(homeDir, ".clawdentity", "agents", agentName); + mkdirSync(agentDir, { recursive: true }); + writeFileSync(join(agentDir, "secret.key"), "secret-key-value", "utf8"); + writeFileSync(join(agentDir, "ait.jwt"), "mock.ait.jwt", "utf8"); +} + +describe("openclaw command helpers", () => { + it("creates and decodes invite codes", () => { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + name: "Beta Agent", + }); + + expect(invite.code.startsWith("clawd1_")).toBe(true); + + const decoded = decodeOpenclawInviteCode(invite.code); + expect(decoded.v).toBe(1); + expect(decoded.did).toBe("did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"); + expect(decoded.proxyUrl).toBe("https://beta.example.com/hooks/agent"); + expect(decoded.alias).toBe("beta"); + expect(decoded.name).toBe("Beta Agent"); + expect(decoded.issuedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + + it("applies relay setup from invite and patches OpenClaw config", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "http://beta-proxy.local:4000/hooks/agent", + peerAlias: "beta", + name: "Beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.peerAlias).toBe("beta"); + expect(result.peerDid).toBe("did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7"); + + const copiedTransform = readFileSync(result.transformTargetPath, "utf8"); + expect(copiedTransform).toContain("relay(ctx)"); + + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + enabled?: boolean; + allowRequestSessionKey?: boolean; + allowedSessionKeyPrefixes?: string[]; + mappings?: Array>; + }; + }; + + expect(openclawConfig.hooks.enabled).toBe(true); + expect(openclawConfig.hooks.allowRequestSessionKey).toBe(true); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect( + openclawConfig.hooks.mappings?.some( + (mapping) => + mapping.id === "clawdentity-send-to-peer" && + (mapping.match as { path?: string })?.path === "send-to-peer" && + mapping.action === "agent" && + mapping.wakeMode === "now" && + (mapping.transform as { module?: string })?.module === + "relay-to-peer.mjs", + ), + ).toBe(true); + + const peers = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "peers.json"), + "utf8", + ), + ) as { + peers: Record; + }; + expect(peers.peers.beta).toEqual({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "http://beta-proxy.local:4000/hooks/agent", + name: "Beta", + }); + + const selectedAgent = readFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-agent-name"), + "utf8", + ).trim(); + expect(selectedAgent).toBe("alpha"); + } finally { + sandbox.cleanup(); + } + }); + + it("keeps send-to-peer mapping idempotent across repeated setup", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfig = JSON.parse( + readFileSync(join(sandbox.openclawDir, "openclaw.json"), "utf8"), + ) as { + hooks: { mappings?: Array> }; + }; + + const relayMappings = (openclawConfig.hooks.mappings ?? []).filter( + (mapping) => + mapping.id === "clawdentity-send-to-peer" || + (mapping.match as { path?: string })?.path === "send-to-peer", + ); + expect(relayMappings).toHaveLength(1); + } finally { + sandbox.cleanup(); + } + }); + + it("requires peer alias when invite code omits it", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }); + + await expect( + setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }), + ).rejects.toThrow( + "Peer alias is required. Include alias in invite code or pass --peer-alias.", + ); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts new file mode 100644 index 0000000..1c0e597 --- /dev/null +++ b/apps/cli/src/commands/openclaw.ts @@ -0,0 +1,759 @@ +import { chmod, copyFile, mkdir, readFile, writeFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { dirname, join } from "node:path"; +import { + decodeBase64url, + encodeBase64url, + parseDid, +} from "@clawdentity/protocol"; +import { AppError, createLogger, nowIso } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { writeStdoutLine } from "../io.js"; +import { assertValidAgentName } from "./agent-name.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "openclaw" }); + +const AGENTS_DIR_NAME = "agents"; +const AIT_FILE_NAME = "ait.jwt"; +const SECRET_KEY_FILE_NAME = "secret.key"; +const PEERS_FILE_NAME = "peers.json"; +const OPENCLAW_DIR_NAME = ".openclaw"; +const OPENCLAW_CONFIG_FILE_NAME = "openclaw.json"; +const OPENCLAW_AGENT_FILE_NAME = "openclaw-agent-name"; +const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; +const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; +const HOOK_MAPPING_ID = "clawdentity-send-to-peer"; +const HOOK_PATH_SEND_TO_PEER = "send-to-peer"; +const INVITE_CODE_PREFIX = "clawd1_"; +const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; +const FILE_MODE = 0o600; +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); + +type OpenclawInvitePayload = { + v: 1; + issuedAt: string; + did: string; + proxyUrl: string; + alias?: string; + name?: string; +}; + +type OpenclawInviteOptions = { + did: string; + proxyUrl: string; + peerAlias?: string; + name?: string; +}; + +type OpenclawSetupOptions = { + inviteCode: string; + peerAlias?: string; + openclawDir?: string; + transformSource?: string; + homeDir?: string; +}; + +type PeerEntry = { + did: string; + proxyUrl: string; + name?: string; +}; + +type PeersConfig = { + peers: Record; +}; + +export type OpenclawInviteResult = { + code: string; + did: string; + proxyUrl: string; + peerAlias?: string; + name?: string; +}; + +export type OpenclawSetupResult = { + peerAlias: string; + peerDid: string; + peerProxyUrl: string; + openclawConfigPath: string; + transformTargetPath: string; +}; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function createCliError( + code: string, + message: string, + details?: Record, +): AppError { + return new AppError({ + code, + message, + status: 400, + details, + }); +} + +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +function parseNonEmptyString(value: unknown, label: string): string { + if (typeof value !== "string") { + throw createCliError( + "CLI_OPENCLAW_INVALID_INPUT", + "Input must be a string", + { + label, + }, + ); + } + + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INPUT", + "Input must not be empty", + { label }, + ); + } + + return trimmed; +} + +function parseOptionalName(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + + return parseNonEmptyString(value, "name"); +} + +function parsePeerAlias(value: unknown): string { + const alias = parseNonEmptyString(value, "peer alias"); + if (alias.length > 128) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEER_ALIAS", + "peer alias must be at most 128 characters", + ); + } + + if (!PEER_ALIAS_PATTERN.test(alias)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEER_ALIAS", + "peer alias must use only letters, numbers, dot, underscore, or hyphen", + ); + } + + return alias; +} + +function parseProxyUrl(value: unknown): string { + const candidate = parseNonEmptyString(value, "proxy URL"); + + let parsedUrl: URL; + try { + parsedUrl = new URL(candidate); + } catch { + throw createCliError( + "CLI_OPENCLAW_INVALID_PROXY_URL", + "proxy URL must be a valid URL", + ); + } + + if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { + throw createCliError( + "CLI_OPENCLAW_INVALID_PROXY_URL", + "proxy URL must use http or https", + ); + } + + return parsedUrl.toString(); +} + +function parseAgentDid(value: unknown, label: string): string { + const did = parseNonEmptyString(value, label); + + try { + const parsed = parseDid(did); + if (parsed.kind !== "agent") { + throw createCliError( + "CLI_OPENCLAW_INVALID_DID", + "DID is not an agent DID", + ); + } + } catch { + throw createCliError("CLI_OPENCLAW_INVALID_DID", "Agent DID is invalid", { + label, + }); + } + + return did; +} + +function parseInvitePayload(value: unknown): OpenclawInvitePayload { + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite payload must be an object", + ); + } + + if (value.v !== 1) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite payload version is unsupported", + ); + } + + const issuedAt = parseNonEmptyString(value.issuedAt, "invite issuedAt"); + const did = parseAgentDid(value.did, "invite did"); + const proxyUrl = parseProxyUrl(value.proxyUrl); + const alias = + value.alias === undefined ? undefined : parsePeerAlias(value.alias); + const name = parseOptionalName(value.name); + + if (alias === undefined && name === undefined) { + return { + v: 1, + issuedAt, + did, + proxyUrl, + }; + } + + if (name === undefined) { + return { + v: 1, + issuedAt, + did, + proxyUrl, + alias, + }; + } + + return { + v: 1, + issuedAt, + did, + proxyUrl, + alias, + name, + }; +} + +function resolveHomeDir(homeDir?: string): string { + if (typeof homeDir === "string" && homeDir.trim().length > 0) { + return homeDir.trim(); + } + + return homedir(); +} + +function resolveOpenclawDir(openclawDir: string | undefined, homeDir: string) { + if (typeof openclawDir === "string" && openclawDir.trim().length > 0) { + return openclawDir.trim(); + } + + return join(homeDir, OPENCLAW_DIR_NAME); +} + +function resolveAgentDirectory(homeDir: string, agentName: string): string { + return join(homeDir, ".clawdentity", AGENTS_DIR_NAME, agentName); +} + +function resolvePeersPath(homeDir: string): string { + return join(homeDir, ".clawdentity", PEERS_FILE_NAME); +} + +function resolveOpenclawConfigPath(openclawDir: string): string { + return join(openclawDir, OPENCLAW_CONFIG_FILE_NAME); +} + +function resolveDefaultTransformSource(openclawDir: string): string { + return join( + openclawDir, + "workspace", + "skills", + SKILL_DIR_NAME, + RELAY_MODULE_FILE_NAME, + ); +} + +function resolveTransformTargetPath(openclawDir: string): string { + return join(openclawDir, "hooks", "transforms", RELAY_MODULE_FILE_NAME); +} + +function resolveOpenclawAgentNamePath(homeDir: string): string { + return join(homeDir, ".clawdentity", OPENCLAW_AGENT_FILE_NAME); +} + +async function readJsonFile(filePath: string): Promise { + const raw = await readFile(filePath, "utf8"); + + try { + return JSON.parse(raw); + } catch { + throw createCliError("CLI_OPENCLAW_INVALID_JSON", "JSON file is invalid", { + filePath, + }); + } +} + +async function ensureLocalAgentCredentials( + homeDir: string, + agentName: string, +): Promise { + const agentDir = resolveAgentDirectory(homeDir, agentName); + const requiredFiles = [ + join(agentDir, SECRET_KEY_FILE_NAME), + join(agentDir, AIT_FILE_NAME), + ]; + + for (const filePath of requiredFiles) { + let content: string; + try { + content = await readFile(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_MISSING_AGENT_CREDENTIALS", + "Local agent credentials are missing", + { agentName, filePath }, + ); + } + + throw error; + } + + if (content.trim().length === 0) { + throw createCliError( + "CLI_OPENCLAW_EMPTY_AGENT_CREDENTIALS", + "Agent credential file is empty", + { filePath }, + ); + } + } +} + +function encodeInvitePayload(payload: OpenclawInvitePayload): string { + const encoded = encodeBase64url(textEncoder.encode(JSON.stringify(payload))); + return `${INVITE_CODE_PREFIX}${encoded}`; +} + +function decodeInvitePayload(code: string): OpenclawInvitePayload { + const rawCode = parseNonEmptyString(code, "invite code"); + if (!rawCode.startsWith(INVITE_CODE_PREFIX)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "Invite code has invalid prefix", + ); + } + + const encoded = rawCode.slice(INVITE_CODE_PREFIX.length); + if (encoded.length === 0) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite code payload is empty", + ); + } + + let decodedJson: string; + try { + decodedJson = textDecoder.decode(decodeBase64url(encoded)); + } catch { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite code payload is not valid base64url", + ); + } + + let parsedPayload: unknown; + try { + parsedPayload = JSON.parse(decodedJson); + } catch { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite code payload is not valid JSON", + ); + } + + return parseInvitePayload(parsedPayload); +} + +async function writeSecureFile( + filePath: string, + content: string, +): Promise { + await mkdir(dirname(filePath), { recursive: true }); + await writeFile(filePath, content, "utf8"); + await chmod(filePath, FILE_MODE); +} + +async function loadPeersConfig(peersPath: string): Promise { + let parsed: unknown; + + try { + parsed = await readJsonFile(peersPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return { peers: {} }; + } + + throw error; + } + + if (!isRecord(parsed)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEERS_CONFIG", + "Peer config root must be a JSON object", + { peersPath }, + ); + } + + const peersValue = parsed.peers; + if (peersValue === undefined) { + return { peers: {} }; + } + + if (!isRecord(peersValue)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEERS_CONFIG", + "Peer config peers field must be an object", + { peersPath }, + ); + } + + const peers: Record = {}; + for (const [alias, value] of Object.entries(peersValue)) { + const normalizedAlias = parsePeerAlias(alias); + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEERS_CONFIG", + "Peer entry must be an object", + { alias: normalizedAlias }, + ); + } + + const did = parseAgentDid(value.did, `Peer ${normalizedAlias} did`); + const proxyUrl = parseProxyUrl(value.proxyUrl); + const name = parseOptionalName(value.name); + + if (name === undefined) { + peers[normalizedAlias] = { did, proxyUrl }; + continue; + } + + peers[normalizedAlias] = { did, proxyUrl, name }; + } + + return { peers }; +} + +async function savePeersConfig( + peersPath: string, + config: PeersConfig, +): Promise { + await writeSecureFile(peersPath, `${JSON.stringify(config, null, 2)}\n`); +} + +function normalizeStringArrayWithValue( + value: unknown, + requiredValue: string, +): string[] { + const normalized = new Set(); + + if (Array.isArray(value)) { + for (const item of value) { + if (typeof item !== "string") { + continue; + } + + const trimmed = item.trim(); + if (trimmed.length > 0) { + normalized.add(trimmed); + } + } + } + + normalized.add(requiredValue); + + return Array.from(normalized); +} + +function upsertRelayHookMapping( + mappingsValue: unknown, +): Record[] { + const mappings = Array.isArray(mappingsValue) + ? mappingsValue.filter(isRecord).map((mapping) => ({ ...mapping })) + : []; + + const existingIndex = mappings.findIndex((mapping) => { + if (mapping.id === HOOK_MAPPING_ID) { + return true; + } + + if (!isRecord(mapping.match)) { + return false; + } + + return mapping.match.path === HOOK_PATH_SEND_TO_PEER; + }); + + const baseMapping = + existingIndex >= 0 && isRecord(mappings[existingIndex]) + ? mappings[existingIndex] + : {}; + + const nextMatch = isRecord(baseMapping.match) ? { ...baseMapping.match } : {}; + nextMatch.path = HOOK_PATH_SEND_TO_PEER; + + const nextTransform = isRecord(baseMapping.transform) + ? { ...baseMapping.transform } + : {}; + nextTransform.module = RELAY_MODULE_FILE_NAME; + + const relayMapping: Record = { + ...baseMapping, + id: HOOK_MAPPING_ID, + match: nextMatch, + action: "agent", + wakeMode: "now", + transform: nextTransform, + }; + + if (existingIndex >= 0) { + mappings[existingIndex] = relayMapping; + return mappings; + } + + mappings.push(relayMapping); + return mappings; +} + +async function patchOpenclawConfig(openclawConfigPath: string): Promise { + let config: unknown; + try { + config = await readJsonFile(openclawConfigPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_CONFIG_NOT_FOUND", + "OpenClaw config file was not found", + { openclawConfigPath }, + ); + } + + throw error; + } + + if (!isRecord(config)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_CONFIG", + "OpenClaw config root must be an object", + { openclawConfigPath }, + ); + } + + const hooks = isRecord(config.hooks) ? { ...config.hooks } : {}; + + hooks.enabled = true; + hooks.allowRequestSessionKey = true; + hooks.allowedSessionKeyPrefixes = normalizeStringArrayWithValue( + hooks.allowedSessionKeyPrefixes, + "hook:", + ); + hooks.mappings = upsertRelayHookMapping(hooks.mappings); + + const nextConfig = { + ...config, + hooks, + }; + + await writeFile( + openclawConfigPath, + `${JSON.stringify(nextConfig, null, 2)}\n`, + "utf8", + ); +} + +export function createOpenclawInviteCode( + options: OpenclawInviteOptions, +): OpenclawInviteResult { + const did = parseAgentDid(options.did, "invite did"); + const proxyUrl = parseProxyUrl(options.proxyUrl); + const peerAlias = + options.peerAlias === undefined + ? undefined + : parsePeerAlias(options.peerAlias); + const name = parseOptionalName(options.name); + + const payload = parseInvitePayload({ + v: 1, + issuedAt: nowIso(), + did, + proxyUrl, + alias: peerAlias, + name, + }); + + const result: OpenclawInviteResult = { + code: encodeInvitePayload(payload), + did: payload.did, + proxyUrl: payload.proxyUrl, + peerAlias: payload.alias, + name: payload.name, + }; + + return result; +} + +export function decodeOpenclawInviteCode(code: string): OpenclawInvitePayload { + return decodeInvitePayload(code); +} + +export async function setupOpenclawRelayFromInvite( + agentName: string, + options: OpenclawSetupOptions, +): Promise { + const normalizedAgentName = assertValidAgentName(agentName); + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const openclawConfigPath = resolveOpenclawConfigPath(openclawDir); + const transformSource = + typeof options.transformSource === "string" && + options.transformSource.trim().length > 0 + ? options.transformSource.trim() + : resolveDefaultTransformSource(openclawDir); + const transformTargetPath = resolveTransformTargetPath(openclawDir); + const invite = decodeInvitePayload(options.inviteCode); + const peerAliasCandidate = options.peerAlias ?? invite.alias; + + if (!peerAliasCandidate) { + throw createCliError( + "CLI_OPENCLAW_PEER_ALIAS_REQUIRED", + "Peer alias is required. Include alias in invite code or pass --peer-alias.", + ); + } + + const peerAlias = parsePeerAlias(peerAliasCandidate); + + await ensureLocalAgentCredentials(homeDir, normalizedAgentName); + await mkdir(dirname(transformTargetPath), { recursive: true }); + try { + await copyFile(transformSource, transformTargetPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_TRANSFORM_NOT_FOUND", + "Relay transform source file was not found", + { transformSource }, + ); + } + + throw error; + } + + await patchOpenclawConfig(openclawConfigPath); + + const peersPath = resolvePeersPath(homeDir); + const peers = await loadPeersConfig(peersPath); + peers.peers[peerAlias] = + invite.name === undefined + ? { did: invite.did, proxyUrl: invite.proxyUrl } + : { did: invite.did, proxyUrl: invite.proxyUrl, name: invite.name }; + await savePeersConfig(peersPath, peers); + + const agentNamePath = resolveOpenclawAgentNamePath(homeDir); + await writeSecureFile(agentNamePath, `${normalizedAgentName}\n`); + + logger.info("cli.openclaw_setup_completed", { + agentName: normalizedAgentName, + peerAlias, + peerDid: invite.did, + openclawConfigPath, + transformTargetPath, + }); + + return { + peerAlias, + peerDid: invite.did, + peerProxyUrl: invite.proxyUrl, + openclawConfigPath, + transformTargetPath, + }; +} + +export const createOpenclawCommand = (): Command => { + const openclawCommand = new Command("openclaw").description( + "Manage OpenClaw invite codes and relay setup", + ); + + openclawCommand + .command("invite") + .description("Create an invite code for peer relay onboarding") + .requiredOption("--did ", "Peer agent DID (did:claw:agent:...)") + .requiredOption( + "--proxy-url ", + "Public proxy URL ending in /hooks/agent", + ) + .option("--peer-alias ", "Suggested peer alias for the receiver") + .option("--name ", "Human-friendly peer display name") + .action( + withErrorHandling( + "openclaw invite", + async (options: OpenclawInviteOptions) => { + const invite = createOpenclawInviteCode(options); + + writeStdoutLine(`Invite code: ${invite.code}`); + writeStdoutLine(`Agent DID: ${invite.did}`); + writeStdoutLine(`Proxy URL: ${invite.proxyUrl}`); + if (invite.peerAlias) { + writeStdoutLine(`Suggested Alias: ${invite.peerAlias}`); + } + }, + ), + ); + + openclawCommand + .command("setup ") + .description("Apply OpenClaw relay setup using an invite code") + .requiredOption( + "--invite-code ", + "Invite code shared by peer operator", + ) + .option("--peer-alias ", "Override peer alias for local routing") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option( + "--transform-source ", + "Path to relay-to-peer.mjs (default /workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs)", + ) + .action( + withErrorHandling( + "openclaw setup", + async (agentName: string, options: OpenclawSetupOptions) => { + const result = await setupOpenclawRelayFromInvite(agentName, options); + writeStdoutLine(`Peer alias configured: ${result.peerAlias}`); + writeStdoutLine(`Peer DID: ${result.peerDid}`); + writeStdoutLine(`Peer proxy URL: ${result.peerProxyUrl}`); + writeStdoutLine( + `Updated OpenClaw config: ${result.openclawConfigPath}`, + ); + writeStdoutLine(`Installed transform: ${result.transformTargetPath}`); + }, + ), + ); + + return openclawCommand; +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 6103dea..8810c62 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -34,6 +34,14 @@ describe("cli", () => { expect(hasVerifyCommand).toBe(true); }); + it("registers the openclaw command", () => { + const hasOpenclawCommand = createProgram() + .commands.map((command) => command.name()) + .includes("openclaw"); + + expect(hasOpenclawCommand).toBe(true); + }); + it("prints version output", async () => { const output: string[] = []; const program = createProgram(); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 366bb1a..97f6ce6 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,6 +1,7 @@ import { Command } from "commander"; import { createAgentCommand } from "./commands/agent.js"; import { createConfigCommand } from "./commands/config.js"; +import { createOpenclawCommand } from "./commands/openclaw.js"; import { createVerifyCommand } from "./commands/verify.js"; export const CLI_VERSION = "0.0.0"; @@ -11,5 +12,6 @@ export const createProgram = (): Command => { .version(CLI_VERSION) .addCommand(createAgentCommand()) .addCommand(createConfigCommand()) + .addCommand(createOpenclawCommand()) .addCommand(createVerifyCommand()); }; diff --git a/apps/cli/tsup.config.ts b/apps/cli/tsup.config.ts index 0305890..364351c 100644 --- a/apps/cli/tsup.config.ts +++ b/apps/cli/tsup.config.ts @@ -3,6 +3,11 @@ import { defineConfig } from "tsup"; export default defineConfig({ entry: ["src/index.ts", "src/bin.ts"], format: ["esm"], + bundle: true, + splitting: false, + noExternal: ["@clawdentity/protocol", "@clawdentity/sdk"], + platform: "node", + target: "node22", dts: true, clean: true, banner: { diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md new file mode 100644 index 0000000..ec5f7bf --- /dev/null +++ b/apps/openclaw-skill/AGENTS.md @@ -0,0 +1,39 @@ +# AGENTS.md (apps/openclaw-skill) + +## Purpose +- Define conventions for the OpenClaw skill package that relays selected payloads to remote Clawdentity peers. +- Keep peer routing config, credential loading, and PoP signing deterministic and testable. + +## Filesystem Contracts +- Peer routing map lives at `~/.clawdentity/peers.json` by default. +- Local agent credentials are read from `~/.clawdentity/agents//secret.key` and `~/.clawdentity/agents//ait.jwt`. +- Agent selection order for relay runtime: + - explicit transform override (`agentName`) + - environment (`CLAWDENTITY_AGENT_NAME`) + - `~/.clawdentity/openclaw-agent-name` + - single local agent auto-detection +- Never commit local runtime files (`peers.json`, `secret.key`, `ait.jwt`) to the repository. + +## Transform Rules +- `src/transforms/peers-config.ts` is the only module that reads/writes peers config. +- Validate all peers config reads/writes with schema parsing before use. +- `src/transforms/relay-to-peer.ts` must: + - expose default export accepting OpenClaw transform context (`ctx.payload`) + - read `payload.peer` + - resolve peer proxy URL from peers config + - sign outbound POST with `signHttpRequest` + - send `Authorization: Claw ` and `X-Claw-*` PoP headers + - remove `peer` from forwarded JSON payload + - return `null` after successful relay so local handling is skipped +- If `payload.peer` is absent, return payload unchanged. +- Keep setup flow CLI-driven via `clawdentity openclaw setup`; do not add `configure-hooks.sh`. + +## Maintainability +- Keep filesystem path logic centralized; avoid hardcoding `~/.clawdentity` paths across multiple files. +- Keep relay behavior pure except for explicit dependencies (`fetch`, clock, random bytes, filesystem) so tests stay deterministic. +- Prefer schema-first runtime validation over ad-hoc guards. + +## Validation Commands +- `pnpm -F @clawdentity/openclaw-skill typecheck` +- `pnpm -F @clawdentity/openclaw-skill test` +- `pnpm -F @clawdentity/openclaw-skill build` diff --git a/apps/openclaw-skill/package.json b/apps/openclaw-skill/package.json new file mode 100644 index 0000000..e88b99e --- /dev/null +++ b/apps/openclaw-skill/package.json @@ -0,0 +1,20 @@ +{ + "name": "@clawdentity/openclaw-skill", + "version": "0.0.0", + "private": true, + "type": "module", + "scripts": { + "build": "tsup", + "format": "biome format .", + "lint": "biome lint .", + "test": "vitest run", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@clawdentity/protocol": "workspace:*", + "@clawdentity/sdk": "workspace:*" + }, + "devDependencies": { + "@types/node": "^22.18.11" + } +} diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md new file mode 100644 index 0000000..ed941da --- /dev/null +++ b/apps/openclaw-skill/skill/SKILL.md @@ -0,0 +1,111 @@ +--- +name: clawdentity_openclaw_relay +description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication with Clawdentity PoP verification. +version: 0.1.0 +--- + +# Clawdentity OpenClaw Relay Skill + +This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through Clawdentity identity (`Authorization: Claw ` + PoP headers) using a single invite code. + +## Trigger Conditions + +Use this skill when any of the following are requested: +- Install relay support for OpenClaw peer communication. +- Complete first-time setup from an invite code. +- Repair broken relay setup after config drift. +- Verify invite-code onboarding and peer mapping. + +## Filesystem Truth (must be used exactly) + +### OpenClaw state files +- OpenClaw state root (default): `~/.openclaw` +- OpenClaw config: `~/.openclaw/openclaw.json` +- Transform target path: `~/.openclaw/hooks/transforms/relay-to-peer.mjs` +- Workspace skill location: `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` +- Default transform source expected by CLI setup: + `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` + +### Clawdentity identity files +- Clawdentity root: `~/.clawdentity` +- Agent config: `~/.clawdentity/config.json` +- Agent identity directory: `~/.clawdentity/agents//` +- Agent private key: `~/.clawdentity/agents//secret.key` +- Agent AIT token: `~/.clawdentity/agents//ait.jwt` +- Peer map: `~/.clawdentity/peers.json` +- Local selected agent marker: `~/.clawdentity/openclaw-agent-name` + +## Operator Split + +### Admin/operator side (only action required) +Create invite code: + +`clawdentity openclaw invite --did --proxy-url --peer-alias ` + +Share the invite code string with the human who owns the target agent. + +## Human + Agent Flow (strict user-style) + +Follow this order. Do not skip steps. + +1. Confirm prerequisites with the human. +- Confirm `clawdentity` CLI is installed and runnable. +- Confirm API key exists for this agent (if missing, ask the human for it). +- Confirm OpenClaw state directory path if non-default. + +2. Confirm skill artifact exists in workspace skills directory. +- Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. +- If missing, install/update skill package contents before setup. + +3. Configure local Clawdentity identity for this OpenClaw agent. +- Run `clawdentity config init`. +- If needed, ask the human for API key and run `clawdentity config set apiKey `. +- Create identity: `clawdentity agent create --framework openclaw`. +- Verify identity: `clawdentity agent inspect `. + +4. Ask the human for invite code. +- Prompt exactly for one invite code string. +- Do not ask for DID/proxy URL when invite code is present. + +5. Run automated setup from invite code. +- Execute: + `clawdentity openclaw setup --invite-code ` +- Use `--openclaw-dir ` when state directory is non-default. +- Use `--peer-alias ` only when alias override is required. + +6. Verify setup outputs. +- Confirm setup reports: + - peer alias + - peer DID + - updated OpenClaw config path + - installed transform path +- Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. + +7. Validate with user-style relay test. +- Human asks Alpha to send a request with `peer: "beta"`. +- Agent relays with Claw + PoP headers. +- Peer proxy verifies and forwards to peer OpenClaw. +- Verify success logs on both sides. + +## Required question policy + +Ask the human only when required inputs are missing: +- Missing Clawdentity API key. +- Unclear OpenClaw state directory. +- Missing invite code. + +## Failure Handling + +If setup or relay fails: +- Report precise missing file/path/value. +- Fix only the failing config/input. +- Re-run the same user-style flow from step 5 onward. + +## Bundled Resources + +### References +| File | Purpose | +|------|---------| +| `references/clawdentity-protocol.md` | Header format, peer map schema, and relay verification details | + +Directive: read the reference file before troubleshooting protocol or signature failures. diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md new file mode 100644 index 0000000..7ef14b4 --- /dev/null +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -0,0 +1,113 @@ +# Clawdentity Relay Protocol Reference + +## Purpose + +Define the exact runtime contract used by `relay-to-peer.mjs`. + +## Filesystem Paths + +### OpenClaw files +- `~/.openclaw/openclaw.json` +- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` +- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` + +### Clawdentity files +- `~/.clawdentity/config.json` +- `~/.clawdentity/agents//secret.key` +- `~/.clawdentity/agents//ait.jwt` +- `~/.clawdentity/peers.json` +- `~/.clawdentity/openclaw-agent-name` + +## Invite Code Contract + +Invite codes are prefixed with `clawd1_` and contain base64url JSON: + +```json +{ + "v": 1, + "issuedAt": "2026-02-15T20:00:00.000Z", + "did": "did:claw:agent:01H...", + "proxyUrl": "https://beta-proxy.example.com/hooks/agent", + "alias": "beta", + "name": "Beta Agent" +} +``` + +Rules: +- `v` must be `1`. +- `issuedAt` is ISO-8601 UTC timestamp. +- `did` must be an agent DID. +- `proxyUrl` must be absolute `http` or `https`. +- `alias` is optional but preferred for zero-question setup. + +## Peer Map Schema + +`~/.clawdentity/peers.json` must be valid JSON: + +```json +{ + "peers": { + "beta": { + "did": "did:claw:agent:01H...", + "proxyUrl": "https://beta-proxy.example.com/hooks/agent", + "name": "Beta Agent" + } + } +} +``` + +Rules: +- peer alias key uses `[a-zA-Z0-9._-]` +- `did` required and must begin with `did:` +- `proxyUrl` required and must be a valid absolute URL +- `name` optional + +## Relay Input Contract + +The OpenClaw transform reads `ctx.payload`. + +- If `payload.peer` is absent: + - return payload unchanged + - do not relay +- If `payload.peer` exists: + - resolve peer from `peers.json` + - remove `peer` from forwarded body + - send JSON POST to `peer.proxyUrl` + - return `null` to skip local handling + +## Relay Agent Selection Contract + +Relay resolves local agent name in this order: +1. transform option `agentName` +2. `CLAWDENTITY_AGENT_NAME` +3. `~/.clawdentity/openclaw-agent-name` +4. single local agent fallback from `~/.clawdentity/agents/` + +## Outbound Auth Contract + +Headers sent to peer proxy: +- `Authorization: Claw ` +- `Content-Type: application/json` +- `X-Claw-Timestamp` +- `X-Claw-Nonce` +- `X-Claw-Body-SHA256` +- `X-Claw-Proof` + +Signing inputs: +- HTTP method: `POST` +- path+query from peer URL +- unix seconds timestamp +- random nonce +- outbound JSON body bytes +- agent secret key from `secret.key` + +## Error Conditions + +Relay fails when: +- no selected local agent can be resolved +- peer alias missing from config +- `secret.key` or `ait.jwt` missing/empty/invalid +- peer returns non-2xx +- peer network request fails + +Error messages should include file/path context but never print secret content. diff --git a/apps/openclaw-skill/src/AGENTS.md b/apps/openclaw-skill/src/AGENTS.md new file mode 100644 index 0000000..bf13b77 --- /dev/null +++ b/apps/openclaw-skill/src/AGENTS.md @@ -0,0 +1,18 @@ +# AGENTS.md (apps/openclaw-skill/src) + +## Source Layout +- Keep package exports in `index.ts` only. +- Keep peer config helpers in `transforms/peers-config.ts`. +- Keep network relay behavior in `transforms/relay-to-peer.ts`. + +## Safety Rules +- Validate external input (`payload`, peer config JSON, selected agent name) before use. +- Resolve selected agent in deterministic order: explicit option, env var, `~/.clawdentity/openclaw-agent-name`, then single-agent fallback. +- Do not log or persist secret material from `secret.key` or `ait.jwt`. +- Keep outbound peer requests as JSON POSTs with explicit auth + PoP headers. +- Keep peer schema strict (`did`, `proxyUrl`, optional `name`) and reject malformed values early. + +## Testing Rules +- Use temp directories for filesystem tests; no dependency on real user home state. +- Mock `fetch` in relay tests and assert emitted headers/body. +- Cover both happy path and failure paths (missing peer mapping, missing credentials, invalid config). diff --git a/apps/openclaw-skill/src/index.ts b/apps/openclaw-skill/src/index.ts new file mode 100644 index 0000000..84a7e78 --- /dev/null +++ b/apps/openclaw-skill/src/index.ts @@ -0,0 +1,17 @@ +export type { + PeerEntry, + PeersConfig, + PeersConfigPathOptions, +} from "./transforms/peers-config.js"; +export { + addPeer, + loadPeersConfig, + resolvePeersConfigPath, + savePeersConfig, +} from "./transforms/peers-config.js"; + +export type { + RelayToPeerOptions, + RelayTransformContext, +} from "./transforms/relay-to-peer.js"; +export { relayPayloadToPeer } from "./transforms/relay-to-peer.js"; diff --git a/apps/openclaw-skill/src/transforms/peers-config.test.ts b/apps/openclaw-skill/src/transforms/peers-config.test.ts new file mode 100644 index 0000000..709e832 --- /dev/null +++ b/apps/openclaw-skill/src/transforms/peers-config.test.ts @@ -0,0 +1,130 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { + addPeer, + loadPeersConfig, + resolvePeersConfigPath, + savePeersConfig, +} from "./peers-config.js"; + +function createSandbox(): { cleanup: () => void; homeDir: string } { + const root = mkdtempSync(join(tmpdir(), "clawdentity-openclaw-skill-")); + + return { + cleanup: () => { + rmSync(root, { recursive: true, force: true }); + }, + homeDir: root, + }; +} + +describe("peers config", () => { + it("returns an empty config when peers.json is missing", async () => { + const sandbox = createSandbox(); + + try { + const config = await loadPeersConfig({ homeDir: sandbox.homeDir }); + + expect(config).toEqual({ peers: {} }); + expect(resolvePeersConfigPath({ homeDir: sandbox.homeDir })).toBe( + join(sandbox.homeDir, ".clawdentity", "peers.json"), + ); + } finally { + sandbox.cleanup(); + } + }); + + it("saves and loads valid peer mappings", async () => { + const sandbox = createSandbox(); + + try { + await savePeersConfig( + { + peers: { + beta: { + did: "did:claw:agent:01TEST", + proxyUrl: "https://beta.example.com/hooks/agent", + name: "Beta Agent", + }, + }, + }, + { homeDir: sandbox.homeDir }, + ); + + const loaded = await loadPeersConfig({ homeDir: sandbox.homeDir }); + expect(loaded).toEqual({ + peers: { + beta: { + did: "did:claw:agent:01TEST", + proxyUrl: "https://beta.example.com/hooks/agent", + name: "Beta Agent", + }, + }, + }); + } finally { + sandbox.cleanup(); + } + }); + + it("adds or replaces peers through addPeer", async () => { + const sandbox = createSandbox(); + + try { + await addPeer( + "alpha", + { + did: "did:claw:agent:01ALPHA", + proxyUrl: "https://alpha.example.com/hooks/agent", + }, + { homeDir: sandbox.homeDir }, + ); + + const loaded = await loadPeersConfig({ homeDir: sandbox.homeDir }); + expect(loaded).toEqual({ + peers: { + alpha: { + did: "did:claw:agent:01ALPHA", + proxyUrl: "https://alpha.example.com/hooks/agent", + }, + }, + }); + } finally { + sandbox.cleanup(); + } + }); + + it("rejects malformed JSON or invalid schema", async () => { + const sandbox = createSandbox(); + + try { + const configPath = join(sandbox.homeDir, ".clawdentity", "peers.json"); + mkdirSync(join(sandbox.homeDir, ".clawdentity"), { recursive: true }); + + writeFileSync(configPath, "{not-json", "utf8"); + await expect( + loadPeersConfig({ homeDir: sandbox.homeDir }), + ).rejects.toThrow("not valid JSON"); + + writeFileSync( + configPath, + JSON.stringify({ + peers: { + invalid: { + did: "not-a-did", + proxyUrl: "not-a-url", + }, + }, + }), + "utf8", + ); + + await expect( + loadPeersConfig({ homeDir: sandbox.homeDir }), + ).rejects.toThrow("Peer config validation failed"); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/openclaw-skill/src/transforms/peers-config.ts b/apps/openclaw-skill/src/transforms/peers-config.ts new file mode 100644 index 0000000..1f9c10a --- /dev/null +++ b/apps/openclaw-skill/src/transforms/peers-config.ts @@ -0,0 +1,231 @@ +import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { dirname, join } from "node:path"; + +const CLAWDENTITY_DIR = ".clawdentity"; +const PEERS_FILENAME = "peers.json"; +const FILE_MODE = 0o600; +const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; + +export type PeerEntry = { + did: string; + proxyUrl: string; + name?: string; +}; + +export type PeersConfig = { + peers: Record; +}; + +export type PeersConfigPathOptions = { + configDir?: string; + configPath?: string; + homeDir?: string; +}; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +function parseNonEmptyString(value: unknown, label: string): string { + if (typeof value !== "string") { + throw new Error(`${label} must be a string`); + } + + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error(`${label} must not be empty`); + } + + return trimmed; +} + +function parsePeerAlias(value: unknown): string { + const alias = parseNonEmptyString(value, "peer alias"); + + if (alias.length > 128) { + throw new Error("peer alias must be at most 128 characters"); + } + + if (!PEER_ALIAS_PATTERN.test(alias)) { + throw new Error( + "peer alias must use only letters, numbers, dot, underscore, or hyphen", + ); + } + + return alias; +} + +function parseDid(value: unknown): string { + const did = parseNonEmptyString(value, "did"); + if (!did.startsWith("did:")) { + throw new Error("did must start with 'did:'"); + } + + return did; +} + +function parseProxyUrl(value: unknown): string { + const candidate = parseNonEmptyString(value, "proxyUrl"); + + try { + return new URL(candidate).toString(); + } catch { + throw new Error("proxyUrl must be a valid URL"); + } +} + +function parsePeerName(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + + return parseNonEmptyString(value, "name"); +} + +function parsePeerEntry(value: unknown): PeerEntry { + if (!isRecord(value)) { + throw new Error("peer entry must be an object"); + } + + const did = parseDid(value.did); + const proxyUrl = parseProxyUrl(value.proxyUrl); + const name = parsePeerName(value.name); + + if (name === undefined) { + return { did, proxyUrl }; + } + + return { did, proxyUrl, name }; +} + +function parsePeersConfig(value: unknown, source: string): PeersConfig { + if (!isRecord(value)) { + throw new Error( + `Peer config validation failed at ${source}: root must be an object`, + ); + } + + const peersRaw = value.peers; + if (peersRaw === undefined) { + return { peers: {} }; + } + + if (!isRecord(peersRaw)) { + throw new Error( + `Peer config validation failed at ${source}: peers must be an object`, + ); + } + + const peers: Record = {}; + for (const [alias, peerValue] of Object.entries(peersRaw)) { + const normalizedAlias = parsePeerAlias(alias); + + try { + peers[normalizedAlias] = parsePeerEntry(peerValue); + } catch (error) { + const reason = error instanceof Error ? error.message : String(error); + throw new Error( + `Peer config validation failed at ${source}: peers.${normalizedAlias}: ${reason}`, + ); + } + } + + return { peers }; +} + +export function resolvePeersConfigPath( + options: PeersConfigPathOptions = {}, +): string { + if ( + typeof options.configPath === "string" && + options.configPath.trim().length > 0 + ) { + return options.configPath.trim(); + } + + if ( + typeof options.configDir === "string" && + options.configDir.trim().length > 0 + ) { + return join(options.configDir.trim(), PEERS_FILENAME); + } + + const home = + typeof options.homeDir === "string" && options.homeDir.trim().length > 0 + ? options.homeDir.trim() + : homedir(); + + return join(home, CLAWDENTITY_DIR, PEERS_FILENAME); +} + +export async function loadPeersConfig( + options: PeersConfigPathOptions = {}, +): Promise { + const configPath = resolvePeersConfigPath(options); + + let rawJson: string; + try { + rawJson = await readFile(configPath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return { peers: {} }; + } + + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawJson); + } catch { + throw new Error(`Peer config at ${configPath} is not valid JSON`); + } + + return parsePeersConfig(parsed, configPath); +} + +export async function savePeersConfig( + config: PeersConfig, + options: PeersConfigPathOptions = {}, +): Promise { + const configPath = resolvePeersConfigPath(options); + const parsedConfig = parsePeersConfig(config, configPath); + + await mkdir(dirname(configPath), { recursive: true }); + await writeFile( + configPath, + `${JSON.stringify(parsedConfig, null, 2)}\n`, + "utf8", + ); + await chmod(configPath, FILE_MODE); +} + +export async function addPeer( + alias: string, + entry: PeerEntry, + options: PeersConfigPathOptions = {}, +): Promise { + const normalizedAlias = parsePeerAlias(alias); + const normalizedEntry = parsePeerEntry(entry); + const current = await loadPeersConfig(options); + + const next: PeersConfig = { + peers: { + ...current.peers, + [normalizedAlias]: normalizedEntry, + }, + }; + + await savePeersConfig(next, options); + + return next; +} diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts new file mode 100644 index 0000000..6834755 --- /dev/null +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts @@ -0,0 +1,224 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { encodeBase64url } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; +import relayToPeer, { relayPayloadToPeer } from "./relay-to-peer.js"; + +type RelaySandbox = { + cleanup: () => void; + homeDir: string; +}; + +function createRelaySandbox(agentName: string): RelaySandbox { + const homeDir = mkdtempSync( + join(tmpdir(), "clawdentity-openclaw-skill-relay-"), + ); + const clawdentityDir = join(homeDir, ".clawdentity"); + const agentDirectory = join(clawdentityDir, "agents", agentName); + + mkdirSync(agentDirectory, { recursive: true }); + + writeFileSync( + join(clawdentityDir, "peers.json"), + JSON.stringify( + { + peers: { + beta: { + did: "did:claw:agent:01BETA", + proxyUrl: "https://peer.example.com/hooks/agent?source=skill", + name: "Beta", + }, + }, + }, + null, + 2, + ), + "utf8", + ); + + writeFileSync( + join(agentDirectory, "secret.key"), + encodeBase64url(Uint8Array.from({ length: 32 }, (_, index) => index + 1)), + "utf8", + ); + writeFileSync(join(agentDirectory, "ait.jwt"), "mock.ait.jwt", "utf8"); + + return { + cleanup: () => { + rmSync(homeDir, { recursive: true, force: true }); + }, + homeDir, + }; +} + +function writeAgentCredentials(homeDir: string, agentName: string): void { + const agentDirectory = join(homeDir, ".clawdentity", "agents", agentName); + mkdirSync(agentDirectory, { recursive: true }); + writeFileSync( + join(agentDirectory, "secret.key"), + encodeBase64url(Uint8Array.from({ length: 32 }, (_, index) => index + 1)), + "utf8", + ); + writeFileSync(join(agentDirectory, "ait.jwt"), "mock.ait.jwt", "utf8"); +} + +describe("relay-to-peer transform", () => { + it("relays peer payloads with Claw authorization and PoP headers", async () => { + const sandbox = createRelaySandbox("alpha-agent"); + const fetchMock = vi.fn(async () => new Response("", { status: 202 })); + + try { + const result = await relayPayloadToPeer( + { + peer: "beta", + message: "hello", + metadata: { + turn: 1, + }, + }, + { + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + fetchImpl: fetchMock as typeof fetch, + clock: () => 1_700_000_000_000, + randomBytesImpl: () => + Uint8Array.from([ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + ]), + }, + ); + + expect(result).toBeNull(); + expect(fetchMock).toHaveBeenCalledTimes(1); + + const [url, requestInit] = fetchMock.mock.calls[0] as [ + string, + RequestInit, + ]; + expect(url).toBe("https://peer.example.com/hooks/agent?source=skill"); + expect(requestInit.method).toBe("POST"); + expect(requestInit.body).toBe( + JSON.stringify({ + message: "hello", + metadata: { + turn: 1, + }, + }), + ); + + const headers = new Headers(requestInit.headers); + expect(headers.get("authorization")).toBe("Claw mock.ait.jwt"); + expect(headers.get("content-type")).toBe("application/json"); + expect(headers.get("x-claw-timestamp")).toBe("1700000000"); + expect(headers.get("x-claw-nonce")).toBe("AQIDBAUGBwgJCgsMDQ4PEA"); + expect(headers.get("x-claw-body-sha256")).toMatch(/^[A-Za-z0-9_-]+$/); + expect(headers.get("x-claw-proof")).toMatch(/^[A-Za-z0-9_-]+$/); + } finally { + sandbox.cleanup(); + } + }); + + it("returns payload unchanged when peer is not set", async () => { + const fetchMock = vi.fn(async () => new Response("", { status: 202 })); + + const payload = { + message: "local only", + }; + + const result = await relayPayloadToPeer(payload, { + fetchImpl: fetchMock as typeof fetch, + }); + + expect(result).toBe(payload); + expect(fetchMock).not.toHaveBeenCalled(); + }); + + it("throws when the peer alias is unknown", async () => { + const sandbox = createRelaySandbox("alpha-agent"); + + try { + await expect( + relayPayloadToPeer( + { + peer: "unknown", + message: "hello", + }, + { + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + fetchImpl: vi.fn( + async () => new Response("", { status: 200 }), + ) as typeof fetch, + }, + ), + ).rejects.toThrow("Peer alias is not configured"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses ~/.clawdentity/openclaw-agent-name when env is missing", async () => { + const sandbox = createRelaySandbox("alpha-agent"); + const previousAgentName = process.env.CLAWDENTITY_AGENT_NAME; + delete process.env.CLAWDENTITY_AGENT_NAME; + writeFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-agent-name"), + "alpha-agent\n", + "utf8", + ); + + try { + const result = await relayPayloadToPeer( + { + peer: "beta", + message: "hello", + }, + { + homeDir: sandbox.homeDir, + fetchImpl: vi.fn( + async () => new Response("", { status: 200 }), + ) as typeof fetch, + }, + ); + + expect(result).toBeNull(); + } finally { + process.env.CLAWDENTITY_AGENT_NAME = previousAgentName; + sandbox.cleanup(); + } + }); + + it("throws when multiple local agents exist without selection", async () => { + const sandbox = createRelaySandbox("alpha-agent"); + const previousAgentName = process.env.CLAWDENTITY_AGENT_NAME; + delete process.env.CLAWDENTITY_AGENT_NAME; + writeAgentCredentials(sandbox.homeDir, "gamma-agent"); + + try { + await expect( + relayPayloadToPeer( + { + peer: "beta", + message: "hello", + }, + { + homeDir: sandbox.homeDir, + fetchImpl: vi.fn( + async () => new Response("", { status: 200 }), + ) as typeof fetch, + }, + ), + ).rejects.toThrow("Multiple local agents found"); + } finally { + process.env.CLAWDENTITY_AGENT_NAME = previousAgentName; + sandbox.cleanup(); + } + }); + + it("uses default export with transform context payload", async () => { + const payload = { message: "context payload" }; + const result = await relayToPeer({ payload }); + expect(result).toBe(payload); + }); +}); diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.ts new file mode 100644 index 0000000..5589924 --- /dev/null +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.ts @@ -0,0 +1,286 @@ +import { randomBytes } from "node:crypto"; +import { readdir, readFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { join } from "node:path"; +import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; +import { signHttpRequest } from "@clawdentity/sdk"; +import { + loadPeersConfig, + type PeersConfigPathOptions, +} from "./peers-config.js"; + +const CLAWDENTITY_DIR = ".clawdentity"; +const AGENTS_DIR = "agents"; +const SECRET_KEY_FILENAME = "secret.key"; +const AIT_FILENAME = "ait.jwt"; +const AGENT_NAME_ENV = "CLAWDENTITY_AGENT_NAME"; +const OPENCLAW_AGENT_NAME_FILENAME = "openclaw-agent-name"; +const NONCE_SIZE = 16; + +const textEncoder = new TextEncoder(); + +export type RelayToPeerOptions = PeersConfigPathOptions & { + agentName?: string; + fetchImpl?: typeof fetch; + clock?: () => number; + randomBytesImpl?: (size: number) => Uint8Array; +}; + +export type RelayTransformContext = { + payload?: unknown; +}; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +function parseRequiredString(value: unknown): string { + if (typeof value !== "string") { + throw new Error("Input value must be a string"); + } + + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error("Input value must not be empty"); + } + + return trimmed; +} + +function resolvePathWithQuery(url: URL): string { + return `${url.pathname}${url.search}`; +} + +function resolveRelayFetch(fetchImpl?: typeof fetch): typeof fetch { + const resolved = fetchImpl ?? globalThis.fetch; + if (typeof resolved !== "function") { + throw new Error("fetch implementation is required"); + } + + return resolved; +} + +async function tryReadTrimmedFile( + filePath: string, + _label: string, +): Promise { + let raw: string; + + try { + raw = await readFile(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + + throw error; + } + + const trimmed = raw.trim(); + if (trimmed.length === 0) { + throw new Error("Required file content is empty"); + } + + return trimmed; +} + +async function readTrimmedFile( + filePath: string, + label: string, +): Promise { + const content = await tryReadTrimmedFile(filePath, label); + if (content === undefined) { + throw new Error("Required file is missing"); + } + + return content; +} + +async function resolveAgentName(input: { + overrideName?: string; + homeDir: string; +}): Promise { + const overridden = input.overrideName?.trim(); + if (overridden) { + return overridden; + } + + const envAgentName = process.env[AGENT_NAME_ENV]?.trim(); + if (envAgentName) { + return envAgentName; + } + + const selectedAgentPath = join( + input.homeDir, + CLAWDENTITY_DIR, + OPENCLAW_AGENT_NAME_FILENAME, + ); + const selectedAgentName = await tryReadTrimmedFile( + selectedAgentPath, + OPENCLAW_AGENT_NAME_FILENAME, + ); + if (selectedAgentName) { + return selectedAgentName; + } + + const agentsDirectory = join(input.homeDir, CLAWDENTITY_DIR, AGENTS_DIR); + let entries: Array<{ isDirectory: () => boolean; name: string }>; + try { + entries = (await readdir(agentsDirectory, { + withFileTypes: true, + })) as Array<{ isDirectory: () => boolean; name: string }>; + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw new Error("No local agents found. Select one before relay setup."); + } + + throw error; + } + + const agentNames = entries + .filter((entry) => entry.isDirectory()) + .map((entry) => entry.name) + .sort(); + + if (agentNames.length === 1) { + return agentNames[0]; + } + + if (agentNames.length === 0) { + throw new Error("No local agents found. Select one before relay setup."); + } + + throw new Error( + "Multiple local agents found. Configure a selected relay agent first.", + ); +} + +async function readAgentCredentials(input: { + agentName: string; + homeDir: string; +}): Promise<{ ait: string; secretKey: Uint8Array }> { + const agentDir = join( + input.homeDir, + CLAWDENTITY_DIR, + AGENTS_DIR, + input.agentName, + ); + const secretPath = join(agentDir, SECRET_KEY_FILENAME); + const aitPath = join(agentDir, AIT_FILENAME); + + const encodedSecret = await readTrimmedFile(secretPath, SECRET_KEY_FILENAME); + const ait = await readTrimmedFile(aitPath, AIT_FILENAME); + + let secretKey: Uint8Array; + try { + secretKey = decodeBase64url(encodedSecret); + } catch { + throw new Error("Agent secret key is invalid"); + } + + return { + ait, + secretKey, + }; +} + +function removePeerField( + payload: Record, +): Record { + const outbound: Record = {}; + + for (const [key, value] of Object.entries(payload)) { + if (key !== "peer") { + outbound[key] = value; + } + } + + return outbound; +} + +export async function relayPayloadToPeer( + payload: unknown, + options: RelayToPeerOptions = {}, +): Promise { + if (!isRecord(payload)) { + return payload; + } + + const peerAliasValue = payload.peer; + if (peerAliasValue === undefined) { + return payload; + } + + const peerAlias = parseRequiredString(peerAliasValue); + const peersConfig = await loadPeersConfig(options); + const peerEntry = peersConfig.peers[peerAlias]; + + if (!peerEntry) { + throw new Error("Peer alias is not configured"); + } + + const home = + typeof options.homeDir === "string" && options.homeDir.trim().length > 0 + ? options.homeDir.trim() + : homedir(); + const agentName = await resolveAgentName({ + overrideName: options.agentName, + homeDir: home, + }); + const { ait, secretKey } = await readAgentCredentials({ + agentName, + homeDir: home, + }); + + const outboundPayload = removePeerField(payload); + const body = JSON.stringify(outboundPayload); + + const peerUrl = new URL(peerEntry.proxyUrl); + const unixSeconds = Math.floor( + (options.clock ?? Date.now)() / 1000, + ).toString(); + const nonce = encodeBase64url( + (options.randomBytesImpl ?? randomBytes)(NONCE_SIZE), + ); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: resolvePathWithQuery(peerUrl), + timestamp: unixSeconds, + nonce, + body: textEncoder.encode(body), + secretKey, + }); + + const response = await resolveRelayFetch(options.fetchImpl)( + peerUrl.toString(), + { + method: "POST", + headers: { + Authorization: `Claw ${ait}`, + "Content-Type": "application/json", + ...signed.headers, + }, + body, + }, + ); + + if (!response.ok) { + throw new Error("Peer relay request failed"); + } + + return null; +} + +export default async function relayToPeer( + ctx: RelayTransformContext, +): Promise { + return relayPayloadToPeer(ctx?.payload); +} diff --git a/apps/openclaw-skill/tsconfig.json b/apps/openclaw-skill/tsconfig.json new file mode 100644 index 0000000..a73f62b --- /dev/null +++ b/apps/openclaw-skill/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "types": ["node"], + "outDir": "./dist" + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.test.ts"] +} diff --git a/apps/openclaw-skill/tsup.config.ts b/apps/openclaw-skill/tsup.config.ts new file mode 100644 index 0000000..00c435f --- /dev/null +++ b/apps/openclaw-skill/tsup.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: { + "relay-to-peer": "src/transforms/relay-to-peer.ts", + }, + format: ["esm"], + outDir: "dist", + outExtension: () => ({ js: ".mjs" }), + platform: "node", + target: "node22", + bundle: true, + noExternal: [/.*/], + dts: false, + clean: true, +}); diff --git a/apps/openclaw-skill/vitest.config.ts b/apps/openclaw-skill/vitest.config.ts new file mode 100644 index 0000000..e8e90b6 --- /dev/null +++ b/apps/openclaw-skill/vitest.config.ts @@ -0,0 +1,20 @@ +import { resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { defineConfig } from "vitest/config"; + +const rootDir = fileURLToPath(new URL(".", import.meta.url)); + +export default defineConfig({ + resolve: { + alias: { + "@clawdentity/protocol": resolve( + rootDir, + "../../packages/protocol/src/index.ts", + ), + "@clawdentity/sdk": resolve(rootDir, "../../packages/sdk/src/index.ts"), + }, + }, + test: { + globals: true, + }, +}); diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 499e937..8a0d6ce 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -17,7 +17,10 @@ - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. +- Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. - Keep auth failure semantics stable: auth-invalid requests map to `401`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. +- Keep `/hooks/agent` input contract strict: require `Content-Type: application/json` and reject malformed JSON with explicit client errors. +- Keep `/hooks/agent` upstream failure mapping explicit: timeout errors -> `504`, network errors -> `502`, and never log `openclawHookToken` or request payload. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts new file mode 100644 index 0000000..f14fc7b --- /dev/null +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -0,0 +1,228 @@ +import { describe, expect, it, vi } from "vitest"; + +vi.mock("./auth-middleware.js", async () => { + const { createMiddleware } = await import("hono/factory"); + + return { + createProxyAuthMiddleware: () => + createMiddleware(async (_c, next) => { + await next(); + }), + }; +}); + +import { parseProxyConfig } from "./config.js"; +import { createProxyApp } from "./server.js"; + +function createHookRouteApp(input: { + fetchImpl: typeof fetch; + timeoutMs?: number; +}) { + return createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_BASE_URL: "http://openclaw.local", + OPENCLAW_HOOK_TOKEN: "openclaw-secret", + }), + hooks: { + fetchImpl: input.fetchImpl, + timeoutMs: input.timeoutMs, + }, + }); +} + +function resolveRequestUrl(input: unknown): string { + if (typeof input === "string") { + return input; + } + + if (input instanceof URL) { + return input.toString(); + } + + if ( + typeof input === "object" && + input !== null && + "url" in input && + typeof (input as { url?: unknown }).url === "string" + ) { + return (input as { url: string }).url; + } + + return ""; +} + +describe("POST /hooks/agent", () => { + it("forwards JSON payload and returns upstream status/body", async () => { + const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { + return new Response( + JSON.stringify({ + accepted: true, + echoedBody: init?.body, + }), + { + status: 202, + headers: { + "content-type": "application/json", + }, + }, + ); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json; charset=utf-8", + }, + body: JSON.stringify({ + event: "agent.started", + }), + }); + + expect(fetchMock).toHaveBeenCalledTimes(1); + const [calledInput, calledInit] = fetchMock.mock.calls[0] as [ + unknown, + RequestInit | undefined, + ]; + const calledHeaders = (calledInit?.headers ?? {}) as Record; + + expect(resolveRequestUrl(calledInput)).toBe( + "http://openclaw.local/hooks/agent", + ); + expect(calledInit?.method).toBe("POST"); + expect(calledInit?.body).toBe(JSON.stringify({ event: "agent.started" })); + expect(calledHeaders["content-type"]).toBe("application/json"); + expect(calledHeaders["x-openclaw-token"]).toBe("openclaw-secret"); + expect(typeof calledHeaders["x-request-id"]).toBe("string"); + expect(calledHeaders["x-request-id"].length).toBeGreaterThan(0); + + expect(response.status).toBe(202); + expect(response.headers.get("content-type")).toContain("application/json"); + const body = (await response.json()) as { + accepted: boolean; + echoedBody: unknown; + }; + expect(body.accepted).toBe(true); + expect(body.echoedBody).toBe(JSON.stringify({ event: "agent.started" })); + }); + + it("rejects non-json content types", async () => { + const fetchMock = vi.fn(async () => new Response("{}", { status: 200 })); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "text/plain", + }, + body: "hello", + }); + + expect(fetchMock).not.toHaveBeenCalled(); + expect(response.status).toBe(415); + const body = (await response.json()) as { + error: { code: string; message: string; requestId: string }; + }; + expect(body.error.code).toBe("PROXY_HOOK_UNSUPPORTED_MEDIA_TYPE"); + expect(body.error.message).toBe("Content-Type must be application/json"); + expect(typeof body.error.requestId).toBe("string"); + }); + + it("rejects invalid JSON payloads", async () => { + const fetchMock = vi.fn(async () => new Response("{}", { status: 200 })); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: "{not valid json", + }); + + expect(fetchMock).not.toHaveBeenCalled(); + expect(response.status).toBe(400); + const body = (await response.json()) as { + error: { code: string; message: string; requestId: string }; + }; + expect(body.error.code).toBe("PROXY_HOOK_INVALID_JSON"); + expect(body.error.message).toBe("Request body must be valid JSON"); + expect(typeof body.error.requestId).toBe("string"); + }); + + it("maps upstream network errors to 502", async () => { + const fetchMock = vi.fn(async () => { + throw new TypeError("fetch failed"); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(response.status).toBe(502); + const body = (await response.json()) as { + error: { code: string; message: string; requestId: string }; + }; + expect(body.error.code).toBe("PROXY_HOOK_UPSTREAM_UNAVAILABLE"); + expect(body.error.message).toBe("OpenClaw hook upstream request failed"); + expect(typeof body.error.requestId).toBe("string"); + }); + + it("maps upstream timeout errors to 504", async () => { + const fetchMock = vi.fn( + (_input: unknown, init?: RequestInit): Promise => + new Promise((_resolve, reject) => { + const signal = init?.signal; + if (signal == null) { + reject(new Error("signal is required")); + return; + } + + signal.addEventListener( + "abort", + () => { + const timeoutError = new Error("request aborted"); + timeoutError.name = "AbortError"; + reject(timeoutError); + }, + { once: true }, + ); + }), + ); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + timeoutMs: 5, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(response.status).toBe(504); + const body = (await response.json()) as { + error: { code: string; message: string; requestId: string }; + }; + expect(body.error.code).toBe("PROXY_HOOK_UPSTREAM_TIMEOUT"); + expect(body.error.message).toBe("OpenClaw hook upstream request timed out"); + expect(typeof body.error.requestId).toBe("string"); + }); +}); diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts new file mode 100644 index 0000000..f69cbd9 --- /dev/null +++ b/apps/proxy/src/agent-hook-route.ts @@ -0,0 +1,146 @@ +import { AppError, type Logger } from "@clawdentity/sdk"; +import type { Context } from "hono"; +import type { ProxyRequestVariables } from "./auth-middleware.js"; + +const AGENT_HOOK_PATH = "/hooks/agent"; +export const DEFAULT_AGENT_HOOK_TIMEOUT_MS = 10_000; + +export type AgentHookRuntimeOptions = { + fetchImpl?: typeof fetch; + timeoutMs?: number; +}; + +type CreateAgentHookHandlerOptions = AgentHookRuntimeOptions & { + logger: Logger; + openclawBaseUrl: string; + openclawHookToken: string; +}; + +type ProxyContext = Context<{ + Variables: ProxyRequestVariables; +}>; + +function isJsonContentType(contentTypeHeader: string | undefined): boolean { + if (typeof contentTypeHeader !== "string") { + return false; + } + + const [mediaType] = contentTypeHeader.split(";"); + return mediaType.trim().toLowerCase() === "application/json"; +} + +function toOpenclawHookUrl(openclawBaseUrl: string): string { + const normalizedBase = openclawBaseUrl.endsWith("/") + ? openclawBaseUrl + : `${openclawBaseUrl}/`; + return new URL(AGENT_HOOK_PATH, normalizedBase).toString(); +} + +function toErrorName(error: unknown): string { + if (error instanceof Error && error.name.trim().length > 0) { + return error.name; + } + + return "unknown"; +} + +function isAbortError(error: unknown): boolean { + return toErrorName(error) === "AbortError"; +} + +export function createAgentHookHandler( + options: CreateAgentHookHandlerOptions, +): (c: ProxyContext) => Promise { + const fetchImpl = options.fetchImpl ?? fetch; + const timeoutMs = options.timeoutMs ?? DEFAULT_AGENT_HOOK_TIMEOUT_MS; + const hookUrl = toOpenclawHookUrl(options.openclawBaseUrl); + + return async (c) => { + if (!isJsonContentType(c.req.header("content-type"))) { + throw new AppError({ + code: "PROXY_HOOK_UNSUPPORTED_MEDIA_TYPE", + message: "Content-Type must be application/json", + status: 415, + expose: true, + }); + } + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "PROXY_HOOK_INVALID_JSON", + message: "Request body must be valid JSON", + status: 400, + expose: true, + }); + } + + const requestId = c.get("requestId"); + const startedAt = Date.now(); + const controller = new AbortController(); + let timedOut = false; + const timeoutHandle = setTimeout(() => { + timedOut = true; + controller.abort(); + }, timeoutMs); + + let upstreamResponse: Response; + try { + upstreamResponse = await fetchImpl(hookUrl, { + method: "POST", + headers: { + "content-type": "application/json", + "x-openclaw-token": options.openclawHookToken, + "x-request-id": requestId, + }, + body: JSON.stringify(payload), + signal: controller.signal, + }); + } catch (error) { + if (timedOut || isAbortError(error)) { + options.logger.warn("proxy.hooks.agent.timeout", { + requestId, + timeoutMs, + }); + throw new AppError({ + code: "PROXY_HOOK_UPSTREAM_TIMEOUT", + message: "OpenClaw hook upstream request timed out", + status: 504, + }); + } + + options.logger.warn("proxy.hooks.agent.network_error", { + requestId, + errorName: toErrorName(error), + }); + throw new AppError({ + code: "PROXY_HOOK_UPSTREAM_UNAVAILABLE", + message: "OpenClaw hook upstream request failed", + status: 502, + }); + } finally { + clearTimeout(timeoutHandle); + } + + options.logger.info("proxy.hooks.agent.forwarded", { + requestId, + upstreamStatus: upstreamResponse.status, + durationMs: Date.now() - startedAt, + }); + + const responseBody = await upstreamResponse.text(); + const responseHeaders: Record = {}; + const upstreamContentType = upstreamResponse.headers.get("content-type"); + if (typeof upstreamContentType === "string") { + responseHeaders["content-type"] = upstreamContentType; + } + + return c.body( + responseBody, + upstreamResponse.status as 200, + responseHeaders, + ); + }; +} diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 86f725b..3298606 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -9,6 +9,10 @@ import { } from "@clawdentity/sdk"; import { serve } from "@hono/node-server"; import { Hono } from "hono"; +import { + type AgentHookRuntimeOptions, + createAgentHookHandler, +} from "./agent-hook-route.js"; import { createProxyAuthMiddleware, type ProxyRequestVariables, @@ -29,6 +33,7 @@ type CreateProxyAppOptions = { logger?: Logger; registerRoutes?: (app: ProxyApp) => void; auth?: ProxyAuthRuntimeOptions; + hooks?: AgentHookRuntimeOptions; }; type StartProxyServerOptions = { @@ -78,6 +83,15 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { environment: options.config.environment, }), ); + app.post( + "/hooks/agent", + createAgentHookHandler({ + logger, + openclawBaseUrl: options.config.openclawBaseUrl, + openclawHookToken: options.config.openclawHookToken, + ...options.hooks, + }), + ); options.registerRoutes?.(app); return app; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cc45563..f204ed7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -49,6 +49,19 @@ importers: specifier: ^22.18.11 version: 22.19.11 + apps/openclaw-skill: + dependencies: + '@clawdentity/protocol': + specifier: workspace:* + version: link:../../packages/protocol + '@clawdentity/sdk': + specifier: workspace:* + version: link:../../packages/sdk + devDependencies: + '@types/node': + specifier: ^22.18.11 + version: 22.19.11 + apps/proxy: dependencies: '@clawdentity/protocol': From 81cad5551d8b141c577b9581c7438194ab63a315 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 21:55:53 +0530 Subject: [PATCH 050/190] fix(proxy): preserve base path when forwarding hook endpoint --- apps/proxy/src/agent-hook-route.test.ts | 26 ++++++++++++++++++++++++- apps/proxy/src/agent-hook-route.ts | 2 +- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index f14fc7b..1744464 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -17,10 +17,11 @@ import { createProxyApp } from "./server.js"; function createHookRouteApp(input: { fetchImpl: typeof fetch; timeoutMs?: number; + openclawBaseUrl?: string; }) { return createProxyApp({ config: parseProxyConfig({ - OPENCLAW_BASE_URL: "http://openclaw.local", + OPENCLAW_BASE_URL: input.openclawBaseUrl ?? "http://openclaw.local", OPENCLAW_HOOK_TOKEN: "openclaw-secret", }), hooks: { @@ -108,6 +109,29 @@ describe("POST /hooks/agent", () => { expect(body.echoedBody).toBe(JSON.stringify({ event: "agent.started" })); }); + it("preserves OpenClaw base path prefixes when building hook URL", async () => { + let forwardedUrl = ""; + const fetchMock = vi.fn(async (input: unknown) => { + forwardedUrl = resolveRequestUrl(input); + return new Response("{}", { status: 202 }); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + openclawBaseUrl: "http://openclaw.local/api", + }); + + await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(forwardedUrl).toBe("http://openclaw.local/api/hooks/agent"); + }); + it("rejects non-json content types", async () => { const fetchMock = vi.fn(async () => new Response("{}", { status: 200 })); const app = createHookRouteApp({ diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index f69cbd9..a1e18b7 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -2,7 +2,7 @@ import { AppError, type Logger } from "@clawdentity/sdk"; import type { Context } from "hono"; import type { ProxyRequestVariables } from "./auth-middleware.js"; -const AGENT_HOOK_PATH = "/hooks/agent"; +const AGENT_HOOK_PATH = "hooks/agent"; export const DEFAULT_AGENT_HOOK_TIMEOUT_MS = 10_000; export type AgentHookRuntimeOptions = { From 234a0452f77dce200714637835dc5e90e86347cd Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 22:00:38 +0530 Subject: [PATCH 051/190] docs: add user-like e2e skill testing strategy --- AGENTS.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index 0df9c20..c743e19 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -89,6 +89,16 @@ - Hono apps are tested via `app.request()` (Hono's built-in test client) — no wrangler or miniflare needed for unit tests. - Pass mock bindings as the third argument: `app.request("/path", {}, { DB: {}, ENVIRONMENT: "test" })`. +## User-Like E2E Skill Testing +- Validate onboarding and relay flows as a real user path, not as manual local shortcuts. +- Start backend services locally with Wrangler (registry/proxy) using the expected environment before E2E checks. +- Run OpenClaw agents in Docker and test through agent skills only; do not pre-configure relay files by hand. +- Install via npm + skill entrypoint (`npm install clawdentity --skill`) and let the skill perform remaining setup. +- Use invite-code onboarding exactly as production intent: admin creates invite code, agent asks its human for the code, then agent completes setup. +- Verify resulting agent filesystem/config artifacts are created by the skill in the expected locations. +- Confirm end-to-end communication between at least two agents after setup (for example alpha <-> beta relay path). +- If a skill-run test fails because of partial/dirty skill-created state, clean/revert only skill-generated setup and rerun from a fresh user-like starting point. + ## T00 Scaffold Best Practices - Start T00 by confirming the deployment-first order (`T00 -> T37 -> T38`) and reviewing README/PRD/`issues/EXECUTION_PLAN.md` so documentation mirrors the execution model. - Define the workspace layout now: `apps/registry`, `apps/proxy`, `apps/cli`, `packages/sdk`, and `packages/protocol` (with shared tooling such as `pnpm-workspace.yaml`, `tsconfig.base.json`, and `biome.json`) so downstream tickets have a known structure. From 0a612a8bd67d3391c19ae83eb850bbc5c24cea08 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 22:03:17 +0530 Subject: [PATCH 052/190] docs: add alpha beta docker and skill-only e2e guidance --- AGENTS.md | 6 ++++++ apps/openclaw-skill/AGENTS.md | 8 ++++++++ 2 files changed, 14 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index c743e19..94725da 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -93,11 +93,17 @@ - Validate onboarding and relay flows as a real user path, not as manual local shortcuts. - Start backend services locally with Wrangler (registry/proxy) using the expected environment before E2E checks. - Run OpenClaw agents in Docker and test through agent skills only; do not pre-configure relay files by hand. +- Keep two dedicated containers for relay verification: Alpha (sender) and Beta (receiver). Use stable names and document them in the test run notes. +- Keep Alpha and Beta isolated with separate HOME volumes so `~/.clawdentity` and `~/.openclaw` state never leaks between agents. +- Ensure both containers run with required model API credentials; onboarding tests must not depend on host-level credentials. - Install via npm + skill entrypoint (`npm install clawdentity --skill`) and let the skill perform remaining setup. - Use invite-code onboarding exactly as production intent: admin creates invite code, agent asks its human for the code, then agent completes setup. - Verify resulting agent filesystem/config artifacts are created by the skill in the expected locations. - Confirm end-to-end communication between at least two agents after setup (for example alpha <-> beta relay path). - If a skill-run test fails because of partial/dirty skill-created state, clean/revert only skill-generated setup and rerun from a fresh user-like starting point. +- Reset for rerun must remove only skill-created artifacts first: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`, and `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/`. +- Use a full reset only when required for identity reprovisioning, and then also clear `~/.clawdentity/agents//` before re-onboarding. +- Skill-only policy: no direct `clawdentity openclaw setup` execution by humans during E2E validation; the agent must run the skill flow and prompt the human only for missing invite code or confirmations. ## T00 Scaffold Best Practices - Start T00 by confirming the deployment-first order (`T00 -> T37 -> T38`) and reviewing README/PRD/`issues/EXECUTION_PLAN.md` so documentation mirrors the execution model. diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index ec5f7bf..b7191d1 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -37,3 +37,11 @@ - `pnpm -F @clawdentity/openclaw-skill typecheck` - `pnpm -F @clawdentity/openclaw-skill test` - `pnpm -F @clawdentity/openclaw-skill build` + +## Docker E2E Workflow +- Run E2E with two OpenClaw containers: Alpha (sender) and Beta (receiver), each with isolated HOME storage. +- Install and execute onboarding through skill flow only (`npm install clawdentity --skill` plus agent-executed skill steps). +- Human role in E2E is limited to supplying invite code and confirmations requested by the agent. +- Do not edit relay hooks, peer config, or selected-agent files manually during validation. +- After skill setup, verify these artifacts exist and are agent-generated: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`. +- For reruns after failures, clear skill-generated artifacts first; only perform full identity reset (`~/.clawdentity/agents//`) when identity reprovisioning is needed. From a5cd7e384bedc2402565cfeb8de68a7c6871c405 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 22:22:10 +0530 Subject: [PATCH 053/190] feat(registry,cli): add production-grade admin bootstrap flow --- apps/cli/src/AGENTS.md | 1 + apps/cli/src/commands/AGENTS.md | 6 + apps/cli/src/commands/admin.test.ts | 133 ++++++ apps/cli/src/commands/admin.ts | 256 +++++++++++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/registry/src/AGENTS.md | 8 + apps/registry/src/admin-bootstrap.ts | 128 ++++++ apps/registry/src/auth/AGENTS.md | 1 + apps/registry/src/auth/api-key-auth.ts | 85 +--- apps/registry/src/auth/api-key-token.ts | 87 ++++ apps/registry/src/server.test.ts | 542 ++++++++++++++++++++++-- apps/registry/src/server.ts | 177 +++++++- 13 files changed, 1331 insertions(+), 103 deletions(-) create mode 100644 apps/cli/src/commands/admin.test.ts create mode 100644 apps/cli/src/commands/admin.ts create mode 100644 apps/registry/src/admin-bootstrap.ts create mode 100644 apps/registry/src/auth/api-key-token.ts diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 60d11cf..17db5de 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -8,6 +8,7 @@ - Reuse shared command wrappers (`withErrorHandling`) and IO helpers (`writeStdoutLine`, `writeStderrLine`) instead of inline process writes. - Prefer explicit error-to-reason mapping for operator-facing failures rather than generic stack traces. - Prefer SDK shared primitives (`AppError`, `nowIso`) for new command error/date logic instead of ad-hoc equivalents. +- Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 127b526..7dabcde 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -21,6 +21,12 @@ - `openclaw setup` must be idempotent for relay mapping updates and peer map writes. - Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. +## Admin Command Rules +- `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. +- Treat bootstrap API key token as write-once secret: print once, persist via config manager, and never log token contents. +- Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. +- Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. + ## Testing Rules - Mock network and filesystem dependencies in command tests. - Include success and failure scenarios for external calls, parsing, and cache behavior. diff --git a/apps/cli/src/commands/admin.test.ts b/apps/cli/src/commands/admin.test.ts new file mode 100644 index 0000000..4b81f2f --- /dev/null +++ b/apps/cli/src/commands/admin.test.ts @@ -0,0 +1,133 @@ +import { describe, expect, it, vi } from "vitest"; +import { bootstrapAdmin } from "./admin.js"; + +describe("admin bootstrap helper", () => { + it("bootstraps admin and persists registryUrl + apiKey", async () => { + const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { + const requestBody = JSON.parse(String(init?.body)) as { + displayName?: string; + apiKeyName?: string; + }; + expect(requestBody.displayName).toBe("Primary Admin"); + expect(requestBody.apiKeyName).toBe("prod-admin"); + + return new Response( + JSON.stringify({ + human: { + id: "00000000000000000000000000", + did: "did:claw:human:00000000000000000000000000", + displayName: "Primary Admin", + role: "admin", + status: "active", + }, + apiKey: { + id: "01KHH000000000000000000001", + name: "prod-admin", + token: "clw_pat_testtoken", + }, + }), + { status: 201, headers: { "content-type": "application/json" } }, + ); + }); + const setConfigValueMock = vi.fn(async () => {}); + + const result = await bootstrapAdmin( + { + bootstrapSecret: "bootstrap-secret", + displayName: "Primary Admin", + apiKeyName: "prod-admin", + }, + { + fetchImpl: fetchMock as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + }), + setConfigValueImpl: setConfigValueMock, + }, + ); + + expect(result.human.did).toBe("did:claw:human:00000000000000000000000000"); + expect(result.apiKey.token).toBe("clw_pat_testtoken"); + expect(fetchMock).toHaveBeenCalledTimes(1); + const [calledInput, calledInit] = fetchMock.mock.calls[0] as [ + URL, + RequestInit, + ]; + expect(calledInput.toString()).toBe( + "https://api.example.com/v1/admin/bootstrap", + ); + expect(calledInit.method).toBe("POST"); + expect( + (calledInit.headers as Record)["x-bootstrap-secret"], + ).toBe("bootstrap-secret"); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 1, + "registryUrl", + "https://api.example.com/", + ); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 2, + "apiKey", + "clw_pat_testtoken", + ); + }); + + it("maps registry bootstrap conflict to stable CLI message", async () => { + const fetchMock = vi.fn(async () => { + return new Response( + JSON.stringify({ + error: { + code: "ADMIN_BOOTSTRAP_ALREADY_COMPLETED", + message: "Admin bootstrap has already completed", + }, + }), + { status: 409, headers: { "content-type": "application/json" } }, + ); + }); + + await expect( + bootstrapAdmin( + { + bootstrapSecret: "bootstrap-secret", + }, + { + fetchImpl: fetchMock as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + }), + setConfigValueImpl: vi.fn(async () => {}), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_ADMIN_BOOTSTRAP_FAILED", + message: "Admin bootstrap has already completed", + }); + }); + + it("returns stable error when bootstrap response is malformed", async () => { + const fetchMock = vi.fn(async () => { + return new Response("{}", { + status: 201, + headers: { "content-type": "application/json" }, + }); + }); + + await expect( + bootstrapAdmin( + { + bootstrapSecret: "bootstrap-secret", + }, + { + fetchImpl: fetchMock as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + }), + setConfigValueImpl: vi.fn(async () => {}), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", + message: "Bootstrap response is invalid", + }); + }); +}); diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts new file mode 100644 index 0000000..37ef611 --- /dev/null +++ b/apps/cli/src/commands/admin.ts @@ -0,0 +1,256 @@ +import { AppError, createLogger } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { + type CliConfig, + resolveConfig, + setConfigValue, +} from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "admin" }); +const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; + +type AdminBootstrapOptions = { + bootstrapSecret: string; + displayName?: string; + apiKeyName?: string; + registryUrl?: string; +}; + +type AdminBootstrapResponse = { + human: { + id: string; + did: string; + displayName: string; + role: "admin"; + status: "active"; + }; + apiKey: { + id: string; + name: string; + token: string; + }; +}; + +type BootstrapErrorBody = { + error?: { + code?: string; + message?: string; + }; +}; + +type AdminBootstrapDependencies = { + fetchImpl?: typeof fetch; + resolveConfigImpl?: () => Promise; + setConfigValueImpl?: typeof setConfigValue; +}; + +function createCliError(code: string, message: string): AppError { + return new AppError({ + code, + message, + status: 400, + }); +} + +function parseNonEmptyString(value: unknown): string { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +} + +function resolveBootstrapRegistryUrl(input: { + overrideRegistryUrl: string | undefined; + configRegistryUrl: string; +}): string { + const candidate = + parseNonEmptyString(input.overrideRegistryUrl) || input.configRegistryUrl; + try { + return new URL(candidate).toString(); + } catch { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_INVALID_REGISTRY_URL", + "Registry URL is invalid", + ); + } +} + +function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { + if (typeof payload !== "object" || payload === null) { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", + "Bootstrap response is invalid", + ); + } + + const body = payload as Record; + const human = body.human as Record | undefined; + const apiKey = body.apiKey as Record | undefined; + if (!human || !apiKey) { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", + "Bootstrap response is invalid", + ); + } + + const humanId = parseNonEmptyString(human.id); + const humanDid = parseNonEmptyString(human.did); + const humanDisplayName = parseNonEmptyString(human.displayName); + const apiKeyId = parseNonEmptyString(apiKey.id); + const apiKeyName = parseNonEmptyString(apiKey.name); + const apiKeyToken = parseNonEmptyString(apiKey.token); + + if ( + humanId.length === 0 || + humanDid.length === 0 || + humanDisplayName.length === 0 || + apiKeyId.length === 0 || + apiKeyName.length === 0 || + apiKeyToken.length === 0 + ) { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", + "Bootstrap response is invalid", + ); + } + + return { + human: { + id: humanId, + did: humanDid, + displayName: humanDisplayName, + role: "admin", + status: "active", + }, + apiKey: { + id: apiKeyId, + name: apiKeyName, + token: apiKeyToken, + }, + }; +} + +function mapBootstrapFailureMessage(payload: BootstrapErrorBody): string { + if (payload.error?.code === "ADMIN_BOOTSTRAP_DISABLED") { + return "Admin bootstrap is disabled on the registry"; + } + + if (payload.error?.code === "ADMIN_BOOTSTRAP_UNAUTHORIZED") { + return "Bootstrap secret is invalid"; + } + + if (payload.error?.code === "ADMIN_BOOTSTRAP_ALREADY_COMPLETED") { + return "Admin bootstrap has already completed"; + } + + if (payload.error?.code === "ADMIN_BOOTSTRAP_INVALID") { + return "Bootstrap request payload is invalid"; + } + + return "Admin bootstrap request failed"; +} + +export async function bootstrapAdmin( + options: AdminBootstrapOptions, + dependencies: AdminBootstrapDependencies = {}, +): Promise { + const bootstrapSecret = parseNonEmptyString(options.bootstrapSecret); + if (bootstrapSecret.length === 0) { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_SECRET_REQUIRED", + "Bootstrap secret is required", + ); + } + + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const setConfigValueImpl = dependencies.setConfigValueImpl ?? setConfigValue; + const config = await resolveConfigImpl(); + const registryUrl = resolveBootstrapRegistryUrl({ + overrideRegistryUrl: options.registryUrl, + configRegistryUrl: config.registryUrl, + }); + + let response: Response; + try { + response = await fetchImpl(new URL(ADMIN_BOOTSTRAP_PATH, registryUrl), { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": bootstrapSecret, + }, + body: JSON.stringify({ + displayName: parseNonEmptyString(options.displayName) || undefined, + apiKeyName: parseNonEmptyString(options.apiKeyName) || undefined, + }), + }); + } catch (error) { + logger.warn("cli.admin_bootstrap_request_failed", { + errorName: error instanceof Error ? error.name : "unknown", + }); + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_REQUEST_FAILED", + "Bootstrap request failed", + ); + } + + let payload: unknown; + try { + payload = await response.json(); + } catch { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", + "Bootstrap response is invalid", + ); + } + + if (!response.ok) { + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_FAILED", + mapBootstrapFailureMessage(payload as BootstrapErrorBody), + ); + } + + const parsed = parseBootstrapResponse(payload); + + await setConfigValueImpl("registryUrl", registryUrl); + await setConfigValueImpl("apiKey", parsed.apiKey.token); + + return parsed; +} + +export const createAdminCommand = (): Command => { + const adminCommand = new Command("admin").description( + "Manage admin bootstrap operations", + ); + + adminCommand + .command("bootstrap") + .description("Bootstrap first admin and store PAT locally") + .requiredOption( + "--bootstrap-secret ", + "One-time bootstrap secret configured on registry", + ) + .option("--display-name ", "Admin display name") + .option("--api-key-name ", "Admin API key label") + .option("--registry-url ", "Override registry URL") + .action( + withErrorHandling( + "admin bootstrap", + async (options: AdminBootstrapOptions) => { + const result = await bootstrapAdmin(options); + writeStdoutLine("Admin bootstrap completed"); + writeStdoutLine(`Human DID: ${result.human.did}`); + writeStdoutLine(`API key name: ${result.apiKey.name}`); + writeStdoutLine("API key saved to local config"); + writeStdoutLine("API key token (shown once):"); + writeStdoutLine(result.apiKey.token); + }, + ), + ); + + return adminCommand; +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 8810c62..f31bff1 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -26,6 +26,14 @@ describe("cli", () => { expect(hasAgentCommand).toBe(true); }); + it("registers the admin command", () => { + const hasAdminCommand = createProgram() + .commands.map((command) => command.name()) + .includes("admin"); + + expect(hasAdminCommand).toBe(true); + }); + it("registers the verify command", () => { const hasVerifyCommand = createProgram() .commands.map((command) => command.name()) diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 97f6ce6..ab57772 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,4 +1,5 @@ import { Command } from "commander"; +import { createAdminCommand } from "./commands/admin.js"; import { createAgentCommand } from "./commands/agent.js"; import { createConfigCommand } from "./commands/config.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; @@ -10,6 +11,7 @@ export const createProgram = (): Command => { return new Command("clawdentity") .description("Clawdentity CLI - Agent identity management") .version(CLI_VERSION) + .addCommand(createAdminCommand()) .addCommand(createAgentCommand()) .addCommand(createConfigCommand()) .addCommand(createOpenclawCommand()) diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index a490b83..fb3629a 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -11,6 +11,14 @@ - `/health` must return HTTP 200 with `{ status, version, environment }` on valid config. - Invalid runtime config must fail through the shared error handler and return `CONFIG_VALIDATION_FAILED`. +## Admin Bootstrap Contract +- `POST /v1/admin/bootstrap` is a one-time bootstrap endpoint gated by `BOOTSTRAP_SECRET`. +- Require `x-bootstrap-secret` header and compare with constant-time semantics; invalid/missing secret must return `401 ADMIN_BOOTSTRAP_UNAUTHORIZED`. +- If `BOOTSTRAP_SECRET` is not configured, return `503 ADMIN_BOOTSTRAP_DISABLED`. +- If any admin human already exists, return `409 ADMIN_BOOTSTRAP_ALREADY_COMPLETED`. +- Success response must include `{ human, apiKey }` and return the PAT token only in bootstrap response. +- Persist admin bootstrap atomically where supported (transaction), with local fallback only for environments that cannot start transactions. + ## Registry Keyset Contract - `/.well-known/claw-keys.json` is a public endpoint and must remain unauthenticated. - Return `keys[]` entries with `kid`, `alg`, `crv`, `x`, and `status` so SDK/offline verifiers can consume directly. diff --git a/apps/registry/src/admin-bootstrap.ts b/apps/registry/src/admin-bootstrap.ts new file mode 100644 index 0000000..dc1e10e --- /dev/null +++ b/apps/registry/src/admin-bootstrap.ts @@ -0,0 +1,128 @@ +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const DEFAULT_ADMIN_DISPLAY_NAME = "Admin"; +const DEFAULT_API_KEY_NAME = "bootstrap-admin"; +const MAX_DISPLAY_NAME_LENGTH = 64; +const MAX_API_KEY_NAME_LENGTH = 64; + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function parseOptionalTrimmedString(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + if (typeof value !== "string") { + return undefined; + } + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +type BootstrapPayload = { + displayName: string; + apiKeyName: string; +}; + +export function parseAdminBootstrapPayload(input: { + payload: unknown; + environment: RegistryConfig["ENVIRONMENT"]; +}): BootstrapPayload { + const exposeDetails = shouldExposeVerboseErrors(input.environment); + if ( + typeof input.payload !== "object" || + input.payload === null || + Array.isArray(input.payload) + ) { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_INVALID", + message: exposeDetails + ? "Bootstrap payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails + ? { + fieldErrors: { body: ["body must be a JSON object"] }, + formErrors: [], + } + : undefined, + }); + } + + const payload = input.payload as Record; + const fieldErrors: Record = {}; + + if ( + payload.displayName !== undefined && + typeof payload.displayName !== "string" + ) { + fieldErrors.displayName = ["displayName must be a string"]; + } + if ( + payload.apiKeyName !== undefined && + typeof payload.apiKeyName !== "string" + ) { + fieldErrors.apiKeyName = ["apiKeyName must be a string"]; + } + + const displayNameInput = parseOptionalTrimmedString(payload.displayName); + const apiKeyNameInput = parseOptionalTrimmedString(payload.apiKeyName); + if ( + payload.displayName !== undefined && + displayNameInput === undefined && + !fieldErrors.displayName + ) { + fieldErrors.displayName = ["displayName must not be empty"]; + } + if ( + payload.apiKeyName !== undefined && + apiKeyNameInput === undefined && + !fieldErrors.apiKeyName + ) { + fieldErrors.apiKeyName = ["apiKeyName must not be empty"]; + } + + const displayName = displayNameInput ?? DEFAULT_ADMIN_DISPLAY_NAME; + const apiKeyName = apiKeyNameInput ?? DEFAULT_API_KEY_NAME; + if (displayName.length > MAX_DISPLAY_NAME_LENGTH) { + fieldErrors.displayName = ["displayName must be at most 64 characters"]; + } else if (hasControlChars(displayName)) { + fieldErrors.displayName = ["displayName contains control characters"]; + } + + if (apiKeyName.length > MAX_API_KEY_NAME_LENGTH) { + fieldErrors.apiKeyName = ["apiKeyName must be at most 64 characters"]; + } else if (hasControlChars(apiKeyName)) { + fieldErrors.apiKeyName = ["apiKeyName contains control characters"]; + } + + if (Object.keys(fieldErrors).length > 0) { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_INVALID", + message: exposeDetails + ? "Bootstrap payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? { fieldErrors, formErrors: [] } : undefined, + }); + } + + return { + displayName, + apiKeyName, + }; +} diff --git a/apps/registry/src/auth/AGENTS.md b/apps/registry/src/auth/AGENTS.md index 022e243..78609aa 100644 --- a/apps/registry/src/auth/AGENTS.md +++ b/apps/registry/src/auth/AGENTS.md @@ -8,6 +8,7 @@ - Reject marker-only PATs (for example, `clw_pat_` without entropy). - Hash incoming PAT values with SHA-256 before lookup; never persist raw PATs. - Derive `api_keys.key_prefix` lookup keys from the PAT marker plus token entropy (not the static marker alone), and keep derivation logic in one shared helper. +- Keep PAT token helpers (`parseBearerPat`, prefix derivation, hashing, constant-time compare, token generation) centralized in `api-key-token.ts` so bootstrap and middleware use identical security behavior. - Use constant-time comparison for hash matching. - Use Drizzle through `src/db/client.ts` for lookup/update queries so auth code stays schema-driven. - Only allow `api_keys.status = "active"` and `humans.status = "active"`. diff --git a/apps/registry/src/auth/api-key-auth.ts b/apps/registry/src/auth/api-key-auth.ts index af7e518..7738ad2 100644 --- a/apps/registry/src/auth/api-key-auth.ts +++ b/apps/registry/src/auth/api-key-auth.ts @@ -3,6 +3,12 @@ import { eq } from "drizzle-orm"; import { createMiddleware } from "hono/factory"; import { createDb } from "../db/client.js"; import { api_keys, humans } from "../db/schema.js"; +import { + constantTimeEqual, + deriveApiKeyLookupPrefix, + hashApiKeyToken, + parseBearerPat, +} from "./api-key-token.js"; type ApiKeyQueryRow = { api_key_id: string; @@ -16,9 +22,6 @@ type ApiKeyQueryRow = { human_status: "active" | "suspended"; }; -const PAT_TOKEN_MARKER = "clw_pat_"; -const PAT_LOOKUP_ENTROPY_LENGTH = 8; - export type AuthenticatedHuman = { id: string; did: string; @@ -30,80 +33,6 @@ export type AuthenticatedHuman = { }; }; -function parseBearerPat(authorization?: string): string { - if (!authorization) { - throw new AppError({ - code: "API_KEY_MISSING", - message: "Authorization header is required", - status: 401, - expose: true, - }); - } - - const [scheme, token] = authorization.trim().split(/\s+/, 2); - if (scheme !== "Bearer" || !token) { - throw new AppError({ - code: "API_KEY_INVALID", - message: "Authorization must be in the format 'Bearer '", - status: 401, - expose: true, - }); - } - - if (!token.startsWith(PAT_TOKEN_MARKER)) { - throw new AppError({ - code: "API_KEY_INVALID", - message: "Authorization must contain a PAT token", - status: 401, - expose: true, - }); - } - - if (token.length <= PAT_TOKEN_MARKER.length) { - throw new AppError({ - code: "API_KEY_INVALID", - message: "Authorization must contain a PAT token", - status: 401, - expose: true, - }); - } - - return token; -} - -export function deriveApiKeyLookupPrefix(token: string): string { - const entropyPrefix = token.slice( - PAT_TOKEN_MARKER.length, - PAT_TOKEN_MARKER.length + PAT_LOOKUP_ENTROPY_LENGTH, - ); - - return `${PAT_TOKEN_MARKER}${entropyPrefix}`; -} - -function constantTimeEqual(left: string, right: string): boolean { - const maxLength = Math.max(left.length, right.length); - let mismatch = left.length ^ right.length; - - for (let index = 0; index < maxLength; index += 1) { - const leftCode = index < left.length ? left.charCodeAt(index) : 0; - const rightCode = index < right.length ? right.charCodeAt(index) : 0; - mismatch |= leftCode ^ rightCode; - } - - return mismatch === 0; -} - -export async function hashApiKeyToken(token: string): Promise { - const digest = await crypto.subtle.digest( - "SHA-256", - new TextEncoder().encode(token), - ); - - return Array.from(new Uint8Array(digest)) - .map((value) => value.toString(16).padStart(2, "0")) - .join(""); -} - export function createApiKeyAuth() { return createMiddleware<{ Bindings: { DB: D1Database }; @@ -181,3 +110,5 @@ export function createApiKeyAuth() { await next(); }); } + +export { deriveApiKeyLookupPrefix, hashApiKeyToken }; diff --git a/apps/registry/src/auth/api-key-token.ts b/apps/registry/src/auth/api-key-token.ts new file mode 100644 index 0000000..44bbb4d --- /dev/null +++ b/apps/registry/src/auth/api-key-token.ts @@ -0,0 +1,87 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { AppError } from "@clawdentity/sdk"; + +export const PAT_TOKEN_MARKER = "clw_pat_"; +const PAT_LOOKUP_ENTROPY_LENGTH = 8; +const PAT_RANDOM_BYTES_LENGTH = 32; + +export function parseBearerPat(authorization?: string): string { + if (!authorization) { + throw new AppError({ + code: "API_KEY_MISSING", + message: "Authorization header is required", + status: 401, + expose: true, + }); + } + + const [scheme, token] = authorization.trim().split(/\s+/, 2); + if (scheme !== "Bearer" || !token) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "Authorization must be in the format 'Bearer '", + status: 401, + expose: true, + }); + } + + if (!token.startsWith(PAT_TOKEN_MARKER)) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "Authorization must contain a PAT token", + status: 401, + expose: true, + }); + } + + if (token.length <= PAT_TOKEN_MARKER.length) { + throw new AppError({ + code: "API_KEY_INVALID", + message: "Authorization must contain a PAT token", + status: 401, + expose: true, + }); + } + + return token; +} + +export function deriveApiKeyLookupPrefix(token: string): string { + const entropyPrefix = token.slice( + PAT_TOKEN_MARKER.length, + PAT_TOKEN_MARKER.length + PAT_LOOKUP_ENTROPY_LENGTH, + ); + + return `${PAT_TOKEN_MARKER}${entropyPrefix}`; +} + +export function constantTimeEqual(left: string, right: string): boolean { + const maxLength = Math.max(left.length, right.length); + let mismatch = left.length ^ right.length; + + for (let index = 0; index < maxLength; index += 1) { + const leftCode = index < left.length ? left.charCodeAt(index) : 0; + const rightCode = index < right.length ? right.charCodeAt(index) : 0; + mismatch |= leftCode ^ rightCode; + } + + return mismatch === 0; +} + +export async function hashApiKeyToken(token: string): Promise { + const digest = await crypto.subtle.digest( + "SHA-256", + new TextEncoder().encode(token), + ); + + return Array.from(new Uint8Array(digest)) + .map((value) => value.toString(16).padStart(2, "0")) + .join(""); +} + +export function generateApiKeyToken(): string { + const randomBytes = crypto.getRandomValues( + new Uint8Array(PAT_RANDOM_BYTES_LENGTH), + ); + return `${PAT_TOKEN_MARKER}${encodeBase64url(randomBytes)}`; +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index dedb6bb..fe7b61c 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -61,7 +61,30 @@ type FakeD1Row = { humanStatus: "active" | "suspended"; }; +type FakeHumanRow = { + id: string; + did: string; + displayName: string; + role: "admin" | "user"; + status: "active" | "suspended"; + createdAt: string; + updatedAt: string; +}; + +type FakeApiKeyRow = { + id: string; + humanId: string; + keyHash: string; + keyPrefix: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; +}; + type FakeAgentInsertRow = Record; +type FakeHumanInsertRow = Record; +type FakeApiKeyInsertRow = Record; type FakeAgentUpdateRow = Record; type FakeRevocationInsertRow = Record; type FakeRevocationRow = { @@ -176,6 +199,10 @@ function hasFilter( return quotedPattern.test(whereClause) || barePattern.test(whereClause); } +function isDefined(value: T | undefined): value is T { + return value !== undefined; +} + function parseWhereEqualityParams(options: { whereClause: string; params: unknown[]; @@ -304,6 +331,75 @@ function getAgentSelectColumnValue( return undefined; } +function getHumanSelectColumnValue(row: FakeHumanRow, column: string): unknown { + if (column === "id") { + return row.id; + } + if (column === "did") { + return row.did; + } + if (column === "display_name") { + return row.displayName; + } + if (column === "role") { + return row.role; + } + if (column === "status") { + return row.status; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +function resolveHumanSelectRows(options: { + query: string; + params: unknown[]; + humanRows: FakeHumanRow[]; +}): FakeHumanRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + + const roleFilter = + typeof equalityParams.values.role?.[0] === "string" + ? String(equalityParams.values.role[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const didFilter = + typeof equalityParams.values.did?.[0] === "string" + ? String(equalityParams.values.did[0]) + : undefined; + + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.humanRows.length; + + return options.humanRows + .filter((row) => (roleFilter ? row.role === roleFilter : true)) + .filter((row) => (statusFilter ? row.status === statusFilter : true)) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => (didFilter ? row.did === didFilter : true)) + .slice(0, limit); +} + function resolveAgentSelectRows(options: { query: string; params: unknown[]; @@ -464,10 +560,38 @@ function createFakeDb( options: FakeDbOptions = {}, ) { const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; + const humanInserts: FakeHumanInsertRow[] = []; + const apiKeyInserts: FakeApiKeyInsertRow[] = []; const agentInserts: FakeAgentInsertRow[] = []; const agentUpdates: FakeAgentUpdateRow[] = []; const revocationInserts: FakeRevocationInsertRow[] = []; const revocationRows = [...(options.revocationRows ?? [])]; + const humanRows = rows.reduce((acc, row) => { + if (acc.some((item) => item.id === row.humanId)) { + return acc; + } + + acc.push({ + id: row.humanId, + did: row.humanDid, + displayName: row.humanDisplayName, + role: row.humanRole, + status: row.humanStatus, + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }); + return acc; + }, []); + const apiKeyRows: FakeApiKeyRow[] = rows.map((row) => ({ + id: row.apiKeyId, + humanId: row.humanId, + keyHash: row.keyHash, + keyPrefix: row.keyPrefix, + name: row.apiKeyName, + status: row.apiKeyStatus, + createdAt: "2026-01-01T00:00:00.000Z", + lastUsedAt: null, + })); let beforeFirstAgentUpdateApplied = false; const database: D1Database = { @@ -487,22 +611,61 @@ function createFakeDb( ) { const requestedKeyPrefix = typeof params[0] === "string" ? params[0] : ""; - const matchingRows = rows.filter( + const matchingRows = apiKeyRows.filter( (row) => row.keyPrefix === requestedKeyPrefix, ); return { - results: matchingRows.map((row) => ({ - api_key_id: row.apiKeyId, - key_hash: row.keyHash, - api_key_status: row.apiKeyStatus, - api_key_name: row.apiKeyName, - human_id: row.humanId, - human_did: row.humanDid, - human_display_name: row.humanDisplayName, - human_role: row.humanRole, - human_status: row.humanStatus, - })), + results: matchingRows + .map((row) => { + const human = humanRows.find( + (humanRow) => humanRow.id === row.humanId, + ); + if (!human) { + return undefined; + } + + return { + api_key_id: row.id, + key_hash: row.keyHash, + api_key_status: row.status, + api_key_name: row.name, + human_id: human.id, + human_did: human.did, + human_display_name: human.displayName, + human_role: human.role, + human_status: human.status, + }; + }) + .filter(isDefined), + }; + } + if ( + (normalizedQuery.includes('from "humans"') || + normalizedQuery.includes("from humans")) && + normalizedQuery.includes("select") + ) { + const resultRows = resolveHumanSelectRows({ + query, + params, + humanRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getHumanSelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), }; } if ( @@ -556,21 +719,48 @@ function createFakeDb( ) { const requestedKeyPrefix = typeof params[0] === "string" ? params[0] : ""; - const matchingRows = rows.filter( + const matchingRows = apiKeyRows.filter( (row) => row.keyPrefix === requestedKeyPrefix, ); - return matchingRows.map((row) => [ - row.apiKeyId, - row.keyHash, - row.apiKeyStatus, - row.apiKeyName, - row.humanId, - row.humanDid, - row.humanDisplayName, - row.humanRole, - row.humanStatus, - ]); + return matchingRows + .map((row) => { + const human = humanRows.find( + (humanRow) => humanRow.id === row.humanId, + ); + if (!human) { + return undefined; + } + + return [ + row.id, + row.keyHash, + row.status, + row.name, + human.id, + human.did, + human.displayName, + human.role, + human.status, + ]; + }) + .filter(isDefined); + } + if ( + normalizedQuery.includes('from "humans"') || + normalizedQuery.includes("from humans") + ) { + const resultRows = resolveHumanSelectRows({ + query, + params, + humanRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getHumanSelectColumnValue(row, column), + ), + ); } if ( normalizedQuery.includes('from "agents"') || @@ -617,6 +807,97 @@ function createFakeDb( lastUsedAt: String(params[0] ?? ""), apiKeyId: String(params[1] ?? ""), }); + const apiKey = apiKeyRows.find( + (row) => row.id === String(params[1]), + ); + if (apiKey) { + apiKey.lastUsedAt = String(params[0] ?? ""); + } + changes = 1; + } + if ( + normalizedQuery.includes('insert into "humans"') || + normalizedQuery.includes("insert into humans") + ) { + const columns = parseInsertColumns(query, "humans"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + humanInserts.push(row); + + const nextHumanId = typeof row.id === "string" ? row.id : ""; + const nextHumanDid = typeof row.did === "string" ? row.did : ""; + const conflict = humanRows.some( + (humanRow) => + humanRow.id === nextHumanId || humanRow.did === nextHumanDid, + ); + + if (!conflict) { + if ( + (row.role === "admin" || row.role === "user") && + (row.status === "active" || row.status === "suspended") && + typeof row.display_name === "string" && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + humanRows.push({ + id: nextHumanId, + did: nextHumanDid, + displayName: row.display_name, + role: row.role, + status: row.status, + createdAt: row.created_at, + updatedAt: row.updated_at, + }); + } + + changes = 1; + } else { + changes = 0; + } + } + if ( + normalizedQuery.includes('insert into "api_keys"') || + normalizedQuery.includes("insert into api_keys") + ) { + const columns = parseInsertColumns(query, "api_keys"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + apiKeyInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.human_id === "string" && + typeof row.key_hash === "string" && + typeof row.key_prefix === "string" && + typeof row.name === "string" && + (row.status === "active" || row.status === "revoked") && + typeof row.created_at === "string" + ) { + apiKeyRows.push({ + id: row.id, + humanId: row.human_id, + keyHash: row.key_hash, + keyPrefix: row.key_prefix, + name: row.name, + status: row.status, + createdAt: row.created_at, + lastUsedAt: + typeof row.last_used_at === "string" + ? row.last_used_at + : null, + }); + } + changes = 1; } if ( @@ -772,6 +1053,8 @@ function createFakeDb( return { database, updates, + humanInserts, + apiKeyInserts, agentInserts, agentUpdates, revocationInserts, @@ -846,6 +1129,217 @@ describe("GET /health", () => { }); }); +describe("POST /v1/admin/bootstrap", () => { + it("returns 503 when bootstrap secret is not configured", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(503); + const body = (await response.json()) as { + error: { + code: string; + message: string; + }; + }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_DISABLED"); + expect(body.error.message).toBe("Admin bootstrap is disabled"); + }); + + it("returns 401 when bootstrap secret header is missing", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_UNAUTHORIZED"); + }); + + it("returns 401 when bootstrap secret is invalid", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "wrong-secret", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_UNAUTHORIZED"); + }); + + it("returns 400 when payload is not valid JSON", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: "{not-valid-json", + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_INVALID"); + }); + + it("returns 400 when payload fields are invalid", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: 123, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_INVALID"); + }); + + it("returns 409 when an admin already exists", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(409); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_ALREADY_COMPLETED"); + }); + + it("creates admin human and PAT token once", async () => { + const { database, humanInserts, apiKeyInserts } = createFakeDb([]); + + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(201); + + const body = (await response.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + status: string; + }; + apiKey: { + id: string; + name: string; + token: string; + }; + }; + + expect(body.human.id).toBe("00000000000000000000000000"); + expect(body.human.did).toBe("did:claw:human:00000000000000000000000000"); + expect(body.human.displayName).toBe("Primary Admin"); + expect(body.human.role).toBe("admin"); + expect(body.human.status).toBe("active"); + expect(body.apiKey.name).toBe("prod-admin-key"); + expect(body.apiKey.token.startsWith("clw_pat_")).toBe(true); + + expect(humanInserts).toHaveLength(1); + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.key_prefix).toBe( + deriveApiKeyLookupPrefix(body.apiKey.token), + ); + expect(apiKeyInserts[0]?.key_hash).toBe( + await hashApiKeyToken(body.apiKey.token), + ); + }); +}); + describe("GET /.well-known/claw-keys.json", () => { it("returns configured registry signing keys with cache headers", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 9b7d6bc..96cc540 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,4 +1,4 @@ -import { generateUlid } from "@clawdentity/protocol"; +import { generateUlid, makeHumanDid } from "@clawdentity/protocol"; import { AppError, createHonoErrorHandler, @@ -14,6 +14,7 @@ import { } from "@clawdentity/sdk"; import { and, desc, eq, lt } from "drizzle-orm"; import { Hono } from "hono"; +import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; import { buildAgentRegistration, @@ -35,8 +36,14 @@ import { type AuthenticatedHuman, createApiKeyAuth, } from "./auth/api-key-auth.js"; +import { + constantTimeEqual, + deriveApiKeyLookupPrefix, + generateApiKeyToken, + hashApiKeyToken, +} from "./auth/api-key-token.js"; import { createDb } from "./db/client.js"; -import { agents, humans, revocations } from "./db/schema.js"; +import { agents, api_keys, humans, revocations } from "./db/schema.js"; import { createInMemoryRateLimit, RESOLVE_RATE_LIMIT_MAX_REQUESTS, @@ -48,6 +55,7 @@ type Bindings = { DB: D1Database; ENVIRONMENT: string; APP_VERSION?: string; + BOOTSTRAP_SECRET?: string; REGISTRY_SIGNING_KEY?: string; REGISTRY_SIGNING_KEYS?: string; }; @@ -61,6 +69,8 @@ const CRL_TTL_SECONDS = REGISTRY_CACHE_MAX_AGE_SECONDS + REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS + CRL_EXPIRY_SAFETY_BUFFER_SECONDS; +// Deterministic bootstrap identity guarantees one-time admin creation under races. +const BOOTSTRAP_ADMIN_HUMAN_ID = "00000000000000000000000000"; type OwnedAgent = { id: string; @@ -225,6 +235,55 @@ function getMutationRowCount(result: unknown): number | undefined { return undefined; } +function requireBootstrapSecret(bootstrapSecret: string | undefined): string { + if (typeof bootstrapSecret === "string" && bootstrapSecret.length > 0) { + return bootstrapSecret; + } + + throw new AppError({ + code: "ADMIN_BOOTSTRAP_DISABLED", + message: "Admin bootstrap is disabled", + status: 503, + expose: true, + }); +} + +function parseBootstrapSecretHeader(headerValue: string | undefined): string { + if (typeof headerValue !== "string" || headerValue.trim().length === 0) { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_UNAUTHORIZED", + message: "Bootstrap secret is required", + status: 401, + expose: true, + }); + } + + return headerValue.trim(); +} + +function assertBootstrapSecretAuthorized(input: { + provided: string; + expected: string; +}): void { + if (!constantTimeEqual(input.provided, input.expected)) { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_UNAUTHORIZED", + message: "Bootstrap secret is invalid", + status: 401, + expose: true, + }); + } +} + +function adminBootstrapAlreadyCompletedError(): AppError { + return new AppError({ + code: "ADMIN_BOOTSTRAP_ALREADY_COMPLETED", + message: "Admin bootstrap has already completed", + status: 409, + expose: true, + }); +} + function createRegistryApp() { let cachedConfig: RegistryConfig | undefined; @@ -260,6 +319,120 @@ function createRegistryApp() { }); }); + app.post("/v1/admin/bootstrap", async (c) => { + const config = getConfig(c.env); + const expectedBootstrapSecret = requireBootstrapSecret( + config.BOOTSTRAP_SECRET, + ); + const providedBootstrapSecret = parseBootstrapSecretHeader( + c.req.header("x-bootstrap-secret"), + ); + assertBootstrapSecretAuthorized({ + provided: providedBootstrapSecret, + expected: expectedBootstrapSecret, + }); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_INVALID", + message: "Request body must be valid JSON", + status: 400, + expose: true, + }); + } + + const bootstrapPayload = parseAdminBootstrapPayload({ + payload, + environment: config.ENVIRONMENT, + }); + + const db = createDb(c.env.DB); + const activeAdminRows = await db + .select({ id: humans.id }) + .from(humans) + .where(eq(humans.role, "admin")) + .limit(1); + if (activeAdminRows.length > 0) { + throw adminBootstrapAlreadyCompletedError(); + } + + const humanId = BOOTSTRAP_ADMIN_HUMAN_ID; + const humanDid = makeHumanDid(humanId); + const apiKeyToken = generateApiKeyToken(); + const apiKeyHash = await hashApiKeyToken(apiKeyToken); + const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); + const apiKeyId = generateUlid(Date.now() + 1); + const createdAt = nowIso(); + + const applyBootstrapMutation = async ( + executor: typeof db, + ): Promise => { + const insertAdminResult = await executor + .insert(humans) + .values({ + id: humanId, + did: humanDid, + display_name: bootstrapPayload.displayName, + role: "admin", + status: "active", + created_at: createdAt, + updated_at: createdAt, + }) + .onConflictDoNothing({ + target: humans.id, + }); + + const insertedRows = getMutationRowCount(insertAdminResult); + if (insertedRows === 0) { + throw adminBootstrapAlreadyCompletedError(); + } + + await executor.insert(api_keys).values({ + id: apiKeyId, + human_id: humanId, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: bootstrapPayload.apiKeyName, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyBootstrapMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyBootstrapMutation(db); + } + + return c.json( + { + human: { + id: humanId, + did: humanDid, + displayName: bootstrapPayload.displayName, + role: "admin", + status: "active", + }, + apiKey: { + id: apiKeyId, + name: bootstrapPayload.apiKeyName, + token: apiKeyToken, + }, + }, + 201, + ); + }); + app.get("/.well-known/claw-keys.json", (c) => { const config = getConfig(c.env); return c.json( From b340dc1f1aa82b090ab4d021637c581040e86adb Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 22:39:12 +0530 Subject: [PATCH 054/190] fix(admin-bootstrap): preserve PAT visibility and fallback atomicity --- apps/cli/src/AGENTS.md | 1 + apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/admin.test.ts | 51 +++++++---- apps/cli/src/commands/admin.ts | 41 +++++++-- apps/registry/src/AGENTS.md | 3 +- apps/registry/src/server.test.ts | 128 ++++++++++++++++++++++++++++ apps/registry/src/server.ts | 44 +++++++--- 7 files changed, 234 insertions(+), 35 deletions(-) diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 17db5de..49f9a82 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -9,6 +9,7 @@ - Prefer explicit error-to-reason mapping for operator-facing failures rather than generic stack traces. - Prefer SDK shared primitives (`AppError`, `nowIso`) for new command error/date logic instead of ad-hoc equivalents. - Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. +- Admin bootstrap must print the one-time PAT before attempting to persist it and depend on `persistBootstrapConfig` so config write failures are surfaced via CLI errors while the operator still sees the PAT. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 7dabcde..efb4aad 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -26,6 +26,7 @@ - Treat bootstrap API key token as write-once secret: print once, persist via config manager, and never log token contents. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. +- Config persistence failures after successful bootstrap must not hide the returned PAT token; print token first, then surface recovery instructions. ## Testing Rules - Mock network and filesystem dependencies in command tests. diff --git a/apps/cli/src/commands/admin.test.ts b/apps/cli/src/commands/admin.test.ts index 4b81f2f..d565ab6 100644 --- a/apps/cli/src/commands/admin.test.ts +++ b/apps/cli/src/commands/admin.test.ts @@ -1,8 +1,8 @@ import { describe, expect, it, vi } from "vitest"; -import { bootstrapAdmin } from "./admin.js"; +import { bootstrapAdmin, persistBootstrapConfig } from "./admin.js"; describe("admin bootstrap helper", () => { - it("bootstraps admin and persists registryUrl + apiKey", async () => { + it("requests bootstrap and returns metadata", async () => { const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { const requestBody = JSON.parse(String(init?.body)) as { displayName?: string; @@ -29,7 +29,6 @@ describe("admin bootstrap helper", () => { { status: 201, headers: { "content-type": "application/json" } }, ); }); - const setConfigValueMock = vi.fn(async () => {}); const result = await bootstrapAdmin( { @@ -42,12 +41,12 @@ describe("admin bootstrap helper", () => { resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", }), - setConfigValueImpl: setConfigValueMock, }, ); expect(result.human.did).toBe("did:claw:human:00000000000000000000000000"); expect(result.apiKey.token).toBe("clw_pat_testtoken"); + expect(result.registryUrl).toBe("https://api.example.com/"); expect(fetchMock).toHaveBeenCalledTimes(1); const [calledInput, calledInit] = fetchMock.mock.calls[0] as [ URL, @@ -60,16 +59,6 @@ describe("admin bootstrap helper", () => { expect( (calledInit.headers as Record)["x-bootstrap-secret"], ).toBe("bootstrap-secret"); - expect(setConfigValueMock).toHaveBeenNthCalledWith( - 1, - "registryUrl", - "https://api.example.com/", - ); - expect(setConfigValueMock).toHaveBeenNthCalledWith( - 2, - "apiKey", - "clw_pat_testtoken", - ); }); it("maps registry bootstrap conflict to stable CLI message", async () => { @@ -95,7 +84,6 @@ describe("admin bootstrap helper", () => { resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", }), - setConfigValueImpl: vi.fn(async () => {}), }, ), ).rejects.toMatchObject({ @@ -122,7 +110,6 @@ describe("admin bootstrap helper", () => { resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", }), - setConfigValueImpl: vi.fn(async () => {}), }, ), ).rejects.toMatchObject({ @@ -131,3 +118,35 @@ describe("admin bootstrap helper", () => { }); }); }); + +describe("persist bootstrap config", () => { + it("saves registry url and api key sequentially", async () => { + const setConfigValueMock = vi.fn(async () => {}); + + await persistBootstrapConfig("https://api.example.com/", "token", { + setConfigValueImpl: setConfigValueMock, + }); + + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 1, + "registryUrl", + "https://api.example.com/", + ); + expect(setConfigValueMock).toHaveBeenNthCalledWith(2, "apiKey", "token"); + }); + + it("throws CLI error when persistence fails", async () => { + const setConfigValueMock = vi.fn(async () => { + throw new Error("disk-full"); + }); + + await expect( + persistBootstrapConfig("https://api.example.com/", "token", { + setConfigValueImpl: setConfigValueMock, + }), + ).rejects.toMatchObject({ + code: "CLI_ADMIN_BOOTSTRAP_CONFIG_PERSISTENCE_FAILED", + message: "Failed to save admin credentials locally", + }); + }); +}); diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index 37ef611..80a2966 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -33,6 +33,10 @@ type AdminBootstrapResponse = { }; }; +export type AdminBootstrapResult = AdminBootstrapResponse & { + registryUrl: string; +}; + type BootstrapErrorBody = { error?: { code?: string; @@ -43,6 +47,9 @@ type BootstrapErrorBody = { type AdminBootstrapDependencies = { fetchImpl?: typeof fetch; resolveConfigImpl?: () => Promise; +}; + +type AdminBootstrapPersistenceDependencies = { setConfigValueImpl?: typeof setConfigValue; }; @@ -156,7 +163,7 @@ function mapBootstrapFailureMessage(payload: BootstrapErrorBody): string { export async function bootstrapAdmin( options: AdminBootstrapOptions, dependencies: AdminBootstrapDependencies = {}, -): Promise { +): Promise { const bootstrapSecret = parseNonEmptyString(options.bootstrapSecret); if (bootstrapSecret.length === 0) { throw createCliError( @@ -167,7 +174,6 @@ export async function bootstrapAdmin( const fetchImpl = dependencies.fetchImpl ?? fetch; const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const setConfigValueImpl = dependencies.setConfigValueImpl ?? setConfigValue; const config = await resolveConfigImpl(); const registryUrl = resolveBootstrapRegistryUrl({ overrideRegistryUrl: options.registryUrl, @@ -216,10 +222,31 @@ export async function bootstrapAdmin( const parsed = parseBootstrapResponse(payload); - await setConfigValueImpl("registryUrl", registryUrl); - await setConfigValueImpl("apiKey", parsed.apiKey.token); + return { + ...parsed, + registryUrl, + }; +} + +export async function persistBootstrapConfig( + registryUrl: string, + apiKeyToken: string, + dependencies: AdminBootstrapPersistenceDependencies = {}, +): Promise { + const setConfigValueImpl = dependencies.setConfigValueImpl ?? setConfigValue; - return parsed; + try { + await setConfigValueImpl("registryUrl", registryUrl); + await setConfigValueImpl("apiKey", apiKeyToken); + } catch (error) { + logger.warn("cli.admin_bootstrap_config_persist_failed", { + errorName: error instanceof Error ? error.name : "unknown", + }); + throw createCliError( + "CLI_ADMIN_BOOTSTRAP_CONFIG_PERSISTENCE_FAILED", + "Failed to save admin credentials locally", + ); + } } export const createAdminCommand = (): Command => { @@ -245,9 +272,11 @@ export const createAdminCommand = (): Command => { writeStdoutLine("Admin bootstrap completed"); writeStdoutLine(`Human DID: ${result.human.did}`); writeStdoutLine(`API key name: ${result.apiKey.name}`); - writeStdoutLine("API key saved to local config"); writeStdoutLine("API key token (shown once):"); writeStdoutLine(result.apiKey.token); + + await persistBootstrapConfig(result.registryUrl, result.apiKey.token); + writeStdoutLine("API key saved to local config"); }, ), ); diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index fb3629a..11f9f74 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -17,7 +17,8 @@ - If `BOOTSTRAP_SECRET` is not configured, return `503 ADMIN_BOOTSTRAP_DISABLED`. - If any admin human already exists, return `409 ADMIN_BOOTSTRAP_ALREADY_COMPLETED`. - Success response must include `{ human, apiKey }` and return the PAT token only in bootstrap response. -- Persist admin bootstrap atomically where supported (transaction), with local fallback only for environments that cannot start transactions. +- Persist admin bootstrap atomically where supported (transaction). When falling back because transactions are unavailable, run the manual mutation with rollback-on-api-key-failure so that no admin human exists without the new API key even if part of the bootstrap fails. +- Fallback path must be compensation-safe: if API key insert fails after admin insert, delete the inserted admin row before returning failure so retry remains possible. ## Registry Keyset Contract - `/.well-known/claw-keys.json` is a public endpoint and must remain unauthenticated. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index fe7b61c..f6a301d 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -125,6 +125,8 @@ type FakeAgentSelectRow = { type FakeDbOptions = { beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; + failApiKeyInsertCount?: number; + failBeginTransaction?: boolean; revocationRows?: FakeRevocationRow[]; }; @@ -593,6 +595,7 @@ function createFakeDb( lastUsedAt: null, })); let beforeFirstAgentUpdateApplied = false; + let remainingApiKeyInsertFailures = options.failApiKeyInsertCount ?? 0; const database: D1Database = { prepare(query: string) { @@ -797,6 +800,13 @@ function createFakeDb( return []; }, async run() { + if ( + options.failBeginTransaction && + normalizedQuery.trim() === "begin" + ) { + throw new Error("Failed query: begin"); + } + let changes = 0; if ( @@ -864,6 +874,11 @@ function createFakeDb( normalizedQuery.includes('insert into "api_keys"') || normalizedQuery.includes("insert into api_keys") ) { + if (remainingApiKeyInsertFailures > 0) { + remainingApiKeyInsertFailures -= 1; + throw new Error("api key insert failed"); + } + const columns = parseInsertColumns(query, "api_keys"); const row = columns.reduce( (acc, column, index) => { @@ -900,6 +915,35 @@ function createFakeDb( changes = 1; } + if ( + normalizedQuery.includes('delete from "humans"') || + normalizedQuery.includes("delete from humans") + ) { + const whereClause = extractWhereClause(query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : ""; + + if (idFilter.length > 0) { + for (let index = humanRows.length - 1; index >= 0; index -= 1) { + if (humanRows[index]?.id === idFilter) { + humanRows.splice(index, 1); + changes += 1; + } + } + + for (let index = apiKeyRows.length - 1; index >= 0; index -= 1) { + if (apiKeyRows[index]?.humanId === idFilter) { + apiKeyRows.splice(index, 1); + } + } + } + } if ( normalizedQuery.includes('insert into "agents"') || normalizedQuery.includes("insert into agents") @@ -1053,6 +1097,7 @@ function createFakeDb( return { database, updates, + humanRows, humanInserts, apiKeyInserts, agentInserts, @@ -1338,6 +1383,89 @@ describe("POST /v1/admin/bootstrap", () => { await hashApiKeyToken(body.apiKey.token), ); }); + + it("falls back to manual mutation when transactions are unavailable", async () => { + const { database, humanInserts, apiKeyInserts } = createFakeDb([], [], { + failBeginTransaction: true, + }); + + const response = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(201); + expect(humanInserts).toHaveLength(1); + expect(apiKeyInserts).toHaveLength(1); + }); + + it("rolls back admin insert when fallback api key insert fails", async () => { + const { database, humanRows } = createFakeDb([], [], { + failBeginTransaction: true, + failApiKeyInsertCount: 1, + }); + + const firstResponse = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(firstResponse.status).toBe(500); + expect(humanRows).toHaveLength(0); + + const secondResponse = await createRegistryApp().request( + "/v1/admin/bootstrap", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(secondResponse.status).toBe(201); + expect(humanRows).toHaveLength(1); + }); }); describe("GET /.well-known/claw-keys.json", () => { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 96cc540..167fe22 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -369,6 +369,7 @@ function createRegistryApp() { const applyBootstrapMutation = async ( executor: typeof db, + options: { rollbackOnApiKeyFailure: boolean }, ): Promise => { const insertAdminResult = await executor .insert(humans) @@ -390,28 +391,47 @@ function createRegistryApp() { throw adminBootstrapAlreadyCompletedError(); } - await executor.insert(api_keys).values({ - id: apiKeyId, - human_id: humanId, - key_hash: apiKeyHash, - key_prefix: apiKeyPrefix, - name: bootstrapPayload.apiKeyName, - status: "active", - created_at: createdAt, - last_used_at: null, - }); + try { + await executor.insert(api_keys).values({ + id: apiKeyId, + human_id: humanId, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: bootstrapPayload.apiKeyName, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + } catch (error) { + if (options.rollbackOnApiKeyFailure) { + try { + await executor.delete(humans).where(eq(humans.id, humanId)); + } catch (rollbackError) { + logger.error("registry.admin_bootstrap_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + }); + } + } + + throw error; + } }; try { await db.transaction(async (tx) => { - await applyBootstrapMutation(tx as unknown as typeof db); + await applyBootstrapMutation(tx as unknown as typeof db, { + rollbackOnApiKeyFailure: false, + }); }); } catch (error) { if (!isUnsupportedLocalTransactionError(error)) { throw error; } - await applyBootstrapMutation(db); + await applyBootstrapMutation(db, { + rollbackOnApiKeyFailure: true, + }); } return c.json( From 25fd0b9399ac4a0e89f0a571f27a4beaeaafcc8c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 22:49:46 +0530 Subject: [PATCH 055/190] fix(ci): sync pnpm lockfile for cli dependency metadata --- pnpm-lock.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f204ed7..5c23fd9 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -35,16 +35,16 @@ importers: apps/cli: dependencies: + commander: + specifier: ^13.1.0 + version: 13.1.0 + devDependencies: '@clawdentity/protocol': specifier: workspace:* version: link:../../packages/protocol '@clawdentity/sdk': specifier: workspace:* version: link:../../packages/sdk - commander: - specifier: ^13.1.0 - version: 13.1.0 - devDependencies: '@types/node': specifier: ^22.18.11 version: 22.19.11 From a4799fd9065736e68e1b0a2a75103f1811a1e74d Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sun, 15 Feb 2026 23:06:25 +0530 Subject: [PATCH 056/190] feat(proxy): add per-agent DID rate limiting (T30) --- apps/proxy/AGENTS.md | 4 +- apps/proxy/src/AGENTS.md | 3 + .../src/agent-rate-limit-middleware.test.ts | 205 ++++++++++++++++++ apps/proxy/src/agent-rate-limit-middleware.ts | 82 +++++++ apps/proxy/src/config.test.ts | 24 ++ apps/proxy/src/config.ts | 25 +++ apps/proxy/src/server.ts | 14 ++ 7 files changed, 356 insertions(+), 1 deletion(-) create mode 100644 apps/proxy/src/agent-rate-limit-middleware.test.ts create mode 100644 apps/proxy/src/agent-rate-limit-middleware.ts diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 63330b6..5fa774f 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -8,6 +8,7 @@ - Keep runtime config centralized in `src/config.ts`. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). +- Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Require hook token input via env (`OPENCLAW_HOOK_TOKEN` or OpenClaw-compatible alias `OPENCLAW_HOOKS_TOKEN`) and never log the token value. - Load env files with OpenClaw precedence and no overrides: @@ -42,6 +43,7 @@ - When CRL verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning dependency failure. - Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. - Return `403` when requests are verified but agent DID is not allowlisted. +- Return `429` with `PROXY_RATE_LIMIT_EXCEEDED` when an allowlisted verified agent DID exceeds its request budget within the configured window. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. ## CRL Policy @@ -56,6 +58,6 @@ ## Server Runtime - Keep `src/server.ts` as the HTTP app/runtime entry. -- Keep middleware order stable: request context -> request logging -> error handler. +- Keep middleware order stable: request context -> request logging -> auth verification -> agent DID rate limit -> error handler. - Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200. - Log startup and request completion with structured JSON logs; never log secrets or tokens. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index d69bcb2..e98d256 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -3,8 +3,10 @@ ## Source Layout - Keep `index.ts` as runtime bootstrap surface and version export. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. +- Keep agent DID rate-limit env parsing in `config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE`, `AGENT_RATE_LIMIT_WINDOW_MS`) and validate as positive integers. - Keep HTTP app/startup concerns in `server.ts`; use `bin.ts` as process entrypoint for Node runtime startup. - Keep inbound auth verification in `auth-middleware.ts` with focused helpers for token parsing, registry material loading, CRL checks, and replay protection. +- Keep per-agent DID throttling in `agent-rate-limit-middleware.ts`; do not blend rate-limit state or counters into `auth-middleware.ts`. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. @@ -21,6 +23,7 @@ - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep rate-limit failure semantics stable: verified requests over budget map to `429` with code `PROXY_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.rate_limit.exceeded`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. diff --git a/apps/proxy/src/agent-rate-limit-middleware.test.ts b/apps/proxy/src/agent-rate-limit-middleware.test.ts new file mode 100644 index 0000000..03b7a17 --- /dev/null +++ b/apps/proxy/src/agent-rate-limit-middleware.test.ts @@ -0,0 +1,205 @@ +import { + createHonoErrorHandler, + createRequestContextMiddleware, + type Logger, +} from "@clawdentity/sdk"; +import { Hono } from "hono"; +import { describe, expect, it, vi } from "vitest"; +import { createAgentRateLimitMiddleware } from "./agent-rate-limit-middleware.js"; +import type { ProxyRequestVariables } from "./auth-middleware.js"; + +type MockLogger = Logger & { + warnSpy: ReturnType; +}; + +function createMockLogger(): MockLogger { + const warnSpy = vi.fn(); + const logger: MockLogger = { + debug: vi.fn(), + info: vi.fn(), + warn: warnSpy, + error: vi.fn(), + child: () => logger, + warnSpy, + }; + return logger; +} + +function createRateLimitTestApp(input: { + maxRequests: number; + windowMs: number; + nowMs: () => number; + logger: Logger; +}) { + const app = new Hono<{ Variables: ProxyRequestVariables }>(); + app.use("*", createRequestContextMiddleware()); + app.use("*", async (c, next) => { + const testAgentDid = c.req.header("x-test-agent-did"); + if (typeof testAgentDid === "string" && testAgentDid.trim().length > 0) { + c.set("auth", { + agentDid: testAgentDid, + ownerDid: "did:claw:human:test-owner", + aitJti: "test-jti", + issuer: "https://api.clawdentity.com", + cnfPublicKey: "test-key", + }); + } + + await next(); + }); + app.use( + "*", + createAgentRateLimitMiddleware({ + config: { + agentRateLimitRequestsPerMinute: input.maxRequests, + agentRateLimitWindowMs: input.windowMs, + }, + logger: input.logger, + nowMs: input.nowMs, + }), + ); + app.onError(createHonoErrorHandler(input.logger)); + app.get("/health", (c) => c.json({ status: "ok" })); + app.post("/protected", (c) => + c.json({ ok: true, agentDid: c.get("auth")?.agentDid }), + ); + + return app; +} + +describe("proxy agent DID rate limit middleware", () => { + it("returns 429 with PROXY_RATE_LIMIT_EXCEEDED when requests exceed limit", async () => { + const now = 1_000; + const logger = createMockLogger(); + const app = createRateLimitTestApp({ + maxRequests: 2, + windowMs: 60_000, + nowMs: () => now, + logger, + }); + const headers = { + "content-type": "application/json", + "x-test-agent-did": "did:claw:agent:alpha", + }; + + const first = await app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "1" }), + }); + const second = await app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "2" }), + }); + const third = await app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "3" }), + }); + + expect(first.status).toBe(200); + expect(second.status).toBe(200); + expect(third.status).toBe(429); + const body = (await third.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RATE_LIMIT_EXCEEDED"); + expect(logger.warnSpy).toHaveBeenCalledWith("proxy.rate_limit.exceeded", { + agentDid: "did:claw:agent:alpha", + windowMs: 60_000, + maxRequests: 2, + }); + }); + + it("tracks counters per agent DID independently", async () => { + const logger = createMockLogger(); + const app = createRateLimitTestApp({ + maxRequests: 1, + windowMs: 60_000, + nowMs: () => 2_000, + logger, + }); + + const alphaHeaders = { + "content-type": "application/json", + "x-test-agent-did": "did:claw:agent:alpha", + }; + const betaHeaders = { + "content-type": "application/json", + "x-test-agent-did": "did:claw:agent:beta", + }; + + const alphaFirst = await app.request("/protected", { + method: "POST", + headers: alphaHeaders, + body: JSON.stringify({ message: "alpha-1" }), + }); + const betaFirst = await app.request("/protected", { + method: "POST", + headers: betaHeaders, + body: JSON.stringify({ message: "beta-1" }), + }); + const alphaSecond = await app.request("/protected", { + method: "POST", + headers: alphaHeaders, + body: JSON.stringify({ message: "alpha-2" }), + }); + + expect(alphaFirst.status).toBe(200); + expect(betaFirst.status).toBe(200); + expect(alphaSecond.status).toBe(429); + }); + + it("resets counters after window expiry", async () => { + let now = 10_000; + const logger = createMockLogger(); + const app = createRateLimitTestApp({ + maxRequests: 1, + windowMs: 1_000, + nowMs: () => now, + logger, + }); + const headers = { + "content-type": "application/json", + "x-test-agent-did": "did:claw:agent:alpha", + }; + + const first = await app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "1" }), + }); + const second = await app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "2" }), + }); + now += 1_001; + const third = await app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "3" }), + }); + + expect(first.status).toBe(200); + expect(second.status).toBe(429); + expect(third.status).toBe(200); + }); + + it("keeps /health unthrottled", async () => { + const logger = createMockLogger(); + const app = createRateLimitTestApp({ + maxRequests: 1, + windowMs: 60_000, + nowMs: () => 10_000, + logger, + }); + + const first = await app.request("/health"); + const second = await app.request("/health"); + const third = await app.request("/health"); + + expect(first.status).toBe(200); + expect(second.status).toBe(200); + expect(third.status).toBe(200); + }); +}); diff --git a/apps/proxy/src/agent-rate-limit-middleware.ts b/apps/proxy/src/agent-rate-limit-middleware.ts new file mode 100644 index 0000000..81f9360 --- /dev/null +++ b/apps/proxy/src/agent-rate-limit-middleware.ts @@ -0,0 +1,82 @@ +import { AppError, type Logger } from "@clawdentity/sdk"; +import { createMiddleware } from "hono/factory"; +import type { ProxyRequestVariables } from "./auth-middleware.js"; +import type { ProxyConfig } from "./config.js"; + +type InMemoryBucket = { + windowStartedAtMs: number; + count: number; +}; + +export type AgentRateLimitMiddlewareOptions = { + config: Pick< + ProxyConfig, + "agentRateLimitRequestsPerMinute" | "agentRateLimitWindowMs" + >; + logger: Logger; + nowMs?: () => number; +}; + +export function createAgentRateLimitMiddleware( + options: AgentRateLimitMiddlewareOptions, +) { + const nowMs = options.nowMs ?? Date.now; + const buckets = new Map(); + + return createMiddleware<{ Variables: ProxyRequestVariables }>( + async (c, next) => { + if (c.req.path === "/health") { + await next(); + return; + } + + const auth = c.get("auth"); + if (!auth) { + await next(); + return; + } + + const now = nowMs(); + for (const [agentDid, bucket] of buckets.entries()) { + if ( + now - bucket.windowStartedAtMs >= + options.config.agentRateLimitWindowMs + ) { + buckets.delete(agentDid); + } + } + + const existing = buckets.get(auth.agentDid); + if ( + !existing || + now - existing.windowStartedAtMs >= + options.config.agentRateLimitWindowMs + ) { + buckets.set(auth.agentDid, { + windowStartedAtMs: now, + count: 1, + }); + await next(); + return; + } + + if (existing.count >= options.config.agentRateLimitRequestsPerMinute) { + options.logger.warn("proxy.rate_limit.exceeded", { + agentDid: auth.agentDid, + windowMs: options.config.agentRateLimitWindowMs, + maxRequests: options.config.agentRateLimitRequestsPerMinute, + }); + throw new AppError({ + code: "PROXY_RATE_LIMIT_EXCEEDED", + message: "Too many requests", + status: 429, + expose: true, + }); + } + + existing.count += 1; + buckets.set(auth.agentDid, existing); + await next(); + }, + ); +} diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 9590ec2..a488be8 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -3,6 +3,8 @@ import { tmpdir } from "node:os"; import { join } from "node:path"; import { describe, expect, it } from "vitest"; import { + DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, DEFAULT_CRL_MAX_AGE_MS, DEFAULT_CRL_REFRESH_INTERVAL_MS, DEFAULT_OPENCLAW_BASE_URL, @@ -35,6 +37,9 @@ describe("proxy config", () => { crlRefreshIntervalMs: DEFAULT_CRL_REFRESH_INTERVAL_MS, crlMaxAgeMs: DEFAULT_CRL_MAX_AGE_MS, crlStaleBehavior: "fail-open", + agentRateLimitRequestsPerMinute: + DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + agentRateLimitWindowMs: DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, }); }); @@ -45,6 +50,8 @@ describe("proxy config", () => { CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: "75", + AGENT_RATE_LIMIT_WINDOW_MS: "90000", }); expect(config.listenPort).toBe(4100); @@ -52,6 +59,8 @@ describe("proxy config", () => { expect(config.registryUrl).toBe("https://registry.example.com"); expect(config.environment).toBe("local"); expect(config.crlStaleBehavior).toBe("fail-closed"); + expect(config.agentRateLimitRequestsPerMinute).toBe(75); + expect(config.agentRateLimitWindowMs).toBe(90000); }); it("parses allow list object and override env lists", () => { @@ -113,6 +122,21 @@ describe("proxy config", () => { }), ).toThrow(ProxyConfigError); }); + + it("throws on invalid agent DID rate-limit values", () => { + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: "0", + }), + ).toThrow(ProxyConfigError); + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + AGENT_RATE_LIMIT_WINDOW_MS: "-1", + }), + ).toThrow(ProxyConfigError); + }); }); describe("proxy config loading", () => { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 798f6f1..30b9f85 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -26,6 +26,8 @@ export const DEFAULT_PROXY_ENVIRONMENT: ProxyEnvironment = "development"; export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; +export const DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE = 60; +export const DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS = 60 * 1000; export class ProxyConfigError extends Error { readonly code = "CONFIG_VALIDATION_FAILED"; @@ -72,6 +74,16 @@ const proxyRuntimeEnvSchema = z.object({ CRL_STALE_BEHAVIOR: z .enum(["fail-open", "fail-closed"]) .default(DEFAULT_CRL_STALE_BEHAVIOR), + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE), + AGENT_RATE_LIMIT_WINDOW_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS), }); const proxyAllowListSchema = z @@ -91,6 +103,8 @@ export const proxyConfigSchema = z.object({ crlRefreshIntervalMs: z.number().int().positive(), crlMaxAgeMs: z.number().int().positive(), crlStaleBehavior: z.enum(["fail-open", "fail-closed"]), + agentRateLimitRequestsPerMinute: z.number().int().positive(), + agentRateLimitWindowMs: z.number().int().positive(), }); export type ProxyConfig = z.infer; @@ -112,6 +126,8 @@ type RuntimeEnvInput = { CRL_REFRESH_INTERVAL_MS?: unknown; CRL_MAX_AGE_MS?: unknown; CRL_STALE_BEHAVIOR?: unknown; + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: unknown; + AGENT_RATE_LIMIT_WINDOW_MS?: unknown; OPENCLAW_STATE_DIR?: unknown; CLAWDBOT_STATE_DIR?: unknown; OPENCLAW_CONFIG_PATH?: unknown; @@ -397,6 +413,12 @@ function normalizeRuntimeEnv(input: unknown): Record { CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), CRL_STALE_BEHAVIOR: firstNonEmpty(env, ["CRL_STALE_BEHAVIOR"]), + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: firstNonEmpty(env, [ + "AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE", + ]), + AGENT_RATE_LIMIT_WINDOW_MS: firstNonEmpty(env, [ + "AGENT_RATE_LIMIT_WINDOW_MS", + ]), }; } @@ -522,6 +544,9 @@ export function parseProxyConfig(env: unknown): ProxyConfig { crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, crlMaxAgeMs: parsedRuntimeEnv.data.CRL_MAX_AGE_MS, crlStaleBehavior: parsedRuntimeEnv.data.CRL_STALE_BEHAVIOR, + agentRateLimitRequestsPerMinute: + parsedRuntimeEnv.data.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + agentRateLimitWindowMs: parsedRuntimeEnv.data.AGENT_RATE_LIMIT_WINDOW_MS, }; const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 3298606..d3f320c 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -13,6 +13,7 @@ import { type AgentHookRuntimeOptions, createAgentHookHandler, } from "./agent-hook-route.js"; +import { createAgentRateLimitMiddleware } from "./agent-rate-limit-middleware.js"; import { createProxyAuthMiddleware, type ProxyRequestVariables, @@ -28,11 +29,16 @@ type ProxyAuthRuntimeOptions = { crlCache?: CrlCache; }; +type ProxyRateLimitRuntimeOptions = { + nowMs?: () => number; +}; + type CreateProxyAppOptions = { config: ProxyConfig; logger?: Logger; registerRoutes?: (app: ProxyApp) => void; auth?: ProxyAuthRuntimeOptions; + rateLimit?: ProxyRateLimitRuntimeOptions; hooks?: AgentHookRuntimeOptions; }; @@ -74,6 +80,14 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { ...options.auth, }), ); + app.use( + "*", + createAgentRateLimitMiddleware({ + config: options.config, + logger, + ...options.rateLimit, + }), + ); app.onError(createHonoErrorHandler(logger)); app.get("/health", (c) => From 3a9820de5be6b3f197c1433f6bfa37ebba551b58 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 09:26:04 +0530 Subject: [PATCH 057/190] feat(proxy): add cloudflare worker runtime and relay config plumbing --- README.md | 15 +- apps/cli/AGENTS.md | 1 + apps/cli/src/commands/AGENTS.md | 2 + apps/cli/src/commands/openclaw.test.ts | 87 ++++++++ apps/cli/src/commands/openclaw.ts | 165 +++++++++++++- apps/openclaw-skill/AGENTS.md | 3 +- apps/openclaw-skill/skill/SKILL.md | 6 + .../skill/references/clawdentity-protocol.md | 17 ++ apps/proxy/.env.example | 26 +++ apps/proxy/AGENTS.md | 9 + apps/proxy/package.json | 17 ++ apps/proxy/src/AGENTS.md | 10 +- apps/proxy/src/agent-hook-route.test.ts | 206 +++++++++++++++++- apps/proxy/src/agent-hook-route.ts | 68 ++++++ apps/proxy/src/bin.ts | 2 +- apps/proxy/src/config.test.ts | 105 ++++++++- apps/proxy/src/config.ts | 147 +++++++++++++ apps/proxy/src/node-server.ts | 53 +++++ apps/proxy/src/server.test.ts | 3 +- apps/proxy/src/server.ts | 46 +--- apps/proxy/src/worker.test.ts | 97 +++++++++ apps/proxy/src/worker.ts | 184 ++++++++++++++++ apps/proxy/tsconfig.json | 2 + apps/proxy/tsup.config.ts | 8 +- apps/proxy/wrangler.jsonc | 34 +++ apps/registry/.env.example | 20 +- apps/registry/AGENTS.md | 1 + package.json | 5 +- pnpm-lock.yaml | 7 + 29 files changed, 1273 insertions(+), 73 deletions(-) create mode 100644 apps/proxy/.env.example create mode 100644 apps/proxy/src/node-server.ts create mode 100644 apps/proxy/src/worker.test.ts create mode 100644 apps/proxy/src/worker.ts create mode 100644 apps/proxy/wrangler.jsonc diff --git a/README.md b/README.md index 5be336f..7b33cc2 100644 --- a/README.md +++ b/README.md @@ -150,7 +150,7 @@ OpenClaw Gateway (normal /hooks/agent handling) This repo is a monorepo: - `apps/registry` — issues AITs, serves CRL + public keys (Worker config: `apps/registry/wrangler.jsonc`) -- `apps/proxy` — verifies Clawdentity headers then forwards to OpenClaw hooks +- `apps/proxy` — verifies Clawdentity headers then forwards to OpenClaw hooks (Worker config: `apps/proxy/wrangler.jsonc`) - `apps/cli` — operator workflow (`claw create`, `claw revoke`, `claw share`) - `packages/sdk` — TS SDK (sign + verify + CRL cache) - `packages/protocol` — shared types + canonical signing rules @@ -180,10 +180,21 @@ This repo is a monorepo: ### 3) Proxy enforcement before OpenClaw - Handled by: `apps/proxy` -- Sidecar proxy verifies AIT + CRL + PoP before forwarding to OpenClaw. +- Proxy Worker verifies AIT + CRL + PoP before forwarding to OpenClaw. - Enforces caller allowlist policy by DID. - Applies per-agent rate limiting. - Keeps `hooks.token` private and only injects it internally during forward. +- Optional: set `INJECT_IDENTITY_INTO_MESSAGE=true` to prepend a sanitized identity block + (`agentDid`, `ownerDid`, `issuer`, `aitJti`) into `/hooks/agent` payload `message`. + Default is `false`, which keeps payloads unchanged. + +### Proxy Worker local runs + +- Local env (`ENVIRONMENT=local`): `pnpm dev:proxy` +- Development env (`ENVIRONMENT=development`): `pnpm dev:proxy:development` +- Fresh deploy-like env: `pnpm dev:proxy:fresh` +- Production deploy command: `pnpm -F @clawdentity/proxy run deploy:production` +- Environment intent: `local` is local Wrangler development only; `development` and `production` are cloud deployment environments. ### 4) Operator lifecycle tooling (CLI) diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index fd09032..b0e2fac 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -21,6 +21,7 @@ - Local CLI config lives at `~/.clawdentity/config.json`. - CLI verification caches live under `~/.clawdentity/cache/` and must never include private keys or PATs. - Agent identities live at `~/.clawdentity/agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. +- OpenClaw setup runtime hint lives at `~/.clawdentity/openclaw-relay.json` and stores `openclawBaseUrl` for proxy fallback. - Reject `.` and `..` as agent names before any filesystem operation to prevent directory traversal outside `~/.clawdentity/agents/`. - Resolve values with explicit precedence: environment variables > config file > built-in defaults. - Keep API tokens masked in human-facing output (`show`, success logs, debug prints). diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index efb4aad..6328c2e 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -19,6 +19,8 @@ ## OpenClaw Command Rules - `openclaw invite` must generate self-contained invite code from admin-provided DID + proxy URL. - `openclaw setup` must be idempotent for relay mapping updates and peer map writes. +- `openclaw setup` must persist/update `~/.clawdentity/openclaw-relay.json` with the resolved `openclawBaseUrl` so downstream proxy runtime can boot without manual env edits. +- `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. - Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. ## Admin Command Rules diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 42a2ade..e586687 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -158,7 +158,94 @@ describe("openclaw command helpers", () => { "utf8", ).trim(); expect(selectedAgent).toBe("alpha"); + + expect(result.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawBaseUrl: string; + updatedAt: string; + }; + expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + expect(relayRuntimeConfig.updatedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + } finally { + sandbox.cleanup(); + } + }); + + it("stores explicit OpenClaw base URL in relay runtime config", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + openclawBaseUrl: "http://127.0.0.1:19001", + }); + + expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19001"); + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawBaseUrl: string; + }; + expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:19001"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses OPENCLAW_BASE_URL env when setup option is omitted", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousBaseUrl = process.env.OPENCLAW_BASE_URL; + process.env.OPENCLAW_BASE_URL = "http://127.0.0.1:19555"; + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19555"); + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawBaseUrl: string; + }; + expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:19555"); } finally { + if (previousBaseUrl === undefined) { + delete process.env.OPENCLAW_BASE_URL; + } else { + process.env.OPENCLAW_BASE_URL = previousBaseUrl; + } sandbox.cleanup(); } }); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 1c0e597..d91c985 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -14,6 +14,7 @@ import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "openclaw" }); +const CLAWDENTITY_DIR_NAME = ".clawdentity"; const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; const SECRET_KEY_FILE_NAME = "secret.key"; @@ -21,10 +22,12 @@ const PEERS_FILE_NAME = "peers.json"; const OPENCLAW_DIR_NAME = ".openclaw"; const OPENCLAW_CONFIG_FILE_NAME = "openclaw.json"; const OPENCLAW_AGENT_FILE_NAME = "openclaw-agent-name"; +const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; const HOOK_MAPPING_ID = "clawdentity-send-to-peer"; const HOOK_PATH_SEND_TO_PEER = "send-to-peer"; +const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; const INVITE_CODE_PREFIX = "clawd1_"; const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; const FILE_MODE = 0o600; @@ -52,6 +55,7 @@ type OpenclawSetupOptions = { peerAlias?: string; openclawDir?: string; transformSource?: string; + openclawBaseUrl?: string; homeDir?: string; }; @@ -79,6 +83,13 @@ export type OpenclawSetupResult = { peerProxyUrl: string; openclawConfigPath: string; transformTargetPath: string; + openclawBaseUrl: string; + relayRuntimeConfigPath: string; +}; + +type OpenclawRelayRuntimeConfig = { + openclawBaseUrl: string; + updatedAt?: string; }; function isRecord(value: unknown): value is Record { @@ -157,28 +168,52 @@ function parsePeerAlias(value: unknown): string { } function parseProxyUrl(value: unknown): string { - const candidate = parseNonEmptyString(value, "proxy URL"); + return parseHttpUrl(value, { + label: "proxy URL", + code: "CLI_OPENCLAW_INVALID_PROXY_URL", + message: "proxy URL must be a valid URL", + }); +} +function parseHttpUrl( + value: unknown, + input: { + label: string; + code: string; + message: string; + }, +): string { + const candidate = parseNonEmptyString(value, input.label); let parsedUrl: URL; try { parsedUrl = new URL(candidate); } catch { - throw createCliError( - "CLI_OPENCLAW_INVALID_PROXY_URL", - "proxy URL must be a valid URL", - ); + throw createCliError(input.code, input.message); } if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { - throw createCliError( - "CLI_OPENCLAW_INVALID_PROXY_URL", - "proxy URL must use http or https", - ); + throw createCliError(input.code, `${input.label} must use http or https`); + } + + if ( + parsedUrl.pathname === "/" && + parsedUrl.search.length === 0 && + parsedUrl.hash.length === 0 + ) { + return parsedUrl.origin; } return parsedUrl.toString(); } +function parseOpenclawBaseUrl(value: unknown): string { + return parseHttpUrl(value, { + label: "OpenClaw base URL", + code: "CLI_OPENCLAW_INVALID_OPENCLAW_BASE_URL", + message: "OpenClaw base URL must be a valid URL", + }); +} + function parseAgentDid(value: unknown, label: string): string { const did = parseNonEmptyString(value, label); @@ -267,11 +302,11 @@ function resolveOpenclawDir(openclawDir: string | undefined, homeDir: string) { } function resolveAgentDirectory(homeDir: string, agentName: string): string { - return join(homeDir, ".clawdentity", AGENTS_DIR_NAME, agentName); + return join(homeDir, CLAWDENTITY_DIR_NAME, AGENTS_DIR_NAME, agentName); } function resolvePeersPath(homeDir: string): string { - return join(homeDir, ".clawdentity", PEERS_FILE_NAME); + return join(homeDir, CLAWDENTITY_DIR_NAME, PEERS_FILE_NAME); } function resolveOpenclawConfigPath(openclawDir: string): string { @@ -293,7 +328,11 @@ function resolveTransformTargetPath(openclawDir: string): string { } function resolveOpenclawAgentNamePath(homeDir: string): string { - return join(homeDir, ".clawdentity", OPENCLAW_AGENT_FILE_NAME); + return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_AGENT_FILE_NAME); +} + +function resolveRelayRuntimeConfigPath(homeDir: string): string { + return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_RELAY_RUNTIME_FILE_NAME); } async function readJsonFile(filePath: string): Promise { @@ -465,6 +504,90 @@ async function savePeersConfig( await writeSecureFile(peersPath, `${JSON.stringify(config, null, 2)}\n`); } +function parseRelayRuntimeConfig( + value: unknown, + relayRuntimeConfigPath: string, +): OpenclawRelayRuntimeConfig { + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_RELAY_RUNTIME_CONFIG", + "Relay runtime config must be an object", + { relayRuntimeConfigPath }, + ); + } + + const updatedAt = + typeof value.updatedAt === "string" && value.updatedAt.trim().length > 0 + ? value.updatedAt.trim() + : undefined; + + return { + openclawBaseUrl: parseOpenclawBaseUrl(value.openclawBaseUrl), + updatedAt, + }; +} + +async function loadRelayRuntimeConfig( + relayRuntimeConfigPath: string, +): Promise { + let parsed: unknown; + try { + parsed = await readJsonFile(relayRuntimeConfigPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + + throw error; + } + + return parseRelayRuntimeConfig(parsed, relayRuntimeConfigPath); +} + +async function saveRelayRuntimeConfig( + relayRuntimeConfigPath: string, + openclawBaseUrl: string, +): Promise { + const config: OpenclawRelayRuntimeConfig = { + openclawBaseUrl, + updatedAt: nowIso(), + }; + + await writeSecureFile( + relayRuntimeConfigPath, + `${JSON.stringify(config, null, 2)}\n`, + ); +} + +async function resolveOpenclawBaseUrl(input: { + optionValue?: string; + relayRuntimeConfigPath: string; +}): Promise { + if ( + typeof input.optionValue === "string" && + input.optionValue.trim().length > 0 + ) { + return parseOpenclawBaseUrl(input.optionValue); + } + + const envOpenclawBaseUrl = process.env.OPENCLAW_BASE_URL; + if ( + typeof envOpenclawBaseUrl === "string" && + envOpenclawBaseUrl.trim().length > 0 + ) { + return parseOpenclawBaseUrl(envOpenclawBaseUrl); + } + + const existingConfig = await loadRelayRuntimeConfig( + input.relayRuntimeConfigPath, + ); + if (existingConfig !== undefined) { + return existingConfig.openclawBaseUrl; + } + + return DEFAULT_OPENCLAW_BASE_URL; +} + function normalizeStringArrayWithValue( value: unknown, requiredValue: string, @@ -634,6 +757,11 @@ export async function setupOpenclawRelayFromInvite( ? options.transformSource.trim() : resolveDefaultTransformSource(openclawDir); const transformTargetPath = resolveTransformTargetPath(openclawDir); + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); + const openclawBaseUrl = await resolveOpenclawBaseUrl({ + optionValue: options.openclawBaseUrl, + relayRuntimeConfigPath, + }); const invite = decodeInvitePayload(options.inviteCode); const peerAliasCandidate = options.peerAlias ?? invite.alias; @@ -674,6 +802,7 @@ export async function setupOpenclawRelayFromInvite( const agentNamePath = resolveOpenclawAgentNamePath(homeDir); await writeSecureFile(agentNamePath, `${normalizedAgentName}\n`); + await saveRelayRuntimeConfig(relayRuntimeConfigPath, openclawBaseUrl); logger.info("cli.openclaw_setup_completed", { agentName: normalizedAgentName, @@ -681,6 +810,8 @@ export async function setupOpenclawRelayFromInvite( peerDid: invite.did, openclawConfigPath, transformTargetPath, + openclawBaseUrl, + relayRuntimeConfigPath, }); return { @@ -689,6 +820,8 @@ export async function setupOpenclawRelayFromInvite( peerProxyUrl: invite.proxyUrl, openclawConfigPath, transformTargetPath, + openclawBaseUrl, + relayRuntimeConfigPath, }; } @@ -739,6 +872,10 @@ export const createOpenclawCommand = (): Command => { "--transform-source ", "Path to relay-to-peer.mjs (default /workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs)", ) + .option( + "--openclaw-base-url ", + "Base URL for local OpenClaw hook API (default http://127.0.0.1:18789)", + ) .action( withErrorHandling( "openclaw setup", @@ -751,6 +888,10 @@ export const createOpenclawCommand = (): Command => { `Updated OpenClaw config: ${result.openclawConfigPath}`, ); writeStdoutLine(`Installed transform: ${result.transformTargetPath}`); + writeStdoutLine(`OpenClaw base URL: ${result.openclawBaseUrl}`); + writeStdoutLine( + `Relay runtime config: ${result.relayRuntimeConfigPath}`, + ); }, ), ); diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index b7191d1..59d8833 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -12,6 +12,7 @@ - environment (`CLAWDENTITY_AGENT_NAME`) - `~/.clawdentity/openclaw-agent-name` - single local agent auto-detection +- Relay setup should preserve local OpenClaw upstream URL in `~/.clawdentity/openclaw-relay.json` for proxy runtime fallback. - Never commit local runtime files (`peers.json`, `secret.key`, `ait.jwt`) to the repository. ## Transform Rules @@ -43,5 +44,5 @@ - Install and execute onboarding through skill flow only (`npm install clawdentity --skill` plus agent-executed skill steps). - Human role in E2E is limited to supplying invite code and confirmations requested by the agent. - Do not edit relay hooks, peer config, or selected-agent files manually during validation. -- After skill setup, verify these artifacts exist and are agent-generated: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`. +- After skill setup, verify these artifacts exist and are agent-generated: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.clawdentity/openclaw-relay.json`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`. - For reruns after failures, clear skill-generated artifacts first; only perform full identity reset (`~/.clawdentity/agents//`) when identity reprovisioning is needed. diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index ed941da..90ffc24 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -34,6 +34,7 @@ Use this skill when any of the following are requested: - Agent AIT token: `~/.clawdentity/agents//ait.jwt` - Peer map: `~/.clawdentity/peers.json` - Local selected agent marker: `~/.clawdentity/openclaw-agent-name` +- Relay runtime config: `~/.clawdentity/openclaw-relay.json` ## Operator Split @@ -52,6 +53,7 @@ Follow this order. Do not skip steps. - Confirm `clawdentity` CLI is installed and runnable. - Confirm API key exists for this agent (if missing, ask the human for it). - Confirm OpenClaw state directory path if non-default. +- Confirm OpenClaw base URL if local endpoint is non-default. 2. Confirm skill artifact exists in workspace skills directory. - Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. @@ -71,6 +73,7 @@ Follow this order. Do not skip steps. - Execute: `clawdentity openclaw setup --invite-code ` - Use `--openclaw-dir ` when state directory is non-default. +- Use `--openclaw-base-url ` when local OpenClaw HTTP endpoint is non-default. - Use `--peer-alias ` only when alias override is required. 6. Verify setup outputs. @@ -79,6 +82,8 @@ Follow this order. Do not skip steps. - peer DID - updated OpenClaw config path - installed transform path + - OpenClaw base URL + - relay runtime config path - Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. 7. Validate with user-style relay test. @@ -92,6 +97,7 @@ Follow this order. Do not skip steps. Ask the human only when required inputs are missing: - Missing Clawdentity API key. - Unclear OpenClaw state directory. +- Non-default OpenClaw base URL. - Missing invite code. ## Failure Handling diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index 7ef14b4..a2df2e1 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -17,6 +17,7 @@ Define the exact runtime contract used by `relay-to-peer.mjs`. - `~/.clawdentity/agents//ait.jwt` - `~/.clawdentity/peers.json` - `~/.clawdentity/openclaw-agent-name` +- `~/.clawdentity/openclaw-relay.json` ## Invite Code Contract @@ -83,6 +84,22 @@ Relay resolves local agent name in this order: 3. `~/.clawdentity/openclaw-agent-name` 4. single local agent fallback from `~/.clawdentity/agents/` +## Local OpenClaw Base URL Contract + +`~/.clawdentity/openclaw-relay.json` stores the OpenClaw upstream base URL used by local proxy runtime fallback: + +```json +{ + "openclawBaseUrl": "http://127.0.0.1:18789", + "updatedAt": "2026-02-15T20:00:00.000Z" +} +``` + +Rules: +- `openclawBaseUrl` must be absolute `http` or `https`. +- `updatedAt` is ISO-8601 UTC timestamp. +- Proxy runtime precedence is: `OPENCLAW_BASE_URL` env first, then `openclaw-relay.json`, then built-in default. + ## Outbound Auth Contract Headers sent to peer proxy: diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example new file mode 100644 index 0000000..784368b --- /dev/null +++ b/apps/proxy/.env.example @@ -0,0 +1,26 @@ +# Proxy local/development template +# For local Wrangler development, copy values into .dev.vars. +# For cloud deploys, set OPENCLAW_HOOK_TOKEN as a Wrangler secret: +# wrangler secret put OPENCLAW_HOOK_TOKEN --env +# For cloud deploy scripts, export a reachable upstream first: +# export OPENCLAW_BASE_URL=https://openclaw-.example.com + +# Required +OPENCLAW_HOOK_TOKEN=replace-with-openclaw-hook-token + +# Runtime vars +ENVIRONMENT=local +REGISTRY_URL=https://dev.api.clawdentity.com +# Optional when ~/.clawdentity/openclaw-relay.json exists from `clawdentity openclaw setup` +OPENCLAW_BASE_URL=http://127.0.0.1:18789 +INJECT_IDENTITY_INTO_MESSAGE=false + +# Optional policy/runtime overrides +# ALLOW_LIST={"owners":[],"agents":[]} +# ALLOWLIST_OWNERS=did:claw:human:example +# ALLOWLIST_AGENTS=did:claw:agent:example +# CRL_REFRESH_INTERVAL_MS=300000 +# CRL_MAX_AGE_MS=900000 +# CRL_STALE_BEHAVIOR=fail-open +# AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60 +# AGENT_RATE_LIMIT_WINDOW_MS=60000 diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 5fa774f..846b663 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -6,15 +6,22 @@ ## Runtime Configuration - Keep runtime config centralized in `src/config.ts`. +- Keep Cloudflare Worker deployment config in `wrangler.jsonc` with explicit `local`, `development`, and `production` environments. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). +- Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. +- For remote Worker deployments (`development`/`production`), require `OPENCLAW_BASE_URL` to be an externally reachable non-loopback URL; never rely on local loopback defaults. +- Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-off (`false`); only enable when operators need webhook `message` augmentation with verified identity context. - Require hook token input via env (`OPENCLAW_HOOK_TOKEN` or OpenClaw-compatible alias `OPENCLAW_HOOKS_TOKEN`) and never log the token value. +- For Worker deploys, set hook tokens via Wrangler secrets for remote environments (`wrangler secret put ... --env `); use CLI `--var` overrides only for local dev runs. +- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (required token, registry URL, base URL, and optional policy/rate-limit vars). - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) - existing environment variables always win over `.env` values. +- If `OPENCLAW_BASE_URL` is still missing after env loading, fallback to `~/.clawdentity/openclaw-relay.json` (`openclawBaseUrl`) before applying the built-in default. - Treat blank env values as unset for fallback resolution: - empty/whitespace values (and null-like values) in inherited env must not block `.env` or config-file fallbacks - dotenv merge semantics must match parser semantics (non-empty value wins). @@ -58,6 +65,8 @@ ## Server Runtime - Keep `src/server.ts` as the HTTP app/runtime entry. +- Keep `src/worker.ts` as the Cloudflare Worker fetch entry and `src/node-server.ts` as the Node compatibility entry. - Keep middleware order stable: request context -> request logging -> auth verification -> agent DID rate limit -> error handler. - Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200. - Log startup and request completion with structured JSON logs; never log secrets or tokens. +- If identity injection is enabled, mutate only `payload.message` when it is a string; preserve all other payload fields unchanged. diff --git a/apps/proxy/package.json b/apps/proxy/package.json index 57216ed..d1ea7c9 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -13,10 +13,23 @@ "./server": { "import": "./dist/server.js", "types": "./dist/server.d.ts" + }, + "./node-server": { + "import": "./dist/node-server.js", + "types": "./dist/node-server.d.ts" + }, + "./worker": { + "import": "./dist/worker.js", + "types": "./dist/worker.d.ts" } }, "scripts": { "build": "tsup", + "deploy:dev": "wrangler deploy --env development --var OPENCLAW_BASE_URL:${OPENCLAW_BASE_URL:?set OPENCLAW_BASE_URL}", + "deploy:production": "wrangler deploy --env production --var OPENCLAW_BASE_URL:${OPENCLAW_BASE_URL:?set OPENCLAW_BASE_URL}", + "dev": "wrangler dev --env local --var OPENCLAW_HOOK_TOKEN:dev-proxy-hook-token", + "dev:development": "wrangler dev --env development --var OPENCLAW_HOOK_TOKEN:dev-proxy-hook-token", + "dev:fresh": "wrangler dev --env local --name clawdentity-proxy-local-fresh --port 8789 --persist-to .wrangler/state-fresh --var OPENCLAW_HOOK_TOKEN:fresh-proxy-hook-token", "format": "biome format .", "lint": "biome lint .", "start": "node ./dist/bin.js", @@ -31,5 +44,9 @@ "hono": "^4.11.9", "json5": "^2.2.3", "zod": "^4.1.12" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20260210.0", + "@types/node": "^22.17.2" } } diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index e98d256..79bb3e2 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -4,10 +4,14 @@ - Keep `index.ts` as runtime bootstrap surface and version export. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. - Keep agent DID rate-limit env parsing in `config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE`, `AGENT_RATE_LIMIT_WINDOW_MS`) and validate as positive integers. -- Keep HTTP app/startup concerns in `server.ts`; use `bin.ts` as process entrypoint for Node runtime startup. +- Keep HTTP app composition in `server.ts`. +- Keep Cloudflare Worker fetch startup in `worker.ts`. +- Keep Node runtime startup in `node-server.ts`; use `bin.ts` as Node process entrypoint. - Keep inbound auth verification in `auth-middleware.ts` with focused helpers for token parsing, registry material loading, CRL checks, and replay protection. - Keep per-agent DID throttling in `agent-rate-limit-middleware.ts`; do not blend rate-limit state or counters into `auth-middleware.ts`. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. +- Keep OpenClaw base URL fallback logic in `config.ts`: `OPENCLAW_BASE_URL` env -> `~/.clawdentity/openclaw-relay.json` -> default. +- Keep Worker runtime guardrails in `worker.ts`: block loopback/default OpenClaw upstream URLs for `development`/`production` so cloud deploys fail fast with config errors. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. @@ -22,6 +26,7 @@ - Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. +- Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep rate-limit failure semantics stable: verified requests over budget map to `429` with code `PROXY_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.rate_limit.exceeded`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. @@ -29,3 +34,6 @@ - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. - Keep `/hooks/agent` input contract strict: require `Content-Type: application/json` and reject malformed JSON with explicit client errors. - Keep `/hooks/agent` upstream failure mapping explicit: timeout errors -> `504`, network errors -> `502`, and never log `openclawHookToken` or request payload. +- Keep identity message injection optional and default-off (`INJECT_IDENTITY_INTO_MESSAGE=false`) so forwarding behavior is unchanged unless explicitly enabled. +- Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. +- When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index 1744464..9d80148 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -5,7 +5,24 @@ vi.mock("./auth-middleware.js", async () => { return { createProxyAuthMiddleware: () => - createMiddleware(async (_c, next) => { + createMiddleware(async (c, next) => { + const missingAuth = c.req.header("x-test-missing-auth") === "1"; + if (!missingAuth) { + const dirtyAuth = c.req.header("x-test-dirty-auth") === "1"; + c.set("auth", { + agentDid: dirtyAuth + ? `\u0000 did:claw:agent:${"a".repeat(200)} \n` + : "did:claw:agent:alpha", + ownerDid: dirtyAuth + ? " \t did:claw:owner:alpha\u0007" + : "did:claw:owner:alpha", + issuer: dirtyAuth + ? ` https://registry.example.com/${"b".repeat(260)} ` + : "https://registry.example.com", + aitJti: dirtyAuth ? `\u0001${"j".repeat(100)}` : "ait-jti-alpha", + cnfPublicKey: "test-public-key", + }); + } await next(); }), }; @@ -14,15 +31,31 @@ vi.mock("./auth-middleware.js", async () => { import { parseProxyConfig } from "./config.js"; import { createProxyApp } from "./server.js"; +function hasDisallowedControlCharacter(value: string): boolean { + for (const char of value) { + const code = char.charCodeAt(0); + if ((code >= 0 && code <= 8) || code === 11 || code === 12) { + return true; + } + if ((code >= 14 && code <= 31) || code === 127) { + return true; + } + } + + return false; +} + function createHookRouteApp(input: { fetchImpl: typeof fetch; timeoutMs?: number; openclawBaseUrl?: string; + injectIdentityIntoMessage?: boolean; }) { return createProxyApp({ config: parseProxyConfig({ OPENCLAW_BASE_URL: input.openclawBaseUrl ?? "http://openclaw.local", OPENCLAW_HOOK_TOKEN: "openclaw-secret", + INJECT_IDENTITY_INTO_MESSAGE: input.injectIdentityIntoMessage, }), hooks: { fetchImpl: input.fetchImpl, @@ -132,6 +165,177 @@ describe("POST /hooks/agent", () => { expect(forwardedUrl).toBe("http://openclaw.local/api/hooks/agent"); }); + it("prepends sanitized identity block when message injection is enabled", async () => { + const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { + return new Response( + JSON.stringify({ + echoedBody: init?.body, + }), + { + status: 202, + headers: { + "content-type": "application/json", + }, + }, + ); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + injectIdentityIntoMessage: true, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + message: "Summarize this payload", + }), + }); + + expect(response.status).toBe(202); + expect(fetchMock).toHaveBeenCalledTimes(1); + + const [, calledInit] = fetchMock.mock.calls[0] as [ + unknown, + RequestInit | undefined, + ]; + const forwardedPayload = JSON.parse(String(calledInit?.body)) as { + message: string; + }; + expect(forwardedPayload.message).toBe( + [ + "[Clawdentity Identity]", + "agentDid: did:claw:agent:alpha", + "ownerDid: did:claw:owner:alpha", + "issuer: https://registry.example.com", + "aitJti: ait-jti-alpha", + "", + "Summarize this payload", + ].join("\n"), + ); + }); + + it("keeps payload unchanged when message injection is enabled but auth is missing", async () => { + const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { + return new Response(String(init?.body), { status: 202 }); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + injectIdentityIntoMessage: true, + }); + const rawPayload = { + message: "No auth context here", + event: "agent.started", + }; + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-missing-auth": "1", + }, + body: JSON.stringify(rawPayload), + }); + + expect(response.status).toBe(202); + const [, calledInit] = fetchMock.mock.calls[0] as [ + unknown, + RequestInit | undefined, + ]; + expect(String(calledInit?.body)).toBe(JSON.stringify(rawPayload)); + }); + + it("keeps payload unchanged when message is missing or non-string", async () => { + const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { + return new Response(String(init?.body), { status: 202 }); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + injectIdentityIntoMessage: true, + }); + + await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + event: "agent.started", + }), + }); + + await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + message: { nested: true }, + }), + }); + + const [, firstInit] = fetchMock.mock.calls[0] as [unknown, RequestInit]; + const [, secondInit] = fetchMock.mock.calls[1] as [unknown, RequestInit]; + expect(String(firstInit.body)).toBe( + JSON.stringify({ event: "agent.started" }), + ); + expect(String(secondInit.body)).toBe( + JSON.stringify({ message: { nested: true } }), + ); + }); + + it("sanitizes identity fields and enforces length limits", async () => { + const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { + return new Response( + JSON.stringify({ + echoedBody: init?.body, + }), + { + status: 202, + headers: { + "content-type": "application/json", + }, + }, + ); + }); + const app = createHookRouteApp({ + fetchImpl: fetchMock as unknown as typeof fetch, + injectIdentityIntoMessage: true, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-dirty-auth": "1", + }, + body: JSON.stringify({ + message: "Hello world", + }), + }); + + expect(response.status).toBe(202); + const [, calledInit] = fetchMock.mock.calls[0] as [ + unknown, + RequestInit | undefined, + ]; + const forwardedPayload = JSON.parse(String(calledInit?.body)) as { + message: string; + }; + expect(forwardedPayload.message).toContain("[Clawdentity Identity]"); + + const identityBlock = forwardedPayload.message.split("\n\n")[0]; + expect(hasDisallowedControlCharacter(identityBlock)).toBe(false); + + const identityLines = identityBlock.split("\n"); + expect(identityLines[1].length).toBeLessThanOrEqual(171); + expect(identityLines[2].length).toBeLessThanOrEqual(171); + expect(identityLines[3].length).toBeLessThanOrEqual(208); + expect(identityLines[4].length).toBeLessThanOrEqual(72); + }); + it("rejects non-json content types", async () => { const fetchMock = vi.fn(async () => new Response("{}", { status: 200 })); const app = createHookRouteApp({ diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index a1e18b7..92a18f3 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -4,10 +4,15 @@ import type { ProxyRequestVariables } from "./auth-middleware.js"; const AGENT_HOOK_PATH = "hooks/agent"; export const DEFAULT_AGENT_HOOK_TIMEOUT_MS = 10_000; +const MAX_AGENT_DID_LENGTH = 160; +const MAX_OWNER_DID_LENGTH = 160; +const MAX_ISSUER_LENGTH = 200; +const MAX_AIT_JTI_LENGTH = 64; export type AgentHookRuntimeOptions = { fetchImpl?: typeof fetch; timeoutMs?: number; + injectIdentityIntoMessage?: boolean; }; type CreateAgentHookHandlerOptions = AgentHookRuntimeOptions & { @@ -48,11 +53,70 @@ function isAbortError(error: unknown): boolean { return toErrorName(error) === "AbortError"; } +function stripControlChars(value: string): string { + let result = ""; + for (const char of value) { + const code = char.charCodeAt(0); + if ((code >= 0 && code <= 31) || code === 127) { + continue; + } + result += char; + } + + return result; +} + +function sanitizeIdentityField(value: string, maxLength: number): string { + const sanitized = stripControlChars(value).replaceAll(/\s+/g, " ").trim(); + + if (sanitized.length === 0) { + return "unknown"; + } + + return sanitized.slice(0, maxLength); +} + +function buildIdentityBlock( + auth: NonNullable, +): string { + return [ + "[Clawdentity Identity]", + `agentDid: ${sanitizeIdentityField(auth.agentDid, MAX_AGENT_DID_LENGTH)}`, + `ownerDid: ${sanitizeIdentityField(auth.ownerDid, MAX_OWNER_DID_LENGTH)}`, + `issuer: ${sanitizeIdentityField(auth.issuer, MAX_ISSUER_LENGTH)}`, + `aitJti: ${sanitizeIdentityField(auth.aitJti, MAX_AIT_JTI_LENGTH)}`, + ].join("\n"); +} + +function injectIdentityBlockIntoPayload( + payload: unknown, + auth: ProxyRequestVariables["auth"], +): unknown { + if (auth === undefined || typeof payload !== "object" || payload === null) { + return payload; + } + + if (!("message" in payload)) { + return payload; + } + + const message = (payload as { message?: unknown }).message; + if (typeof message !== "string") { + return payload; + } + + return { + ...(payload as Record), + message: `${buildIdentityBlock(auth)}\n\n${message}`, + }; +} + export function createAgentHookHandler( options: CreateAgentHookHandlerOptions, ): (c: ProxyContext) => Promise { const fetchImpl = options.fetchImpl ?? fetch; const timeoutMs = options.timeoutMs ?? DEFAULT_AGENT_HOOK_TIMEOUT_MS; + const injectIdentityIntoMessage = options.injectIdentityIntoMessage ?? false; const hookUrl = toOpenclawHookUrl(options.openclawBaseUrl); return async (c) => { @@ -77,6 +141,10 @@ export function createAgentHookHandler( }); } + if (injectIdentityIntoMessage) { + payload = injectIdentityBlockIntoPayload(payload, c.get("auth")); + } + const requestId = c.get("requestId"); const startedAt = Date.now(); const controller = new AbortController(); diff --git a/apps/proxy/src/bin.ts b/apps/proxy/src/bin.ts index 6b433af..1b8c95b 100644 --- a/apps/proxy/src/bin.ts +++ b/apps/proxy/src/bin.ts @@ -1,3 +1,3 @@ -import { startProxyServer } from "./server.js"; +import { startProxyServer } from "./node-server.js"; startProxyServer(); diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index a488be8..8cb9aae 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -7,6 +7,7 @@ import { DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, DEFAULT_CRL_MAX_AGE_MS, DEFAULT_CRL_REFRESH_INTERVAL_MS, + DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, DEFAULT_OPENCLAW_BASE_URL, DEFAULT_PROXY_ENVIRONMENT, DEFAULT_PROXY_LISTEN_PORT, @@ -40,6 +41,7 @@ describe("proxy config", () => { agentRateLimitRequestsPerMinute: DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, agentRateLimitWindowMs: DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, + injectIdentityIntoMessage: DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, }); }); @@ -52,6 +54,7 @@ describe("proxy config", () => { CRL_STALE_BEHAVIOR: "fail-closed", AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: "75", AGENT_RATE_LIMIT_WINDOW_MS: "90000", + INJECT_IDENTITY_INTO_MESSAGE: "true", }); expect(config.listenPort).toBe(4100); @@ -61,6 +64,7 @@ describe("proxy config", () => { expect(config.crlStaleBehavior).toBe("fail-closed"); expect(config.agentRateLimitRequestsPerMinute).toBe(75); expect(config.agentRateLimitWindowMs).toBe(90000); + expect(config.injectIdentityIntoMessage).toBe(true); }); it("parses allow list object and override env lists", () => { @@ -137,6 +141,15 @@ describe("proxy config", () => { }), ).toThrow(ProxyConfigError); }); + + it("throws on invalid injectIdentityIntoMessage value", () => { + expect(() => + parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + INJECT_IDENTITY_INTO_MESSAGE: "maybe", + }), + ).toThrow(ProxyConfigError); + }); }); describe("proxy config loading", () => { @@ -144,14 +157,16 @@ describe("proxy config loading", () => { const root = mkdtempSync(join(tmpdir(), "clawdentity-proxy-config-")); const cwd = join(root, "workspace"); const stateDir = join(root, ".openclaw"); + const clawdentityDir = join(root, ".clawdentity"); mkdirSync(cwd, { recursive: true }); mkdirSync(stateDir, { recursive: true }); + mkdirSync(clawdentityDir, { recursive: true }); const cleanup = () => { rmSync(root, { recursive: true, force: true }); }; - return { root, cwd, stateDir, cleanup }; + return { root, cwd, stateDir, clawdentityDir, cleanup }; } it("loads cwd .env first, then state .env without overriding existing values", () => { @@ -191,6 +206,31 @@ describe("proxy config loading", () => { } }); + it("loads INJECT_IDENTITY_INTO_MESSAGE from .env", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.cwd, ".env"), + [ + "OPENCLAW_HOOK_TOKEN=from-cwd-dotenv", + "INJECT_IDENTITY_INTO_MESSAGE=true", + ].join("\n"), + ); + + const config = loadProxyConfig( + {}, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.injectIdentityIntoMessage).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + it("treats empty env variables as missing and accepts dotenv fallback", () => { const sandbox = createSandbox(); try { @@ -244,6 +284,69 @@ describe("proxy config loading", () => { } }); + it("falls back to ~/.clawdentity/openclaw-relay.json when OPENCLAW_BASE_URL is missing", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.clawdentityDir, "openclaw-relay.json"), + JSON.stringify( + { + openclawBaseUrl: "http://127.0.0.1:19111", + updatedAt: "2026-02-15T20:00:00.000Z", + }, + null, + 2, + ), + ); + + const config = loadProxyConfig( + { + OPENCLAW_HOOK_TOKEN: "token", + }, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawBaseUrl).toBe("http://127.0.0.1:19111"); + } finally { + sandbox.cleanup(); + } + }); + + it("prefers env OPENCLAW_BASE_URL over ~/.clawdentity/openclaw-relay.json", () => { + const sandbox = createSandbox(); + try { + writeFileSync( + join(sandbox.clawdentityDir, "openclaw-relay.json"), + JSON.stringify( + { + openclawBaseUrl: "http://127.0.0.1:19111", + updatedAt: "2026-02-15T20:00:00.000Z", + }, + null, + 2, + ), + ); + + const config = loadProxyConfig( + { + OPENCLAW_HOOK_TOKEN: "token", + OPENCLAW_BASE_URL: "http://127.0.0.1:19999", + }, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawBaseUrl).toBe("http://127.0.0.1:19999"); + } finally { + sandbox.cleanup(); + } + }); + it("uses legacy state directory when canonical .openclaw does not exist", () => { const sandbox = createSandbox(); try { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 30b9f85..817a67b 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -28,6 +28,7 @@ export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; export const DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE = 60; export const DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS = 60 * 1000; +export const DEFAULT_INJECT_IDENTITY_INTO_MESSAGE = false; export class ProxyConfigError extends Error { readonly code = "CONFIG_VALIDATION_FAILED"; @@ -43,8 +44,35 @@ export class ProxyConfigError extends Error { } const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; +const CLAWDENTITY_CONFIG_DIR = ".clawdentity"; +const OPENCLAW_RELAY_CONFIG_FILENAME = "openclaw-relay.json"; const LEGACY_STATE_DIR_NAMES = [".clawdbot", ".moldbot", ".moltbot"] as const; +const envBooleanSchema = z.preprocess((value) => { + if (typeof value === "string") { + const normalized = value.trim().toLowerCase(); + if ( + normalized === "true" || + normalized === "1" || + normalized === "yes" || + normalized === "on" + ) { + return true; + } + + if ( + normalized === "false" || + normalized === "0" || + normalized === "no" || + normalized === "off" + ) { + return false; + } + } + + return value; +}, z.boolean()); + const proxyRuntimeEnvSchema = z.object({ LISTEN_PORT: z.coerce .number() @@ -84,6 +112,9 @@ const proxyRuntimeEnvSchema = z.object({ .int() .positive() .default(DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS), + INJECT_IDENTITY_INTO_MESSAGE: envBooleanSchema.default( + DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, + ), }); const proxyAllowListSchema = z @@ -105,6 +136,7 @@ export const proxyConfigSchema = z.object({ crlStaleBehavior: z.enum(["fail-open", "fail-closed"]), agentRateLimitRequestsPerMinute: z.number().int().positive(), agentRateLimitWindowMs: z.number().int().positive(), + injectIdentityIntoMessage: z.boolean(), }); export type ProxyConfig = z.infer; @@ -128,6 +160,7 @@ type RuntimeEnvInput = { CRL_STALE_BEHAVIOR?: unknown; AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: unknown; AGENT_RATE_LIMIT_WINDOW_MS?: unknown; + INJECT_IDENTITY_INTO_MESSAGE?: unknown; OPENCLAW_STATE_DIR?: unknown; CLAWDBOT_STATE_DIR?: unknown; OPENCLAW_CONFIG_PATH?: unknown; @@ -282,6 +315,14 @@ function resolveOpenClawConfigPath( return join(stateDir, OPENCLAW_CONFIG_FILENAME); } +function resolveOpenclawRelayConfigPath( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string { + const home = resolveHomeDir(env, options.homeDir); + return join(home, CLAWDENTITY_CONFIG_DIR, OPENCLAW_RELAY_CONFIG_FILENAME); +} + function mergeMissingEnvValues( target: MutableEnv, values: Record, @@ -392,6 +433,87 @@ function resolveHookTokenFromOpenClawConfig( return trimmedToken.length > 0 ? trimmedToken : undefined; } +function resolveBaseUrlFromRelayConfig( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string | undefined { + const configPath = resolveOpenclawRelayConfigPath(env, options); + if (!existsSync(configPath)) { + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(readFileSync(configPath, "utf8")); + } catch (error) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: [ + `Unable to parse relay config at ${configPath}`, + ], + }, + formErrors: [ + error instanceof Error ? error.message : "Unknown relay parse error", + ], + }); + } + + if (typeof parsed !== "object" || parsed === null) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: ["Relay config root must be a JSON object"], + }, + formErrors: [], + }); + } + + const baseUrlValue = (parsed as Record).openclawBaseUrl; + if (typeof baseUrlValue !== "string" || baseUrlValue.trim().length === 0) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: [ + "openclawBaseUrl must be a non-empty string", + ], + }, + formErrors: [], + }); + } + + const trimmed = baseUrlValue.trim(); + let parsedUrl: URL; + try { + parsedUrl = new URL(trimmed); + } catch { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: [ + "openclawBaseUrl must be a valid absolute URL", + ], + }, + formErrors: [], + }); + } + + if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: ["openclawBaseUrl must use http or https"], + }, + formErrors: [], + }); + } + + if ( + parsedUrl.pathname === "/" && + parsedUrl.search.length === 0 && + parsedUrl.hash.length === 0 + ) { + return parsedUrl.origin; + } + + return parsedUrl.toString(); +} + function normalizeRuntimeEnv(input: unknown): Record { const env: RuntimeEnvInput = isRuntimeEnvInput(input) ? input : {}; @@ -419,6 +541,9 @@ function normalizeRuntimeEnv(input: unknown): Record { AGENT_RATE_LIMIT_WINDOW_MS: firstNonEmpty(env, [ "AGENT_RATE_LIMIT_WINDOW_MS", ]), + INJECT_IDENTITY_INTO_MESSAGE: firstNonEmpty(env, [ + "INJECT_IDENTITY_INTO_MESSAGE", + ]), }; } @@ -520,6 +645,25 @@ function loadHookTokenFromFallback( } } +function loadOpenclawBaseUrlFromFallback( + env: MutableEnv, + options: ProxyConfigLoadOptions, +): void { + if ( + firstNonEmpty(env as RuntimeEnvInput, ["OPENCLAW_BASE_URL"]) !== undefined + ) { + return; + } + + const openclawBaseUrl = resolveBaseUrlFromRelayConfig( + env as RuntimeEnvInput, + options, + ); + if (openclawBaseUrl !== undefined) { + env.OPENCLAW_BASE_URL = openclawBaseUrl; + } +} + export function parseProxyConfig(env: unknown): ProxyConfig { const inputEnv: RuntimeEnvInput = isRuntimeEnvInput(env) ? env : {}; assertNoDeprecatedAllowAllVerified(inputEnv); @@ -547,6 +691,8 @@ export function parseProxyConfig(env: unknown): ProxyConfig { agentRateLimitRequestsPerMinute: parsedRuntimeEnv.data.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, agentRateLimitWindowMs: parsedRuntimeEnv.data.AGENT_RATE_LIMIT_WINDOW_MS, + injectIdentityIntoMessage: + parsedRuntimeEnv.data.INJECT_IDENTITY_INTO_MESSAGE, }; const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); @@ -565,6 +711,7 @@ export function loadProxyConfig( options: ProxyConfigLoadOptions = {}, ): ProxyConfig { const mergedEnv = loadEnvWithDotEnvFallback(env, options); + loadOpenclawBaseUrlFromFallback(mergedEnv, options); loadHookTokenFromFallback(mergedEnv, options); return parseProxyConfig(mergedEnv); } diff --git a/apps/proxy/src/node-server.ts b/apps/proxy/src/node-server.ts new file mode 100644 index 0000000..6a8145c --- /dev/null +++ b/apps/proxy/src/node-server.ts @@ -0,0 +1,53 @@ +import { createLogger, type Logger } from "@clawdentity/sdk"; +import { type ServerType, serve } from "@hono/node-server"; +import type { ProxyConfig } from "./config.js"; +import { loadProxyConfig } from "./config.js"; +import { PROXY_VERSION } from "./index.js"; +import { createProxyApp, type ProxyApp } from "./server.js"; + +type StartProxyServerOptions = { + env?: unknown; + config?: ProxyConfig; + logger?: Logger; + port?: number; +}; + +export type StartedProxyServer = { + app: ProxyApp; + config: ProxyConfig; + port: number; + server: ServerType; +}; + +function resolveLogger(logger?: Logger): Logger { + return logger ?? createLogger({ service: "proxy" }); +} + +export function startProxyServer( + options: StartProxyServerOptions = {}, +): StartedProxyServer { + const config = options.config ?? loadProxyConfig(options.env); + const logger = resolveLogger(options.logger); + const app = createProxyApp({ + config, + logger, + }); + const port = options.port ?? config.listenPort; + const server = serve({ + fetch: app.fetch, + port, + }); + + logger.info("proxy.server_started", { + port, + version: PROXY_VERSION, + environment: config.environment, + }); + + return { + app, + config, + port, + server, + }; +} diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts index 4d47082..3dfb0a1 100644 --- a/apps/proxy/src/server.test.ts +++ b/apps/proxy/src/server.test.ts @@ -5,7 +5,8 @@ import { parseProxyConfig, } from "./config.js"; import { PROXY_VERSION } from "./index.js"; -import { createProxyApp, startProxyServer } from "./server.js"; +import { startProxyServer } from "./node-server.js"; +import { createProxyApp } from "./server.js"; describe("proxy server", () => { it("returns health response with status, version, and environment", async () => { diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index d3f320c..3d413ef 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -7,7 +7,6 @@ import { type Logger, type NonceCache, } from "@clawdentity/sdk"; -import { serve } from "@hono/node-server"; import { Hono } from "hono"; import { type AgentHookRuntimeOptions, @@ -19,7 +18,6 @@ import { type ProxyRequestVariables, } from "./auth-middleware.js"; import type { ProxyConfig } from "./config.js"; -import { loadProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; type ProxyAuthRuntimeOptions = { @@ -42,24 +40,10 @@ type CreateProxyAppOptions = { hooks?: AgentHookRuntimeOptions; }; -type StartProxyServerOptions = { - env?: unknown; - config?: ProxyConfig; - logger?: Logger; - port?: number; -}; - export type ProxyApp = Hono<{ Variables: ProxyRequestVariables; }>; -export type StartedProxyServer = { - app: ProxyApp; - config: ProxyConfig; - port: number; - server: ReturnType; -}; - function resolveLogger(logger?: Logger): Logger { return logger ?? createLogger({ service: "proxy" }); } @@ -103,6 +87,7 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { logger, openclawBaseUrl: options.config.openclawBaseUrl, openclawHookToken: options.config.openclawHookToken, + injectIdentityIntoMessage: options.config.injectIdentityIntoMessage, ...options.hooks, }), ); @@ -110,32 +95,3 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { return app; } - -export function startProxyServer( - options: StartProxyServerOptions = {}, -): StartedProxyServer { - const config = options.config ?? loadProxyConfig(options.env); - const logger = resolveLogger(options.logger); - const app = createProxyApp({ - config, - logger, - }); - const port = options.port ?? config.listenPort; - const server = serve({ - fetch: app.fetch, - port, - }); - - logger.info("proxy.server_started", { - port, - version: PROXY_VERSION, - environment: config.environment, - }); - - return { - app, - config, - port, - server, - }; -} diff --git a/apps/proxy/src/worker.test.ts b/apps/proxy/src/worker.test.ts new file mode 100644 index 0000000..52ca289 --- /dev/null +++ b/apps/proxy/src/worker.test.ts @@ -0,0 +1,97 @@ +import { describe, expect, it, vi } from "vitest"; +import { PROXY_VERSION } from "./index.js"; +import worker, { type ProxyWorkerBindings } from "./worker.js"; + +function createExecutionContext(): ExecutionContext { + return { + waitUntil: vi.fn(), + passThroughOnException: vi.fn(), + props: {}, + } as unknown as ExecutionContext; +} + +describe("proxy worker", () => { + it("serves /health with parsed runtime config from bindings", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + { + ENVIRONMENT: "local", + OPENCLAW_HOOK_TOKEN: "proxy-hook-token", + } satisfies ProxyWorkerBindings, + createExecutionContext(), + ); + + expect(response.status).toBe(200); + const payload = (await response.json()) as { + status: string; + version: string; + environment: string; + }; + expect(payload).toEqual({ + status: "ok", + version: PROXY_VERSION, + environment: "local", + }); + }); + + it("returns config validation error when required bindings are missing", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + {} satisfies ProxyWorkerBindings, + createExecutionContext(), + ); + + expect(response.status).toBe(500); + const payload = (await response.json()) as { + error: { + code: string; + }; + }; + expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + }); + + it("returns config validation error when deployed env uses loopback upstream", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + { + ENVIRONMENT: "development", + OPENCLAW_HOOK_TOKEN: "proxy-hook-token", + } satisfies ProxyWorkerBindings, + createExecutionContext(), + ); + + expect(response.status).toBe(500); + const payload = (await response.json()) as { + error: { + code: string; + details: { + fieldErrors?: Record; + }; + }; + }; + expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(payload.error.details.fieldErrors?.OPENCLAW_BASE_URL?.[0]).toContain( + "externally reachable URL", + ); + }); + + it("accepts non-loopback upstream in deployed env", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + { + ENVIRONMENT: "development", + OPENCLAW_HOOK_TOKEN: "proxy-hook-token", + OPENCLAW_BASE_URL: "https://openclaw-dev.internal.example", + } satisfies ProxyWorkerBindings, + createExecutionContext(), + ); + + expect(response.status).toBe(200); + const payload = (await response.json()) as { + status: string; + environment: string; + }; + expect(payload.status).toBe("ok"); + expect(payload.environment).toBe("development"); + }); +}); diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts new file mode 100644 index 0000000..913b1e9 --- /dev/null +++ b/apps/proxy/src/worker.ts @@ -0,0 +1,184 @@ +import { createLogger } from "@clawdentity/sdk"; +import { + DEFAULT_OPENCLAW_BASE_URL, + type ProxyConfig, + ProxyConfigError, + parseProxyConfig, +} from "./config.js"; +import { createProxyApp, type ProxyApp } from "./server.js"; + +export type ProxyWorkerBindings = { + LISTEN_PORT?: string; + PORT?: string; + OPENCLAW_BASE_URL?: string; + OPENCLAW_HOOK_TOKEN?: string; + OPENCLAW_HOOKS_TOKEN?: string; + REGISTRY_URL?: string; + CLAWDENTITY_REGISTRY_URL?: string; + ENVIRONMENT?: string; + ALLOW_LIST?: string; + ALLOWLIST_OWNERS?: string; + ALLOWLIST_AGENTS?: string; + ALLOW_ALL_VERIFIED?: string; + CRL_REFRESH_INTERVAL_MS?: string; + CRL_MAX_AGE_MS?: string; + CRL_STALE_BEHAVIOR?: string; + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: string; + AGENT_RATE_LIMIT_WINDOW_MS?: string; + INJECT_IDENTITY_INTO_MESSAGE?: string; + [key: string]: unknown; +}; + +type CachedProxyRuntime = { + key: string; + app: ProxyApp; + config: ProxyConfig; +}; + +const logger = createLogger({ service: "proxy" }); +let cachedRuntime: CachedProxyRuntime | undefined; + +function toCacheKey(env: ProxyWorkerBindings): string { + const keyParts = [ + env.OPENCLAW_BASE_URL, + env.OPENCLAW_HOOK_TOKEN, + env.OPENCLAW_HOOKS_TOKEN, + env.REGISTRY_URL, + env.CLAWDENTITY_REGISTRY_URL, + env.ENVIRONMENT, + env.ALLOW_LIST, + env.ALLOWLIST_OWNERS, + env.ALLOWLIST_AGENTS, + env.CRL_REFRESH_INTERVAL_MS, + env.CRL_MAX_AGE_MS, + env.CRL_STALE_BEHAVIOR, + env.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + env.AGENT_RATE_LIMIT_WINDOW_MS, + env.INJECT_IDENTITY_INTO_MESSAGE, + ]; + + return keyParts.map((value) => String(value ?? "")).join("|"); +} + +function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { + const key = toCacheKey(env); + if (cachedRuntime && cachedRuntime.key === key) { + return cachedRuntime; + } + + const config = parseProxyConfig(env); + assertWorkerOpenclawBaseUrl(config); + const app = createProxyApp({ config, logger }); + + cachedRuntime = { + key, + app, + config, + }; + return cachedRuntime; +} + +function isLoopbackHostname(hostname: string): boolean { + const normalized = hostname.toLowerCase(); + if ( + normalized === "localhost" || + normalized === "::1" || + normalized === "0.0.0.0" + ) { + return true; + } + + const ipv4Match = normalized.match(/^(\d{1,3})(?:\.(\d{1,3})){3}$/); + if (!ipv4Match) { + return false; + } + + const segments = normalized.split(".").map(Number); + if (segments.some((segment) => Number.isNaN(segment) || segment > 255)) { + return false; + } + + return segments[0] === 127; +} + +function assertWorkerOpenclawBaseUrl(config: ProxyConfig): void { + if (config.environment === "local" || config.environment === "test") { + return; + } + + let parsed: URL; + try { + parsed = new URL(config.openclawBaseUrl); + } catch { + throw new ProxyConfigError("Proxy configuration is invalid", { + fieldErrors: { + OPENCLAW_BASE_URL: ["OPENCLAW_BASE_URL must be a valid absolute URL"], + }, + formErrors: [], + }); + } + + if ( + config.openclawBaseUrl === DEFAULT_OPENCLAW_BASE_URL || + isLoopbackHostname(parsed.hostname) + ) { + throw new ProxyConfigError("Proxy configuration is invalid", { + fieldErrors: { + OPENCLAW_BASE_URL: [ + "OPENCLAW_BASE_URL must be an externally reachable URL for deployed Worker environments", + ], + }, + formErrors: [], + }); + } +} + +function toConfigErrorResponse(error: ProxyConfigError): Response { + logger.error(error.message, { + code: error.code, + details: error.details, + }); + + return Response.json( + { + error: { + code: error.code, + message: error.message, + details: error.details, + }, + }, + { status: error.status }, + ); +} + +const worker = { + fetch( + request: Request, + env: ProxyWorkerBindings, + executionCtx: ExecutionContext, + ): Response | Promise { + try { + const runtime = buildRuntime(env); + return runtime.app.fetch(request, env, executionCtx); + } catch (error) { + if (error instanceof ProxyConfigError) { + return toConfigErrorResponse(error); + } + + logger.error("Unhandled proxy worker startup error", { + errorName: error instanceof Error ? error.name : "unknown", + }); + return Response.json( + { + error: { + code: "PROXY_WORKER_STARTUP_FAILED", + message: "Proxy worker startup failed", + }, + }, + { status: 500 }, + ); + } + }, +}; + +export default worker; diff --git a/apps/proxy/tsconfig.json b/apps/proxy/tsconfig.json index b0fced2..c6ba935 100644 --- a/apps/proxy/tsconfig.json +++ b/apps/proxy/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.base.json", "compilerOptions": { + "lib": ["esnext"], + "types": ["@cloudflare/workers-types", "node"], "outDir": "./dist" }, "include": ["src"] diff --git a/apps/proxy/tsup.config.ts b/apps/proxy/tsup.config.ts index 88f8edd..b194042 100644 --- a/apps/proxy/tsup.config.ts +++ b/apps/proxy/tsup.config.ts @@ -1,7 +1,13 @@ import { defineConfig } from "tsup"; export default defineConfig({ - entry: ["src/index.ts", "src/server.ts", "src/bin.ts"], + entry: [ + "src/index.ts", + "src/server.ts", + "src/node-server.ts", + "src/worker.ts", + "src/bin.ts", + ], format: ["esm"], dts: true, clean: true, diff --git a/apps/proxy/wrangler.jsonc b/apps/proxy/wrangler.jsonc new file mode 100644 index 0000000..3e0b85d --- /dev/null +++ b/apps/proxy/wrangler.jsonc @@ -0,0 +1,34 @@ +{ + "$schema": "../../node_modules/wrangler/config-schema.json", + "name": "clawdentity-proxy", + "main": "src/worker.ts", + "compatibility_date": "2025-09-01", + "compatibility_flags": ["nodejs_compat"], + "env": { + "local": { + "name": "clawdentity-proxy-local", + "vars": { + "ENVIRONMENT": "local", + "REGISTRY_URL": "https://dev.api.clawdentity.com", + "OPENCLAW_BASE_URL": "http://127.0.0.1:18789", + "INJECT_IDENTITY_INTO_MESSAGE": "false" + } + }, + "development": { + "name": "clawdentity-proxy-development", + "vars": { + "ENVIRONMENT": "development", + "REGISTRY_URL": "https://dev.api.clawdentity.com", + "INJECT_IDENTITY_INTO_MESSAGE": "false" + } + }, + "production": { + "name": "clawdentity-proxy", + "vars": { + "ENVIRONMENT": "production", + "REGISTRY_URL": "https://api.clawdentity.com", + "INJECT_IDENTITY_INTO_MESSAGE": "false" + } + } + } +} diff --git a/apps/registry/.env.example b/apps/registry/.env.example index 8c8ec6d..48dcb85 100644 --- a/apps/registry/.env.example +++ b/apps/registry/.env.example @@ -1,7 +1,15 @@ -# Cloudflare (set via wrangler secret) -# BOOTSTRAP_SECRET= -# REGISTRY_SIGNING_KEY= -# REGISTRY_SIGNING_KEYS=","status":"active"}]> +# Registry local/development template +# For local Wrangler development, place active values in .dev.vars. +# For cloud deploys, set secrets with: +# wrangler secret put BOOTSTRAP_SECRET --env +# wrangler secret put REGISTRY_SIGNING_KEY --env +# wrangler secret put REGISTRY_SIGNING_KEYS --env -# wrangler.jsonc vars (non-secret) -# ENVIRONMENT=production +# Wrangler vars (non-secret) +ENVIRONMENT=development +APP_VERSION=local-dev + +# Secrets +BOOTSTRAP_SECRET=replace-with-random-secret +REGISTRY_SIGNING_KEY=replace-with-base64url-ed25519-private-key +REGISTRY_SIGNING_KEYS=[{"kid":"reg-key-1","alg":"EdDSA","crv":"Ed25519","x":"replace-with-base64url-ed25519-public-key","status":"active"}] diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 91a3b06..b47a77f 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -23,6 +23,7 @@ - Preserve `/health` response contract: `{ status, version, environment }`. - Keep the worker entrypoint in `src/server.ts`; use `src/index.ts` only as the package export wrapper. - Keep environment variables non-secret in `wrangler.jsonc` and secret values out of git. +- Keep `.dev.vars` and `.env.example` synchronized when adding/changing runtime config fields (`ENVIRONMENT`, `APP_VERSION`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). ## Validation - Validate config changes with `wrangler check` before deployment. diff --git a/package.json b/package.json index a8dc08a..4155b47 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,10 @@ "affected:test:local": "nx affected -t lint,format,typecheck,test --base=origin/main --head=HEAD", "affected:ci": "nx affected -t lint,format,typecheck,test,build --base=$NX_BASE --head=$NX_HEAD", "issues:validate": "node issues/scripts/validate-ticket-set.mjs", - "dev:registry:local": "pnpm -F @clawdentity/registry run dev:local" + "dev:registry:local": "pnpm -F @clawdentity/registry run dev:local", + "dev:proxy": "pnpm -F @clawdentity/proxy run dev", + "dev:proxy:development": "pnpm -F @clawdentity/proxy run dev:development", + "dev:proxy:fresh": "pnpm -F @clawdentity/proxy run dev:fresh" }, "devDependencies": { "@biomejs/biome": "^2.3.14", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5c23fd9..f1d0137 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -85,6 +85,13 @@ importers: zod: specifier: ^4.1.12 version: 4.3.6 + devDependencies: + '@cloudflare/workers-types': + specifier: ^4.20260210.0 + version: 4.20260210.0 + '@types/node': + specifier: ^22.17.2 + version: 22.19.11 apps/registry: dependencies: From 92b1144b6a681a484d342ceba153c1eace81726c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 12:48:44 +0530 Subject: [PATCH 058/190] feat: canonicalize bootstrap endpoint path and remove issues validator script --- AGENTS.md | 2 +- README.md | 2 +- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/admin.test.ts | 3 +- apps/cli/src/commands/admin.ts | 2 +- apps/registry/src/AGENTS.md | 1 + apps/registry/src/server.test.ts | 100 +++++++++++++++++++++++++--- apps/registry/src/server.ts | 8 ++- package.json | 1 - packages/protocol/AGENTS.md | 1 + packages/protocol/src/endpoints.ts | 1 + packages/protocol/src/index.test.ts | 5 ++ packages/protocol/src/index.ts | 1 + 13 files changed, 110 insertions(+), 18 deletions(-) create mode 100644 packages/protocol/src/endpoints.ts diff --git a/AGENTS.md b/AGENTS.md index 94725da..54554ad 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -42,7 +42,7 @@ ## Validation Baseline - Run and pass: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build` for implementation changes. - Lint runs at root (`pnpm lint` via `biome check .`), not per-package. -- For planning/doc changes, run dependency/order consistency checks in `issues/EXECUTION_PLAN.md`. +- For planning/doc changes, verify dependency/order consistency against the current execution source of truth (the in-repo execution plan if present, otherwise the active issue tracker plan). ## Cloudflare Worker & Wrangler Conventions - Registry is a **Hono** app deployed as a Cloudflare Worker. Wrangler handles bundling — tsup is only for type generation and local build validation. diff --git a/README.md b/README.md index 7b33cc2..2c73eaa 100644 --- a/README.md +++ b/README.md @@ -286,7 +286,7 @@ No one shares keys/files between agents. Identity is presented per request. - **PRD:** see [`PRD.md`](./PRD.md) (MVP product requirements + execution plan) - **Issue execution plan:** see [`issues/EXECUTION_PLAN.md`](./issues/EXECUTION_PLAN.md) (deployment-first ordering + waves) - **Issue authoring rules:** see [`issues/AGENTS.md`](./issues/AGENTS.md) (required issue schema + blockers policy) -- **Canonical ticket specs:** `issues/T00.md` through `issues/T38.md` are versioned in-repo and should be validated with `pnpm issues:validate` in local checks and CI. +- **Canonical ticket specs:** `issues/T00.md` through `issues/T38.md` are versioned in-repo. --- diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 6328c2e..c4b9d98 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -25,6 +25,7 @@ ## Admin Command Rules - `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. +- `admin bootstrap` must import `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` instead of duplicating endpoint literals in command code/tests. - Treat bootstrap API key token as write-once secret: print once, persist via config manager, and never log token contents. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. diff --git a/apps/cli/src/commands/admin.test.ts b/apps/cli/src/commands/admin.test.ts index d565ab6..f99cbbb 100644 --- a/apps/cli/src/commands/admin.test.ts +++ b/apps/cli/src/commands/admin.test.ts @@ -1,3 +1,4 @@ +import { ADMIN_BOOTSTRAP_PATH } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; import { bootstrapAdmin, persistBootstrapConfig } from "./admin.js"; @@ -53,7 +54,7 @@ describe("admin bootstrap helper", () => { RequestInit, ]; expect(calledInput.toString()).toBe( - "https://api.example.com/v1/admin/bootstrap", + `https://api.example.com${ADMIN_BOOTSTRAP_PATH}`, ); expect(calledInit.method).toBe("POST"); expect( diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index 80a2966..84da21f 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -1,3 +1,4 @@ +import { ADMIN_BOOTSTRAP_PATH } from "@clawdentity/protocol"; import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; import { @@ -9,7 +10,6 @@ import { writeStdoutLine } from "../io.js"; import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "admin" }); -const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; type AdminBootstrapOptions = { bootstrapSecret: string; diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 11f9f74..53e8e47 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -13,6 +13,7 @@ ## Admin Bootstrap Contract - `POST /v1/admin/bootstrap` is a one-time bootstrap endpoint gated by `BOOTSTRAP_SECRET`. +- Use `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` for route registration and test requests; do not hardcode bootstrap path literals in registry code. - Require `x-bootstrap-secret` header and compare with constant-time semantics; invalid/missing secret must return `401 ADMIN_BOOTSTRAP_UNAUTHORIZED`. - If `BOOTSTRAP_SECRET` is not configured, return `503 ADMIN_BOOTSTRAP_DISABLED`. - If any admin human already exists, return `409 ADMIN_BOOTSTRAP_ALREADY_COMPLETED`. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index f6a301d..534b54b 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,4 +1,5 @@ import { + ADMIN_BOOTSTRAP_PATH, type AitClaims, encodeBase64url, generateUlid, @@ -1174,11 +1175,11 @@ describe("GET /health", () => { }); }); -describe("POST /v1/admin/bootstrap", () => { +describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { it("returns 503 when bootstrap secret is not configured", async () => { const { database } = createFakeDb([]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1207,7 +1208,7 @@ describe("POST /v1/admin/bootstrap", () => { it("returns 401 when bootstrap secret header is missing", async () => { const { database } = createFakeDb([]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1230,7 +1231,7 @@ describe("POST /v1/admin/bootstrap", () => { it("returns 401 when bootstrap secret is invalid", async () => { const { database } = createFakeDb([]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1254,7 +1255,7 @@ describe("POST /v1/admin/bootstrap", () => { it("returns 400 when payload is not valid JSON", async () => { const { database } = createFakeDb([]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1278,7 +1279,7 @@ describe("POST /v1/admin/bootstrap", () => { it("returns 400 when payload fields are invalid", async () => { const { database } = createFakeDb([]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1305,7 +1306,7 @@ describe("POST /v1/admin/bootstrap", () => { const { authRow } = await makeValidPatContext(); const { database } = createFakeDb([authRow]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1330,7 +1331,7 @@ describe("POST /v1/admin/bootstrap", () => { const { database, humanInserts, apiKeyInserts } = createFakeDb([]); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1384,13 +1385,90 @@ describe("POST /v1/admin/bootstrap", () => { ); }); + it("returns PAT that authenticates GET /v1/me on same app and database", async () => { + const { database } = createFakeDb([]); + const appInstance = createRegistryApp(); + + const bootstrapResponse = await appInstance.request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(bootstrapResponse.status).toBe(201); + const bootstrapBody = (await bootstrapResponse.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + }; + apiKey: { + id: string; + name: string; + token: string; + }; + }; + + const meResponse = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${bootstrapBody.apiKey.token}`, + }, + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(meResponse.status).toBe(200); + const meBody = (await meResponse.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + apiKey: { + id: string; + name: string; + }; + }; + }; + expect(meBody.human).toEqual({ + id: bootstrapBody.human.id, + did: bootstrapBody.human.did, + displayName: bootstrapBody.human.displayName, + role: bootstrapBody.human.role, + apiKey: { + id: bootstrapBody.apiKey.id, + name: bootstrapBody.apiKey.name, + }, + }); + }); + it("falls back to manual mutation when transactions are unavailable", async () => { const { database, humanInserts, apiKeyInserts } = createFakeDb([], [], { failBeginTransaction: true, }); const response = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1421,7 +1499,7 @@ describe("POST /v1/admin/bootstrap", () => { }); const firstResponse = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { @@ -1444,7 +1522,7 @@ describe("POST /v1/admin/bootstrap", () => { expect(humanRows).toHaveLength(0); const secondResponse = await createRegistryApp().request( - "/v1/admin/bootstrap", + ADMIN_BOOTSTRAP_PATH, { method: "POST", headers: { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 167fe22..2627776 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,4 +1,8 @@ -import { generateUlid, makeHumanDid } from "@clawdentity/protocol"; +import { + ADMIN_BOOTSTRAP_PATH, + generateUlid, + makeHumanDid, +} from "@clawdentity/protocol"; import { AppError, createHonoErrorHandler, @@ -319,7 +323,7 @@ function createRegistryApp() { }); }); - app.post("/v1/admin/bootstrap", async (c) => { + app.post(ADMIN_BOOTSTRAP_PATH, async (c) => { const config = getConfig(c.env); const expectedBootstrapSecret = requireBootstrapSecret( config.BOOTSTRAP_SECRET, diff --git a/package.json b/package.json index 4155b47..7a52095 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,6 @@ "affected:typecheck:local": "nx affected -t typecheck --base=origin/main --head=HEAD", "affected:test:local": "nx affected -t lint,format,typecheck,test --base=origin/main --head=HEAD", "affected:ci": "nx affected -t lint,format,typecheck,test,build --base=$NX_BASE --head=$NX_HEAD", - "issues:validate": "node issues/scripts/validate-ticket-set.mjs", "dev:registry:local": "pnpm -F @clawdentity/registry run dev:local", "dev:proxy": "pnpm -F @clawdentity/proxy run dev", "dev:proxy:development": "pnpm -F @clawdentity/proxy run dev:development", diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index 237c7f3..937bc7d 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -17,6 +17,7 @@ - Reuse cross-module helpers (e.g., `text.ts`’s `hasControlChars`) so control-character checks stay consistent across AIT and CRL validation. - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). +- Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. ## Testing - Add focused Vitest tests per helper module and one root export test in `src/index.test.ts`. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts new file mode 100644 index 0000000..589fad7 --- /dev/null +++ b/packages/protocol/src/endpoints.ts @@ -0,0 +1 @@ +export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index 8720d17..eb5b2f1 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "vitest"; import { + ADMIN_BOOTSTRAP_PATH, AGENT_NAME_REGEX, aitClaimsSchema, CLAW_PROOF_CANONICAL_VERSION, @@ -26,6 +27,10 @@ describe("protocol", () => { expect(PROTOCOL_VERSION).toBe("0.0.0"); }); + it("exports shared endpoint constants", () => { + expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); + }); + it("exports helpers from package root", () => { const ulid = generateUlid(1700000000000); const humanDid = makeHumanDid(ulid); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 3144b31..f5ba58a 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -14,6 +14,7 @@ export type { CrlClaims } from "./crl.js"; export { crlClaimsSchema, parseCrlClaims } from "./crl.js"; export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; +export { ADMIN_BOOTSTRAP_PATH } from "./endpoints.js"; export type { ProtocolParseErrorCode } from "./errors.js"; export { ProtocolParseError } from "./errors.js"; export type { CanonicalRequestInput } from "./http-signing.js"; From 95f67cb346a3063ce0610bbed7b88a86ac926683 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 12:49:49 +0530 Subject: [PATCH 059/190] chore: remove legacy issues ticket docs and validator artifacts --- issues/AGENTS.md | 54 ----- issues/EXECUTION_PLAN.md | 82 -------- issues/T00.md | 48 ----- issues/T01.md | 50 ----- issues/T02.md | 48 ----- issues/T03.md | 48 ----- issues/T04.md | 49 ----- issues/T05.md | 50 ----- issues/T06.md | 50 ----- issues/T07.md | 52 ----- issues/T08.md | 48 ----- issues/T09.md | 49 ----- issues/T10.md | 49 ----- issues/T11.md | 50 ----- issues/T12.md | 52 ----- issues/T13.md | 49 ----- issues/T14.md | 55 ----- issues/T15.md | 50 ----- issues/T16.md | 50 ----- issues/T17.md | 51 ----- issues/T18.md | 52 ----- issues/T19.md | 51 ----- issues/T20.md | 49 ----- issues/T21.md | 52 ----- issues/T22.md | 48 ----- issues/T23.md | 48 ----- issues/T24.md | 51 ----- issues/T25.md | 48 ----- issues/T26.md | 49 ----- issues/T27.md | 53 ----- issues/T28.md | 50 ----- issues/T29.md | 49 ----- issues/T30.md | 49 ----- issues/T31.md | 50 ----- issues/T32.md | 48 ----- issues/T33.md | 48 ----- issues/T34.md | 51 ----- issues/T35.md | 50 ----- issues/T36.md | 53 ----- issues/T37.md | 48 ----- issues/T38.md | 51 ----- issues/scripts/AGENTS.md | 18 -- issues/scripts/validate-ticket-set.mjs | 277 ------------------------- 43 files changed, 2377 deletions(-) delete mode 100644 issues/AGENTS.md delete mode 100644 issues/EXECUTION_PLAN.md delete mode 100644 issues/T00.md delete mode 100644 issues/T01.md delete mode 100644 issues/T02.md delete mode 100644 issues/T03.md delete mode 100644 issues/T04.md delete mode 100644 issues/T05.md delete mode 100644 issues/T06.md delete mode 100644 issues/T07.md delete mode 100644 issues/T08.md delete mode 100644 issues/T09.md delete mode 100644 issues/T10.md delete mode 100644 issues/T11.md delete mode 100644 issues/T12.md delete mode 100644 issues/T13.md delete mode 100644 issues/T14.md delete mode 100644 issues/T15.md delete mode 100644 issues/T16.md delete mode 100644 issues/T17.md delete mode 100644 issues/T18.md delete mode 100644 issues/T19.md delete mode 100644 issues/T20.md delete mode 100644 issues/T21.md delete mode 100644 issues/T22.md delete mode 100644 issues/T23.md delete mode 100644 issues/T24.md delete mode 100644 issues/T25.md delete mode 100644 issues/T26.md delete mode 100644 issues/T27.md delete mode 100644 issues/T28.md delete mode 100644 issues/T29.md delete mode 100644 issues/T30.md delete mode 100644 issues/T31.md delete mode 100644 issues/T32.md delete mode 100644 issues/T33.md delete mode 100644 issues/T34.md delete mode 100644 issues/T35.md delete mode 100644 issues/T36.md delete mode 100644 issues/T37.md delete mode 100644 issues/T38.md delete mode 100644 issues/scripts/AGENTS.md delete mode 100644 issues/scripts/validate-ticket-set.mjs diff --git a/issues/AGENTS.md b/issues/AGENTS.md deleted file mode 100644 index de5367b..0000000 --- a/issues/AGENTS.md +++ /dev/null @@ -1,54 +0,0 @@ -# AGENTS.md (issues folder) - -## Purpose -- This folder contains implementation-ready issue specifications for the Clawdentity support plugin roadmap. -- Each issue must be decision-complete so an engineer can execute it without guessing scope or acceptance criteria. - -## Required Issue Schema -Every `T*.md` file must include these sections in this order: -1. `## Goal` -2. `## In Scope` -3. `## Out of Scope` -4. `## Dependencies` -5. `## Execution Mode` -6. `## Parallel Wave` -7. `## Required Skills` -8. `## Deliverables` -9. `## Refactor Opportunities` -10. `## Definition of Done` -11. `## Validation Steps` - -## Dependency Rules -- `Dependencies` must list only valid ticket IDs (`T00` format) that exist in this folder. -- `Dependencies` must include a `Blockers` line. -- Before marking an issue complete, validate that all blockers are resolved. -- Run `pnpm issues:validate` before closing deployment-gate tickets (`T37`, `T38`) or changing dependency/wave metadata. -- Do not reorder dependency logic without updating `EXECUTION_PLAN.md`. - -## Deployment-First Rule -- `T00` scaffolds the workspace. -- `T37` and `T38` are deployment gate tickets. -- Feature tickets (`T01`-`T36`) must depend on `T38`. -- No feature implementation begins before `T38` is complete. - -## Quality Rules -- Keep acceptance criteria unique and non-duplicative. -- Add at least one refactor opportunity, or explicitly state `None`. -- Add concrete validation commands with expected outcomes. -- Keep scope narrow: one issue should represent one coherent unit of delivery. -- For CLI deliverables, argument placeholders must match supported identifier semantics (for example `` for local filesystem lookups) and avoid ambiguous `` unless resolution rules are explicitly defined. - -## Skill Rules -- Every issue must declare required skills. -- Use the defined defaults from `EXECUTION_PLAN.md` when no issue-specific override is needed. -- There is currently no dedicated `openclaw support-plugin` skill; use mapped fallback skills by issue group. - -## Change Management -- If a dependency is ambiguous, preserve current dependency links and add a note in that issue. -- Prefer small, maintainable updates over broad speculative rewrites. -- If a change affects sequencing or parallel waves, update both the issue file and `EXECUTION_PLAN.md` in the same change. - -## Audit Best Practices -- Confirm each feature ticket (`T01`-`T36`) lists `T38` under `Dependencies` and in the `Blockers` line; document any gaps before capturing new wave assignments. -- When sequencing or wave assignments evolve, update `EXECUTION_PLAN.md` in the same commit so the deployment-first narrative stays accurate and blockers remain visible to reviewers. -- Use `pnpm issues:validate` as the final audit step after editing any `issues/T*.md` file. diff --git a/issues/EXECUTION_PLAN.md b/issues/EXECUTION_PLAN.md deleted file mode 100644 index c09d934..0000000 --- a/issues/EXECUTION_PLAN.md +++ /dev/null @@ -1,82 +0,0 @@ -# Clawdentity Support Plugin Execution Plan - -## Scope -- This plan governs issue specification and implementation sequencing for `issues/T00.md` through `issues/T38.md`. -- Execution is deployment-first: scaffold and deploy baseline before feature tickets. -- Scope excludes deployment mechanics in `~/Workdir/clawdbot`. - -## Deployment-First Gate -- `T00` establishes workspace scaffolding. -- `T37` defines deployment scaffolding and configuration contract. -- `T38` performs baseline deployment verification. -- Feature tickets (`T01`-`T36`) must not start until `T38` is complete. - -## Canonical Sequential Order -- `T00 -> T37 -> T38 -> T01 -> T02 -> T03 -> T04 -> T05 -> T06 -> T07 -> T08 -> T09 -> T10 -> T11 -> T12 -> T13 -> T14 -> T15 -> T16 -> T17 -> T18 -> T19 -> T20 -> T21 -> T22 -> T23 -> T24 -> T25 -> T26 -> T27 -> T28 -> T29 -> T30 -> T31 -> T32 -> T33 -> T34 -> T35 -> T36` - -## Parallel Waves -- Wave 0: `T00` -- Wave 1: `T37` -- Wave 2: `T38` -- Wave 3: `T01, T10, T20, T25` -- Wave 4: `T02, T03, T04, T11, T26` -- Wave 5: `T05, T06, T07, T12, T13, T19` -- Wave 6: `T08, T09, T14, T15, T22` -- Wave 7: `T16, T21, T24, T27, T34` -- Wave 8: `T17, T18, T23, T28, T30, T31, T32, T35` -- Wave 9: `T29, T36` -- Wave 10: `T33` - -## Skill Mapping Defaults -- Foundation (`T00`-`T09`): `code-quality`, `testing-framework`, `validation-schema` -- Deployment scaffolding (`T37`, `T38`): `deployment`, `configuration-management`, `observability` -- Registry (`T10`-`T19`, `T34`): `database`, `api-standards`, `identity-service`, `error-handling` -- CLI (`T20`-`T24`, `T35`): `command-development`, `code-quality`, `testing-framework` -- Proxy (`T25`-`T31`, `T36`): `api-client`, `data-fetching`, `logging`, `error-handling` -- UI/docs (`T32`, `T33`): `frontend-design`, `web-design-guidelines`, `hld-generator` - -## Validation Scenarios -0. Ticket-set validator (authoritative gate check): -- Command: -```bash -pnpm issues:validate -``` -- Expected output: `issues:validate passed (...)` - -1. Schema consistency: -- Command: -```bash -for f in issues/T*.md; do - for s in "## Goal" "## In Scope" "## Out of Scope" "## Dependencies" "## Execution Mode" "## Parallel Wave" "## Required Skills" "## Deliverables" "## Refactor Opportunities" "## Definition of Done" "## Validation Steps"; do - rg -q "^${s}$" "$f" || { echo "Missing ${s} in $f"; exit 1; } - done -done -echo "Schema check passed" -``` -- Expected output: `Schema check passed` - -2. Deployment gate enforcement: -- Test: every feature ticket (`T01`-`T36`) declares `T38` in `Dependencies` and `Blockers`. -- Expected output: no feature ticket bypasses deployment gate. - -3. Dependency integrity (IDs exist + acyclic graph): -- Test: every dependency listed in a ticket exists as a `T*.md` file and no dependency cycle is present. -- Expected output: all dependency references resolve; topological sort succeeds. - -4. Sequential order validity: -- Test: each ticket in the sequential list appears only after all dependencies. -- Expected output: no ticket appears before one of its blockers. - -5. Parallel wave safety: -- Test: a ticket must not be in the same wave as one of its dependencies. -- Expected output: zero dependency conflicts within each wave. - -6. Quality completeness: -- Test: each ticket has at least one refactor opportunity bullet or explicit `None`. -- Expected output: every `T*.md` passes this quality gate. - -## Working Rules -- Preserve existing dependency links unless there is a clear correctness issue. -- Deployment gate (`T38`) is mandatory for all feature tickets. -- If dependency ambiguity appears, keep current links and add a clarification note in the affected issue. -- Update this plan whenever wave assignment or dependency shape changes. diff --git a/issues/T00.md b/issues/T00.md deleted file mode 100644 index 3f485f1..0000000 --- a/issues/T00.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T00.md` - -## Goal -Create a monorepo workspace for registry, SDK, CLI, and proxy with consistent tooling. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- None -- Blockers: None - -## Execution Mode -- Sequential-ready - -## Parallel Wave -- Wave 0 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- pnpm workspace setup (`pnpm-workspace.yaml`, root `package.json` scripts: lint/test/build) -- Shared TS config (`tsconfig.base.json`) -- Folders: `packages/protocol`, `packages/sdk`, `apps/registry`, `apps/cli`, `apps/proxy` -- CI-friendly scripts: `pnpm -r build`, `pnpm -r test` (can be placeholders) - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- `pnpm -r build` runs without errors (even if builds are stubbed) -- Repo has consistent formatting/linting configuration (ESLint/Prettier or Biome) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T00 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (None) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T01.md b/issues/T01.md deleted file mode 100644 index bdfe215..0000000 --- a/issues/T01.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T01.md` - -## Goal -Implement shared encoding + identifier helpers used across SDK/registry/CLI. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T00 -- T38 -- Blockers: T00, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 3 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/protocol/src/base64url.ts` (encode/decode) -- `packages/protocol/src/ulid.ts` (generate/parse wrapper) -- `packages/protocol/src/did.ts` (make/parse human + agent DIDs) -- Unit tests for roundtrips and parsing - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- base64url roundtrip tests pass -- DID parsing rejects invalid formats - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T01 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T02.md b/issues/T02.md deleted file mode 100644 index 417d6ca..0000000 --- a/issues/T02.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T02.md` - -## Goal -Define the canonical string format used for PoP request signing (stable and unambiguous). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T01 -- T38 -- Blockers: T01, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 4 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/protocol/src/http-signing.ts` with `canonicalizeRequest()` -- Tests asserting canonical output is stable (snapshot) - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- Given same inputs, canonical string is identical across runs -- Canonical format includes method/path/ts/nonce/body-hash - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T02 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T01, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T03.md b/issues/T03.md deleted file mode 100644 index 609e400..0000000 --- a/issues/T03.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T03.md` - -## Goal -Implement Ed25519 keypair generation and sign/verify utilities. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T01 -- T38 -- Blockers: T01, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 4 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/sdk/src/crypto/ed25519.ts` (generate/sign/verify, base64url helpers) -- Unit tests: sign/verify happy path + negative cases - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- Sign/verify works for known vectors or generated keys -- Wrong message/signature fails verification - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T03 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T01, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T04.md b/issues/T04.md deleted file mode 100644 index 339cdbe..0000000 --- a/issues/T04.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T04.md` - -## Goal -Define a strict schema for AIT claims and validate risky fields (name, description). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T01 -- T38 -- Blockers: T01, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 4 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/protocol/src/ait.ts` (schema + types) -- `validateAgentName()` (strict regex + max length) -- Unit tests for valid/invalid names - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- Invalid names (control chars, too long) are rejected -- Schema covers required claims for MVP - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T04 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T01, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T05.md b/issues/T05.md deleted file mode 100644 index 6ef414e..0000000 --- a/issues/T05.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T05.md` - -## Goal -Implement JWT (JWS) encoding/decoding and EdDSA signing for AIT tokens. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T03 -- T04 -- T38 -- Blockers: T03, T04, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 5 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/sdk/src/jwt/ait-jwt.ts` (`signAIT`, `verifyAIT`) -- Support `kid` lookup from registry keys -- Unit tests: sign then verify; wrong issuer/kid fails - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- AIT created by `signAIT` verifies with published public key -- Verifier enforces `alg=EdDSA` and `typ=AIT` - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T05 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T03, T04, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T06.md b/issues/T06.md deleted file mode 100644 index c8019cf..0000000 --- a/issues/T06.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T06.md` - -## Goal -Define signed CRL format and verification logic. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T01 -- T03 -- T38 -- Blockers: T01, T03, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 5 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/protocol/src/crl.ts` schema -- `packages/sdk/src/jwt/crl-jwt.ts` (`signCRL`, `verifyCRL`) -- Unit tests: tampering invalidates signature - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- CRL signature verification fails on tampered payload -- Schema enforces expected fields (iss, iat, revocations) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T06 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T01, T03, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T07.md b/issues/T07.md deleted file mode 100644 index 7febd03..0000000 --- a/issues/T07.md +++ /dev/null @@ -1,52 +0,0 @@ -Source: `T07.md` - -## Goal -Sign outbound HTTP requests and verify inbound requests using PoP headers (replay-resistant). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T02 -- T03 -- T04 -- T38 -- Blockers: T02, T03, T04, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 5 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/sdk/src/http/sign.ts` (hash body, produce headers) -- `packages/sdk/src/http/verify.ts` (verify headers + proof) -- Bind signature to method/path/timestamp/nonce/body hash -- Unit tests for verification success/failure - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- Signed request verifies successfully -- Altering method/path/body/timestamp causes verification failure - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T07 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T02, T03, T04, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T08.md b/issues/T08.md deleted file mode 100644 index a8f68a1..0000000 --- a/issues/T08.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T08.md` - -## Goal -Prevent request replay by tracking seen nonces per agent DID within a TTL window. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T07 -- T38 -- Blockers: T07, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 6 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/sdk/src/security/nonce-cache.ts` (TTL store keyed by agent+nonce) -- Unit tests: duplicate nonce within TTL is rejected - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- Second request with same nonce is rejected -- Expired nonces are pruned (or treated as unseen) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T08 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T07, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T09.md b/issues/T09.md deleted file mode 100644 index 5246027..0000000 --- a/issues/T09.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T09.md` - -## Goal -Implement a CRL cache that refreshes periodically and reports staleness. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T06 -- T38 -- Blockers: T06, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 6 - -## Required Skills -- `code-quality` -- `testing-framework` -- `validation-schema` - -## Deliverables -- `packages/sdk/src/crl/cache.ts` (`refreshIfStale`, `isRevoked`) -- Config: refresh interval, max age, fail-open/fail-closed -- Unit tests for stale behavior and revoked lookup - -## Refactor Opportunities -- Consolidate shared protocol helpers to avoid duplicate encoding/signing logic across packages. - -## Definition of Done -- Revoked `jti` is rejected -- Stale CRL triggers refresh attempt; warnings surfaced - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T09 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T06, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T10.md b/issues/T10.md deleted file mode 100644 index 73cea8e..0000000 --- a/issues/T10.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T10.md` - -## Goal -Create minimal database schema for humans, agents, revocations, and PAT API keys. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T00 -- T38 -- Blockers: T00, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 3 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- SQL migrations for: `humans`, `agents`, `revocations`, `api_keys` -- Indexes on agent owner/status and revocations jti - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Migration applies cleanly on a fresh DB -- Tables match the PRD MVP needs - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T10 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T11.md b/issues/T11.md deleted file mode 100644 index 0d3cc5e..0000000 --- a/issues/T11.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T11.md` - -## Goal -Bootstrap the registry API service with health endpoint and configuration. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T10 -- T38 -- Blockers: T10, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 4 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- `apps/registry/src/server.ts` server entry -- `GET /health` returns JSON + 200 -- Centralized config loading (env validation) - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Local dev server starts successfully -- Health endpoint returns expected JSON - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T11 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T12.md b/issues/T12.md deleted file mode 100644 index e0556e6..0000000 --- a/issues/T12.md +++ /dev/null @@ -1,52 +0,0 @@ -Source: `T12.md` - -## Goal -Implement simple PAT authentication for MVP (Authorization: Bearer ). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T11 -- T10 -- T38 -- Blockers: T11, T10, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 5 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- `apps/registry/src/auth/apiKeyAuth.ts` -- Token hashing + constant-time compare -- Context injection (`ctx.human`) - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Valid PAT authenticates successfully -- Invalid/missing PAT returns 401 -- Tests cover both cases - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T12 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T11, T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T13.md b/issues/T13.md deleted file mode 100644 index ebb01d6..0000000 --- a/issues/T13.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T13.md` - -## Goal -Expose registry signing public keys for offline verification. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T11 -- T38 -- Blockers: T11, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 5 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- `GET /.well-known/claw-keys.json` endpoint -- Response includes keys: kid/alg/crv/x/status -- Cache headers (reasonable) - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- SDK can fetch keys and verify a signed AIT using them - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T13 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T11, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T14.md b/issues/T14.md deleted file mode 100644 index 470a400..0000000 --- a/issues/T14.md +++ /dev/null @@ -1,55 +0,0 @@ -Source: `T14.md` - -## Goal -Allow an authenticated human to register an agent public key and receive a registry-signed AIT. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T12 -- T13 -- T10 -- T05 -- T38 -- Blockers: T12, T13, T10, T05, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 6 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- Endpoint `POST /v1/agents` (validates name/framework/publicKey/ttlDays) -- Stores agent row with `current_jti` + expiry -- Signs AIT with registry issuer key -- Returns `{ agent, ait }` - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Creating an agent returns an AIT that SDK verifies locally -- Name validation rejects unsafe strings -- Only authenticated humans can create agents - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T14 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T12, T13, T10, T05, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T15.md b/issues/T15.md deleted file mode 100644 index 1b42c8d..0000000 --- a/issues/T15.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T15.md` - -## Goal -List agents owned by the authenticated human. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T12 -- T10 -- T38 -- Blockers: T12, T10, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 6 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- Endpoint `GET /v1/agents` with filters (status/framework) optional -- Returns minimal fields (id/did/name/status/expires) - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Only returns caller-owned agents -- Pagination pattern established (even if minimal) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T15 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T12, T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T16.md b/issues/T16.md deleted file mode 100644 index 19af68e..0000000 --- a/issues/T16.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T16.md` - -## Goal -Revoke an agent (kill switch) and publish revocation via CRL. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T14 -- T38 -- Blockers: T14, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 7 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- Endpoint `DELETE /v1/agents/:id` -- Transaction: mark agent revoked + insert revocation for current_jti -- Idempotent behavior for repeat revoke - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Revoked agent’s `current_jti` appears in CRL -- SDK/proxy rejects revoked AIT - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T16 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T14, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T17.md b/issues/T17.md deleted file mode 100644 index 04d23b6..0000000 --- a/issues/T17.md +++ /dev/null @@ -1,51 +0,0 @@ -Source: `T17.md` - -## Goal -Reissue an AIT for the same agent and revoke the previous token to enforce 'one active token'. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T14 -- T16 -- T38 -- Blockers: T14, T16, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- Endpoint `POST /v1/agents/:id/reissue` -- Revokes previous `current_jti` -- Issues new AIT with new `jti` and updates agent row - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Old AIT becomes invalid due to CRL revocation -- New AIT verifies successfully - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T17 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T14, T16, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T18.md b/issues/T18.md deleted file mode 100644 index 542e7ec..0000000 --- a/issues/T18.md +++ /dev/null @@ -1,52 +0,0 @@ -Source: `T18.md` - -## Goal -Serve a signed CRL snapshot (MVP: full list). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T16 -- T06 -- T13 -- T38 -- Blockers: T16, T06, T13, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- Endpoint `GET /v1/crl` returning `{ crl: }` -- CRL includes all revocations (MVP) -- Cache headers set appropriately - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- SDK verifies CRL signature -- CRL contains expected revocations - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T18 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T16, T06, T13, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T19.md b/issues/T19.md deleted file mode 100644 index bedbafc..0000000 --- a/issues/T19.md +++ /dev/null @@ -1,51 +0,0 @@ -Source: `T19.md` - -## Goal -Resolve an agent ID to a public profile (no PII). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T11 -- T10 -- T38 -- Blockers: T11, T10, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 5 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- Endpoint `GET /v1/resolve/:id` -- Returns `{ did, name, framework, status, ownerDid }` - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- No auth required -- Does not leak email or private user data -- Rate limiting enabled (basic) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T19 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T11, T10, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T20.md b/issues/T20.md deleted file mode 100644 index 89efc9b..0000000 --- a/issues/T20.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T20.md` - -## Goal -Create CLI framework and local config storage. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T00 -- T38 -- Blockers: T00, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 3 - -## Required Skills -- `command-development` -- `code-quality` -- `testing-framework` - -## Deliverables -- `apps/cli/src/index.ts` with command router -- Config at `~/.clawdentity/config.json` (registryUrl, apiKey) -- `claw --help` and `claw config set` - -## Refactor Opportunities -- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. - -## Definition of Done -- CLI runs on macOS/Linux -- Config read/write works - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T20 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T21.md b/issues/T21.md deleted file mode 100644 index c461906..0000000 --- a/issues/T21.md +++ /dev/null @@ -1,52 +0,0 @@ -Source: `T21.md` - -## Goal -Generate keypair locally, register public key, and save AIT + key to disk. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T20 -- T14 -- T03 -- T38 -- Blockers: T20, T14, T03, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 7 - -## Required Skills -- `command-development` -- `code-quality` -- `testing-framework` - -## Deliverables -- Command `claw agent create ` -- Generates Ed25519 keypair -- Calls registry `POST /v1/agents` -- Writes identity files under `~/.clawdentity/agents//` - -## Refactor Opportunities -- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. - -## Definition of Done -- Command prints agent DID + expiry -- Files created with secure permissions (0600 where applicable) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T21 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T20, T14, T03, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T22.md b/issues/T22.md deleted file mode 100644 index c12002c..0000000 --- a/issues/T22.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T22.md` - -## Goal -Print decoded AIT fields for an existing local identity. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T20 -- T05 -- T38 -- Blockers: T20, T05, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 6 - -## Required Skills -- `command-development` -- `code-quality` -- `testing-framework` - -## Deliverables -- Command `claw agent inspect ` -- Displays: did, owner, exp, kid, pubkey, framework - -## Refactor Opportunities -- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. - -## Definition of Done -- Works offline (no registry call needed) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T22 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T20, T05, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T23.md b/issues/T23.md deleted file mode 100644 index 302b77c..0000000 --- a/issues/T23.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T23.md` - -## Goal -Revoke an agent via registry and print confirmation. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T20 -- T16 -- T38 -- Blockers: T20, T16, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `command-development` -- `code-quality` -- `testing-framework` - -## Deliverables -- Command `claw agent revoke ` -- Calls `DELETE /v1/agents/:id` - -## Refactor Opportunities -- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. - -## Definition of Done -- Revocation succeeds and is visible via CRL after refresh - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T23 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T20, T16, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T24.md b/issues/T24.md deleted file mode 100644 index 647ddfa..0000000 --- a/issues/T24.md +++ /dev/null @@ -1,51 +0,0 @@ -Source: `T24.md` - -## Goal -Verify an AIT locally (signature + expiry + CRL). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T20 -- T05 -- T09 -- T38 -- Blockers: T20, T05, T09, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 7 - -## Required Skills -- `command-development` -- `code-quality` -- `testing-framework` - -## Deliverables -- Command `claw verify ` -- Fetches keys/CRL if needed; caches them -- Outputs ✅/❌ with reason - -## Refactor Opportunities -- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. - -## Definition of Done -- Valid token verifies -- Revoked token fails with reason 'revoked' - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T24 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T20, T05, T09, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T25.md b/issues/T25.md deleted file mode 100644 index 3cbbdd2..0000000 --- a/issues/T25.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T25.md` - -## Goal -Define proxy runtime configuration and validation. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T00 -- T38 -- Blockers: T00, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 3 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- `apps/proxy/src/config.ts` with schema and defaults -- Config: listenPort, openclawBaseUrl, openclawHookToken, registryUrl, allowList, crl refresh, stale policy - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Proxy refuses to start with invalid config - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T25 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T00, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T26.md b/issues/T26.md deleted file mode 100644 index 89be86f..0000000 --- a/issues/T26.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T26.md` - -## Goal -Start proxy server with health endpoint and basic logging. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T25 -- T38 -- Blockers: T25, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 4 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- `apps/proxy/src/server.ts` -- `GET /health` -- Structured request logging - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Proxy starts and responds to /health - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T26 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T25, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T27.md b/issues/T27.md deleted file mode 100644 index 58208be..0000000 --- a/issues/T27.md +++ /dev/null @@ -1,53 +0,0 @@ -Source: `T27.md` - -## Goal -Verify Clawdentity auth headers on inbound requests (AIT + CRL + PoP + nonce). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T26 -- T07 -- T09 -- T13 -- T38 -- Blockers: T26, T07, T09, T13, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 7 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- Middleware integrating SDK verifier -- Returns 401 on invalid/expired/revoked/proof failures -- Maintains nonce replay cache - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Invalid requests rejected with 401 -- Replay requests rejected (nonce reuse) - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T27 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T26, T07, T09, T13, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T28.md b/issues/T28.md deleted file mode 100644 index 3c64380..0000000 --- a/issues/T28.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T28.md` - -## Goal -Enforce allowlist by agent DID after verification. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T27 -- T38 -- Blockers: T27, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- Allowlist config: owners[], agents[] -- Remove support for `allowAllVerified`/`ALLOW_ALL_VERIFIED` -- Return 403 when verified but not allowed - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Verified-but-not-allowed returns 403 -- Allowed callers proceed to forwarding - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T28 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T27, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T29.md b/issues/T29.md deleted file mode 100644 index 1831ed4..0000000 --- a/issues/T29.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T29.md` - -## Goal -Forward verified requests to OpenClaw webhooks using OpenClaw hook token internally. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T28 -- T38 -- Blockers: T28, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 9 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- Proxy route `POST /hooks/agent` -- Forward JSON body to `${openclawBaseUrl}/hooks/agent` -- Add header `x-openclaw-token: ` - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Valid call yields OpenClaw 202 for `/hooks/agent` - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T29 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T28, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T30.md b/issues/T30.md deleted file mode 100644 index d7c8c41..0000000 --- a/issues/T30.md +++ /dev/null @@ -1,49 +0,0 @@ -Source: `T30.md` - -## Goal -Add basic rate limiting per verified agent DID to reduce abuse. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T27 -- T38 -- Blockers: T27, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- In-memory limiter (default 60 req/min per agent DID) -- Return 429 when exceeded - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Excess requests return 429 -- Limit is configurable - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T30 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T27, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T31.md b/issues/T31.md deleted file mode 100644 index 330d7a7..0000000 --- a/issues/T31.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T31.md` - -## Goal -Optionally prepend a sanitized identity block into webhook `message` (off by default). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T27 -- T38 -- Blockers: T27, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- Config `injectIdentityIntoMessage: boolean` (default false) -- Sanitize identity fields (no control chars, length limits) -- Document clearly as optional - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- When enabled, OpenClaw receives augmented message -- When disabled, payload unchanged - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T31 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T27, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T32.md b/issues/T32.md deleted file mode 100644 index 889e187..0000000 --- a/issues/T32.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T32.md` - -## Goal -Provide a minimal web UI for mobile-friendly agent revocation (kill switch). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T15 -- T16 -- T38 -- Blockers: T15, T16, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `frontend-design` -- `web-design-guidelines` -- `hld-generator` - -## Deliverables -- Minimal app (Next.js or simple UI) for login/auth and listing agents -- Revoke button triggers registry revoke - -## Refactor Opportunities -- Reuse common UI/documentation templates to reduce repeated structure and maintenance overhead. - -## Definition of Done -- Operator can revoke from mobile browser and see effect via CRL - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T32 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T15, T16, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T33.md b/issues/T33.md deleted file mode 100644 index 9e9c5ef..0000000 --- a/issues/T33.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T33.md` - -## Goal -Write a reproducible README walkthrough for the end-to-end MVP demo. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T29 -- T21 -- T24 -- T38 -- Blockers: T29, T21, T24, T38 - -## Execution Mode -- Sequential-ready - -## Parallel Wave -- Wave 10 - -## Required Skills -- `frontend-design` -- `web-design-guidelines` -- `hld-generator` - -## Deliverables -- Step-by-step: enable OpenClaw hooks, run proxy, create agent, send signed call, revoke and retry -- Provide curl examples and expected outputs - -## Refactor Opportunities -- Reuse common UI/documentation templates to reduce repeated structure and maintenance overhead. - -## Definition of Done -- A new machine can reproduce first verified call in < 10 minutes - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T33 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T29, T21, T24, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T34.md b/issues/T34.md deleted file mode 100644 index 1d38a14..0000000 --- a/issues/T34.md +++ /dev/null @@ -1,51 +0,0 @@ -Source: `T34.md` - -## Goal -Enable endpoint discovery by allowing agents to publish a callable endpoint (`gateway_hint`). - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T14 -- T19 -- T38 -- Blockers: T14, T19, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 7 - -## Required Skills -- `database` -- `api-standards` -- `identity-service` -- `error-handling` - -## Deliverables -- DB migration: add `gateway_hint` to agents table -- Endpoint `PATCH /v1/agents/:id` to set/unset gateway_hint with strict URL validation -- `GET /v1/resolve/:id` includes `gatewayHint` when present - -## Refactor Opportunities -- Centralize request validation and authorization checks into reusable middleware/utilities. - -## Definition of Done -- Valid gateway_hint is stored and returned via resolve -- Invalid URLs are rejected with 400 - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T34 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T14, T19, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T35.md b/issues/T35.md deleted file mode 100644 index 67a2609..0000000 --- a/issues/T35.md +++ /dev/null @@ -1,50 +0,0 @@ -Source: `T35.md` - -## Goal -Make sharing easy: print a copy/paste contact card (verify URL + endpoint) without sharing secrets. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T34 -- T20 -- T38 -- Blockers: T34, T20, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 8 - -## Required Skills -- `command-development` -- `code-quality` -- `testing-framework` - -## Deliverables -- Command `claw share [--json] [--qr]` -- Fetches gateway_hint from registry if not present locally -- Prints contact card with DID + verify URL + endpoint - -## Refactor Opportunities -- Extract shared CLI option parsing and output formatting helpers to keep commands small and testable. - -## Definition of Done -- Contact card prints even if endpoint missing (with guidance) -- `--json` outputs valid machine-readable JSON - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T35 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T34, T20, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T36.md b/issues/T36.md deleted file mode 100644 index c4ceb58..0000000 --- a/issues/T36.md +++ /dev/null @@ -1,53 +0,0 @@ -Source: `T36.md` - -## Goal -Add pairing code flow so operators can approve first contact and auto-update allowlist. - -## In Scope -- Implement the ticket objective described in `Goal`. -- Complete all items listed in `Deliverables`. -- Keep changes compatible with the declared dependency chain. - -## Out of Scope -- Features not explicitly required by this ticket. -- Reordering dependencies without updating the issue dependency graph. -- Cross-ticket changes that are not required for this ticket to pass. - -## Dependencies -- T27 -- T28 -- T34 -- T38 -- Blockers: T27, T28, T34, T38 - -## Execution Mode -- Sequential-ready -- Parallel-ready - -## Parallel Wave -- Wave 9 - -## Required Skills -- `api-client` -- `data-fetching` -- `logging` -- `error-handling` - -## Deliverables -- Persistent allowlist file store (`allowlist.json`) -- `POST /pair/start` protected by admin token; returns time-limited code -- `POST /pair/confirm` requires Claw auth; adds caller owner DID (or agent DID) to allowlist - -## Refactor Opportunities -- Break proxy verification/forwarding flow into composable middleware stages with single responsibilities. - -## Definition of Done -- Pairing code expires and cannot be reused -- Paired caller can send /hooks/agent successfully after pairing -- Allowlist survives proxy restarts - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T36 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T27, T28, T34, T38) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T37.md b/issues/T37.md deleted file mode 100644 index a6cf57f..0000000 --- a/issues/T37.md +++ /dev/null @@ -1,48 +0,0 @@ -Source: `T37.md` - -## Goal -Define deployment scaffolding for the support plugin so deployments are repeatable before feature delivery. - -## In Scope -- Create deployment scaffolding artifacts (environment template, release/deploy workflow outline, and runbook placeholders). -- Define required secrets and configuration contract for staging and production. -- Establish health check and rollback expectations for the first deployment. - -## Out of Scope -- Implementing feature endpoints or business logic. -- Production traffic cutover without passing staging checks. -- Environment-specific tweaks not documented in the scaffolding contract. - -## Dependencies -- T00 -- Blockers: T00 - -## Execution Mode -- Sequential-ready - -## Parallel Wave -- Wave 1 - -## Required Skills -- `deployment` -- `configuration-management` -- `code-quality` - -## Deliverables -- Deployment scaffolding checklist document under `issues/` references. -- CI/CD deployment workflow skeleton with explicit staging gate. -- Environment variable contract listing required secrets and defaults. - -## Refactor Opportunities -- Consolidate deployment configuration into shared templates to avoid environment drift. - -## Definition of Done -- Deployment scaffolding is documented and versioned. -- CI can validate presence/shape of required deployment configuration. -- Team has a clear staging deploy path before feature work starts. - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including checks tied to T37 scaffolding changes. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Confirm blocker status (T00) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/T38.md b/issues/T38.md deleted file mode 100644 index cfef7b7..0000000 --- a/issues/T38.md +++ /dev/null @@ -1,51 +0,0 @@ -Source: `T38.md` - -## Goal -Deploy the scaffolded support plugin baseline to staging and verify it is healthy before feature implementation. - -## In Scope -- Execute staging deployment using the scaffolding from T37. -- Verify health checks, basic connectivity, and rollback path. -- Capture deployment verification evidence and operator checklist. - -## Out of Scope -- Shipping feature tickets prior to staging deploy validation. -- Performance tuning beyond baseline health checks. -- Non-essential infrastructure changes unrelated to baseline deployment. - -## Dependencies -- T37 -- Blockers: T37 - -## Execution Mode -- Sequential-ready - -## Parallel Wave -- Wave 2 - -## Required Skills -- `deployment` -- `observability` -- `testing-framework` - -## Deliverables -- Successful staging deployment run using the agreed workflow. -- Health verification checklist with pass/fail evidence. -- Rollback procedure validated at least once in non-production. - -## Refactor Opportunities -- Automate repetitive deployment verification steps into reusable CI jobs/scripts. - -## Definition of Done -- Staging deployment is successful and repeatable. -- Health checks pass for all baseline services. -- Deployment sign-off recorded; feature tickets may proceed. - -## Validation Steps -- Run `pnpm -r lint`; expected: exit code 0 and no new lint violations in touched workspaces. -- Run `pnpm -r test`; expected: all tests pass, including tests covering T38 acceptance criteria. -- Run `pnpm -r build`; expected: build completes without errors for all affected workspaces. -- Run deployment workflow in staging; expected: deployment completes without manual patching. -- Execute health checks; expected: all required endpoints return healthy status. -- Execute rollback drill; expected: previous stable version restores successfully. -- Confirm blocker status (T37) before implementation; expected: unresolved blockers keep this ticket in blocked state. diff --git a/issues/scripts/AGENTS.md b/issues/scripts/AGENTS.md deleted file mode 100644 index 35ec73a..0000000 --- a/issues/scripts/AGENTS.md +++ /dev/null @@ -1,18 +0,0 @@ -# AGENTS.md (issues/scripts) - -## Purpose -- Keep issue-governance scripts deterministic and local-only. -- Ensure dependency/order checks remain stable as backlog metadata evolves. - -## Script Rules -- Scripts in this folder must run without network access. -- Prefer read-only checks that fail with actionable messages. -- Treat `issues/T00.md` through `issues/T38.md` as canonical ticket inputs. - -## Validation Expectations -- `validate-ticket-set.mjs` must verify schema order, dependency integrity, deployment gate (`T38`) requirements, sequential order, and parallel-wave safety. -- Exit with non-zero status on any violation and print each violation on its own line. - -## Maintenance -- When `issues/EXECUTION_PLAN.md` wave/order format changes, update parser logic in the same commit. -- Keep checks strict enough to block drift, but avoid coupling to cosmetic markdown formatting. diff --git a/issues/scripts/validate-ticket-set.mjs b/issues/scripts/validate-ticket-set.mjs deleted file mode 100644 index 96f1ed1..0000000 --- a/issues/scripts/validate-ticket-set.mjs +++ /dev/null @@ -1,277 +0,0 @@ -import { readFileSync } from "node:fs"; -import { existsSync } from "node:fs"; -import { resolve } from "node:path"; - -const REQUIRED_HEADINGS = [ - "Goal", - "In Scope", - "Out of Scope", - "Dependencies", - "Execution Mode", - "Parallel Wave", - "Required Skills", - "Deliverables", - "Refactor Opportunities", - "Definition of Done", - "Validation Steps", -]; - -const TICKET_CODES = Array.from({ length: 39 }, (_, index) => - `T${String(index).padStart(2, "0")}`, -); - -const projectRoot = process.cwd(); -const issuesDir = resolve(projectRoot, "issues"); -const executionPlanPath = resolve(issuesDir, "EXECUTION_PLAN.md"); - -const errors = []; -const dependencyGraph = new Map(); -const blockerGraph = new Map(); - -function readUtf8(path) { - return readFileSync(path, "utf8"); -} - -function parseHeadings(markdown) { - return [...markdown.matchAll(/^##\s+(.+)$/gm)].map((match) => ({ - heading: match[1]?.trim() ?? "", - index: match.index ?? -1, - fullLength: match[0].length, - })); -} - -function extractSection(markdown, headings, sectionName) { - const sectionIndex = headings.findIndex( - (entry) => entry.heading === sectionName, - ); - if (sectionIndex < 0) { - return null; - } - - const sectionStart = - (headings[sectionIndex]?.index ?? 0) + - (headings[sectionIndex]?.fullLength ?? 0); - const sectionEnd = - sectionIndex < headings.length - 1 - ? (headings[sectionIndex + 1]?.index ?? markdown.length) - : markdown.length; - - return markdown.slice(sectionStart, sectionEnd).trim(); -} - -function parseTicketReferences(text) { - return new Set(text.match(/\bT\d{2}\b/g) ?? []); -} - -for (const code of TICKET_CODES) { - const ticketPath = resolve(issuesDir, `${code}.md`); - if (!existsSync(ticketPath)) { - errors.push(`Missing ticket file: issues/${code}.md`); - continue; - } - - const markdown = readUtf8(ticketPath); - const headings = parseHeadings(markdown); - let previousRequiredIndex = -1; - - for (const heading of REQUIRED_HEADINGS) { - const currentIndex = headings.findIndex((entry) => entry.heading === heading); - if (currentIndex < 0) { - errors.push(`issues/${code}.md is missing section: "## ${heading}"`); - continue; - } - if (currentIndex < previousRequiredIndex) { - errors.push( - `issues/${code}.md has out-of-order section: "## ${heading}"`, - ); - } - previousRequiredIndex = currentIndex; - } - - const dependenciesSection = extractSection(markdown, headings, "Dependencies"); - if (dependenciesSection === null) { - dependencyGraph.set(code, new Set()); - blockerGraph.set(code, new Set()); - continue; - } - - const blockersMatch = dependenciesSection.match( - /^\s*-\s*Blockers:\s*(.+)$/im, - ); - if (!blockersMatch) { - errors.push(`issues/${code}.md is missing a "- Blockers:" line`); - } - - const dependencyIds = new Set(); - for (const line of dependenciesSection.split("\n")) { - const trimmed = line.trim(); - if (!trimmed.startsWith("-")) { - continue; - } - if (/^-\s*Blockers:/i.test(trimmed)) { - continue; - } - for (const ticketId of parseTicketReferences(trimmed)) { - dependencyIds.add(ticketId); - } - } - - const blockerIds = blockersMatch - ? parseTicketReferences(blockersMatch[1] ?? "") - : new Set(); - - dependencyGraph.set(code, dependencyIds); - blockerGraph.set(code, blockerIds); -} - -for (const [ticket, dependencies] of dependencyGraph.entries()) { - for (const dep of dependencies) { - if (!TICKET_CODES.includes(dep)) { - errors.push(`issues/${ticket}.md references unknown dependency: ${dep}`); - } - } -} - -for (const [ticket, blockers] of blockerGraph.entries()) { - for (const blocker of blockers) { - if (!TICKET_CODES.includes(blocker)) { - errors.push(`issues/${ticket}.md references unknown blocker: ${blocker}`); - } - } -} - -for (let ticketNumber = 1; ticketNumber <= 36; ticketNumber += 1) { - const ticket = `T${String(ticketNumber).padStart(2, "0")}`; - const dependencies = dependencyGraph.get(ticket) ?? new Set(); - const blockers = blockerGraph.get(ticket) ?? new Set(); - - if (!dependencies.has("T38")) { - errors.push(`issues/${ticket}.md must include T38 under Dependencies`); - } - if (!blockers.has("T38")) { - errors.push(`issues/${ticket}.md must include T38 in Blockers`); - } -} - -const dfsState = new Map(); -const recursionStack = []; - -function visit(ticket) { - const state = dfsState.get(ticket) ?? 0; - if (state === 1) { - const cycleStart = recursionStack.indexOf(ticket); - const cyclePath = [...recursionStack.slice(cycleStart), ticket].join(" -> "); - errors.push(`Dependency cycle detected: ${cyclePath}`); - return; - } - if (state === 2) { - return; - } - - dfsState.set(ticket, 1); - recursionStack.push(ticket); - for (const dep of dependencyGraph.get(ticket) ?? []) { - if (TICKET_CODES.includes(dep)) { - visit(dep); - } - } - recursionStack.pop(); - dfsState.set(ticket, 2); -} - -for (const ticket of TICKET_CODES) { - visit(ticket); -} - -const executionPlan = readUtf8(executionPlanPath); -const sequenceMatch = executionPlan.match(/`(T\d{2}\s*->[^`]+)`/); -if (!sequenceMatch) { - errors.push("issues/EXECUTION_PLAN.md is missing canonical sequential order"); -} - -const sequentialOrder = sequenceMatch - ? sequenceMatch[1] - .split("->") - .map((item) => item.trim()) - .filter((item) => item.length > 0) - : []; -const sequentialIndex = new Map(); -for (const [index, ticket] of sequentialOrder.entries()) { - if (sequentialIndex.has(ticket)) { - errors.push(`Sequential order duplicates ticket ${ticket}`); - } else { - sequentialIndex.set(ticket, index); - } -} - -for (const ticket of TICKET_CODES) { - if (!sequentialIndex.has(ticket)) { - errors.push(`Sequential order is missing ticket ${ticket}`); - } -} - -for (const [ticket, dependencies] of dependencyGraph.entries()) { - const ticketOrder = sequentialIndex.get(ticket); - if (ticketOrder === undefined) { - continue; - } - for (const dep of dependencies) { - const dependencyOrder = sequentialIndex.get(dep); - if (dependencyOrder === undefined) { - continue; - } - if (dependencyOrder >= ticketOrder) { - errors.push( - `Sequential order violation: ${ticket} appears before dependency ${dep}`, - ); - } - } -} - -const waveMatches = [...executionPlan.matchAll(/^- Wave \d+:\s*`([^`]+)`$/gm)]; -const waveByTicket = new Map(); -for (const [waveIndex, match] of waveMatches.entries()) { - const tickets = (match[1] ?? "") - .split(",") - .map((ticket) => ticket.trim()) - .filter((ticket) => ticket.length > 0); - for (const ticket of tickets) { - if (waveByTicket.has(ticket)) { - errors.push( - `Parallel waves duplicate ticket ${ticket} (wave ${waveByTicket.get(ticket)} and wave ${waveIndex})`, - ); - continue; - } - waveByTicket.set(ticket, waveIndex); - } -} - -for (const [ticket, dependencies] of dependencyGraph.entries()) { - const ticketWave = waveByTicket.get(ticket); - if (ticketWave === undefined) { - continue; - } - for (const dep of dependencies) { - const dependencyWave = waveByTicket.get(dep); - if (dependencyWave === undefined) { - continue; - } - if (dependencyWave === ticketWave) { - errors.push( - `Parallel wave conflict: ${ticket} and dependency ${dep} are both in wave ${ticketWave}`, - ); - } - } -} - -if (errors.length > 0) { - console.error("issues:validate failed"); - for (const error of errors) { - console.error(`- ${error}`); - } - process.exit(1); -} - -console.log( - `issues:validate passed (${TICKET_CODES.length} tickets, ${waveMatches.length} waves checked)`, -); From 949b1e357ab522c3cf359894548799af9e2ae09d Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 15:14:24 +0530 Subject: [PATCH 060/190] feat(registry): require signed challenge for agent registration --- apps/cli/src/commands/AGENTS.md | 6 + apps/cli/src/commands/agent.test.ts | 57 +- apps/cli/src/commands/agent.ts | 118 ++- .../0001_agent_registration_challenge.sql | 15 + apps/registry/drizzle/meta/_journal.json | 9 +- apps/registry/src/AGENTS.md | 12 + apps/registry/src/agent-registration.ts | 349 ++++++++- apps/registry/src/db/schema.ts | 26 + apps/registry/src/server.test.ts | 709 +++++++++++++++++- apps/registry/src/server.ts | 229 +++++- packages/protocol/AGENTS.md | 2 + .../protocol/src/agent-registration-proof.ts | 44 ++ packages/protocol/src/endpoints.ts | 1 + packages/protocol/src/index.test.ts | 32 + packages/protocol/src/index.ts | 11 +- 15 files changed, 1578 insertions(+), 42 deletions(-) create mode 100644 apps/registry/drizzle/0001_agent_registration_challenge.sql create mode 100644 packages/protocol/src/agent-registration-proof.ts diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index c4b9d98..d1f0710 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -31,6 +31,12 @@ - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. - Config persistence failures after successful bootstrap must not hide the returned PAT token; print token first, then surface recovery instructions. +## Agent Command Rules +- `agent create` must use a two-step registration handshake: request challenge from registry, sign canonical challenge message locally with agent private key, then submit registration with `challengeId` + `challengeSignature`. +- Never send or log agent private keys; only send public key and proof signature. +- Keep proof canonicalization sourced from `@clawdentity/protocol` helper exports to avoid CLI/registry signature drift. +- Keep registry error mapping stable for both challenge and register requests so users receive deterministic remediation output. + ## Testing Rules - Mock network and filesystem dependencies in command tests. - Include success and failure scenarios for external calls, parsing, and cache behavior. diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index 1f1112e..9d0390e 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -24,15 +24,19 @@ vi.mock("@clawdentity/sdk", () => ({ error: vi.fn(), })), decodeAIT: vi.fn(), + encodeEd25519SignatureBase64url: vi.fn(), encodeEd25519KeypairBase64url: vi.fn(), generateEd25519Keypair: vi.fn(), + signEd25519: vi.fn(), })); import { type DecodedAit, decodeAIT, encodeEd25519KeypairBase64url, + encodeEd25519SignatureBase64url, generateEd25519Keypair, + signEd25519, } from "@clawdentity/sdk"; import { resolveConfig } from "../config/manager.js"; import { createAgentCommand } from "./agent.js"; @@ -44,6 +48,10 @@ const mockedReadFile = vi.mocked(readFile); const mockedWriteFile = vi.mocked(writeFile); const mockedResolveConfig = vi.mocked(resolveConfig); const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); +const mockedSignEd25519 = vi.mocked(signEd25519); +const mockedEncodeEd25519SignatureBase64url = vi.mocked( + encodeEd25519SignatureBase64url, +); const mockedEncodeEd25519KeypairBase64url = vi.mocked( encodeEd25519KeypairBase64url, ); @@ -115,6 +123,7 @@ const runAgentCommand = async (args: string[]) => { describe("agent create command", () => { beforeEach(() => { vi.clearAllMocks(); + mockFetch.mockReset(); vi.stubGlobal("fetch", mockFetch); mockedResolveConfig.mockResolvedValue({ @@ -137,8 +146,23 @@ describe("agent create command", () => { secretKey: "secret-key-b64url", }); - mockFetch.mockResolvedValue( - createJsonResponse(201, { + mockedSignEd25519.mockResolvedValue(Uint8Array.from([1, 2, 3])); + mockedEncodeEd25519SignatureBase64url.mockReturnValue( + "challenge-signature-b64url", + ); + + mockFetch.mockImplementation(async (input) => { + const url = String(input); + if (url.endsWith("/v1/agents/challenge")) { + return createJsonResponse(201, { + challengeId: "01JCHALLENGEID1234567890ABC", + nonce: "challenge-nonce-b64url", + ownerDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + expiresAt: "2030-01-01T00:05:00.000Z", + }); + } + + return createJsonResponse(201, { agent: { did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", name: "agent-01", @@ -146,8 +170,8 @@ describe("agent create command", () => { expiresAt: "2030-01-01T00:00:00.000Z", }, ait: "ait.jwt.value", - }), - ); + }); + }); }); afterEach(() => { @@ -159,6 +183,23 @@ describe("agent create command", () => { const result = await runAgentCommand(["create", "agent-01"]); expect(mockedGenerateEd25519Keypair).toHaveBeenCalled(); + expect(mockedSignEd25519).toHaveBeenCalledWith( + expect.any(Uint8Array), + expect.any(Uint8Array), + ); + expect(mockedEncodeEd25519SignatureBase64url).toHaveBeenCalledWith( + Uint8Array.from([1, 2, 3]), + ); + expect(mockFetch).toHaveBeenCalledWith( + "https://api.clawdentity.com/v1/agents/challenge", + expect.objectContaining({ + method: "POST", + headers: expect.objectContaining({ + authorization: "Bearer pat_123", + "content-type": "application/json", + }), + }), + ); expect(mockFetch).toHaveBeenCalledWith( "https://api.clawdentity.com/v1/agents", expect.objectContaining({ @@ -294,14 +335,18 @@ describe("agent create command", () => { "45", ]); - const request = mockFetch.mock.calls[0] as [string, RequestInit]; + const request = mockFetch.mock.calls[1] as [string, RequestInit]; const requestBody = JSON.parse(String(request[1]?.body)) as { framework?: string; ttlDays?: number; + challengeId?: string; + challengeSignature?: string; }; expect(requestBody.framework).toBe("langgraph"); expect(requestBody.ttlDays).toBe(45); + expect(requestBody.challengeId).toBe("01JCHALLENGEID1234567890ABC"); + expect(requestBody.challengeSignature).toBe("challenge-signature-b64url"); }); it("rejects dot-segment agent names before hitting the filesystem", async () => { @@ -321,6 +366,7 @@ describe("agent revoke command", () => { beforeEach(() => { vi.clearAllMocks(); + mockFetch.mockReset(); vi.stubGlobal("fetch", mockFetch); mockedResolveConfig.mockResolvedValue({ @@ -525,6 +571,7 @@ describe("agent inspect command", () => { beforeEach(() => { vi.clearAllMocks(); + mockFetch.mockReset(); mockedReadFile.mockResolvedValue("mock-ait-token"); mockedDecodeAIT.mockReturnValue(decodedAit); }); diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index 4d7336a..9daea9e 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -1,12 +1,18 @@ import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { join } from "node:path"; -import { parseDid } from "@clawdentity/protocol"; +import { + AGENT_REGISTRATION_CHALLENGE_PATH, + canonicalizeAgentRegistrationProof, + parseDid, +} from "@clawdentity/protocol"; import { createLogger, type DecodedAit, decodeAIT, encodeEd25519KeypairBase64url, + encodeEd25519SignatureBase64url, generateEd25519Keypair, + signEd25519, } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; @@ -36,6 +42,13 @@ type AgentRegistrationResponse = { ait: string; }; +type AgentRegistrationChallengeResponse = { + challengeId: string; + nonce: string; + ownerDid: string; + expiresAt: string; +}; + type LocalAgentIdentity = { did: string; }; @@ -211,6 +224,17 @@ const toRegistryAgentsRequestUrl = ( return new URL(path, normalizedBaseUrl).toString(); }; +const toRegistryAgentChallengeRequestUrl = (registryUrl: string): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL( + AGENT_REGISTRATION_CHALLENGE_PATH.slice(1), + normalizedBaseUrl, + ).toString(); +}; + const toHttpErrorMessage = (status: number, responseBody: unknown): string => { const registryMessage = extractRegistryErrorMessage(responseBody); @@ -276,6 +300,35 @@ const parseAgentRegistrationResponse = ( }; }; +const parseAgentRegistrationChallengeResponse = ( + payload: unknown, +): AgentRegistrationChallengeResponse => { + if (!isRecord(payload)) { + throw new Error("Registry returned an invalid response payload"); + } + + const challengeId = payload.challengeId; + const nonce = payload.nonce; + const ownerDid = payload.ownerDid; + const expiresAt = payload.expiresAt; + + if ( + typeof challengeId !== "string" || + typeof nonce !== "string" || + typeof ownerDid !== "string" || + typeof expiresAt !== "string" + ) { + throw new Error("Registry returned an invalid response payload"); + } + + return { + challengeId, + nonce, + ownerDid, + expiresAt, + }; +}; + const ensureAgentDirectoryAvailable = async ( agentName: string, agentDirectory: string, @@ -357,22 +410,84 @@ const writeAgentIdentity = async (input: { await writeSecureFile(join(input.agentDirectory, "ait.jwt"), input.ait); }; +const requestAgentRegistrationChallenge = async (input: { + apiKey: string; + registryUrl: string; + publicKey: string; +}): Promise => { + let response: Response; + try { + response = await fetch( + toRegistryAgentChallengeRequestUrl(input.registryUrl), + { + method: "POST", + headers: { + authorization: `Bearer ${input.apiKey}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: input.publicKey, + }), + }, + ); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw new Error(toHttpErrorMessage(response.status, responseBody)); + } + + return parseAgentRegistrationChallengeResponse(responseBody); +}; + const registerAgent = async (input: { apiKey: string; registryUrl: string; name: string; publicKey: string; + secretKey: Uint8Array; framework?: string; ttlDays?: number; }): Promise => { + const challenge = await requestAgentRegistrationChallenge({ + apiKey: input.apiKey, + registryUrl: input.registryUrl, + publicKey: input.publicKey, + }); + + const canonicalProof = canonicalizeAgentRegistrationProof({ + challengeId: challenge.challengeId, + nonce: challenge.nonce, + ownerDid: challenge.ownerDid, + publicKey: input.publicKey, + name: input.name, + framework: input.framework, + ttlDays: input.ttlDays, + }); + const challengeSignature = encodeEd25519SignatureBase64url( + await signEd25519( + new TextEncoder().encode(canonicalProof), + input.secretKey, + ), + ); + const requestBody: { name: string; publicKey: string; + challengeId: string; + challengeSignature: string; framework?: string; ttlDays?: number; } = { name: input.name, publicKey: input.publicKey, + challengeId: challenge.challengeId, + challengeSignature, }; if (input.framework) { @@ -530,6 +645,7 @@ export const createAgentCommand = (): Command => { registryUrl: config.registryUrl, name: agentName, publicKey: encoded.publicKey, + secretKey: keypair.secretKey, framework, ttlDays, }); diff --git a/apps/registry/drizzle/0001_agent_registration_challenge.sql b/apps/registry/drizzle/0001_agent_registration_challenge.sql new file mode 100644 index 0000000..1a4c2a7 --- /dev/null +++ b/apps/registry/drizzle/0001_agent_registration_challenge.sql @@ -0,0 +1,15 @@ +CREATE TABLE `agent_registration_challenges` ( + `id` text PRIMARY KEY NOT NULL, + `owner_id` text NOT NULL, + `public_key` text NOT NULL, + `nonce` text NOT NULL, + `status` text DEFAULT 'pending' NOT NULL, + `expires_at` text NOT NULL, + `used_at` text, + `created_at` text NOT NULL, + `updated_at` text NOT NULL, + FOREIGN KEY (`owner_id`) REFERENCES `humans`(`id`) ON UPDATE no action ON DELETE no action +); +--> statement-breakpoint +CREATE INDEX `idx_agent_registration_challenges_owner_status` ON `agent_registration_challenges` (`owner_id`,`status`);--> statement-breakpoint +CREATE INDEX `idx_agent_registration_challenges_expires_at` ON `agent_registration_challenges` (`expires_at`); diff --git a/apps/registry/drizzle/meta/_journal.json b/apps/registry/drizzle/meta/_journal.json index 8388264..901f93b 100644 --- a/apps/registry/drizzle/meta/_journal.json +++ b/apps/registry/drizzle/meta/_journal.json @@ -8,6 +8,13 @@ "when": 1770816277696, "tag": "0000_common_marrow", "breakpoints": true + }, + { + "idx": 1, + "version": "7", + "when": 1771278200000, + "tag": "0001_agent_registration_challenge", + "breakpoints": true } ] -} \ No newline at end of file +} diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 53e8e47..2322f7c 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -62,6 +62,13 @@ - Keep ordering deterministic (`id` descending) and compute `nextCursor` from the last item in the returned page. - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors`: generic 400 message in `production`, detailed `fieldErrors` in `development`/`test`. +## POST /v1/agents/challenge Contract +- Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. +- Accept only `{ publicKey }` and validate it as base64url Ed25519 (32-byte decode). +- Persist challenge state in D1 (`agent_registration_challenges`) with owner binding, nonce, expiry, and status. +- Return challenge metadata needed for deterministic proof signing: `challengeId`, `nonce`, `ownerDid`, `expiresAt`, algorithm marker, and canonical message template. +- Keep challenge TTL short-lived (5 minutes) and make replay protection stateful (pending -> used). + ## POST /v1/agents Contract - Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. - Validate request payload fields with explicit rules: @@ -69,9 +76,14 @@ - `framework`: optional; default to `openclaw` when omitted. - `publicKey`: base64url Ed25519 key that decodes to 32 bytes. - `ttlDays`: optional; default `30`; allow only integer range `1..90`. +- Require ownership-proof fields: + - `challengeId`: ULID from `/v1/agents/challenge`. + - `challengeSignature`: base64url Ed25519 signature over the canonical proof message. - Keep request parsing and validation in a reusable helper module (`agent-registration.ts`) so future routes can share the same constraints without duplicating schema logic. - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors` (shared SDK helper path): return generic messages without internals in `production`, but include validation/config details in `development`/`test` for debugging. - Persist `agents.current_jti` and `agents.expires_at` on insert; generated AIT claims (`jti`, `exp`) must stay in sync with those persisted values. +- Verify challenge ownership before signing AIT: challenge must exist for the caller, be unexpired, remain `pending`, and match the request public key + signature. +- Consume challenge with guarded state transition (`pending` -> `used`) in the same mutation unit as agent insert; reject zero-row updates as replayed challenge. - Use shared SDK datetime helpers (`nowIso`, `addSeconds`) for issuance/expiry math instead of ad-hoc `Date.now()` arithmetic in route logic. - Resolve signing material through a reusable signer helper (`registry-signer.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. - Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.api.clawdentity.com`, `production` -> `https://api.clawdentity.com`) rather than request-origin inference. diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index b0ba517..51af0f2 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -1,8 +1,12 @@ import { + AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, type AitClaims, + canonicalizeAgentRegistrationProof, decodeBase64url, + encodeBase64url, generateUlid, makeAgentDid, + parseUlid, validateAgentName, } from "@clawdentity/protocol"; import { @@ -11,6 +15,7 @@ import { nowIso, type RegistryConfig, shouldExposeVerboseErrors, + verifyEd25519, } from "@clawdentity/sdk"; const DEFAULT_AGENT_FRAMEWORK = "openclaw"; @@ -20,6 +25,9 @@ const MIN_AGENT_TTL_DAYS = 1; const MAX_AGENT_TTL_DAYS = 90; const DAY_IN_SECONDS = 24 * 60 * 60; const ED25519_PUBLIC_KEY_LENGTH = 32; +const ED25519_SIGNATURE_LENGTH = 64; +const AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS = 5 * 60; +const AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH = 24; const REGISTRY_ISSUER_BY_ENVIRONMENT: Record< RegistryConfig["ENVIRONMENT"], string @@ -34,6 +42,46 @@ type AgentRegistrationBody = { framework?: string; publicKey: string; ttlDays?: number; + challengeId: string; + challengeSignature: string; +}; + +type AgentRegistrationChallengeBody = { + publicKey: string; +}; + +export type AgentRegistrationChallenge = { + id: string; + ownerId: string; + publicKey: string; + nonce: string; + status: "pending"; + expiresAt: string; + usedAt: null; + createdAt: string; + updatedAt: string; +}; + +export type AgentRegistrationChallengeResult = { + challenge: AgentRegistrationChallenge; + response: { + challengeId: string; + nonce: string; + ownerDid: string; + expiresAt: string; + algorithm: "Ed25519"; + messageTemplate: string; + }; +}; + +export type PersistedAgentRegistrationChallenge = { + id: string; + ownerId: string; + publicKey: string; + nonce: string; + status: "pending" | "used"; + expiresAt: string; + usedAt: string | null; }; export type AgentRegistrationResult = { @@ -90,6 +138,43 @@ function invalidRegistration(options: { }); } +function invalidRegistrationChallenge(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_INVALID", + message: exposeDetails + ? "Agent registration challenge payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function registrationProofError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + code: + | "AGENT_REGISTRATION_CHALLENGE_EXPIRED" + | "AGENT_REGISTRATION_CHALLENGE_REPLAYED" + | "AGENT_REGISTRATION_PROOF_MISMATCH" + | "AGENT_REGISTRATION_PROOF_INVALID"; + message: string; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: options.code, + message: exposeDetails ? options.message : "Request could not be processed", + status: 400, + expose: true, + }); +} + function addFieldError( fieldErrors: Record, field: string, @@ -238,6 +323,158 @@ function parseTtlDays( return input; } +function parseChallengeId( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError(fieldErrors, "challengeId", "challengeId is required"); + return ""; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "challengeId", "challengeId is required"); + return ""; + } + + try { + parseUlid(value); + } catch { + addFieldError(fieldErrors, "challengeId", "challengeId must be a ULID"); + } + + return value; +} + +function parseChallengeSignature( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature is required", + ); + return ""; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature is required", + ); + return ""; + } + + let decodedSignature: Uint8Array; + try { + decodedSignature = decodeBase64url(value); + } catch { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature must be a base64url-encoded Ed25519 signature", + ); + return value; + } + + if (decodedSignature.length !== ED25519_SIGNATURE_LENGTH) { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature must be a base64url-encoded Ed25519 signature", + ); + } + + return value; +} + +export function parseAgentRegistrationChallengeBody( + payload: unknown, + environment: RegistryConfig["ENVIRONMENT"], +): AgentRegistrationChallengeBody { + const fieldErrors: Record = {}; + + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw invalidRegistrationChallenge({ + environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const objectPayload = payload as Record; + + const parsed: AgentRegistrationChallengeBody = { + publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), + }; + + if (Object.keys(fieldErrors).length > 0) { + throw invalidRegistrationChallenge({ + environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return parsed; +} + +export function buildAgentRegistrationChallenge(input: { + payload: unknown; + ownerId: string; + ownerDid: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): AgentRegistrationChallengeResult { + const parsedBody = parseAgentRegistrationChallengeBody( + input.payload, + input.environment, + ); + + const createdAt = nowIso(); + const createdAtMs = Date.parse(createdAt); + const challengeId = generateUlid(createdAtMs); + const nonceBytes = crypto.getRandomValues( + new Uint8Array(AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH), + ); + const nonce = encodeBase64url(nonceBytes); + const expiresAt = addSeconds( + createdAt, + AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS, + ); + + const challenge: AgentRegistrationChallenge = { + id: challengeId, + ownerId: input.ownerId, + publicKey: parsedBody.publicKey, + nonce, + status: "pending", + expiresAt, + usedAt: null, + createdAt, + updatedAt: createdAt, + }; + + return { + challenge, + response: { + challengeId, + nonce, + ownerDid: input.ownerDid, + expiresAt, + algorithm: "Ed25519", + messageTemplate: AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, + }, + }; +} + export function parseAgentRegistrationBody( payload: unknown, environment: RegistryConfig["ENVIRONMENT"], @@ -263,6 +500,11 @@ export function parseAgentRegistrationBody( framework: parseFramework(objectPayload.framework, fieldErrors), publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), ttlDays: parseTtlDays(objectPayload.ttlDays, fieldErrors), + challengeId: parseChallengeId(objectPayload.challengeId, fieldErrors), + challengeSignature: parseChallengeSignature( + objectPayload.challengeSignature, + fieldErrors, + ), }; if (Object.keys(fieldErrors).length > 0) { @@ -275,22 +517,85 @@ export function parseAgentRegistrationBody( return parsed; } -export function buildAgentRegistration(input: { - payload: unknown; +export async function verifyAgentRegistrationOwnershipProof(input: { + parsedBody: AgentRegistrationBody; + challenge: PersistedAgentRegistrationChallenge; ownerDid: string; - issuer: string; environment: RegistryConfig["ENVIRONMENT"]; -}): AgentRegistrationResult { - const parsedBody = parseAgentRegistrationBody( - input.payload, - input.environment, +}): Promise { + if (input.challenge.status !== "pending") { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_CHALLENGE_REPLAYED", + message: "Registration challenge has already been used", + }); + } + + const expiresAtMs = Date.parse(input.challenge.expiresAt); + if (!Number.isFinite(expiresAtMs) || expiresAtMs <= Date.now()) { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_CHALLENGE_EXPIRED", + message: "Registration challenge has expired", + }); + } + + if (input.challenge.publicKey !== input.parsedBody.publicKey) { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_PROOF_MISMATCH", + message: "Registration challenge does not match the provided public key", + }); + } + + let signatureBytes: Uint8Array; + let publicKeyBytes: Uint8Array; + try { + signatureBytes = decodeBase64url(input.parsedBody.challengeSignature); + publicKeyBytes = decodeBase64url(input.parsedBody.publicKey); + } catch { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_PROOF_INVALID", + message: "Registration challenge signature is invalid", + }); + } + + const canonical = canonicalizeAgentRegistrationProof({ + challengeId: input.challenge.id, + nonce: input.challenge.nonce, + ownerDid: input.ownerDid, + publicKey: input.parsedBody.publicKey, + name: input.parsedBody.name, + framework: input.parsedBody.framework, + ttlDays: input.parsedBody.ttlDays, + }); + + const verified = await verifyEd25519( + signatureBytes, + new TextEncoder().encode(canonical), + publicKeyBytes, ); + if (!verified) { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_PROOF_INVALID", + message: "Registration challenge signature is invalid", + }); + } +} + +export function buildAgentRegistrationFromParsed(input: { + parsedBody: AgentRegistrationBody; + ownerDid: string; + issuer: string; +}): AgentRegistrationResult { const issuedAt = nowIso(); const issuedAtMs = Date.parse(issuedAt); const issuedAtSeconds = Math.floor(issuedAtMs / 1000); - const ttlDays = parsedBody.ttlDays ?? DEFAULT_AGENT_TTL_DAYS; - const framework = parsedBody.framework ?? DEFAULT_AGENT_FRAMEWORK; + const ttlDays = input.parsedBody.ttlDays ?? DEFAULT_AGENT_TTL_DAYS; + const framework = input.parsedBody.framework ?? DEFAULT_AGENT_FRAMEWORK; const ttlSeconds = ttlDays * DAY_IN_SECONDS; const expiresAt = addSeconds(issuedAt, ttlSeconds); @@ -304,9 +609,9 @@ export function buildAgentRegistration(input: { id: agentId, did: agentDid, ownerDid: input.ownerDid, - name: parsedBody.name, + name: input.parsedBody.name, framework, - publicKey: parsedBody.publicKey, + publicKey: input.parsedBody.publicKey, currentJti, ttlDays, status: "active", @@ -318,13 +623,13 @@ export function buildAgentRegistration(input: { iss: input.issuer, sub: agentDid, ownerDid: input.ownerDid, - name: parsedBody.name, + name: input.parsedBody.name, framework, cnf: { jwk: { kty: "OKP", crv: "Ed25519", - x: parsedBody.publicKey, + x: input.parsedBody.publicKey, }, }, iat: issuedAtSeconds, @@ -335,6 +640,24 @@ export function buildAgentRegistration(input: { }; } +export function buildAgentRegistration(input: { + payload: unknown; + ownerDid: string; + issuer: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): AgentRegistrationResult { + const parsedBody = parseAgentRegistrationBody( + input.payload, + input.environment, + ); + + return buildAgentRegistrationFromParsed({ + parsedBody, + ownerDid: input.ownerDid, + issuer: input.issuer, + }); +} + function resolveReissueExpiry(input: { previousExpiresAt: string | null; issuedAt: string; diff --git a/apps/registry/src/db/schema.ts b/apps/registry/src/db/schema.ts index 38f561e..aa3ed8c 100644 --- a/apps/registry/src/db/schema.ts +++ b/apps/registry/src/db/schema.ts @@ -72,6 +72,32 @@ export const api_keys = sqliteTable( (table) => [index("idx_api_keys_key_hash").on(table.key_hash)], ); +export const agent_registration_challenges = sqliteTable( + "agent_registration_challenges", + { + id: text("id").primaryKey(), + owner_id: text("owner_id") + .notNull() + .references(() => humans.id), + public_key: text("public_key").notNull(), + nonce: text("nonce").notNull(), + status: text("status", { enum: ["pending", "used"] }) + .notNull() + .default("pending"), + expires_at: text("expires_at").notNull(), + used_at: text("used_at"), + created_at: text("created_at").notNull(), + updated_at: text("updated_at").notNull(), + }, + (table) => [ + index("idx_agent_registration_challenges_owner_status").on( + table.owner_id, + table.status, + ), + index("idx_agent_registration_challenges_expires_at").on(table.expires_at), + ], +); + export const invites = sqliteTable("invites", { id: text("id").primaryKey(), code: text("code").notNull().unique(), diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 534b54b..9cc8e8c 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,15 +1,19 @@ import { ADMIN_BOOTSTRAP_PATH, + AGENT_REGISTRATION_CHALLENGE_PATH, type AitClaims, + canonicalizeAgentRegistrationProof, encodeBase64url, generateUlid, makeAgentDid, makeHumanDid, } from "@clawdentity/protocol"; import { + encodeEd25519SignatureBase64url, generateEd25519Keypair, REQUEST_ID_HEADER, signAIT, + signEd25519, verifyAIT, verifyCRL, } from "@clawdentity/sdk"; @@ -88,6 +92,8 @@ type FakeHumanInsertRow = Record; type FakeApiKeyInsertRow = Record; type FakeAgentUpdateRow = Record; type FakeRevocationInsertRow = Record; +type FakeAgentRegistrationChallengeInsertRow = Record; +type FakeAgentRegistrationChallengeUpdateRow = Record; type FakeRevocationRow = { id: string; jti: string; @@ -108,6 +114,17 @@ type FakeAgentRow = { createdAt?: string; updatedAt?: string; }; +type FakeAgentRegistrationChallengeRow = { + id: string; + ownerId: string; + publicKey: string; + nonce: string; + status: "pending" | "used"; + expiresAt: string; + usedAt: string | null; + createdAt: string; + updatedAt: string; +}; type FakeAgentSelectRow = { id: string; @@ -129,6 +146,7 @@ type FakeDbOptions = { failApiKeyInsertCount?: number; failBeginTransaction?: boolean; revocationRows?: FakeRevocationRow[]; + registrationChallengeRows?: FakeAgentRegistrationChallengeRow[]; }; type FakeCrlSelectRow = { @@ -334,6 +352,40 @@ function getAgentSelectColumnValue( return undefined; } +function getAgentRegistrationChallengeSelectColumnValue( + row: FakeAgentRegistrationChallengeRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "owner_id") { + return row.ownerId; + } + if (column === "public_key") { + return row.publicKey; + } + if (column === "nonce") { + return row.nonce; + } + if (column === "status") { + return row.status; + } + if (column === "expires_at") { + return row.expiresAt; + } + if (column === "used_at") { + return row.usedAt; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + function getHumanSelectColumnValue(row: FakeHumanRow, column: string): unknown { if (column === "id") { return row.id; @@ -497,6 +549,48 @@ function resolveAgentSelectRows(options: { return filteredRows; } +function resolveAgentRegistrationChallengeSelectRows(options: { + query: string; + params: unknown[]; + challengeRows: FakeAgentRegistrationChallengeRow[]; +}): FakeAgentRegistrationChallengeRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasOwnerFilter = hasFilter(whereClause, "owner_id"); + const hasChallengeIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const ownerId = + hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id[0]) + : undefined; + const challengeId = + hasChallengeIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.challengeRows.length; + + return options.challengeRows + .filter((row) => (ownerId ? row.ownerId === ownerId : true)) + .filter((row) => (challengeId ? row.id === challengeId : true)) + .filter((row) => (status ? row.status === status : true)) + .slice(0, limit); +} + function getCrlSelectColumnValue( row: FakeCrlSelectRow, column: string, @@ -568,7 +662,14 @@ function createFakeDb( const agentInserts: FakeAgentInsertRow[] = []; const agentUpdates: FakeAgentUpdateRow[] = []; const revocationInserts: FakeRevocationInsertRow[] = []; + const agentRegistrationChallengeInserts: FakeAgentRegistrationChallengeInsertRow[] = + []; + const agentRegistrationChallengeUpdates: FakeAgentRegistrationChallengeUpdateRow[] = + []; const revocationRows = [...(options.revocationRows ?? [])]; + const registrationChallengeRows = [ + ...(options.registrationChallengeRows ?? []), + ]; const humanRows = rows.reduce((acc, row) => { if (acc.some((item) => item.id === row.humanId)) { return acc; @@ -702,6 +803,39 @@ function createFakeDb( }), }; } + if ( + (normalizedQuery.includes('from "agent_registration_challenges"') || + normalizedQuery.includes("from agent_registration_challenges")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveAgentRegistrationChallengeSelectRows({ + query, + params, + challengeRows: registrationChallengeRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = + getAgentRegistrationChallengeSelectColumnValue( + row, + column, + ); + return acc; + }, + {}, + ); + }), + }; + } if ( (normalizedQuery.includes('from "revocations"') || normalizedQuery.includes("from revocations")) && @@ -783,6 +917,22 @@ function createFakeDb( ), ); } + if ( + normalizedQuery.includes('from "agent_registration_challenges"') || + normalizedQuery.includes("from agent_registration_challenges") + ) { + const resultRows = resolveAgentRegistrationChallengeSelectRows({ + query, + params, + challengeRows: registrationChallengeRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getAgentRegistrationChallengeSelectColumnValue(row, column), + ), + ); + } if ( normalizedQuery.includes('from "revocations"') || normalizedQuery.includes("from revocations") @@ -960,6 +1110,128 @@ function createFakeDb( agentInserts.push(row); changes = 1; } + if ( + normalizedQuery.includes( + 'insert into "agent_registration_challenges"', + ) || + normalizedQuery.includes( + "insert into agent_registration_challenges", + ) + ) { + const columns = parseInsertColumns( + query, + "agent_registration_challenges", + ); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentRegistrationChallengeInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.owner_id === "string" && + typeof row.public_key === "string" && + typeof row.nonce === "string" && + (row.status === "pending" || row.status === "used") && + typeof row.expires_at === "string" && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + registrationChallengeRows.push({ + id: row.id, + ownerId: row.owner_id, + publicKey: row.public_key, + nonce: row.nonce, + status: row.status, + expiresAt: row.expires_at, + usedAt: + typeof row.used_at === "string" ? String(row.used_at) : null, + createdAt: row.created_at, + updatedAt: row.updated_at, + }); + } + + changes = 1; + } + if ( + normalizedQuery.includes( + 'update "agent_registration_challenges"', + ) || + normalizedQuery.includes("update agent_registration_challenges") + ) { + const setColumns = parseUpdateSetColumns( + query, + "agent_registration_challenges", + ); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const ownerFilter = + typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + let matchedRows = 0; + for (const row of registrationChallengeRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (ownerFilter && row.ownerId !== ownerFilter) { + continue; + } + if (statusFilter && row.status !== statusFilter) { + continue; + } + + matchedRows += 1; + if ( + nextValues.status === "pending" || + nextValues.status === "used" + ) { + row.status = nextValues.status; + } + if ( + typeof nextValues.used_at === "string" || + nextValues.used_at === null + ) { + row.usedAt = nextValues.used_at; + } + if (typeof nextValues.updated_at === "string") { + row.updatedAt = nextValues.updated_at; + } + } + + agentRegistrationChallengeUpdates.push({ + ...nextValues, + id: idFilter, + owner_id: ownerFilter, + status_where: statusFilter, + matched_rows: matchedRows, + }); + changes = matchedRows; + } if ( normalizedQuery.includes('update "agents"') || normalizedQuery.includes("update agents") @@ -1103,7 +1375,10 @@ function createFakeDb( apiKeyInserts, agentInserts, agentUpdates, + agentRegistrationChallengeInserts, + agentRegistrationChallengeUpdates, revocationInserts, + registrationChallengeRows, }; } @@ -1126,6 +1401,32 @@ function makeValidPatContext(token = "clw_pat_valid-token-value") { }); } +async function signRegistrationChallenge(options: { + challengeId: string; + nonce: string; + ownerDid: string; + publicKey: string; + name: string; + secretKey: Uint8Array; + framework?: string; + ttlDays?: number; +}): Promise { + const canonical = canonicalizeAgentRegistrationProof({ + challengeId: options.challengeId, + nonce: options.nonce, + ownerDid: options.ownerDid, + publicKey: options.publicKey, + name: options.name, + framework: options.framework, + ttlDays: options.ttlDays, + }); + const signature = await signEd25519( + new TextEncoder().encode(canonical), + options.secretKey, + ); + return encodeEd25519SignatureBase64url(signature); +} + describe("GET /health", () => { it("returns status ok with fallback version", async () => { const res = await app.request( @@ -3165,6 +3466,109 @@ describe("POST /v1/agents/:id/reissue", () => { }); }); +describe(`POST ${AGENT_REGISTRATION_CHALLENGE_PATH}`, () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 400 when payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: "not-base64url", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_INVALID"); + expect(body.error.details?.fieldErrors).toMatchObject({ + publicKey: expect.any(Array), + }); + }); + + it("creates and persists challenge for authenticated owner", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentRegistrationChallengeInserts } = createFakeDb([ + authRow, + ]); + const agentKeypair = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(201); + const body = (await res.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + expiresAt: string; + algorithm: string; + messageTemplate: string; + }; + expect(body.challengeId).toEqual(expect.any(String)); + expect(body.nonce).toEqual(expect.any(String)); + expect(body.ownerDid).toBe(authRow.humanDid); + expect(body.algorithm).toBe("Ed25519"); + expect(body.messageTemplate).toContain("challengeId:{challengeId}"); + expect(Date.parse(body.expiresAt)).toBeGreaterThan(Date.now()); + + expect(agentRegistrationChallengeInserts).toHaveLength(1); + expect(agentRegistrationChallengeInserts[0]).toMatchObject({ + id: body.challengeId, + owner_id: "human-1", + public_key: encodeBase64url(agentKeypair.publicKey), + nonce: body.nonce, + status: "pending", + used_at: null, + }); + }); +}); + describe("POST /v1/agents", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( @@ -3238,6 +3642,8 @@ describe("POST /v1/agents", () => { framework: expect.any(Array), publicKey: expect.any(Array), ttlDays: expect.any(Array), + challengeId: expect.any(Array), + challengeSignature: expect.any(Array), }); }); @@ -3354,13 +3760,237 @@ describe("POST /v1/agents", () => { expect(body.error.details).toBeUndefined(); }); + it("returns 400 when registration challenge is missing", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const challengeSignature = encodeEd25519SignatureBase64url( + Uint8Array.from({ length: 64 }, (_, index) => index + 1), + ); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-missing-challenge", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: generateUlid(1700000000000), + challengeSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_NOT_FOUND"); + }); + + it("returns 400 when challenge signature is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const challengeId = generateUlid(1700000010000); + const challengeNonce = encodeBase64url( + Uint8Array.from({ length: 24 }, (_, index) => index + 3), + ); + const { database } = createFakeDb([authRow], [], { + registrationChallengeRows: [ + { + id: challengeId, + ownerId: "human-1", + publicKey: encodeBase64url(agentKeypair.publicKey), + nonce: challengeNonce, + status: "pending", + expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), + usedAt: null, + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const invalidSignature = await signRegistrationChallenge({ + challengeId, + nonce: challengeNonce, + ownerDid: authRow.humanDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "wrong-name", + secretKey: agentKeypair.secretKey, + }); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-proof-invalid", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId, + challengeSignature: invalidSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_PROOF_INVALID"); + }); + + it("returns 400 when challenge has already been used", async () => { + const { token, authRow } = await makeValidPatContext(); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const challengeId = generateUlid(1700000011000); + const challengeNonce = encodeBase64url( + Uint8Array.from({ length: 24 }, (_, index) => index + 5), + ); + const { database } = createFakeDb([authRow], [], { + registrationChallengeRows: [ + { + id: challengeId, + ownerId: "human-1", + publicKey: encodeBase64url(agentKeypair.publicKey), + nonce: challengeNonce, + status: "used", + expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), + usedAt: new Date(Date.now() - 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const signature = await signRegistrationChallenge({ + challengeId, + nonce: challengeNonce, + ownerDid: authRow.humanDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-challenge-replayed", + secretKey: agentKeypair.secretKey, + }); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-challenge-replayed", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId, + challengeSignature: signature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_REPLAYED"); + }); + it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { const { token, authRow } = await makeValidPatContext(); const { database, agentInserts } = createFakeDb([authRow]); const signer = await generateEd25519Keypair(); const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); - const res = await createRegistryApp().request( + const challengeResponse = await appInstance.request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + expect(challengeResponse.status).toBe(201); + const challengeBody = (await challengeResponse.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + }; + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-01", + secretKey: agentKeypair.secretKey, + }); + + const res = await appInstance.request( "/v1/agents", { method: "POST", @@ -3371,6 +4001,8 @@ describe("POST /v1/agents", () => { body: JSON.stringify({ name: "agent-01", publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, }), }, { @@ -3441,6 +4073,42 @@ describe("POST /v1/agents", () => { }, ]); + const challengeResponse = await appInstance.request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + expect(challengeResponse.status).toBe(201); + const challengeBody = (await challengeResponse.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + }; + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-registry-verify", + framework: "openclaw", + ttlDays: 10, + secretKey: agentKeypair.secretKey, + }); + const registerResponse = await appInstance.request( "/v1/agents", { @@ -3454,6 +4122,8 @@ describe("POST /v1/agents", () => { framework: "openclaw", ttlDays: 10, publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, }), }, { @@ -3527,8 +4197,41 @@ describe("POST /v1/agents", () => { const signer = await generateEd25519Keypair(); const wrongPublishedKey = await generateEd25519Keypair(); const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); - const res = await createRegistryApp().request( + const challengeResponse = await appInstance.request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + expect(challengeResponse.status).toBe(201); + const challengeBody = (await challengeResponse.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + }; + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-signer-mismatch", + secretKey: agentKeypair.secretKey, + }); + + const res = await appInstance.request( "/v1/agents", { method: "POST", @@ -3539,6 +4242,8 @@ describe("POST /v1/agents", () => { body: JSON.stringify({ name: "agent-signer-mismatch", publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, }), }, { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 2627776..12236b2 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,5 +1,6 @@ import { ADMIN_BOOTSTRAP_PATH, + AGENT_REGISTRATION_CHALLENGE_PATH, generateUlid, makeHumanDid, } from "@clawdentity/protocol"; @@ -21,9 +22,12 @@ import { Hono } from "hono"; import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; import { - buildAgentRegistration, + buildAgentRegistrationChallenge, + buildAgentRegistrationFromParsed, buildAgentReissue, + parseAgentRegistrationBody, resolveRegistryIssuer, + verifyAgentRegistrationOwnershipProof, } from "./agent-registration.js"; import { agentResolveNotFoundError, @@ -47,7 +51,13 @@ import { hashApiKeyToken, } from "./auth/api-key-token.js"; import { createDb } from "./db/client.js"; -import { agents, api_keys, humans, revocations } from "./db/schema.js"; +import { + agent_registration_challenges, + agents, + api_keys, + humans, + revocations, +} from "./db/schema.js"; import { createInMemoryRateLimit, RESOLVE_RATE_LIMIT_MAX_REQUESTS, @@ -87,6 +97,16 @@ type OwnedAgent = { current_jti: string | null; }; +type OwnedAgentRegistrationChallenge = { + id: string; + owner_id: string; + public_key: string; + nonce: string; + status: "pending" | "used"; + expires_at: string; + used_at: string | null; +}; + type CrlSnapshotRow = { id: string; jti: string; @@ -198,6 +218,33 @@ async function findOwnedAgent(input: { return rows[0]; } +async function findOwnedAgentRegistrationChallenge(input: { + db: ReturnType; + ownerId: string; + challengeId: string; +}): Promise { + const rows = await input.db + .select({ + id: agent_registration_challenges.id, + owner_id: agent_registration_challenges.owner_id, + public_key: agent_registration_challenges.public_key, + nonce: agent_registration_challenges.nonce, + status: agent_registration_challenges.status, + expires_at: agent_registration_challenges.expires_at, + used_at: agent_registration_challenges.used_at, + }) + .from(agent_registration_challenges) + .where( + and( + eq(agent_registration_challenges.owner_id, input.ownerId), + eq(agent_registration_challenges.id, input.challengeId), + ), + ) + .limit(1); + + return rows[0]; +} + function requireCurrentJti(input: { currentJti: string | null; onInvalid: (reason: string) => AppError; @@ -595,6 +642,48 @@ function createRegistryApp() { }); }); + app.post(AGENT_REGISTRATION_CHALLENGE_PATH, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const human = c.get("human"); + const challenge = buildAgentRegistrationChallenge({ + payload, + ownerId: human.id, + ownerDid: human.did, + environment: config.ENVIRONMENT, + }); + + const db = createDb(c.env.DB); + await db.insert(agent_registration_challenges).values({ + id: challenge.challenge.id, + owner_id: challenge.challenge.ownerId, + public_key: challenge.challenge.publicKey, + nonce: challenge.challenge.nonce, + status: challenge.challenge.status, + expires_at: challenge.challenge.expiresAt, + used_at: challenge.challenge.usedAt, + created_at: challenge.challenge.createdAt, + updated_at: challenge.challenge.updatedAt, + }); + + return c.json(challenge.response, 201); + }); + app.post("/v1/agents", createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); @@ -614,12 +703,45 @@ function createRegistryApp() { } const human = c.get("human"); - const registration = buildAgentRegistration({ - payload, + const parsedBody = parseAgentRegistrationBody(payload, config.ENVIRONMENT); + const db = createDb(c.env.DB); + + const challenge = await findOwnedAgentRegistrationChallenge({ + db, + ownerId: human.id, + challengeId: parsedBody.challengeId, + }); + + if (!challenge) { + throw new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_NOT_FOUND", + message: exposeDetails + ? "Registration challenge was not found" + : "Request could not be processed", + status: 400, + expose: true, + }); + } + + await verifyAgentRegistrationOwnershipProof({ + parsedBody, + challenge: { + id: challenge.id, + ownerId: challenge.owner_id, + publicKey: challenge.public_key, + nonce: challenge.nonce, + status: challenge.status, + expiresAt: challenge.expires_at, + usedAt: challenge.used_at, + }, ownerDid: human.did, - issuer: resolveRegistryIssuer(config.ENVIRONMENT), environment: config.ENVIRONMENT, }); + const registration = buildAgentRegistrationFromParsed({ + parsedBody, + ownerDid: human.did, + issuer: resolveRegistryIssuer(config.ENVIRONMENT), + }); const signer = await resolveRegistrySigner(config); const ait = await signAIT({ claims: registration.claims, @@ -627,20 +749,89 @@ function createRegistryApp() { signerKeypair: signer.signerKeypair, }); - const db = createDb(c.env.DB); - await db.insert(agents).values({ - id: registration.agent.id, - did: registration.agent.did, - owner_id: human.id, - name: registration.agent.name, - framework: registration.agent.framework, - public_key: registration.agent.publicKey, - current_jti: registration.agent.currentJti, - status: registration.agent.status, - expires_at: registration.agent.expiresAt, - created_at: registration.agent.createdAt, - updated_at: registration.agent.updatedAt, - }); + const challengeUsedAt = nowIso(); + const applyRegistrationMutation = async ( + executor: typeof db, + options: { rollbackOnAgentInsertFailure: boolean }, + ): Promise => { + const challengeUpdateResult = await executor + .update(agent_registration_challenges) + .set({ + status: "used", + used_at: challengeUsedAt, + updated_at: challengeUsedAt, + }) + .where( + and( + eq(agent_registration_challenges.id, challenge.id), + eq(agent_registration_challenges.owner_id, human.id), + eq(agent_registration_challenges.status, "pending"), + ), + ); + + const updatedRows = getMutationRowCount(challengeUpdateResult); + if (updatedRows === 0) { + throw new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_REPLAYED", + message: exposeDetails + ? "Registration challenge has already been used" + : "Request could not be processed", + status: 400, + expose: true, + }); + } + + try { + await executor.insert(agents).values({ + id: registration.agent.id, + did: registration.agent.did, + owner_id: human.id, + name: registration.agent.name, + framework: registration.agent.framework, + public_key: registration.agent.publicKey, + current_jti: registration.agent.currentJti, + status: registration.agent.status, + expires_at: registration.agent.expiresAt, + created_at: registration.agent.createdAt, + updated_at: registration.agent.updatedAt, + }); + } catch (error) { + if (options.rollbackOnAgentInsertFailure) { + await executor + .update(agent_registration_challenges) + .set({ + status: "pending", + used_at: null, + updated_at: nowIso(), + }) + .where( + and( + eq(agent_registration_challenges.id, challenge.id), + eq(agent_registration_challenges.owner_id, human.id), + eq(agent_registration_challenges.status, "used"), + ), + ); + } + + throw error; + } + }; + + try { + await db.transaction(async (tx) => { + await applyRegistrationMutation(tx as unknown as typeof db, { + rollbackOnAgentInsertFailure: false, + }); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRegistrationMutation(db, { + rollbackOnAgentInsertFailure: true, + }); + } return c.json({ agent: registration.agent, ait }, 201); }); diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index 937bc7d..b361f49 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -18,6 +18,8 @@ - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. +- Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. +- Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. ## Testing - Add focused Vitest tests per helper module and one root export test in `src/index.test.ts`. diff --git a/packages/protocol/src/agent-registration-proof.ts b/packages/protocol/src/agent-registration-proof.ts new file mode 100644 index 0000000..36a1ab7 --- /dev/null +++ b/packages/protocol/src/agent-registration-proof.ts @@ -0,0 +1,44 @@ +export const AGENT_REGISTRATION_PROOF_VERSION = "clawdentity.register.v1"; + +export const AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE = + `${AGENT_REGISTRATION_PROOF_VERSION}\\n` + + "challengeId:{challengeId}\\n" + + "nonce:{nonce}\\n" + + "ownerDid:{ownerDid}\\n" + + "publicKey:{publicKey}\\n" + + "name:{name}\\n" + + "framework:{framework}\\n" + + "ttlDays:{ttlDays}"; + +export type AgentRegistrationProofMessageInput = { + challengeId: string; + nonce: string; + ownerDid: string; + publicKey: string; + name: string; + framework?: string; + ttlDays?: number; +}; + +function normalizeOptionalField(value: string | number | undefined): string { + if (value === undefined) { + return ""; + } + + return String(value); +} + +export function canonicalizeAgentRegistrationProof( + input: AgentRegistrationProofMessageInput, +): string { + return [ + AGENT_REGISTRATION_PROOF_VERSION, + `challengeId:${input.challengeId}`, + `nonce:${input.nonce}`, + `ownerDid:${input.ownerDid}`, + `publicKey:${input.publicKey}`, + `name:${input.name}`, + `framework:${normalizeOptionalField(input.framework)}`, + `ttlDays:${normalizeOptionalField(input.ttlDays)}`, + ].join("\n"); +} diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 589fad7..cda3452 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -1 +1,2 @@ export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; +export const AGENT_REGISTRATION_CHALLENGE_PATH = "/v1/agents/challenge"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index eb5b2f1..7fdfd93 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -2,8 +2,12 @@ import { describe, expect, it } from "vitest"; import { ADMIN_BOOTSTRAP_PATH, AGENT_NAME_REGEX, + AGENT_REGISTRATION_CHALLENGE_PATH, + AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, + AGENT_REGISTRATION_PROOF_VERSION, aitClaimsSchema, CLAW_PROOF_CANONICAL_VERSION, + canonicalizeAgentRegistrationProof, canonicalizeRequest, crlClaimsSchema, decodeBase64url, @@ -29,6 +33,7 @@ describe("protocol", () => { it("exports shared endpoint constants", () => { expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); + expect(AGENT_REGISTRATION_CHALLENGE_PATH).toBe("/v1/agents/challenge"); }); it("exports helpers from package root", () => { @@ -67,6 +72,33 @@ describe("protocol", () => { ); }); + it("exports agent registration proof canonicalization helpers", () => { + const canonical = canonicalizeAgentRegistrationProof({ + challengeId: "01JCHALLENGEABC", + nonce: "nonce123", + ownerDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + name: "agent_01", + }); + + expect(AGENT_REGISTRATION_PROOF_VERSION).toBe("clawdentity.register.v1"); + expect(AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE).toContain( + "challengeId:{challengeId}", + ); + expect(canonical).toBe( + [ + "clawdentity.register.v1", + "challengeId:01JCHALLENGEABC", + "nonce:nonce123", + "ownerDid:did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + "publicKey:AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + "name:agent_01", + "framework:", + "ttlDays:", + ].join("\n"), + ); + }); + it("exports AIT helpers from package root", () => { const agentUlid = generateUlid(1700000000000); const ownerUlid = generateUlid(1700000000100); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index f5ba58a..3f5ffc5 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -1,5 +1,11 @@ export const PROTOCOL_VERSION = "0.0.0"; +export type { AgentRegistrationProofMessageInput } from "./agent-registration-proof.js"; +export { + AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, + AGENT_REGISTRATION_PROOF_VERSION, + canonicalizeAgentRegistrationProof, +} from "./agent-registration-proof.js"; export type { AitClaims, AitCnfJwk } from "./ait.js"; export { AGENT_NAME_REGEX, @@ -14,7 +20,10 @@ export type { CrlClaims } from "./crl.js"; export { crlClaimsSchema, parseCrlClaims } from "./crl.js"; export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; -export { ADMIN_BOOTSTRAP_PATH } from "./endpoints.js"; +export { + ADMIN_BOOTSTRAP_PATH, + AGENT_REGISTRATION_CHALLENGE_PATH, +} from "./endpoints.js"; export type { ProtocolParseErrorCode } from "./errors.js"; export { ProtocolParseError } from "./errors.js"; export type { CanonicalRequestInput } from "./http-signing.js"; From 5558c855bbcf8dd202d503258be79bcb220d1aa7 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 15:27:56 +0530 Subject: [PATCH 061/190] fix: align proof template newlines and add drizzle 0001 snapshot --- apps/registry/drizzle/meta/0001_snapshot.json | 586 ++++++++++++++++++ apps/registry/drizzle/meta/_journal.json | 2 +- .../protocol/src/agent-registration-proof.ts | 14 +- packages/protocol/src/index.test.ts | 13 +- 4 files changed, 605 insertions(+), 10 deletions(-) create mode 100644 apps/registry/drizzle/meta/0001_snapshot.json diff --git a/apps/registry/drizzle/meta/0001_snapshot.json b/apps/registry/drizzle/meta/0001_snapshot.json new file mode 100644 index 0000000..081f334 --- /dev/null +++ b/apps/registry/drizzle/meta/0001_snapshot.json @@ -0,0 +1,586 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "24c0c1a7-7fe4-4b8f-b1d1-4050997231cf", + "prevId": "42ce4eff-bbcb-41b5-b739-1d746e1bd162", + "tables": { + "agent_registration_challenges": { + "name": "agent_registration_challenges", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "owner_id": { + "name": "owner_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "nonce": { + "name": "nonce", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "expires_at": { + "name": "expires_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "used_at": { + "name": "used_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "idx_agent_registration_challenges_owner_status": { + "name": "idx_agent_registration_challenges_owner_status", + "columns": [ + "owner_id", + "status" + ], + "isUnique": false + }, + "idx_agent_registration_challenges_expires_at": { + "name": "idx_agent_registration_challenges_expires_at", + "columns": [ + "expires_at" + ], + "isUnique": false + } + }, + "foreignKeys": { + "agent_registration_challenges_owner_id_humans_id_fk": { + "name": "agent_registration_challenges_owner_id_humans_id_fk", + "tableFrom": "agent_registration_challenges", + "tableTo": "humans", + "columnsFrom": [ + "owner_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "did": { + "name": "did", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "owner_id": { + "name": "owner_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "framework": { + "name": "framework", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "current_jti": { + "name": "current_jti", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "expires_at": { + "name": "expires_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "gateway_hint": { + "name": "gateway_hint", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_did_unique": { + "name": "agents_did_unique", + "columns": [ + "did" + ], + "isUnique": true + }, + "idx_agents_owner_status": { + "name": "idx_agents_owner_status", + "columns": [ + "owner_id", + "status" + ], + "isUnique": false + } + }, + "foreignKeys": { + "agents_owner_id_humans_id_fk": { + "name": "agents_owner_id_humans_id_fk", + "tableFrom": "agents", + "tableTo": "humans", + "columnsFrom": [ + "owner_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "api_keys": { + "name": "api_keys", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "human_id": { + "name": "human_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "key_hash": { + "name": "key_hash", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "key_prefix": { + "name": "key_prefix", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "last_used_at": { + "name": "last_used_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": { + "idx_api_keys_key_hash": { + "name": "idx_api_keys_key_hash", + "columns": [ + "key_hash" + ], + "isUnique": false + } + }, + "foreignKeys": { + "api_keys_human_id_humans_id_fk": { + "name": "api_keys_human_id_humans_id_fk", + "tableFrom": "api_keys", + "tableTo": "humans", + "columnsFrom": [ + "human_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "humans": { + "name": "humans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "did": { + "name": "did", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "display_name": { + "name": "display_name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "role": { + "name": "role", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'user'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "humans_did_unique": { + "name": "humans_did_unique", + "columns": [ + "did" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "invites": { + "name": "invites", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "code": { + "name": "code", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_by": { + "name": "created_by", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "redeemed_by": { + "name": "redeemed_by", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "agent_id": { + "name": "agent_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "expires_at": { + "name": "expires_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "invites_code_unique": { + "name": "invites_code_unique", + "columns": [ + "code" + ], + "isUnique": true + } + }, + "foreignKeys": { + "invites_created_by_humans_id_fk": { + "name": "invites_created_by_humans_id_fk", + "tableFrom": "invites", + "tableTo": "humans", + "columnsFrom": [ + "created_by" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + }, + "invites_redeemed_by_humans_id_fk": { + "name": "invites_redeemed_by_humans_id_fk", + "tableFrom": "invites", + "tableTo": "humans", + "columnsFrom": [ + "redeemed_by" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + }, + "invites_agent_id_agents_id_fk": { + "name": "invites_agent_id_agents_id_fk", + "tableFrom": "invites", + "tableTo": "agents", + "columnsFrom": [ + "agent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "revocations": { + "name": "revocations", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "jti": { + "name": "jti", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "agent_id": { + "name": "agent_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "reason": { + "name": "reason", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "revoked_at": { + "name": "revoked_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "revocations_jti_unique": { + "name": "revocations_jti_unique", + "columns": [ + "jti" + ], + "isUnique": true + }, + "idx_revocations_agent_id": { + "name": "idx_revocations_agent_id", + "columns": [ + "agent_id" + ], + "isUnique": false + } + }, + "foreignKeys": { + "revocations_agent_id_agents_id_fk": { + "name": "revocations_agent_id_agents_id_fk", + "tableFrom": "revocations", + "tableTo": "agents", + "columnsFrom": [ + "agent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/apps/registry/drizzle/meta/_journal.json b/apps/registry/drizzle/meta/_journal.json index 901f93b..3b672be 100644 --- a/apps/registry/drizzle/meta/_journal.json +++ b/apps/registry/drizzle/meta/_journal.json @@ -11,7 +11,7 @@ }, { "idx": 1, - "version": "7", + "version": "6", "when": 1771278200000, "tag": "0001_agent_registration_challenge", "breakpoints": true diff --git a/packages/protocol/src/agent-registration-proof.ts b/packages/protocol/src/agent-registration-proof.ts index 36a1ab7..eee0dd7 100644 --- a/packages/protocol/src/agent-registration-proof.ts +++ b/packages/protocol/src/agent-registration-proof.ts @@ -1,13 +1,13 @@ export const AGENT_REGISTRATION_PROOF_VERSION = "clawdentity.register.v1"; export const AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE = - `${AGENT_REGISTRATION_PROOF_VERSION}\\n` + - "challengeId:{challengeId}\\n" + - "nonce:{nonce}\\n" + - "ownerDid:{ownerDid}\\n" + - "publicKey:{publicKey}\\n" + - "name:{name}\\n" + - "framework:{framework}\\n" + + `${AGENT_REGISTRATION_PROOF_VERSION}\n` + + "challengeId:{challengeId}\n" + + "nonce:{nonce}\n" + + "ownerDid:{ownerDid}\n" + + "publicKey:{publicKey}\n" + + "name:{name}\n" + + "framework:{framework}\n" + "ttlDays:{ttlDays}"; export type AgentRegistrationProofMessageInput = { diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index 7fdfd93..ca16946 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -82,8 +82,17 @@ describe("protocol", () => { }); expect(AGENT_REGISTRATION_PROOF_VERSION).toBe("clawdentity.register.v1"); - expect(AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE).toContain( - "challengeId:{challengeId}", + expect(AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE).toBe( + [ + "clawdentity.register.v1", + "challengeId:{challengeId}", + "nonce:{nonce}", + "ownerDid:{ownerDid}", + "publicKey:{publicKey}", + "name:{name}", + "framework:{framework}", + "ttlDays:{ttlDays}", + ].join("\n"), ); expect(canonical).toBe( [ From 30d8834351d6566c32bfc9989f3b432d3343046b Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 16:37:31 +0530 Subject: [PATCH 062/190] feat: implement api key lifecycle endpoints and cli commands --- PRD.md | 10 + README.md | 5 + apps/cli/AGENTS.md | 3 +- apps/cli/src/AGENTS.md | 1 + apps/cli/src/commands/AGENTS.md | 6 + apps/cli/src/commands/api-key.test.ts | 329 ++++++++++++ apps/cli/src/commands/api-key.ts | 494 ++++++++++++++++++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/cli/vitest.config.ts | 11 + apps/registry/AGENTS.md | 4 + apps/registry/src/AGENTS.md | 20 + apps/registry/src/api-key-lifecycle.ts | 189 +++++++ apps/registry/src/server.test.ts | 680 ++++++++++++++++++++++--- apps/registry/src/server.ts | 125 +++++ packages/protocol/AGENTS.md | 1 + packages/protocol/src/endpoints.ts | 1 + packages/protocol/src/index.test.ts | 2 + packages/protocol/src/index.ts | 1 + 19 files changed, 1830 insertions(+), 62 deletions(-) create mode 100644 apps/cli/src/commands/api-key.test.ts create mode 100644 apps/cli/src/commands/api-key.ts create mode 100644 apps/registry/src/api-key-lifecycle.ts diff --git a/PRD.md b/PRD.md index 5b70f89..5e7829d 100644 --- a/PRD.md +++ b/PRD.md @@ -71,6 +71,7 @@ Because OpenClaw requires `hooks.token` and expects Bearer/token auth for `/hook - Revoke agent (`claw agent revoke`) - Inspect token (`claw agent inspect`) - Verify token (`claw verify`) + - Personal PAT lifecycle (`clawdentity api-key create|list|revoke`) - Share contact card (`claw share`) - **Proxy** @@ -146,6 +147,15 @@ Verifier must enforce: - One invite maps to one agent slot - Admin may suspend a human, which triggers agent revocation flow +### 6.7 Personal PAT lifecycle (post-bootstrap) +- Authenticated humans can mint additional PATs for safe key rotation. +- Registry exposes: + - `POST /v1/me/api-keys` (create, plaintext token returned once) + - `GET /v1/me/api-keys` (metadata only) + - `DELETE /v1/me/api-keys/:id` (revoke) +- Revoked PATs must fail auth with `401 API_KEY_REVOKED`. +- Unrelated active PATs must continue to authenticate after targeted key revocation. + --- ## 7) Non-functional requirements diff --git a/README.md b/README.md index 2c73eaa..add79be 100644 --- a/README.md +++ b/README.md @@ -202,6 +202,9 @@ This repo is a monorepo: - `clawdentity agent create` for local keypair + registry registration. - `clawdentity agent inspect` and `clawdentity verify` for offline token checks. - `clawdentity agent revoke` for kill switch workflows. +- `clawdentity api-key create` to mint a new PAT (token shown once). +- `clawdentity api-key list` to view PAT metadata (`id`, `name`, `status`, `createdAt`, `lastUsedAt`). +- `clawdentity api-key revoke ` to invalidate a PAT without rotating unrelated keys. - `clawdentity share` for contact-card exchange (DID, verify URL, endpoint). ### 5) Onboarding and control model @@ -278,6 +281,8 @@ No one shares keys/files between agents. Identity is presented per request. - Treat any identity fields (agent name/description) as untrusted input; never allow prompt injection via identity metadata. - Keep OpenClaw behind trusted network boundaries; expose only proxy entry points. - Rotate PATs and audit allowlist entries regularly. +- Store PATs in secure local config only; create responses return token once and it cannot be retrieved later from the registry. +- Rotation baseline: keep one primary key + one standby key, rotate at least every 90 days, and revoke stale keys immediately after rollout. --- diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index b0e2fac..cf5e629 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -8,7 +8,7 @@ - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Implement command groups under `src/commands/*` and register them from `createProgram()`. -- Keep top-level command contracts stable (`config`, `agent`, `verify`) so automation and docs do not drift. +- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `verify`, `openclaw`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. @@ -26,6 +26,7 @@ - Resolve values with explicit precedence: environment variables > config file > built-in defaults. - Keep API tokens masked in human-facing output (`show`, success logs, debug prints). - Write config and identity artifacts with restrictive permissions (`0600`) and never commit secrets or generated local config. +- API-key lifecycle commands must print plaintext PATs only at creation time and never persist newly-created tokens automatically without explicit user action. ## Testing Rules - Use Vitest for all tests. diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 49f9a82..b821de6 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -10,6 +10,7 @@ - Prefer SDK shared primitives (`AppError`, `nowIso`) for new command error/date logic instead of ad-hoc equivalents. - Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. - Admin bootstrap must print the one-time PAT before attempting to persist it and depend on `persistBootstrapConfig` so config write failures are surfaced via CLI errors while the operator still sees the PAT. +- API-key lifecycle command logic should stay in `commands/api-key.ts`; keep create/list/revoke request mapping explicit and keep token exposure limited to create output only. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index d1f0710..60576ee 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -31,6 +31,12 @@ - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. - Config persistence failures after successful bootstrap must not hide the returned PAT token; print token first, then surface recovery instructions. +## API Key Command Rules +- `api-key create` must call registry `POST /v1/me/api-keys` and print the plaintext PAT token once without persisting it into local config automatically. +- `api-key list` must call registry `GET /v1/me/api-keys` and print metadata only (`id`, `name`, `status`, `createdAt`, `lastUsedAt`), never token/hash/prefix values. +- `api-key revoke` must call registry `DELETE /v1/me/api-keys/:id` using ULID path validation before network calls. +- Keep API-key command error mapping stable for `401`, `400`, `404`, and `5xx` responses so rotation workflows are deterministic for operators. + ## Agent Command Rules - `agent create` must use a two-step registration handshake: request challenge from registry, sign canonical challenge message locally with agent private key, then submit registration with `challengeId` + `challengeSignature`. - Never send or log agent private keys; only send public key and proof signature. diff --git a/apps/cli/src/commands/api-key.test.ts b/apps/cli/src/commands/api-key.test.ts new file mode 100644 index 0000000..0f8e01c --- /dev/null +++ b/apps/cli/src/commands/api-key.test.ts @@ -0,0 +1,329 @@ +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +vi.mock("../config/manager.js", () => ({ + resolveConfig: vi.fn(), +})); + +import { resolveConfig } from "../config/manager.js"; +import { + createApiKey, + createApiKeyCommand, + listApiKeys, + revokeApiKey, +} from "./api-key.js"; + +const mockedResolveConfig = vi.mocked(resolveConfig); + +const mockFetch = vi.fn(); + +const createJsonResponse = (status: number, body: unknown): Response => { + return { + ok: status >= 200 && status < 300, + status, + json: vi.fn(async () => body), + } as unknown as Response; +}; + +async function runApiKeyCommand(args: string[]) { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createApiKeyCommand({ + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }), + }); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "api-key", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stdout: stdout.join(""), + stderr: stderr.join(""), + }; +} + +describe("api-key command helpers", () => { + beforeEach(() => { + vi.clearAllMocks(); + mockFetch.mockReset(); + + mockedResolveConfig.mockResolvedValue({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("creates API key and returns metadata with token", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(201, { + apiKey: { + id: "01KJ8E2A4F8B10V8R8A6T8XKZ9", + name: "workstation", + status: "active", + createdAt: "2026-02-16T00:00:00.000Z", + lastUsedAt: null, + token: "clw_pat_created", + }, + }), + ); + + const result = await createApiKey( + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }), + }, + ); + + expect(result.apiKey.token).toBe("clw_pat_created"); + expect(mockFetch).toHaveBeenCalledWith( + "https://api.clawdentity.com/v1/me/api-keys", + expect.objectContaining({ + method: "POST", + headers: expect.objectContaining({ + authorization: "Bearer clw_pat_local", + }), + }), + ); + }); + + it("lists API key metadata entries", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + apiKeys: [ + { + id: "01KJ8E2A4F8B10V8R8A6T8XKZ9", + name: "workstation", + status: "active", + createdAt: "2026-02-16T00:00:00.000Z", + lastUsedAt: "2026-02-16T01:00:00.000Z", + }, + { + id: "01KJ8E2A4F8B10V8R8A6T8XKZA", + name: "old-key", + status: "revoked", + createdAt: "2026-02-15T00:00:00.000Z", + lastUsedAt: null, + }, + ], + }), + ); + + const result = await listApiKeys( + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }), + }, + ); + + expect(result.apiKeys).toHaveLength(2); + expect(result.apiKeys[0]?.status).toBe("active"); + expect(result.apiKeys[1]?.status).toBe("revoked"); + }); + + it("revokes API key by id", async () => { + mockFetch.mockResolvedValueOnce(createJsonResponse(204, {})); + + const result = await revokeApiKey( + "01KJ8E2A4F8B10V8R8A6T8XKZ9", + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }), + }, + ); + + expect(result.apiKeyId).toBe("01KJ8E2A4F8B10V8R8A6T8XKZ9"); + expect(mockFetch).toHaveBeenCalledWith( + "https://api.clawdentity.com/v1/me/api-keys/01KJ8E2A4F8B10V8R8A6T8XKZ9", + expect.objectContaining({ + method: "DELETE", + headers: expect.objectContaining({ + authorization: "Bearer clw_pat_local", + }), + }), + ); + }); + + it("fails create when local API key is not configured", async () => { + mockedResolveConfig.mockResolvedValueOnce({ + registryUrl: "https://api.clawdentity.com", + }); + + await expect( + createApiKey( + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: mockedResolveConfig, + }, + ), + ).rejects.toMatchObject({ + code: "CLI_API_KEY_MISSING_LOCAL_CREDENTIALS", + }); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails revoke when key id is invalid", async () => { + await expect( + revokeApiKey( + "not-a-ulid", + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_API_KEY_ID_INVALID", + message: "API key id must be a valid ULID", + }); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("maps revoke 404 to stable message", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(404, { + error: { + code: "API_KEY_NOT_FOUND", + message: "API key not found", + }, + }), + ); + + await expect( + revokeApiKey( + "01KJ8E2A4F8B10V8R8A6T8XKZ9", + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + }), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_API_KEY_REVOKE_FAILED", + message: "API key (404): API key not found", + }); + }); + + it("sets command exit code and stderr on create failure", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(401, { + error: { + code: "API_KEY_INVALID", + message: "API key is invalid", + }, + }), + ); + + const result = await runApiKeyCommand(["create"]); + + expect(result.exitCode).toBe(1); + expect(result.stderr).toContain("Registry authentication failed"); + }); + + it("prints token once for create command", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(201, { + apiKey: { + id: "01KJ8E2A4F8B10V8R8A6T8XKZ9", + name: "workstation", + status: "active", + createdAt: "2026-02-16T00:00:00.000Z", + lastUsedAt: null, + token: "clw_pat_created", + }, + }), + ); + + const result = await runApiKeyCommand(["create", "--name", "workstation"]); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("API key created"); + expect(result.stdout).toContain("Token (shown once):"); + expect(result.stdout).toContain("clw_pat_created"); + }); + + it("prints empty state for list command", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(200, { + apiKeys: [], + }), + ); + + const result = await runApiKeyCommand(["list"]); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("No API keys found."); + }); + + it("prints revoke success message", async () => { + mockFetch.mockResolvedValueOnce(createJsonResponse(204, {})); + + const result = await runApiKeyCommand([ + "revoke", + "01KJ8E2A4F8B10V8R8A6T8XKZ9", + ]); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain( + "API key revoked: 01KJ8E2A4F8B10V8R8A6T8XKZ9", + ); + }); +}); diff --git a/apps/cli/src/commands/api-key.ts b/apps/cli/src/commands/api-key.ts new file mode 100644 index 0000000..52b0f92 --- /dev/null +++ b/apps/cli/src/commands/api-key.ts @@ -0,0 +1,494 @@ +import { ME_API_KEYS_PATH, parseUlid } from "@clawdentity/protocol"; +import { AppError, createLogger } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { type CliConfig, resolveConfig } from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "api-key" }); + +type ApiKeyCreateOptions = { + name?: string; + registryUrl?: string; +}; + +type ApiKeyListOptions = { + registryUrl?: string; +}; + +type ApiKeyRevokeOptions = { + registryUrl?: string; +}; + +type ApiKeyMetadata = { + id: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; +}; + +type ApiKeyCreateResult = { + apiKey: ApiKeyMetadata & { + token: string; + }; + registryUrl: string; +}; + +type ApiKeyListResult = { + apiKeys: ApiKeyMetadata[]; + registryUrl: string; +}; + +type RegistryErrorEnvelope = { + error?: { + message?: string; + }; +}; + +type ApiKeyDependencies = { + fetchImpl?: typeof fetch; + resolveConfigImpl?: () => Promise; +}; + +type ApiKeyRuntime = { + fetchImpl: typeof fetch; + registryUrl: string; + apiKey: string; +}; + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +function parseNonEmptyString(value: unknown): string { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +} + +function createCliError(code: string, message: string): AppError { + return new AppError({ + code, + message, + status: 400, + }); +} + +function resolveRegistryUrl(input: { + overrideRegistryUrl: string | undefined; + configRegistryUrl: string; +}): string { + const candidate = + parseNonEmptyString(input.overrideRegistryUrl) || input.configRegistryUrl; + + try { + return new URL(candidate).toString(); + } catch { + throw createCliError( + "CLI_API_KEY_INVALID_REGISTRY_URL", + "Registry URL is invalid", + ); + } +} + +function requireApiKey(config: CliConfig): string { + if (typeof config.apiKey === "string" && config.apiKey.trim().length > 0) { + return config.apiKey; + } + + throw createCliError( + "CLI_API_KEY_MISSING_LOCAL_CREDENTIALS", + "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", + ); +} + +function toApiKeyRequestUrl(registryUrl: string, apiKeyId?: string): string { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + const path = apiKeyId + ? `${ME_API_KEYS_PATH.slice(1)}/${encodeURIComponent(apiKeyId)}` + : ME_API_KEYS_PATH.slice(1); + + return new URL(path, normalizedBaseUrl).toString(); +} + +function extractRegistryErrorMessage(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const trimmed = envelope.error.message.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function toHttpErrorMessage(options: { + status: number; + responseBody: unknown; + notFoundPrefix: string; +}): string { + const registryMessage = extractRegistryErrorMessage(options.responseBody); + + if (options.status === 401) { + return registryMessage + ? `Registry authentication failed (401): ${registryMessage}` + : "Registry authentication failed (401). Check your API key."; + } + + if (options.status === 400) { + return registryMessage + ? `Registry rejected the request (400): ${registryMessage}` + : "Registry rejected the request (400)."; + } + + if (options.status === 404) { + return registryMessage + ? `${options.notFoundPrefix} (404): ${registryMessage}` + : `${options.notFoundPrefix} not found in the registry (404).`; + } + + if (options.status >= 500) { + return `Registry server error (${options.status}). Try again later.`; + } + + if (registryMessage) { + return `Registry request failed (${options.status}): ${registryMessage}`; + } + + return `Registry request failed (${options.status})`; +} + +function parseApiKeyMetadata(payload: unknown): ApiKeyMetadata { + if (!isRecord(payload)) { + throw createCliError( + "CLI_API_KEY_INVALID_RESPONSE", + "API key response is invalid", + ); + } + + const id = parseNonEmptyString(payload.id); + const name = parseNonEmptyString(payload.name); + const status = payload.status; + const createdAt = parseNonEmptyString(payload.createdAt); + const lastUsedAt = payload.lastUsedAt; + + if ( + id.length === 0 || + name.length === 0 || + (status !== "active" && status !== "revoked") || + createdAt.length === 0 || + (lastUsedAt !== null && typeof lastUsedAt !== "string") + ) { + throw createCliError( + "CLI_API_KEY_INVALID_RESPONSE", + "API key response is invalid", + ); + } + + return { + id, + name, + status, + createdAt, + lastUsedAt, + }; +} + +function parseApiKeyCreateResponse( + payload: unknown, +): ApiKeyCreateResult["apiKey"] { + if (!isRecord(payload) || !isRecord(payload.apiKey)) { + throw createCliError( + "CLI_API_KEY_INVALID_RESPONSE", + "API key response is invalid", + ); + } + + const metadata = parseApiKeyMetadata(payload.apiKey); + const token = parseNonEmptyString(payload.apiKey.token); + if (token.length === 0) { + throw createCliError( + "CLI_API_KEY_INVALID_RESPONSE", + "API key response is invalid", + ); + } + + return { + ...metadata, + token, + }; +} + +function parseApiKeyListResponse(payload: unknown): ApiKeyMetadata[] { + if (!isRecord(payload) || !Array.isArray(payload.apiKeys)) { + throw createCliError( + "CLI_API_KEY_INVALID_RESPONSE", + "API key response is invalid", + ); + } + + return payload.apiKeys.map((item) => parseApiKeyMetadata(item)); +} + +function parseApiKeyId(id: string): string { + const trimmed = id.trim(); + if (trimmed.length === 0) { + throw createCliError("CLI_API_KEY_ID_REQUIRED", "API key id is required"); + } + + try { + return parseUlid(trimmed).value; + } catch { + throw createCliError( + "CLI_API_KEY_ID_INVALID", + "API key id must be a valid ULID", + ); + } +} + +async function resolveApiKeyRuntime( + overrideRegistryUrl: string | undefined, + dependencies: ApiKeyDependencies, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const config = await resolveConfigImpl(); + const apiKey = requireApiKey(config); + const registryUrl = resolveRegistryUrl({ + overrideRegistryUrl, + configRegistryUrl: config.registryUrl, + }); + + return { + fetchImpl, + registryUrl, + apiKey, + }; +} + +async function executeApiKeyRequest(input: { + fetchImpl: typeof fetch; + url: string; + init: RequestInit; +}): Promise { + try { + return await input.fetchImpl(input.url, input.init); + } catch { + throw createCliError( + "CLI_API_KEY_REQUEST_FAILED", + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } +} + +export async function createApiKey( + options: ApiKeyCreateOptions, + dependencies: ApiKeyDependencies = {}, +): Promise { + const runtime = await resolveApiKeyRuntime(options.registryUrl, dependencies); + + const response = await executeApiKeyRequest({ + fetchImpl: runtime.fetchImpl, + url: toApiKeyRequestUrl(runtime.registryUrl), + init: { + method: "POST", + headers: { + authorization: `Bearer ${runtime.apiKey}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: parseNonEmptyString(options.name) || undefined, + }), + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_API_KEY_CREATE_FAILED", + toHttpErrorMessage({ + status: response.status, + responseBody, + notFoundPrefix: "API key", + }), + ); + } + + return { + apiKey: parseApiKeyCreateResponse(responseBody), + registryUrl: runtime.registryUrl, + }; +} + +export async function listApiKeys( + options: ApiKeyListOptions, + dependencies: ApiKeyDependencies = {}, +): Promise { + const runtime = await resolveApiKeyRuntime(options.registryUrl, dependencies); + + const response = await executeApiKeyRequest({ + fetchImpl: runtime.fetchImpl, + url: toApiKeyRequestUrl(runtime.registryUrl), + init: { + method: "GET", + headers: { + authorization: `Bearer ${runtime.apiKey}`, + }, + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_API_KEY_LIST_FAILED", + toHttpErrorMessage({ + status: response.status, + responseBody, + notFoundPrefix: "API key", + }), + ); + } + + return { + apiKeys: parseApiKeyListResponse(responseBody), + registryUrl: runtime.registryUrl, + }; +} + +export async function revokeApiKey( + id: string, + options: ApiKeyRevokeOptions, + dependencies: ApiKeyDependencies = {}, +): Promise<{ apiKeyId: string; registryUrl: string }> { + const runtime = await resolveApiKeyRuntime(options.registryUrl, dependencies); + const apiKeyId = parseApiKeyId(id); + + const response = await executeApiKeyRequest({ + fetchImpl: runtime.fetchImpl, + url: toApiKeyRequestUrl(runtime.registryUrl, apiKeyId), + init: { + method: "DELETE", + headers: { + authorization: `Bearer ${runtime.apiKey}`, + }, + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_API_KEY_REVOKE_FAILED", + toHttpErrorMessage({ + status: response.status, + responseBody, + notFoundPrefix: "API key", + }), + ); + } + + return { apiKeyId, registryUrl: runtime.registryUrl }; +} + +export const createApiKeyCommand = ( + dependencies: ApiKeyDependencies = {}, +): Command => { + const apiKeyCommand = new Command("api-key").description( + "Manage personal API keys for registry access", + ); + + apiKeyCommand + .command("create") + .description("Create a new personal API key") + .option("--name ", "API key label") + .option("--registry-url ", "Override registry URL") + .action( + withErrorHandling( + "api-key create", + async (options: ApiKeyCreateOptions) => { + const result = await createApiKey(options, dependencies); + + logger.info("cli.api_key_created", { + id: result.apiKey.id, + name: result.apiKey.name, + status: result.apiKey.status, + registryUrl: result.registryUrl, + }); + + writeStdoutLine("API key created"); + writeStdoutLine(`ID: ${result.apiKey.id}`); + writeStdoutLine(`Name: ${result.apiKey.name}`); + writeStdoutLine(`Status: ${result.apiKey.status}`); + writeStdoutLine(`Created At: ${result.apiKey.createdAt}`); + writeStdoutLine( + `Last Used At: ${result.apiKey.lastUsedAt ?? "never"}`, + ); + writeStdoutLine("Token (shown once):"); + writeStdoutLine(result.apiKey.token); + }, + ), + ); + + apiKeyCommand + .command("list") + .description("List personal API keys") + .option("--registry-url ", "Override registry URL") + .action( + withErrorHandling("api-key list", async (options: ApiKeyListOptions) => { + const result = await listApiKeys(options, dependencies); + + logger.info("cli.api_key_listed", { + count: result.apiKeys.length, + registryUrl: result.registryUrl, + }); + + if (result.apiKeys.length === 0) { + writeStdoutLine("No API keys found."); + return; + } + + for (const apiKey of result.apiKeys) { + writeStdoutLine( + `${apiKey.id} | ${apiKey.name} | ${apiKey.status} | created ${apiKey.createdAt} | last used ${apiKey.lastUsedAt ?? "never"}`, + ); + } + }), + ); + + apiKeyCommand + .command("revoke ") + .description("Revoke a personal API key by id") + .option("--registry-url ", "Override registry URL") + .action( + withErrorHandling( + "api-key revoke", + async (id: string, options: ApiKeyRevokeOptions) => { + const result = await revokeApiKey(id, options, dependencies); + + logger.info("cli.api_key_revoked", { + id: result.apiKeyId, + registryUrl: result.registryUrl, + }); + + writeStdoutLine(`API key revoked: ${result.apiKeyId}`); + }, + ), + ); + + return apiKeyCommand; +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index f31bff1..4476945 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -42,6 +42,14 @@ describe("cli", () => { expect(hasVerifyCommand).toBe(true); }); + it("registers the api-key command", () => { + const hasApiKeyCommand = createProgram() + .commands.map((command) => command.name()) + .includes("api-key"); + + expect(hasApiKeyCommand).toBe(true); + }); + it("registers the openclaw command", () => { const hasOpenclawCommand = createProgram() .commands.map((command) => command.name()) diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index ab57772..71c0c74 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,6 +1,7 @@ import { Command } from "commander"; import { createAdminCommand } from "./commands/admin.js"; import { createAgentCommand } from "./commands/agent.js"; +import { createApiKeyCommand } from "./commands/api-key.js"; import { createConfigCommand } from "./commands/config.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; import { createVerifyCommand } from "./commands/verify.js"; @@ -13,6 +14,7 @@ export const createProgram = (): Command => { .version(CLI_VERSION) .addCommand(createAdminCommand()) .addCommand(createAgentCommand()) + .addCommand(createApiKeyCommand()) .addCommand(createConfigCommand()) .addCommand(createOpenclawCommand()) .addCommand(createVerifyCommand()); diff --git a/apps/cli/vitest.config.ts b/apps/cli/vitest.config.ts index e2ec332..a42bac4 100644 --- a/apps/cli/vitest.config.ts +++ b/apps/cli/vitest.config.ts @@ -1,6 +1,17 @@ +import { fileURLToPath } from "node:url"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + alias: { + "@clawdentity/protocol": fileURLToPath( + new URL("../../packages/protocol/src/index.ts", import.meta.url), + ), + "@clawdentity/sdk": fileURLToPath( + new URL("../../packages/sdk/src/index.ts", import.meta.url), + ), + }, + }, test: { globals: true, }, diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index b47a77f..b0599a7 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -66,6 +66,10 @@ - Use constant-time comparison when checking the header-derived hash against `api_keys.key_hash`, only allow `status = 'active'`, and surface failures through `AppError` codes such as `API_KEY_MISSING`, `API_KEY_INVALID`, or `API_KEY_REVOKED` so the shared SDK error handler can produce consistent envelopes. - Enrich the request context with `humanId`, `apiKeyId`, and `apiKeyName` for downstream handlers and update `last_used_at` as part of the auth middleware/handler so analytics and revocation tooling stay honest. - Keep the middleware reversible: a no-auth `GET /health` can stay open but any future `/v1/*` endpoints should extend this middleware so unauthorized access never reaches the DB layer. +- PAT lifecycle endpoints live under `/v1/me/api-keys`: + - `POST` creates a key and returns plaintext token once, + - `GET` returns metadata only, + - `DELETE /:id` revokes owner-scoped keys with idempotent `204` for already-revoked owned rows. ## Public Key Discovery - `GET /.well-known/claw-keys.json` is the canonical public key discovery endpoint for offline AIT verification. diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 2322f7c..0b44e6b 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -62,6 +62,26 @@ - Keep ordering deterministic (`id` descending) and compute `nextCursor` from the last item in the returned page. - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors`: generic 400 message in `production`, detailed `fieldErrors` in `development`/`test`. +## POST /v1/me/api-keys Contract +- Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. +- Accept optional `{ name }`; default to `api-key` when omitted. +- Validate `name` as trimmed, non-empty when provided, max 64 chars, and free of control characters. +- Return plaintext token only in create response; never persist plaintext token in DB. +- Persist only hashed lookup materials (`key_hash`, `key_prefix`) with `status=active` and `last_used_at=null`. + +## GET /v1/me/api-keys Contract +- Require PAT auth via `createApiKeyAuth`. +- Return caller-owned key metadata for both active and revoked keys. +- Response must include only `{ id, name, status, createdAt, lastUsedAt }`. +- Never expose `key_hash`, `key_prefix`, or plaintext token in list responses. + +## DELETE /v1/me/api-keys/:id Contract +- Require PAT auth via `createApiKeyAuth`. +- Validate `:id` as ULID and return `400 API_KEY_REVOKE_INVALID_PATH` for invalid path input. +- Enforce owner scoping: unknown or foreign key IDs must return `404 API_KEY_NOT_FOUND`. +- Revoke by setting `status=revoked`; return `204` for successful revoke and already-revoked owned keys. +- Revoked PATs must fail subsequent auth with `401 API_KEY_REVOKED`, while unrelated active PATs continue to authenticate. + ## POST /v1/agents/challenge Contract - Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. - Accept only `{ publicKey }` and validate it as base64url Ed25519 (32-byte decode). diff --git a/apps/registry/src/api-key-lifecycle.ts b/apps/registry/src/api-key-lifecycle.ts new file mode 100644 index 0000000..7f834c5 --- /dev/null +++ b/apps/registry/src/api-key-lifecycle.ts @@ -0,0 +1,189 @@ +import { parseUlid } from "@clawdentity/protocol"; +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const DEFAULT_API_KEY_NAME = "api-key"; +const MAX_API_KEY_NAME_LENGTH = 64; + +type ApiKeyMetadataRow = { + id: string; + name: string; + status: "active" | "revoked"; + created_at: string; + last_used_at: string | null; +}; + +type ApiKeyCreatePayload = { + name: string; +}; + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function apiKeyCreateInvalidError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "API_KEY_CREATE_INVALID", + message: exposeDetails + ? "API key create payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function apiKeyRevokeInvalidPathError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "API_KEY_REVOKE_INVALID_PATH", + message: exposeDetails + ? "API key revoke path is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function parseOptionalTrimmedString(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + if (typeof value !== "string") { + return undefined; + } + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +export function parseApiKeyCreatePayload(input: { + payload: unknown; + environment: RegistryConfig["ENVIRONMENT"]; +}): ApiKeyCreatePayload { + const exposeDetails = shouldExposeVerboseErrors(input.environment); + if ( + typeof input.payload !== "object" || + input.payload === null || + Array.isArray(input.payload) + ) { + throw new AppError({ + code: "API_KEY_CREATE_INVALID", + message: exposeDetails + ? "API key create payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails + ? { + fieldErrors: { body: ["body must be a JSON object"] }, + formErrors: [], + } + : undefined, + }); + } + + const payload = input.payload as Record; + const fieldErrors: Record = {}; + + if (payload.name !== undefined && typeof payload.name !== "string") { + fieldErrors.name = ["name must be a string"]; + } + + const nameInput = parseOptionalTrimmedString(payload.name); + if ( + payload.name !== undefined && + nameInput === undefined && + !fieldErrors.name + ) { + fieldErrors.name = ["name must not be empty"]; + } + + const name = nameInput ?? DEFAULT_API_KEY_NAME; + if (name.length > MAX_API_KEY_NAME_LENGTH) { + fieldErrors.name = [ + `name must be at most ${MAX_API_KEY_NAME_LENGTH} characters`, + ]; + } else if (hasControlChars(name)) { + fieldErrors.name = ["name contains control characters"]; + } + + if (Object.keys(fieldErrors).length > 0) { + throw apiKeyCreateInvalidError({ + environment: input.environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return { name }; +} + +export function parseApiKeyRevokePath(input: { + id: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): string { + const id = input.id.trim(); + if (id.length === 0) { + throw apiKeyRevokeInvalidPathError({ + environment: input.environment, + details: { + fieldErrors: { id: ["id is required"] }, + formErrors: [], + }, + }); + } + + try { + return parseUlid(id).value; + } catch { + throw apiKeyRevokeInvalidPathError({ + environment: input.environment, + details: { + fieldErrors: { id: ["id must be a valid ULID"] }, + formErrors: [], + }, + }); + } +} + +export function apiKeyNotFoundError(): AppError { + return new AppError({ + code: "API_KEY_NOT_FOUND", + message: "API key not found", + status: 404, + expose: true, + }); +} + +export function mapApiKeyMetadataRow(row: ApiKeyMetadataRow) { + return { + id: row.id, + name: row.name, + status: row.status, + createdAt: row.created_at, + lastUsedAt: row.last_used_at, + }; +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 9cc8e8c..2852bda 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -5,6 +5,7 @@ import { canonicalizeAgentRegistrationProof, encodeBase64url, generateUlid, + ME_API_KEYS_PATH, makeAgentDid, makeHumanDid, } from "@clawdentity/protocol"; @@ -86,6 +87,16 @@ type FakeApiKeyRow = { createdAt: string; lastUsedAt: string | null; }; +type FakeApiKeySelectRow = { + id: string; + human_id: string; + key_hash: string; + key_prefix: string; + name: string; + status: "active" | "revoked"; + created_at: string; + last_used_at: string | null; +}; type FakeAgentInsertRow = Record; type FakeHumanInsertRow = Record; @@ -455,6 +466,110 @@ function resolveHumanSelectRows(options: { .slice(0, limit); } +function getApiKeySelectColumnValue( + row: FakeApiKeySelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "human_id") { + return row.human_id; + } + if (column === "key_hash") { + return row.key_hash; + } + if (column === "key_prefix") { + return row.key_prefix; + } + if (column === "name") { + return row.name; + } + if (column === "status") { + return row.status; + } + if (column === "created_at") { + return row.created_at; + } + if (column === "last_used_at") { + return row.last_used_at; + } + return undefined; +} + +function resolveApiKeySelectRows(options: { + query: string; + params: unknown[]; + apiKeyRows: FakeApiKeyRow[]; +}): FakeApiKeySelectRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasHumanIdFilter = hasFilter(whereClause, "human_id"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasPrefixFilter = hasFilter(whereClause, "key_prefix"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const orderByCreatedAtDesc = + options.query.toLowerCase().includes("order by") && + options.query.toLowerCase().includes("created_at") && + options.query.toLowerCase().includes("desc"); + + const humanId = + hasHumanIdFilter && typeof equalityParams.values.human_id?.[0] === "string" + ? String(equalityParams.values.human_id[0]) + : undefined; + const id = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const keyPrefix = + hasPrefixFilter && typeof equalityParams.values.key_prefix?.[0] === "string" + ? String(equalityParams.values.key_prefix[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.apiKeyRows.length; + + const rows = options.apiKeyRows + .filter((row) => (humanId ? row.humanId === humanId : true)) + .filter((row) => (id ? row.id === id : true)) + .filter((row) => (status ? row.status === status : true)) + .filter((row) => (keyPrefix ? row.keyPrefix === keyPrefix : true)) + .map((row) => ({ + id: row.id, + human_id: row.humanId, + key_hash: row.keyHash, + key_prefix: row.keyPrefix, + name: row.name, + status: row.status, + created_at: row.createdAt, + last_used_at: row.lastUsedAt, + })); + + if (orderByCreatedAtDesc) { + rows.sort((left, right) => { + const createdAtCompare = right.created_at.localeCompare(left.created_at); + if (createdAtCompare !== 0) { + return createdAtCompare; + } + return right.id.localeCompare(left.id); + }); + } + + return rows.slice(0, limit); +} + function resolveAgentSelectRows(options: { query: string; params: unknown[]; @@ -714,35 +829,63 @@ function createFakeDb( normalizedQuery.includes('from "api_keys"') || normalizedQuery.includes("from api_keys") ) { - const requestedKeyPrefix = - typeof params[0] === "string" ? params[0] : ""; - const matchingRows = apiKeyRows.filter( - (row) => row.keyPrefix === requestedKeyPrefix, - ); + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); + + if (requiresHumanJoin) { + const requestedKeyPrefix = + typeof params[0] === "string" ? params[0] : ""; + const matchingRows = apiKeyRows.filter( + (row) => row.keyPrefix === requestedKeyPrefix, + ); + + return { + results: matchingRows + .map((row) => { + const human = humanRows.find( + (humanRow) => humanRow.id === row.humanId, + ); + if (!human) { + return undefined; + } + + return { + api_key_id: row.id, + key_hash: row.keyHash, + api_key_status: row.status, + api_key_name: row.name, + human_id: human.id, + human_did: human.did, + human_display_name: human.displayName, + human_role: human.role, + human_status: human.status, + }; + }) + .filter(isDefined), + }; + } + const resultRows = resolveApiKeySelectRows({ + query, + params, + apiKeyRows, + }); + const selectedColumns = parseSelectedColumns(query); return { - results: matchingRows - .map((row) => { - const human = humanRows.find( - (humanRow) => humanRow.id === row.humanId, - ); - if (!human) { - return undefined; - } + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } - return { - api_key_id: row.id, - key_hash: row.keyHash, - api_key_status: row.status, - api_key_name: row.name, - human_id: human.id, - human_did: human.did, - human_display_name: human.displayName, - human_role: human.role, - human_status: human.status, - }; - }) - .filter(isDefined), + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getApiKeySelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), }; } if ( @@ -855,34 +998,52 @@ function createFakeDb( normalizedQuery.includes('from "api_keys"') || normalizedQuery.includes("from api_keys") ) { - const requestedKeyPrefix = - typeof params[0] === "string" ? params[0] : ""; - const matchingRows = apiKeyRows.filter( - (row) => row.keyPrefix === requestedKeyPrefix, - ); + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); + + if (requiresHumanJoin) { + const requestedKeyPrefix = + typeof params[0] === "string" ? params[0] : ""; + const matchingRows = apiKeyRows.filter( + (row) => row.keyPrefix === requestedKeyPrefix, + ); + + return matchingRows + .map((row) => { + const human = humanRows.find( + (humanRow) => humanRow.id === row.humanId, + ); + if (!human) { + return undefined; + } - return matchingRows - .map((row) => { - const human = humanRows.find( - (humanRow) => humanRow.id === row.humanId, - ); - if (!human) { - return undefined; - } + return [ + row.id, + row.keyHash, + row.status, + row.name, + human.id, + human.did, + human.displayName, + human.role, + human.status, + ]; + }) + .filter(isDefined); + } - return [ - row.id, - row.keyHash, - row.status, - row.name, - human.id, - human.did, - human.displayName, - human.role, - human.status, - ]; - }) - .filter(isDefined); + const resultRows = resolveApiKeySelectRows({ + query, + params, + apiKeyRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getApiKeySelectColumnValue(row, column), + ), + ); } if ( normalizedQuery.includes('from "humans"') || @@ -964,17 +1125,67 @@ function createFakeDb( normalizedQuery.includes('update "api_keys"') || normalizedQuery.includes("update api_keys") ) { - updates.push({ - lastUsedAt: String(params[0] ?? ""), - apiKeyId: String(params[1] ?? ""), - }); - const apiKey = apiKeyRows.find( - (row) => row.id === String(params[1]), + const setColumns = parseUpdateSetColumns(query, "api_keys"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, ); - if (apiKey) { - apiKey.lastUsedAt = String(params[0] ?? ""); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const humanIdFilter = + typeof equalityParams.values.human_id?.[0] === "string" + ? String(equalityParams.values.human_id[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + let matchedRows = 0; + for (const row of apiKeyRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (humanIdFilter && row.humanId !== humanIdFilter) { + continue; + } + if (statusFilter && row.status !== statusFilter) { + continue; + } + + matchedRows += 1; + if ( + nextValues.status === "active" || + nextValues.status === "revoked" + ) { + row.status = nextValues.status; + } + if ( + typeof nextValues.last_used_at === "string" || + nextValues.last_used_at === null + ) { + row.lastUsedAt = nextValues.last_used_at; + } } - changes = 1; + + if (typeof nextValues.last_used_at === "string" && idFilter) { + updates.push({ + lastUsedAt: nextValues.last_used_at, + apiKeyId: idFilter, + }); + } + changes = matchedRows; } if ( normalizedQuery.includes('insert into "humans"') || @@ -2451,6 +2662,353 @@ describe("GET /v1/me", () => { }); }); +describe(`POST ${ME_API_KEYS_PATH}`, () => { + it("returns 401 when PAT is missing", async () => { + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ name: "workstation" }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("creates key and returns plaintext token once", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, apiKeyInserts } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "workstation", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + apiKey: { + id: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; + token: string; + }; + }; + expect(body.apiKey.name).toBe("workstation"); + expect(body.apiKey.status).toBe("active"); + expect(body.apiKey.token).toMatch(/^clw_pat_/); + expect(body.apiKey.lastUsedAt).toBeNull(); + + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.name).toBe("workstation"); + expect(apiKeyInserts[0]?.key_hash).not.toBe(body.apiKey.token); + expect(apiKeyInserts[0]?.key_prefix).toBe( + deriveApiKeyLookupPrefix(body.apiKey.token), + ); + }); + + it("accepts empty body and uses default key name", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, apiKeyInserts } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + apiKey: { + name: string; + token: string; + }; + }; + expect(body.apiKey.name).toBe("api-key"); + expect(body.apiKey.token).toMatch(/^clw_pat_/); + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.name).toBe("api-key"); + }); +}); + +describe(`GET ${ME_API_KEYS_PATH}`, () => { + it("returns metadata for caller-owned keys only", async () => { + const authToken = "clw_pat_valid-token-value"; + const authTokenHash = await hashApiKeyToken(authToken); + const revokedToken = "clw_pat_revoked-token-value"; + const revokedTokenHash = await hashApiKeyToken(revokedToken); + const foreignToken = "clw_pat_foreign-token-value"; + const foreignTokenHash = await hashApiKeyToken(foreignToken); + + const authRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000000001", + keyPrefix: deriveApiKeyLookupPrefix(authToken), + keyHash: authTokenHash, + apiKeyStatus: "active", + apiKeyName: "primary", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const revokedOwnedRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000000002", + keyPrefix: deriveApiKeyLookupPrefix(revokedToken), + keyHash: revokedTokenHash, + apiKeyStatus: "revoked", + apiKeyName: "old-laptop", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const foreignRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000000003", + keyPrefix: deriveApiKeyLookupPrefix(foreignToken), + keyHash: foreignTokenHash, + apiKeyStatus: "active", + apiKeyName: "foreign", + humanId: "human-2", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB8", + humanDisplayName: "Ira", + humanRole: "user", + humanStatus: "active", + }; + const { database } = createFakeDb([authRow, revokedOwnedRow, foreignRow]); + + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + apiKeys: Array<{ + id: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; + token?: string; + keyHash?: string; + keyPrefix?: string; + }>; + }; + expect(body.apiKeys).toEqual([ + { + id: "01KJ0000000000000000000002", + name: "old-laptop", + status: "revoked", + createdAt: "2026-01-01T00:00:00.000Z", + lastUsedAt: null, + }, + { + id: "01KJ0000000000000000000001", + name: "primary", + status: "active", + createdAt: "2026-01-01T00:00:00.000Z", + lastUsedAt: expect.any(String), + }, + ]); + for (const apiKey of body.apiKeys) { + expect(apiKey).not.toHaveProperty("token"); + expect(apiKey).not.toHaveProperty("keyHash"); + expect(apiKey).not.toHaveProperty("keyPrefix"); + } + }); +}); + +describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { + it("returns 400 for invalid id path", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + `${ME_API_KEYS_PATH}/invalid-id`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_REVOKE_INVALID_PATH"); + }); + + it("returns 404 when key is not found for owner", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + `${ME_API_KEYS_PATH}/${generateUlid(1700300000000)}`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(404); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_NOT_FOUND"); + }); + + it("revokes target key but keeps unrelated key active", async () => { + const authToken = "clw_pat_valid-token-value"; + const authTokenHash = await hashApiKeyToken(authToken); + const rotateToken = "clw_pat_rotation-token-value"; + const rotateTokenHash = await hashApiKeyToken(rotateToken); + const targetApiKeyId = generateUlid(1700300000000); + + const authRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000001001", + keyPrefix: deriveApiKeyLookupPrefix(authToken), + keyHash: authTokenHash, + apiKeyStatus: "active", + apiKeyName: "primary", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const revokableRow: FakeD1Row = { + apiKeyId: targetApiKeyId, + keyPrefix: deriveApiKeyLookupPrefix(rotateToken), + keyHash: rotateTokenHash, + apiKeyStatus: "active", + apiKeyName: "rotate-me", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const { database } = createFakeDb([authRow, revokableRow]); + const appInstance = createRegistryApp(); + + const revokeResponse = await appInstance.request( + `${ME_API_KEYS_PATH}/${targetApiKeyId}`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(revokeResponse.status).toBe(204); + + const revokedAuth = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${rotateToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(revokedAuth.status).toBe(401); + const revokedBody = (await revokedAuth.json()) as { + error: { code: string }; + }; + expect(revokedBody.error.code).toBe("API_KEY_REVOKED"); + + const activeAuth = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(activeAuth.status).toBe(200); + }); + + it("returns 204 when key is already revoked", async () => { + const authToken = "clw_pat_valid-token-value"; + const authTokenHash = await hashApiKeyToken(authToken); + const revokedToken = "clw_pat_already-revoked-token-value"; + const revokedTokenHash = await hashApiKeyToken(revokedToken); + const targetApiKeyId = generateUlid(1700300000100); + + const authRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000002001", + keyPrefix: deriveApiKeyLookupPrefix(authToken), + keyHash: authTokenHash, + apiKeyStatus: "active", + apiKeyName: "primary", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const alreadyRevokedRow: FakeD1Row = { + apiKeyId: targetApiKeyId, + keyPrefix: deriveApiKeyLookupPrefix(revokedToken), + keyHash: revokedTokenHash, + apiKeyStatus: "revoked", + apiKeyName: "already-revoked", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const { database } = createFakeDb([authRow, alreadyRevokedRow]); + + const response = await createRegistryApp().request( + `${ME_API_KEYS_PATH}/${targetApiKeyId}`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(204); + }); +}); + describe("GET /v1/agents", () => { it("returns 401 when PAT is missing", async () => { const res = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 12236b2..6e91a9d 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -2,6 +2,7 @@ import { ADMIN_BOOTSTRAP_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, generateUlid, + ME_API_KEYS_PATH, makeHumanDid, } from "@clawdentity/protocol"; import { @@ -40,6 +41,12 @@ import { invalidAgentRevokeStateError, parseAgentRevokePath, } from "./agent-revocation.js"; +import { + apiKeyNotFoundError, + mapApiKeyMetadataRow, + parseApiKeyCreatePayload, + parseApiKeyRevokePath, +} from "./api-key-lifecycle.js"; import { type AuthenticatedHuman, createApiKeyAuth, @@ -594,6 +601,124 @@ function createRegistryApp() { return c.json({ human: c.get("human") }); }); + app.post(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown = {}; + try { + const rawBody = await c.req.text(); + if (rawBody.trim().length > 0) { + payload = JSON.parse(rawBody); + } + } catch { + throw new AppError({ + code: "API_KEY_CREATE_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const parsedPayload = parseApiKeyCreatePayload({ + payload, + environment: config.ENVIRONMENT, + }); + + const human = c.get("human"); + const apiKeyToken = generateApiKeyToken(); + const apiKeyHash = await hashApiKeyToken(apiKeyToken); + const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); + const apiKeyId = generateUlid(Date.now() + 1); + const createdAt = nowIso(); + + const db = createDb(c.env.DB); + await db.insert(api_keys).values({ + id: apiKeyId, + human_id: human.id, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: parsedPayload.name, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + + return c.json( + { + apiKey: { + id: apiKeyId, + name: parsedPayload.name, + status: "active", + createdAt, + lastUsedAt: null, + token: apiKeyToken, + }, + }, + 201, + ); + }); + + app.get(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { + const human = c.get("human"); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: api_keys.id, + name: api_keys.name, + status: api_keys.status, + created_at: api_keys.created_at, + last_used_at: api_keys.last_used_at, + }) + .from(api_keys) + .where(eq(api_keys.human_id, human.id)) + .orderBy(desc(api_keys.created_at), desc(api_keys.id)); + + return c.json({ + apiKeys: rows.map(mapApiKeyMetadataRow), + }); + }); + + app.delete(`${ME_API_KEYS_PATH}/:id`, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const apiKeyId = parseApiKeyRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: api_keys.id, + status: api_keys.status, + }) + .from(api_keys) + .where(and(eq(api_keys.id, apiKeyId), eq(api_keys.human_id, human.id))) + .limit(1); + + const existingKey = rows[0]; + if (!existingKey) { + throw apiKeyNotFoundError(); + } + + if (existingKey.status === "revoked") { + return c.body(null, 204); + } + + await db + .update(api_keys) + .set({ + status: "revoked", + }) + .where(and(eq(api_keys.id, apiKeyId), eq(api_keys.human_id, human.id))); + + return c.body(null, 204); + }); + app.get("/v1/agents", createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const query = parseAgentListQuery({ diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index b361f49..d875eab 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -18,6 +18,7 @@ - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. +- Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `ME_API_KEYS_PATH`) so registry and CLI stay contract-synchronized. - Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. - Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index cda3452..4dde676 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -1,2 +1,3 @@ export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; export const AGENT_REGISTRATION_CHALLENGE_PATH = "/v1/agents/challenge"; +export const ME_API_KEYS_PATH = "/v1/me/api-keys"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index ca16946..f5a204e 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -15,6 +15,7 @@ import { generateUlid, MAX_AGENT_DESCRIPTION_LENGTH, MAX_AGENT_NAME_LENGTH, + ME_API_KEYS_PATH, makeAgentDid, makeHumanDid, PROTOCOL_VERSION, @@ -34,6 +35,7 @@ describe("protocol", () => { it("exports shared endpoint constants", () => { expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); expect(AGENT_REGISTRATION_CHALLENGE_PATH).toBe("/v1/agents/challenge"); + expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); }); it("exports helpers from package root", () => { diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 3f5ffc5..017f4f0 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -23,6 +23,7 @@ export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export { ADMIN_BOOTSTRAP_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, + ME_API_KEYS_PATH, } from "./endpoints.js"; export type { ProtocolParseErrorCode } from "./errors.js"; export { ProtocolParseError } from "./errors.js"; From 4c58748a8245bac375cee10b32361df151465ff3 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 17:12:47 +0530 Subject: [PATCH 063/190] feat: add admin invite create/redeem onboarding flow and CLI commands --- AGENTS.md | 28 +- PRD.md | 41 +- README.md | 42 +- apps/cli/AGENTS.md | 2 +- apps/cli/src/AGENTS.md | 4 + apps/cli/src/commands/AGENTS.md | 7 + apps/cli/src/commands/invite.test.ts | 325 ++++++++++++++ apps/cli/src/commands/invite.ts | 505 +++++++++++++++++++++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/registry/src/AGENTS.md | 15 + apps/registry/src/invite-lifecycle.ts | 287 ++++++++++++ apps/registry/src/server.test.ts | 625 ++++++++++++++++++++++++++ apps/registry/src/server.ts | 340 +++++++++++++- packages/protocol/src/endpoints.ts | 2 + packages/protocol/src/index.test.ts | 4 + packages/protocol/src/index.ts | 2 + 17 files changed, 2162 insertions(+), 77 deletions(-) create mode 100644 apps/cli/src/commands/invite.test.ts create mode 100644 apps/cli/src/commands/invite.ts create mode 100644 apps/registry/src/invite-lifecycle.ts diff --git a/AGENTS.md b/AGENTS.md index 54554ad..a180049 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -2,26 +2,22 @@ ## Purpose - Define repository-wide engineering and documentation guardrails for Clawdentity. -- Keep product docs, issue specs, and execution order in sync. +- Keep product docs and issue governance in sync with the active GitHub tracker. ## Core Rules - Ship maintainable, non-duplicative changes. - Prefer small, testable increments tied to explicit issue IDs. - If a simplification/refactor is obvious, include it in the plan and ticket notes. -## Deployment-First Execution -- Enforce `T00 -> T37 -> T38` before feature implementation. -- Feature tickets `T01`-`T36` must not proceed until `T38` is complete. -- Source of truth for sequencing: `issues/EXECUTION_PLAN.md`. - -## Issue Governance -- Ticket schema and quality rules are maintained in `issues/AGENTS.md`. -- Any dependency/wave changes must update both affected `T*.md` files and `issues/EXECUTION_PLAN.md` in the same change. +## Execution Governance +- GitHub issues are the source of truth for sequencing, blockers, and rollout updates. +- Primary execution tracker: https://github.com/vrknetha/clawdentity/issues/74. +- Do not use local execution-order files as governance source. ## Ticket Lifecycle Workflow - Operate in a self-serve loop for ticket delivery: pick an issue, execute, and keep GitHub status accurate without waiting for manual reminders. - Standard sequence for every ticket: - - Select the target issue and confirm blockers from `issues/EXECUTION_PLAN.md` and `issues/T*.md`. + - Select the target issue and confirm blockers from the GitHub issue tracker. - Start from latest `develop`: `git checkout develop && git pull --ff-only`. - Create a feature branch with `feature/` prefix scoped to the ticket. - Implement the ticket with tests/docs updates required by the issue definition. @@ -37,12 +33,12 @@ ## Documentation Sync - `README.md` must reflect current execution model and links to issue governance. - `PRD.md` must reflect current rollout order, deployment gating, and verification strategy. -- If backlog shape changes (`Txx` additions/removals), update README + PRD + execution plan together. +- If backlog shape changes, update README + PRD + the relevant GitHub issue threads in the same change. ## Validation Baseline - Run and pass: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build` for implementation changes. - Lint runs at root (`pnpm lint` via `biome check .`), not per-package. -- For planning/doc changes, verify dependency/order consistency against the current execution source of truth (the in-repo execution plan if present, otherwise the active issue tracker plan). +- For planning/doc changes, verify dependency/order consistency against the active GitHub issue tracker. ## Cloudflare Worker & Wrangler Conventions - Registry is a **Hono** app deployed as a Cloudflare Worker. Wrangler handles bundling — tsup is only for type generation and local build validation. @@ -105,13 +101,13 @@ - Use a full reset only when required for identity reprovisioning, and then also clear `~/.clawdentity/agents//` before re-onboarding. - Skill-only policy: no direct `clawdentity openclaw setup` execution by humans during E2E validation; the agent must run the skill flow and prompt the human only for missing invite code or confirmations. -## T00 Scaffold Best Practices -- Start T00 by confirming the deployment-first order (`T00 -> T37 -> T38`) and reviewing README/PRD/`issues/EXECUTION_PLAN.md` so documentation mirrors the execution model. +## Scaffold Best Practices +- Start by reviewing README, PRD, and the active execution tracker issue so documentation mirrors the execution model. - Define the workspace layout now: `apps/registry`, `apps/proxy`, `apps/cli`, `packages/sdk`, and `packages/protocol` (with shared tooling such as `pnpm-workspace.yaml`, `tsconfig.base.json`, and `biome.json`) so downstream tickets have a known structure. - Declare placeholder scripts for lint/test/build (e.g., `pnpm -r lint`, `pnpm -r test`, `pnpm -r build`) and identify the expected toolchain (Biome, Vitest, tsup, etc.) so future work can fill implementations without duplication. -- Document the CI entrypoints (GitHub Actions or another pipeline) that will run the above scripts, so deployment scaffolding (T37/T38) can wire the baseline checks without guessing what belongs in T00. +- Document the CI entrypoints (GitHub Actions or another pipeline) that will run the above scripts, so deployment scaffolding can wire the baseline checks without guessing what belongs in initial setup. -## T37/T38 Deployment Scaffold Best Practices +## Deployment Scaffold Best Practices - Always separate dev and production via wrangler environments — never use a single top-level D1 binding. - Keep `wrangler.jsonc` database IDs in version control (they are not secrets). Secrets go via `wrangler secret put`. - Deploy scripts should always run migrations before deploy (`db:migrate:remote && wrangler deploy`) for atomic one-touch deploys. diff --git a/PRD.md b/PRD.md index 5e7829d..8915883 100644 --- a/PRD.md +++ b/PRD.md @@ -179,40 +179,23 @@ Verifier must enforce: ## 9) Rollout plan -1) **Scaffold baseline (`T00`)** -2) **Define deployment scaffolding (`T37`)** -3) **Deploy and verify baseline (`T38`)** -4) Implement feature backlog (`T01`-`T36`) after deploy gate passes -5) Execute Phase 2/3 enhancements from HLD after MVP stability +1) Establish workspace and deployment baseline +2) Deploy and verify baseline environments and health checks +3) Execute MVP feature backlog after the deployment gate passes +4) Execute Phase 2/3 enhancements from HLD after MVP stability --- ## 10) Execution plan -Execution plan is defined in [`issues/EXECUTION_PLAN.md`](./issues/EXECUTION_PLAN.md). - -### Canonical sequential order -`T00 -> T37 -> T38 -> T01 -> T02 -> T03 -> T04 -> T05 -> T06 -> T07 -> T08 -> T09 -> T10 -> T11 -> T12 -> T13 -> T14 -> T15 -> T16 -> T17 -> T18 -> T19 -> T20 -> T21 -> T22 -> T23 -> T24 -> T25 -> T26 -> T27 -> T28 -> T29 -> T30 -> T31 -> T32 -> T33 -> T34 -> T35 -> T36` - -### Parallel waves (after deployment gate) -- Wave 0: `T00` -- Wave 1: `T37` -- Wave 2: `T38` -- Wave 3: `T01, T10, T20, T25` -- Wave 4: `T02, T03, T04, T11, T26` -- Wave 5: `T05, T06, T07, T12, T13, T19` -- Wave 6: `T08, T09, T14, T15, T22` -- Wave 7: `T16, T21, T24, T27, T34` -- Wave 8: `T17, T18, T23, T28, T30, T31, T32, T35` -- Wave 9: `T29, T36` -- Wave 10: `T33` - -### Issue governance -Issue authoring and quality rules are enforced in [`issues/AGENTS.md`](./issues/AGENTS.md): -- standardized schema for each ticket -- dependency blockers line required -- refactor opportunities required -- validation commands required +Execution sequencing, dependency management, and wave planning are maintained in the GitHub issue tracker. + +Primary tracker: https://github.com/vrknetha/clawdentity/issues/74. + +Governance rules: +- Treat GitHub issues as the source of truth for rollout order and blockers. +- Record dependency or wave changes in tracker issues at the time of change. +- Keep this PRD and `README.md` aligned with tracker-level execution decisions. --- diff --git a/README.md b/README.md index add79be..21b04f7 100644 --- a/README.md +++ b/README.md @@ -212,7 +212,7 @@ This repo is a monorepo: - Handled by: `apps/registry`, `apps/cli` - Invite-gated registration model with admin-issued invite codes. - One-agent-per-invite policy for simple quota and abuse control. -- Feature work is deployment-gated (`T00 -> T37 -> T38`) before backlog execution. +- Feature work follows a deployment-first gate tracked in GitHub issues. ### 6) Discovery and first-contact options @@ -288,43 +288,25 @@ No one shares keys/files between agents. Identity is presented per request. ## Documentation -- **PRD:** see [`PRD.md`](./PRD.md) (MVP product requirements + execution plan) -- **Issue execution plan:** see [`issues/EXECUTION_PLAN.md`](./issues/EXECUTION_PLAN.md) (deployment-first ordering + waves) -- **Issue authoring rules:** see [`issues/AGENTS.md`](./issues/AGENTS.md) (required issue schema + blockers policy) -- **Canonical ticket specs:** `issues/T00.md` through `issues/T38.md` are versioned in-repo. +- **PRD:** see [`PRD.md`](./PRD.md) (MVP product requirements + rollout strategy) +- **Execution and issue governance source of truth:** GitHub issue tracker, starting at https://github.com/vrknetha/clawdentity/issues/74. --- ## Contributing / Execution -This repo is built as a sequence of small issues with a **deployment-first gate**: +This repo is delivered through small GitHub issues with a **deployment-first gate**: -1. `T00` — workspace scaffolding -2. `T37` — deployment scaffolding contract -3. `T38` — baseline deployment verification -4. `T01`–`T36` — feature implementation after deploy gate passes +1. Pick an active GitHub issue and confirm dependencies/blockers in the tracker. +2. Implement in a feature branch with tests/docs updates. +3. Run required validation commands. +4. Open a PR to `develop` and post implementation evidence back on the issue. -### Backlog shape +### Governance expectations -- Total issue set: `T00`–`T38` -- Feature tickets `T01`–`T36` explicitly depend on `T38` -- Parallel execution starts only after Wave 2 (`T38`) completes - -### Issue schema - -Every issue in [`issues/`](./issues) is standardized to include: - -- `Goal` -- `In Scope` -- `Out of Scope` -- `Dependencies` + `Blockers` -- `Execution Mode` -- `Parallel Wave` -- `Required Skills` -- `Deliverables` -- `Refactor Opportunities` -- `Definition of Done` -- `Validation Steps` +- Keep issue status aligned with reality (`OPEN` while active, close with evidence when complete). +- Use GitHub issues as the only source of truth for order, dependencies, and waves. +- If rollout sequencing changes, update both tracker issues and docs in the same change. --- diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index cf5e629..6b2db44 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -8,7 +8,7 @@ - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Implement command groups under `src/commands/*` and register them from `createProgram()`. -- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `verify`, `openclaw`) so automation and docs do not drift. +- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index b821de6..693d894 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -11,6 +11,9 @@ - Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. - Admin bootstrap must print the one-time PAT before attempting to persist it and depend on `persistBootstrapConfig` so config write failures are surfaced via CLI errors while the operator still sees the PAT. - API-key lifecycle command logic should stay in `commands/api-key.ts`; keep create/list/revoke request mapping explicit and keep token exposure limited to create output only. +- Registry invite lifecycle command logic should stay in `commands/invite.ts`; keep it strictly scoped to registry onboarding invites and separate from `commands/openclaw.ts` peer-relay invite codes. +- `invite redeem` must print the returned PAT once, then persist config in deterministic order (`registryUrl`, then `apiKey`) so bootstrap/onboarding state is predictable. +- `invite` command routes must use endpoint constants from `@clawdentity/protocol` (`INVITES_PATH`, `INVITES_REDEEM_PATH`) instead of inline path literals. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. @@ -27,4 +30,5 @@ - Command tests must capture `stdout`/`stderr` and assert exit-code behavior. - Include success, revoked, invalid token, keyset failure, CRL failure, and cache-hit scenarios for `verify`. - For OpenClaw invite/setup flow, cover invite encode/decode, config patch idempotency, and missing-file validation. +- For registry invite flow, cover admin-auth create path, public redeem path, config persistence failures, and command exit-code behavior. - Keep tests deterministic by mocking network and filesystem dependencies. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 60576ee..63e3d3d 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -23,6 +23,13 @@ - `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. - Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. +## Registry Invite Command Rules +- `invite create` is for registry onboarding invites only (admin-authenticated), not peer-relay invite-code generation. +- `invite create` must call `INVITES_PATH` from `@clawdentity/protocol` and include PAT bearer auth from resolved CLI config. +- `invite redeem` must call `INVITES_REDEEM_PATH` from `@clawdentity/protocol` without PAT auth and must persist returned PAT to local config. +- `invite redeem` must print the plaintext PAT token once before config persistence so operators can recover from local write failures. +- Keep registry invite error mapping stable for `400`, `401`, `403`, `404`, `409`, and `5xx` responses. + ## Admin Command Rules - `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. - `admin bootstrap` must import `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` instead of duplicating endpoint literals in command code/tests. diff --git a/apps/cli/src/commands/invite.test.ts b/apps/cli/src/commands/invite.test.ts new file mode 100644 index 0000000..839b8a9 --- /dev/null +++ b/apps/cli/src/commands/invite.test.ts @@ -0,0 +1,325 @@ +import { INVITES_PATH, INVITES_REDEEM_PATH } from "@clawdentity/protocol"; +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import type { setConfigValue } from "../config/manager.js"; +import { + createInvite, + createInviteCommand, + persistRedeemConfig, + redeemInvite, +} from "./invite.js"; + +const mockFetch = vi.fn(); + +const createJsonResponse = (status: number, body: unknown): Response => { + return { + ok: status >= 200 && status < 300, + status, + json: vi.fn(async () => body), + } as unknown as Response; +}; + +async function runInviteCommand( + args: string[], + input: { + fetchImpl?: typeof fetch; + resolveConfigImpl?: () => Promise<{ registryUrl: string; apiKey?: string }>; + setConfigValueImpl?: typeof setConfigValue; + } = {}, +) { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createInviteCommand({ + fetchImpl: input.fetchImpl ?? (mockFetch as unknown as typeof fetch), + resolveConfigImpl: + input.resolveConfigImpl ?? + (async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_local", + })), + setConfigValueImpl: input.setConfigValueImpl, + }); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "invite", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stdout: stdout.join(""), + stderr: stderr.join(""), + }; +} + +describe("invite command helpers", () => { + beforeEach(() => { + vi.clearAllMocks(); + mockFetch.mockReset(); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("creates invite with PAT auth", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(201, { + invite: { + id: "01KJ8E2A4F8B10V8R8A6T8XKZ9", + code: "clw_invite_123", + createdAt: "2026-02-16T00:00:00.000Z", + expiresAt: null, + }, + }), + ); + + const result = await createInvite( + { + expiresAt: "2026-02-20T00:00:00.000Z", + }, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + apiKey: "clw_pat_admin", + }), + }, + ); + + expect(result.invite.code).toBe("clw_invite_123"); + expect(result.registryUrl).toBe("https://api.clawdentity.com/"); + expect(mockFetch).toHaveBeenCalledTimes(1); + const [calledUrl, calledInit] = mockFetch.mock.calls[0] as [ + string, + RequestInit, + ]; + expect(calledUrl).toBe(`https://api.clawdentity.com${INVITES_PATH}`); + expect(calledInit.method).toBe("POST"); + expect((calledInit.headers as Record).authorization).toBe( + "Bearer clw_pat_admin", + ); + expect(JSON.parse(String(calledInit.body))).toEqual({ + expiresAt: "2026-02-20T00:00:00.000Z", + }); + }); + + it("fails invite create when local API key is missing", async () => { + await expect( + createInvite( + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_INVITE_MISSING_LOCAL_CREDENTIALS", + }); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("redeems invite and returns PAT payload", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(201, { + apiKey: { + id: "01KJ8E2A4F8B10V8R8A6T8XKZA", + name: "invite-issued", + token: "clw_pat_invite_token", + }, + }), + ); + + const result = await redeemInvite( + "clw_invite_123", + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }, + ); + + expect(result.apiKeyToken).toBe("clw_pat_invite_token"); + expect(result.apiKeyName).toBe("invite-issued"); + const [calledUrl, calledInit] = mockFetch.mock.calls[0] as [ + string, + RequestInit, + ]; + expect(calledUrl).toBe(`https://api.clawdentity.com${INVITES_REDEEM_PATH}`); + expect(calledInit.method).toBe("POST"); + expect((calledInit.headers as Record).authorization).toBe( + undefined, + ); + expect(JSON.parse(String(calledInit.body))).toEqual({ + code: "clw_invite_123", + }); + }); + + it("maps invalid invite redeem response", async () => { + mockFetch.mockResolvedValueOnce(createJsonResponse(201, { apiKey: {} })); + + await expect( + redeemInvite( + "clw_invite_123", + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_INVITE_REDEEM_INVALID_RESPONSE", + message: "Invite redeem response is invalid", + }); + }); +}); + +describe("persist redeem config", () => { + it("saves registry url and api key sequentially", async () => { + const setConfigValueMock = vi.fn(async () => {}); + + await persistRedeemConfig("https://api.clawdentity.com/", "token", { + setConfigValueImpl: setConfigValueMock, + }); + + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 1, + "registryUrl", + "https://api.clawdentity.com/", + ); + expect(setConfigValueMock).toHaveBeenNthCalledWith(2, "apiKey", "token"); + }); + + it("throws CLI error when config persistence fails", async () => { + const setConfigValueMock = vi.fn(async () => { + throw new Error("disk-full"); + }); + + await expect( + persistRedeemConfig("https://api.clawdentity.com/", "token", { + setConfigValueImpl: setConfigValueMock, + }), + ).rejects.toMatchObject({ + code: "CLI_INVITE_REDEEM_CONFIG_PERSISTENCE_FAILED", + message: "Failed to save redeemed API key locally", + }); + }); +}); + +describe("invite command output", () => { + beforeEach(() => { + vi.clearAllMocks(); + mockFetch.mockReset(); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("prints invite create output", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(201, { + invite: { + id: "01KJ8E2A4F8B10V8R8A6T8XKZ9", + code: "clw_invite_123", + expiresAt: null, + }, + }), + ); + + const result = await runInviteCommand(["create"]); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Invite created"); + expect(result.stdout).toContain("Code: clw_invite_123"); + expect(result.stdout).toContain("Expires At: never"); + }); + + it("prints token once and saves config for redeem", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(201, { + apiKey: { + id: "01KJ8E2A4F8B10V8R8A6T8XKZA", + name: "invite-issued", + token: "clw_pat_invite_token", + }, + }), + ); + const setConfigValueMock = vi.fn(async () => {}); + + const result = await runInviteCommand(["redeem", "clw_invite_123"], { + setConfigValueImpl: setConfigValueMock, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Invite redeemed"); + expect(result.stdout).toContain("API key token (shown once):"); + expect(result.stdout).toContain("clw_pat_invite_token"); + expect(result.stdout).toContain("API key saved to local config"); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 1, + "registryUrl", + "https://api.clawdentity.com/", + ); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 2, + "apiKey", + "clw_pat_invite_token", + ); + }); + + it("sets exit code and stderr on create failure", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(403, { + error: { + code: "ADMIN_ONLY", + message: "admin role required", + }, + }), + ); + + const result = await runInviteCommand(["create"]); + + expect(result.exitCode).toBe(1); + expect(result.stderr).toContain("Invite creation requires admin access"); + }); +}); diff --git a/apps/cli/src/commands/invite.ts b/apps/cli/src/commands/invite.ts new file mode 100644 index 0000000..1320cc5 --- /dev/null +++ b/apps/cli/src/commands/invite.ts @@ -0,0 +1,505 @@ +import { INVITES_PATH, INVITES_REDEEM_PATH } from "@clawdentity/protocol"; +import { AppError, createLogger } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { + type CliConfig, + resolveConfig, + setConfigValue, +} from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "invite" }); + +type InviteCreateOptions = { + expiresAt?: string; + registryUrl?: string; +}; + +type InviteRedeemOptions = { + registryUrl?: string; +}; + +type InviteRecord = { + code: string; + id?: string; + createdAt?: string; + expiresAt?: string | null; +}; + +export type InviteCreateResult = { + invite: InviteRecord; + registryUrl: string; +}; + +export type InviteRedeemResult = { + apiKeyToken: string; + apiKeyId?: string; + apiKeyName?: string; + registryUrl: string; +}; + +type RegistryErrorEnvelope = { + error?: { + code?: string; + message?: string; + }; +}; + +type InviteDependencies = { + fetchImpl?: typeof fetch; + resolveConfigImpl?: () => Promise; +}; + +type InvitePersistenceDependencies = { + setConfigValueImpl?: typeof setConfigValue; +}; + +type InviteCommandDependencies = InviteDependencies & + InvitePersistenceDependencies; + +type InviteRuntime = { + fetchImpl: typeof fetch; + registryUrl: string; + config: CliConfig; +}; + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +function parseNonEmptyString(value: unknown): string { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +} + +function createCliError(code: string, message: string): AppError { + return new AppError({ + code, + message, + status: 400, + }); +} + +function resolveRegistryUrl(input: { + overrideRegistryUrl: string | undefined; + configRegistryUrl: string; +}): string { + const candidate = + parseNonEmptyString(input.overrideRegistryUrl) || input.configRegistryUrl; + + try { + return new URL(candidate).toString(); + } catch { + throw createCliError( + "CLI_INVITE_INVALID_REGISTRY_URL", + "Registry URL is invalid", + ); + } +} + +function requireApiKey(config: CliConfig): string { + if (typeof config.apiKey === "string" && config.apiKey.trim().length > 0) { + return config.apiKey; + } + + throw createCliError( + "CLI_INVITE_MISSING_LOCAL_CREDENTIALS", + "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", + ); +} + +function toRegistryRequestUrl(registryUrl: string, path: string): string { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL(path.slice(1), normalizedBaseUrl).toString(); +} + +function extractRegistryErrorCode(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.code !== "string") { + return undefined; + } + + const trimmed = envelope.error.code.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +function extractRegistryErrorMessage(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const trimmed = envelope.error.message.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +async function executeInviteRequest(input: { + fetchImpl: typeof fetch; + url: string; + init: RequestInit; +}): Promise { + try { + return await input.fetchImpl(input.url, input.init); + } catch { + throw createCliError( + "CLI_INVITE_REQUEST_FAILED", + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } +} + +function mapCreateInviteError(status: number, payload: unknown): string { + const errorCode = extractRegistryErrorCode(payload); + const registryMessage = extractRegistryErrorMessage(payload); + + if (status === 401) { + return registryMessage + ? `Registry authentication failed (401): ${registryMessage}` + : "Registry authentication failed (401). Check your API key."; + } + + if (status === 403) { + return registryMessage + ? `Invite creation requires admin access (403): ${registryMessage}` + : "Invite creation requires admin access (403)."; + } + + if (status === 400) { + return registryMessage + ? `Registry rejected invite request (400): ${registryMessage}` + : "Registry rejected invite request (400)."; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (errorCode && registryMessage) { + return `Invite creation failed (${errorCode}): ${registryMessage}`; + } + + if (registryMessage) { + return `Invite creation failed (${status}): ${registryMessage}`; + } + + return `Invite creation failed (${status})`; +} + +function mapRedeemInviteError(status: number, payload: unknown): string { + const errorCode = extractRegistryErrorCode(payload); + const registryMessage = extractRegistryErrorMessage(payload); + + if ( + errorCode === "INVITE_REDEEM_ALREADY_USED" || + errorCode === "INVITE_REDEEM_ALREADY_REDEEMED" + ) { + return "Invite code has already been redeemed"; + } + + if (errorCode === "INVITE_REDEEM_EXPIRED") { + return "Invite code has expired"; + } + + if ( + errorCode === "INVITE_REDEEM_CODE_INVALID" || + errorCode === "INVITE_REDEEM_INVALID_CODE" + ) { + return "Invite code is invalid"; + } + + if (status === 400 || status === 404 || status === 409) { + return registryMessage + ? `Invite redeem failed (${status}): ${registryMessage}` + : "Invite code is invalid or unavailable"; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (registryMessage) { + return `Invite redeem failed (${status}): ${registryMessage}`; + } + + return `Invite redeem failed (${status})`; +} + +function parseInviteRecord(payload: unknown): InviteRecord { + if (!isRecord(payload)) { + throw createCliError( + "CLI_INVITE_CREATE_INVALID_RESPONSE", + "Invite response is invalid", + ); + } + + const source = isRecord(payload.invite) ? payload.invite : payload; + const code = parseNonEmptyString(source.code); + if (code.length === 0) { + throw createCliError( + "CLI_INVITE_CREATE_INVALID_RESPONSE", + "Invite response is invalid", + ); + } + + const invite: InviteRecord = { + code, + }; + + const id = parseNonEmptyString(source.id); + if (id.length > 0) { + invite.id = id; + } + + const createdAt = parseNonEmptyString(source.createdAt); + if (createdAt.length > 0) { + invite.createdAt = createdAt; + } + + if (source.expiresAt === null || typeof source.expiresAt === "string") { + invite.expiresAt = source.expiresAt; + } + + return invite; +} + +function parseInviteRedeemResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_INVITE_REDEEM_INVALID_RESPONSE", + "Invite redeem response is invalid", + ); + } + + const apiKeySource = isRecord(payload.apiKey) ? payload.apiKey : payload; + const apiKeyToken = parseNonEmptyString( + isRecord(payload.apiKey) ? payload.apiKey.token : payload.token, + ); + if (apiKeyToken.length === 0) { + throw createCliError( + "CLI_INVITE_REDEEM_INVALID_RESPONSE", + "Invite redeem response is invalid", + ); + } + + const apiKeyId = parseNonEmptyString(apiKeySource.id); + const apiKeyName = parseNonEmptyString(apiKeySource.name); + + return { + apiKeyToken, + apiKeyId: apiKeyId.length > 0 ? apiKeyId : undefined, + apiKeyName: apiKeyName.length > 0 ? apiKeyName : undefined, + }; +} + +async function resolveInviteRuntime( + overrideRegistryUrl: string | undefined, + dependencies: InviteDependencies, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const config = await resolveConfigImpl(); + const registryUrl = resolveRegistryUrl({ + overrideRegistryUrl, + configRegistryUrl: config.registryUrl, + }); + + return { + fetchImpl, + registryUrl, + config, + }; +} + +export async function createInvite( + options: InviteCreateOptions, + dependencies: InviteDependencies = {}, +): Promise { + const runtime = await resolveInviteRuntime(options.registryUrl, dependencies); + const apiKey = requireApiKey(runtime.config); + + const response = await executeInviteRequest({ + fetchImpl: runtime.fetchImpl, + url: toRegistryRequestUrl(runtime.registryUrl, INVITES_PATH), + init: { + method: "POST", + headers: { + authorization: `Bearer ${apiKey}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + expiresAt: parseNonEmptyString(options.expiresAt) || undefined, + }), + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_INVITE_CREATE_FAILED", + mapCreateInviteError(response.status, responseBody), + ); + } + + return { + invite: parseInviteRecord(responseBody), + registryUrl: runtime.registryUrl, + }; +} + +export async function redeemInvite( + code: string, + options: InviteRedeemOptions, + dependencies: InviteDependencies = {}, +): Promise { + const inviteCode = parseNonEmptyString(code); + if (inviteCode.length === 0) { + throw createCliError( + "CLI_INVITE_REDEEM_CODE_REQUIRED", + "Invite code is required", + ); + } + + const runtime = await resolveInviteRuntime(options.registryUrl, dependencies); + const response = await executeInviteRequest({ + fetchImpl: runtime.fetchImpl, + url: toRegistryRequestUrl(runtime.registryUrl, INVITES_REDEEM_PATH), + init: { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ code: inviteCode }), + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_INVITE_REDEEM_FAILED", + mapRedeemInviteError(response.status, responseBody), + ); + } + + return { + ...parseInviteRedeemResponse(responseBody), + registryUrl: runtime.registryUrl, + }; +} + +export async function persistRedeemConfig( + registryUrl: string, + apiKeyToken: string, + dependencies: InvitePersistenceDependencies = {}, +): Promise { + const setConfigValueImpl = dependencies.setConfigValueImpl ?? setConfigValue; + + try { + await setConfigValueImpl("registryUrl", registryUrl); + await setConfigValueImpl("apiKey", apiKeyToken); + } catch (error) { + logger.warn("cli.invite_redeem_config_persist_failed", { + errorName: error instanceof Error ? error.name : "unknown", + }); + throw createCliError( + "CLI_INVITE_REDEEM_CONFIG_PERSISTENCE_FAILED", + "Failed to save redeemed API key locally", + ); + } +} + +export const createInviteCommand = ( + dependencies: InviteCommandDependencies = {}, +): Command => { + const inviteCommand = new Command("invite").description( + "Manage registry onboarding invites (not OpenClaw peer relay invites)", + ); + + inviteCommand + .command("create") + .description("Create a registry invite code (admin only)") + .option("--expires-at ", "Optional invite expiry (ISO-8601)") + .option("--registry-url ", "Override registry URL") + .action( + withErrorHandling( + "invite create", + async (options: InviteCreateOptions) => { + const result = await createInvite(options, dependencies); + + logger.info("cli.invite_created", { + code: result.invite.code, + id: result.invite.id, + registryUrl: result.registryUrl, + }); + + writeStdoutLine("Invite created"); + writeStdoutLine(`Code: ${result.invite.code}`); + if (result.invite.id) { + writeStdoutLine(`ID: ${result.invite.id}`); + } + + writeStdoutLine(`Expires At: ${result.invite.expiresAt ?? "never"}`); + }, + ), + ); + + inviteCommand + .command("redeem ") + .description("Redeem a registry invite code and store PAT locally") + .option("--registry-url ", "Override registry URL") + .action( + withErrorHandling( + "invite redeem", + async (code: string, options: InviteRedeemOptions) => { + const result = await redeemInvite(code, options, dependencies); + + logger.info("cli.invite_redeemed", { + apiKeyId: result.apiKeyId, + apiKeyName: result.apiKeyName, + registryUrl: result.registryUrl, + }); + + writeStdoutLine("Invite redeemed"); + if (result.apiKeyName) { + writeStdoutLine(`API key name: ${result.apiKeyName}`); + } + + writeStdoutLine("API key token (shown once):"); + writeStdoutLine(result.apiKeyToken); + + await persistRedeemConfig( + result.registryUrl, + result.apiKeyToken, + dependencies, + ); + writeStdoutLine("API key saved to local config"); + }, + ), + ); + + return inviteCommand; +}; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 4476945..0535877 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -58,6 +58,14 @@ describe("cli", () => { expect(hasOpenclawCommand).toBe(true); }); + it("registers the invite command", () => { + const hasInviteCommand = createProgram() + .commands.map((command) => command.name()) + .includes("invite"); + + expect(hasInviteCommand).toBe(true); + }); + it("prints version output", async () => { const output: string[] = []; const program = createProgram(); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 71c0c74..c2724df 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -3,6 +3,7 @@ import { createAdminCommand } from "./commands/admin.js"; import { createAgentCommand } from "./commands/agent.js"; import { createApiKeyCommand } from "./commands/api-key.js"; import { createConfigCommand } from "./commands/config.js"; +import { createInviteCommand } from "./commands/invite.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; import { createVerifyCommand } from "./commands/verify.js"; @@ -16,6 +17,7 @@ export const createProgram = (): Command => { .addCommand(createAgentCommand()) .addCommand(createApiKeyCommand()) .addCommand(createConfigCommand()) + .addCommand(createInviteCommand()) .addCommand(createOpenclawCommand()) .addCommand(createVerifyCommand()); }; diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 0b44e6b..d900fcf 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -62,6 +62,21 @@ - Keep ordering deterministic (`id` descending) and compute `nextCursor` from the last item in the returned page. - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors`: generic 400 message in `production`, detailed `fieldErrors` in `development`/`test`. +## POST /v1/invites Contract +- Require PAT auth via `createApiKeyAuth`. +- Enforce admin-only access with explicit `403 INVITE_CREATE_FORBIDDEN` for authenticated non-admin callers. +- Validate payload in a dedicated helper module (`invite-lifecycle.ts`) and keep malformed-json handling environment-aware (`INVITE_CREATE_INVALID`). +- Generate invite codes server-side only; never accept client-supplied codes for create. +- Persist one invite row per request in `invites` with `redeemed_by = null` and optional `expires_at`. + +## POST /v1/invites/redeem Contract +- Public endpoint: no PAT required. +- Validate payload in `invite-lifecycle.ts` with explicit error code `INVITE_REDEEM_INVALID`. +- One-time semantics are enforced by guarded update (`redeemed_by IS NULL`); repeated redeem attempts must return explicit invite lifecycle errors. +- Expired invites must be rejected with `INVITE_REDEEM_EXPIRED` before token issuance. +- Successful redeem must create a new active user human and mint a PAT in the same mutation unit as invite consumption. +- Keep mutation flow transaction-first; on local fallback (no transaction support), apply compensation rollback so failed redeem attempts do not leave partially-created humans or consumed invites. + ## POST /v1/me/api-keys Contract - Require PAT auth via `createApiKeyAuth`; unauthenticated calls must fail before payload parsing. - Accept optional `{ name }`; default to `api-key` when omitted. diff --git a/apps/registry/src/invite-lifecycle.ts b/apps/registry/src/invite-lifecycle.ts new file mode 100644 index 0000000..b6d0075 --- /dev/null +++ b/apps/registry/src/invite-lifecycle.ts @@ -0,0 +1,287 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +const DEFAULT_INVITE_REDEEM_DISPLAY_NAME = "User"; +const DEFAULT_INVITE_REDEEM_API_KEY_NAME = "invite"; +const MAX_DISPLAY_NAME_LENGTH = 64; +const MAX_API_KEY_NAME_LENGTH = 64; +const MAX_INVITE_CODE_LENGTH = 128; +const INVITE_CODE_PREFIX = "clw_inv_"; +const INVITE_CODE_RANDOM_BYTES = 24; + +type InviteCreatePayload = { + expiresAt: string | null; +}; + +type InviteRedeemPayload = { + code: string; + displayName: string; + apiKeyName: string; +}; + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function parseOptionalTrimmedString(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + if (typeof value !== "string") { + return undefined; + } + + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +function inviteCreateInvalidError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "INVITE_CREATE_INVALID", + message: exposeDetails + ? "Invite create payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function inviteRedeemInvalidError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "INVITE_REDEEM_INVALID", + message: exposeDetails + ? "Invite redeem payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +export function parseInviteCreatePayload(input: { + payload: unknown; + environment: RegistryConfig["ENVIRONMENT"]; + now: Date; +}): InviteCreatePayload { + if ( + typeof input.payload !== "object" || + input.payload === null || + Array.isArray(input.payload) + ) { + throw inviteCreateInvalidError({ + environment: input.environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const payload = input.payload as Record; + const fieldErrors: Record = {}; + + if ( + payload.expiresAt !== undefined && + payload.expiresAt !== null && + typeof payload.expiresAt !== "string" + ) { + fieldErrors.expiresAt = ["expiresAt must be a string or null"]; + } + + let expiresAt: string | null = null; + if (typeof payload.expiresAt === "string") { + const expiresAtInput = payload.expiresAt.trim(); + if (expiresAtInput.length === 0) { + fieldErrors.expiresAt = ["expiresAt must not be empty"]; + } else { + const expiresAtMillis = Date.parse(expiresAtInput); + if (!Number.isFinite(expiresAtMillis)) { + fieldErrors.expiresAt = ["expiresAt must be a valid ISO-8601 datetime"]; + } else if (expiresAtMillis <= input.now.getTime()) { + fieldErrors.expiresAt = ["expiresAt must be in the future"]; + } else { + expiresAt = new Date(expiresAtMillis).toISOString(); + } + } + } + + if (Object.keys(fieldErrors).length > 0) { + throw inviteCreateInvalidError({ + environment: input.environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return { expiresAt }; +} + +export function parseInviteRedeemPayload(input: { + payload: unknown; + environment: RegistryConfig["ENVIRONMENT"]; +}): InviteRedeemPayload { + if ( + typeof input.payload !== "object" || + input.payload === null || + Array.isArray(input.payload) + ) { + throw inviteRedeemInvalidError({ + environment: input.environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const payload = input.payload as Record; + const fieldErrors: Record = {}; + + if (typeof payload.code !== "string") { + fieldErrors.code = ["code is required"]; + } + + const code = typeof payload.code === "string" ? payload.code.trim() : ""; + if (code.length === 0 && !fieldErrors.code) { + fieldErrors.code = ["code is required"]; + } else if (code.length > MAX_INVITE_CODE_LENGTH) { + fieldErrors.code = [ + `code must be at most ${MAX_INVITE_CODE_LENGTH} characters`, + ]; + } + + if ( + payload.displayName !== undefined && + typeof payload.displayName !== "string" + ) { + fieldErrors.displayName = ["displayName must be a string"]; + } + + if ( + payload.apiKeyName !== undefined && + typeof payload.apiKeyName !== "string" + ) { + fieldErrors.apiKeyName = ["apiKeyName must be a string"]; + } + + const displayNameInput = parseOptionalTrimmedString(payload.displayName); + if ( + payload.displayName !== undefined && + displayNameInput === undefined && + !fieldErrors.displayName + ) { + fieldErrors.displayName = ["displayName must not be empty"]; + } + + const apiKeyNameInput = parseOptionalTrimmedString(payload.apiKeyName); + if ( + payload.apiKeyName !== undefined && + apiKeyNameInput === undefined && + !fieldErrors.apiKeyName + ) { + fieldErrors.apiKeyName = ["apiKeyName must not be empty"]; + } + + const displayName = displayNameInput ?? DEFAULT_INVITE_REDEEM_DISPLAY_NAME; + const apiKeyName = apiKeyNameInput ?? DEFAULT_INVITE_REDEEM_API_KEY_NAME; + + if (displayName.length > MAX_DISPLAY_NAME_LENGTH) { + fieldErrors.displayName = [ + `displayName must be at most ${MAX_DISPLAY_NAME_LENGTH} characters`, + ]; + } else if (hasControlChars(displayName)) { + fieldErrors.displayName = ["displayName contains control characters"]; + } + + if (apiKeyName.length > MAX_API_KEY_NAME_LENGTH) { + fieldErrors.apiKeyName = [ + `apiKeyName must be at most ${MAX_API_KEY_NAME_LENGTH} characters`, + ]; + } else if (hasControlChars(apiKeyName)) { + fieldErrors.apiKeyName = ["apiKeyName contains control characters"]; + } + + if (Object.keys(fieldErrors).length > 0) { + throw inviteRedeemInvalidError({ + environment: input.environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return { + code, + displayName, + apiKeyName, + }; +} + +export function generateInviteCode(): string { + const bytes = new Uint8Array(INVITE_CODE_RANDOM_BYTES); + crypto.getRandomValues(bytes); + return `${INVITE_CODE_PREFIX}${encodeBase64url(bytes)}`; +} + +export function inviteCreateForbiddenError(): AppError { + return new AppError({ + code: "INVITE_CREATE_FORBIDDEN", + message: "Admin role is required", + status: 403, + expose: true, + }); +} + +export function inviteRedeemCodeInvalidError(): AppError { + return new AppError({ + code: "INVITE_REDEEM_CODE_INVALID", + message: "Invite code is invalid", + status: 400, + expose: true, + }); +} + +export function inviteRedeemExpiredError(): AppError { + return new AppError({ + code: "INVITE_REDEEM_EXPIRED", + message: "Invite code has expired", + status: 400, + expose: true, + }); +} + +export function inviteRedeemAlreadyUsedError(): AppError { + return new AppError({ + code: "INVITE_REDEEM_ALREADY_USED", + message: "Invite code has already been redeemed", + status: 409, + expose: true, + }); +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 2852bda..a323e27 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -5,6 +5,8 @@ import { canonicalizeAgentRegistrationProof, encodeBase64url, generateUlid, + INVITES_PATH, + INVITES_REDEEM_PATH, ME_API_KEYS_PATH, makeAgentDid, makeHumanDid, @@ -105,6 +107,8 @@ type FakeAgentUpdateRow = Record; type FakeRevocationInsertRow = Record; type FakeAgentRegistrationChallengeInsertRow = Record; type FakeAgentRegistrationChallengeUpdateRow = Record; +type FakeInviteInsertRow = Record; +type FakeInviteUpdateRow = Record; type FakeRevocationRow = { id: string; jti: string; @@ -136,6 +140,15 @@ type FakeAgentRegistrationChallengeRow = { createdAt: string; updatedAt: string; }; +type FakeInviteRow = { + id: string; + code: string; + createdBy: string; + redeemedBy: string | null; + agentId: string | null; + expiresAt: string | null; + createdAt: string; +}; type FakeAgentSelectRow = { id: string; @@ -156,6 +169,7 @@ type FakeDbOptions = { beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; failApiKeyInsertCount?: number; failBeginTransaction?: boolean; + inviteRows?: FakeInviteRow[]; revocationRows?: FakeRevocationRow[]; registrationChallengeRows?: FakeAgentRegistrationChallengeRow[]; }; @@ -706,6 +720,83 @@ function resolveAgentRegistrationChallengeSelectRows(options: { .slice(0, limit); } +function getInviteSelectColumnValue( + row: FakeInviteRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "code") { + return row.code; + } + if (column === "created_by") { + return row.createdBy; + } + if (column === "redeemed_by") { + return row.redeemedBy; + } + if (column === "agent_id") { + return row.agentId; + } + if (column === "expires_at") { + return row.expiresAt; + } + if (column === "created_at") { + return row.createdAt; + } + return undefined; +} + +function resolveInviteSelectRows(options: { + query: string; + params: unknown[]; + inviteRows: FakeInviteRow[]; +}): FakeInviteRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasCodeFilter = hasFilter(whereClause, "code"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasRedeemedByFilter = hasFilter(whereClause, "redeemed_by"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const codeFilter = + hasCodeFilter && typeof equalityParams.values.code?.[0] === "string" + ? String(equalityParams.values.code[0]) + : undefined; + const idFilter = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const redeemedByFilter = hasRedeemedByFilter + ? (equalityParams.values.redeemed_by?.[0] as string | null | undefined) + : undefined; + + const requiresRedeemedByNull = + whereClause.includes("redeemed_by") && whereClause.includes("is null"); + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.inviteRows.length; + + return options.inviteRows + .filter((row) => (codeFilter ? row.code === codeFilter : true)) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => + redeemedByFilter !== undefined + ? row.redeemedBy === redeemedByFilter + : true, + ) + .filter((row) => (requiresRedeemedByNull ? row.redeemedBy === null : true)) + .slice(0, limit); +} + function getCrlSelectColumnValue( row: FakeCrlSelectRow, column: string, @@ -781,10 +872,13 @@ function createFakeDb( []; const agentRegistrationChallengeUpdates: FakeAgentRegistrationChallengeUpdateRow[] = []; + const inviteInserts: FakeInviteInsertRow[] = []; + const inviteUpdates: FakeInviteUpdateRow[] = []; const revocationRows = [...(options.revocationRows ?? [])]; const registrationChallengeRows = [ ...(options.registrationChallengeRows ?? []), ]; + const inviteRows = [...(options.inviteRows ?? [])]; const humanRows = rows.reduce((acc, row) => { if (acc.some((item) => item.id === row.humanId)) { return acc; @@ -979,6 +1073,35 @@ function createFakeDb( }), }; } + if ( + (normalizedQuery.includes('from "invites"') || + normalizedQuery.includes("from invites")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveInviteSelectRows({ + query, + params, + inviteRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getInviteSelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), + }; + } if ( (normalizedQuery.includes('from "revocations"') || normalizedQuery.includes("from revocations")) && @@ -1094,6 +1217,22 @@ function createFakeDb( ), ); } + if ( + normalizedQuery.includes('from "invites"') || + normalizedQuery.includes("from invites") + ) { + const resultRows = resolveInviteSelectRows({ + query, + params, + inviteRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getInviteSelectColumnValue(row, column), + ), + ); + } if ( normalizedQuery.includes('from "revocations"') || normalizedQuery.includes("from revocations") @@ -1277,6 +1416,107 @@ function createFakeDb( changes = 1; } + if ( + normalizedQuery.includes('insert into "invites"') || + normalizedQuery.includes("insert into invites") + ) { + const columns = parseInsertColumns(query, "invites"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + inviteInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.code === "string" && + typeof row.created_by === "string" && + typeof row.created_at === "string" + ) { + inviteRows.push({ + id: row.id, + code: row.code, + createdBy: row.created_by, + redeemedBy: + typeof row.redeemed_by === "string" ? row.redeemed_by : null, + agentId: typeof row.agent_id === "string" ? row.agent_id : null, + expiresAt: + typeof row.expires_at === "string" ? row.expires_at : null, + createdAt: row.created_at, + }); + } + + changes = 1; + } + if ( + normalizedQuery.includes('update "invites"') || + normalizedQuery.includes("update invites") + ) { + const setColumns = parseUpdateSetColumns(query, "invites"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const redeemedByFilter = hasFilter(whereClause, "redeemed_by") + ? (equalityParams.values.redeemed_by?.[0] as + | string + | null + | undefined) + : undefined; + const requiresRedeemedByNull = + whereClause.includes("redeemed_by") && + whereClause.includes("is null"); + + let matchedRows = 0; + for (const row of inviteRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (requiresRedeemedByNull && row.redeemedBy !== null) { + continue; + } + if ( + redeemedByFilter !== undefined && + row.redeemedBy !== redeemedByFilter + ) { + continue; + } + + matchedRows += 1; + if ( + typeof nextValues.redeemed_by === "string" || + nextValues.redeemed_by === null + ) { + row.redeemedBy = nextValues.redeemed_by; + } + } + + inviteUpdates.push({ + ...nextValues, + id: idFilter, + redeemed_by_where: redeemedByFilter, + redeemed_by_is_null_where: requiresRedeemedByNull, + matched_rows: matchedRows, + }); + changes = matchedRows; + } if ( normalizedQuery.includes('delete from "humans"') || normalizedQuery.includes("delete from humans") @@ -1588,6 +1828,9 @@ function createFakeDb( agentUpdates, agentRegistrationChallengeInserts, agentRegistrationChallengeUpdates, + inviteInserts, + inviteUpdates, + inviteRows, revocationInserts, registrationChallengeRows, }; @@ -2662,6 +2905,388 @@ describe("GET /v1/me", () => { }); }); +describe(`POST ${INVITES_PATH}`, () => { + it("returns 401 when PAT is missing", async () => { + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 403 when PAT owner is not an admin", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([ + { + ...authRow, + humanRole: "user", + }, + ]); + + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_CREATE_FORBIDDEN"); + }); + + it("returns 400 when payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + expiresAt: "not-an-iso-date", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("INVITE_CREATE_INVALID"); + expect(body.error.details?.fieldErrors?.expiresAt).toEqual([ + "expiresAt must be a valid ISO-8601 datetime", + ]); + }); + + it("creates invite code and persists invite row", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, inviteInserts } = createFakeDb([authRow]); + const expiresAt = new Date(Date.now() + 60 * 60 * 1000).toISOString(); + + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + expiresAt, + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + invite: { + id: string; + code: string; + createdBy: string; + expiresAt: string | null; + createdAt: string; + }; + }; + expect(body.invite.code.startsWith("clw_inv_")).toBe(true); + expect(body.invite.createdBy).toBe("human-1"); + expect(body.invite.expiresAt).toBe(expiresAt); + expect(body.invite.createdAt).toEqual(expect.any(String)); + + expect(inviteInserts).toHaveLength(1); + expect(inviteInserts[0]?.id).toBe(body.invite.id); + expect(inviteInserts[0]?.code).toBe(body.invite.code); + expect(inviteInserts[0]?.created_by).toBe("human-1"); + expect(inviteInserts[0]?.expires_at).toBe(expiresAt); + }); +}); + +describe(`POST ${INVITES_REDEEM_PATH}`, () => { + it("returns 400 when payload is invalid", async () => { + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("INVITE_REDEEM_INVALID"); + expect(body.error.details?.fieldErrors?.code).toEqual(["code is required"]); + }); + + it("returns 400 when invite code does not exist", async () => { + const { database } = createFakeDb([]); + + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: "clw_inv_missing", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_REDEEM_CODE_INVALID"); + }); + + it("returns 400 when invite is expired", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow], [], { + inviteRows: [ + { + id: generateUlid(1700700000000), + code: "clw_inv_expired", + createdBy: "human-1", + redeemedBy: null, + agentId: null, + expiresAt: new Date(Date.now() - 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: "clw_inv_expired", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_REDEEM_EXPIRED"); + }); + + it("returns 409 when invite is already redeemed", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow], [], { + inviteRows: [ + { + id: generateUlid(1700700001000), + code: "clw_inv_redeemed", + createdBy: "human-1", + redeemedBy: "human-2", + agentId: null, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: "clw_inv_redeemed", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(409); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_REDEEM_ALREADY_USED"); + }); + + it("redeems invite and returns PAT that authenticates /v1/me", async () => { + const { authRow } = await makeValidPatContext(); + const inviteCode = "clw_inv_redeem_success"; + const { database, humanInserts, apiKeyInserts, inviteRows, inviteUpdates } = + createFakeDb([authRow], [], { + inviteRows: [ + { + id: generateUlid(1700700002000), + code: inviteCode, + createdBy: "human-1", + redeemedBy: null, + agentId: null, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const appInstance = createRegistryApp(); + + const redeemResponse = await appInstance.request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: inviteCode, + displayName: "Invitee Alpha", + apiKeyName: "primary-invite-key", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(redeemResponse.status).toBe(201); + const redeemBody = (await redeemResponse.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: "admin" | "user"; + status: "active" | "suspended"; + }; + apiKey: { + id: string; + name: string; + token: string; + }; + }; + expect(redeemBody.human.displayName).toBe("Invitee Alpha"); + expect(redeemBody.human.role).toBe("user"); + expect(redeemBody.apiKey.name).toBe("primary-invite-key"); + expect(redeemBody.apiKey.token.startsWith("clw_pat_")).toBe(true); + + expect(humanInserts).toHaveLength(1); + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.human_id).toBe(redeemBody.human.id); + expect(inviteUpdates).toHaveLength(1); + expect(inviteRows[0]?.redeemedBy).toBe(redeemBody.human.id); + + const meResponse = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${redeemBody.apiKey.token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(meResponse.status).toBe(200); + const meBody = (await meResponse.json()) as { + human: { + id: string; + displayName: string; + role: "admin" | "user"; + }; + }; + expect(meBody.human.id).toBe(redeemBody.human.id); + expect(meBody.human.displayName).toBe("Invitee Alpha"); + expect(meBody.human.role).toBe("user"); + }); + + it("rolls back fallback mutations when api key insert fails", async () => { + const { authRow } = await makeValidPatContext(); + const inviteCode = "clw_inv_fallback_rollback"; + const { database, humanRows, inviteRows } = createFakeDb([authRow], [], { + failBeginTransaction: true, + failApiKeyInsertCount: 1, + inviteRows: [ + { + id: generateUlid(1700700003000), + code: inviteCode, + createdBy: "human-1", + redeemedBy: null, + agentId: null, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const appInstance = createRegistryApp(); + + const firstResponse = await appInstance.request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: inviteCode, + displayName: "Fallback Invitee", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(firstResponse.status).toBe(500); + expect(humanRows).toHaveLength(1); + expect(inviteRows[0]?.redeemedBy).toBeNull(); + + const secondResponse = await appInstance.request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: inviteCode, + displayName: "Fallback Invitee", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(secondResponse.status).toBe(201); + expect(humanRows).toHaveLength(2); + expect(inviteRows[0]?.redeemedBy).toEqual(expect.any(String)); + }); +}); + describe(`POST ${ME_API_KEYS_PATH}`, () => { it("returns 401 when PAT is missing", async () => { const response = await createRegistryApp().request( diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 6e91a9d..7391c58 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -2,6 +2,8 @@ import { ADMIN_BOOTSTRAP_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, generateUlid, + INVITES_PATH, + INVITES_REDEEM_PATH, ME_API_KEYS_PATH, makeHumanDid, } from "@clawdentity/protocol"; @@ -18,7 +20,7 @@ import { signAIT, signCRL, } from "@clawdentity/sdk"; -import { and, desc, eq, lt } from "drizzle-orm"; +import { and, desc, eq, isNull, lt } from "drizzle-orm"; import { Hono } from "hono"; import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; @@ -63,8 +65,18 @@ import { agents, api_keys, humans, + invites, revocations, } from "./db/schema.js"; +import { + generateInviteCode, + inviteCreateForbiddenError, + inviteRedeemAlreadyUsedError, + inviteRedeemCodeInvalidError, + inviteRedeemExpiredError, + parseInviteCreatePayload, + parseInviteRedeemPayload, +} from "./invite-lifecycle.js"; import { createInMemoryRateLimit, RESOLVE_RATE_LIMIT_MAX_REQUESTS, @@ -114,6 +126,15 @@ type OwnedAgentRegistrationChallenge = { used_at: string | null; }; +type InviteRow = { + id: string; + code: string; + created_by: string; + redeemed_by: string | null; + expires_at: string | null; + created_at: string; +}; + type CrlSnapshotRow = { id: string; jti: string; @@ -252,6 +273,92 @@ async function findOwnedAgentRegistrationChallenge(input: { return rows[0]; } +async function findInviteByCode(input: { + db: ReturnType; + code: string; +}): Promise { + const rows = await input.db + .select({ + id: invites.id, + code: invites.code, + created_by: invites.created_by, + redeemed_by: invites.redeemed_by, + expires_at: invites.expires_at, + created_at: invites.created_at, + }) + .from(invites) + .where(eq(invites.code, input.code)) + .limit(1); + + return rows[0]; +} + +async function findInviteById(input: { + db: ReturnType; + id: string; +}): Promise { + const rows = await input.db + .select({ + id: invites.id, + code: invites.code, + created_by: invites.created_by, + redeemed_by: invites.redeemed_by, + expires_at: invites.expires_at, + created_at: invites.created_at, + }) + .from(invites) + .where(eq(invites.id, input.id)) + .limit(1); + + return rows[0]; +} + +function isInviteExpired(input: { + expiresAt: string | null; + nowMillis: number; +}) { + if (typeof input.expiresAt !== "string") { + return false; + } + + const expiresAtMillis = Date.parse(input.expiresAt); + if (!Number.isFinite(expiresAtMillis)) { + return true; + } + + return expiresAtMillis <= input.nowMillis; +} + +async function resolveInviteRedeemStateError(input: { + db: ReturnType; + inviteId: string; + nowMillis: number; +}): Promise { + const latestInvite = await findInviteById({ + db: input.db, + id: input.inviteId, + }); + + if (!latestInvite) { + return inviteRedeemCodeInvalidError(); + } + + if (latestInvite.redeemed_by !== null) { + return inviteRedeemAlreadyUsedError(); + } + + if ( + isInviteExpired({ + expiresAt: latestInvite.expires_at, + nowMillis: input.nowMillis, + }) + ) { + return inviteRedeemExpiredError(); + } + + return inviteRedeemCodeInvalidError(); +} + function requireCurrentJti(input: { currentJti: string | null; onInvalid: (reason: string) => AppError; @@ -601,6 +708,237 @@ function createRegistryApp() { return c.json({ human: c.get("human") }); }); + app.post(INVITES_PATH, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "INVITE_CREATE_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const human = c.get("human"); + if (human.role !== "admin") { + throw inviteCreateForbiddenError(); + } + + const parsedPayload = parseInviteCreatePayload({ + payload, + environment: config.ENVIRONMENT, + now: new Date(), + }); + + const inviteId = generateUlid(Date.now()); + const inviteCode = generateInviteCode(); + const createdAt = nowIso(); + const db = createDb(c.env.DB); + await db.insert(invites).values({ + id: inviteId, + code: inviteCode, + created_by: human.id, + redeemed_by: null, + agent_id: null, + expires_at: parsedPayload.expiresAt, + created_at: createdAt, + }); + + return c.json( + { + invite: { + id: inviteId, + code: inviteCode, + createdBy: human.id, + expiresAt: parsedPayload.expiresAt, + createdAt, + }, + }, + 201, + ); + }); + + app.post(INVITES_REDEEM_PATH, async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "INVITE_REDEEM_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const parsedPayload = parseInviteRedeemPayload({ + payload, + environment: config.ENVIRONMENT, + }); + + const db = createDb(c.env.DB); + const invite = await findInviteByCode({ + db, + code: parsedPayload.code, + }); + + if (!invite) { + throw inviteRedeemCodeInvalidError(); + } + + const nowMillis = Date.now(); + if (invite.redeemed_by !== null) { + throw inviteRedeemAlreadyUsedError(); + } + + if ( + isInviteExpired({ + expiresAt: invite.expires_at, + nowMillis, + }) + ) { + throw inviteRedeemExpiredError(); + } + + const humanId = generateUlid(nowMillis); + const humanDid = makeHumanDid(humanId); + const apiKeyToken = generateApiKeyToken(); + const apiKeyHash = await hashApiKeyToken(apiKeyToken); + const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); + const apiKeyId = generateUlid(nowMillis + 1); + const createdAt = nowIso(); + + const applyRedeemMutation = async ( + executor: typeof db, + options: { rollbackOnFailure: boolean }, + ): Promise => { + await executor.insert(humans).values({ + id: humanId, + did: humanDid, + display_name: parsedPayload.displayName, + role: "user", + status: "active", + created_at: createdAt, + updated_at: createdAt, + }); + + let inviteRedeemed = false; + try { + const inviteUpdateResult = await executor + .update(invites) + .set({ + redeemed_by: humanId, + }) + .where(and(eq(invites.id, invite.id), isNull(invites.redeemed_by))); + + const updatedRows = getMutationRowCount(inviteUpdateResult); + if (updatedRows === 0) { + throw await resolveInviteRedeemStateError({ + db: executor, + inviteId: invite.id, + nowMillis, + }); + } + inviteRedeemed = true; + + await executor.insert(api_keys).values({ + id: apiKeyId, + human_id: humanId, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: parsedPayload.apiKeyName, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + } catch (error) { + if (options.rollbackOnFailure) { + if (inviteRedeemed) { + try { + await executor + .update(invites) + .set({ + redeemed_by: null, + }) + .where( + and( + eq(invites.id, invite.id), + eq(invites.redeemed_by, humanId), + ), + ); + } catch (rollbackError) { + logger.error("registry.invite_redeem_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error + ? rollbackError.name + : "unknown", + stage: "invite_unlink", + }); + } + } + + try { + await executor.delete(humans).where(eq(humans.id, humanId)); + } catch (rollbackError) { + logger.error("registry.invite_redeem_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + stage: "human_delete", + }); + } + } + + throw error; + } + }; + + try { + await db.transaction(async (tx) => { + await applyRedeemMutation(tx as unknown as typeof db, { + rollbackOnFailure: false, + }); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRedeemMutation(db, { + rollbackOnFailure: true, + }); + } + + return c.json( + { + human: { + id: humanId, + did: humanDid, + displayName: parsedPayload.displayName, + role: "user", + status: "active", + }, + apiKey: { + id: apiKeyId, + name: parsedPayload.apiKeyName, + token: apiKeyToken, + }, + }, + 201, + ); + }); + app.post(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 4dde676..98aa369 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -1,3 +1,5 @@ export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; export const AGENT_REGISTRATION_CHALLENGE_PATH = "/v1/agents/challenge"; +export const INVITES_PATH = "/v1/invites"; +export const INVITES_REDEEM_PATH = "/v1/invites/redeem"; export const ME_API_KEYS_PATH = "/v1/me/api-keys"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index f5a204e..c232d82 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -13,6 +13,8 @@ import { decodeBase64url, encodeBase64url, generateUlid, + INVITES_PATH, + INVITES_REDEEM_PATH, MAX_AGENT_DESCRIPTION_LENGTH, MAX_AGENT_NAME_LENGTH, ME_API_KEYS_PATH, @@ -35,6 +37,8 @@ describe("protocol", () => { it("exports shared endpoint constants", () => { expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); expect(AGENT_REGISTRATION_CHALLENGE_PATH).toBe("/v1/agents/challenge"); + expect(INVITES_PATH).toBe("/v1/invites"); + expect(INVITES_REDEEM_PATH).toBe("/v1/invites/redeem"); expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); }); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 017f4f0..afcf910 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -23,6 +23,8 @@ export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export { ADMIN_BOOTSTRAP_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, + INVITES_PATH, + INVITES_REDEEM_PATH, ME_API_KEYS_PATH, } from "./endpoints.js"; export type { ProtocolParseErrorCode } from "./errors.js"; From a2bded5cf7496b86e8799da4014f3f9f47bff7c5 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 18:06:00 +0530 Subject: [PATCH 064/190] feat: implement agent auth refresh lifecycle and rotation --- apps/cli/src/AGENTS.md | 2 + apps/cli/src/commands/AGENTS.md | 3 + apps/cli/src/commands/agent.test.ts | 220 ++++- apps/cli/src/commands/agent.ts | 356 ++++++- .../drizzle/0002_agent_auth_refresh.sql | 38 + apps/registry/src/AGENTS.md | 27 +- apps/registry/src/agent-auth-lifecycle.ts | 205 ++++ apps/registry/src/auth/AGENTS.md | 7 + apps/registry/src/auth/agent-auth-token.ts | 110 +++ apps/registry/src/auth/agent-claw-auth.ts | 147 +++ apps/registry/src/db/AGENTS.md | 4 +- apps/registry/src/db/schema.contract.test.ts | 25 +- apps/registry/src/db/schema.ts | 68 +- apps/registry/src/server.test.ts | 891 +++++++++++++++++- apps/registry/src/server.ts | 464 ++++++++- packages/protocol/AGENTS.md | 2 +- packages/protocol/src/endpoints.ts | 1 + packages/protocol/src/index.test.ts | 2 + packages/protocol/src/index.ts | 1 + 19 files changed, 2557 insertions(+), 16 deletions(-) create mode 100644 apps/registry/drizzle/0002_agent_auth_refresh.sql create mode 100644 apps/registry/src/agent-auth-lifecycle.ts create mode 100644 apps/registry/src/auth/agent-auth-token.ts create mode 100644 apps/registry/src/auth/agent-claw-auth.ts diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 693d894..82cdb5b 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -14,6 +14,8 @@ - Registry invite lifecycle command logic should stay in `commands/invite.ts`; keep it strictly scoped to registry onboarding invites and separate from `commands/openclaw.ts` peer-relay invite codes. - `invite redeem` must print the returned PAT once, then persist config in deterministic order (`registryUrl`, then `apiKey`) so bootstrap/onboarding state is predictable. - `invite` command routes must use endpoint constants from `@clawdentity/protocol` (`INVITES_PATH`, `INVITES_REDEEM_PATH`) instead of inline path literals. +- Agent auth refresh state is stored per-agent at `~/.clawdentity/agents//registry-auth.json` and must be written with secure file permissions. +- `agent auth refresh` must use `Authorization: Claw ` + PoP headers from local agent keys and must not require PAT config. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 63e3d3d..a2fb05d 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -46,6 +46,9 @@ ## Agent Command Rules - `agent create` must use a two-step registration handshake: request challenge from registry, sign canonical challenge message locally with agent private key, then submit registration with `challengeId` + `challengeSignature`. +- `agent create` must persist returned `agentAuth` bootstrap credentials to `registry-auth.json` alongside `identity.json`, `secret.key`, `public.key`, and `ait.jwt`. +- `agent auth refresh` must call `AGENT_AUTH_REFRESH_PATH` from `@clawdentity/protocol` using Claw + PoP headers and local refresh token payload, and PoP signing must use the resolved request path (including any registry base path prefix). +- `agent auth refresh` must rewrite `registry-auth.json` atomically on success and keep error mapping stable for `400`, `401`, `409`, and `5xx`. - Never send or log agent private keys; only send public key and proof signature. - Keep proof canonicalization sourced from `@clawdentity/protocol` helper exports to avoid CLI/registry signature drift. - Keep registry error mapping stable for both challenge and register requests so users receive deterministic remediation output. diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index 9d0390e..acb1239 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -1,4 +1,12 @@ -import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { + access, + chmod, + mkdir, + readFile, + rename, + unlink, + writeFile, +} from "node:fs/promises"; import { Command } from "commander"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; @@ -7,6 +15,8 @@ vi.mock("node:fs/promises", () => ({ chmod: vi.fn(), mkdir: vi.fn(), readFile: vi.fn(), + rename: vi.fn(), + unlink: vi.fn(), writeFile: vi.fn(), })); @@ -27,6 +37,7 @@ vi.mock("@clawdentity/sdk", () => ({ encodeEd25519SignatureBase64url: vi.fn(), encodeEd25519KeypairBase64url: vi.fn(), generateEd25519Keypair: vi.fn(), + signHttpRequest: vi.fn(), signEd25519: vi.fn(), })); @@ -37,6 +48,7 @@ import { encodeEd25519SignatureBase64url, generateEd25519Keypair, signEd25519, + signHttpRequest, } from "@clawdentity/sdk"; import { resolveConfig } from "../config/manager.js"; import { createAgentCommand } from "./agent.js"; @@ -45,9 +57,12 @@ const mockedAccess = vi.mocked(access); const mockedChmod = vi.mocked(chmod); const mockedMkdir = vi.mocked(mkdir); const mockedReadFile = vi.mocked(readFile); +const mockedRename = vi.mocked(rename); +const mockedUnlink = vi.mocked(unlink); const mockedWriteFile = vi.mocked(writeFile); const mockedResolveConfig = vi.mocked(resolveConfig); const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); +const mockedSignHttpRequest = vi.mocked(signHttpRequest); const mockedSignEd25519 = vi.mocked(signEd25519); const mockedEncodeEd25519SignatureBase64url = vi.mocked( encodeEd25519SignatureBase64url, @@ -134,6 +149,8 @@ describe("agent create command", () => { mockedAccess.mockRejectedValue(buildErrnoError("ENOENT")); mockedMkdir.mockResolvedValue(undefined); mockedWriteFile.mockResolvedValue(undefined); + mockedRename.mockResolvedValue(undefined); + mockedUnlink.mockResolvedValue(undefined); mockedChmod.mockResolvedValue(undefined); mockedGenerateEd25519Keypair.mockResolvedValue({ @@ -147,6 +164,16 @@ describe("agent create command", () => { }); mockedSignEd25519.mockResolvedValue(Uint8Array.from([1, 2, 3])); + mockedSignHttpRequest.mockResolvedValue({ + canonicalRequest: "canonical", + proof: "proof", + headers: { + "X-Claw-Timestamp": "1739364000", + "X-Claw-Nonce": "nonce-value", + "X-Claw-Body-SHA256": "body-sha", + "X-Claw-Proof": "proof", + }, + }); mockedEncodeEd25519SignatureBase64url.mockReturnValue( "challenge-signature-b64url", ); @@ -170,6 +197,13 @@ describe("agent create command", () => { expiresAt: "2030-01-01T00:00:00.000Z", }, ait: "ait.jwt.value", + agentAuth: { + tokenType: "Bearer", + accessToken: "clw_agt_access_token", + accessExpiresAt: "2030-01-01T00:15:00.000Z", + refreshToken: "clw_rft_refresh_token", + refreshExpiresAt: "2030-01-31T00:00:00.000Z", + }, }); }); }); @@ -211,7 +245,7 @@ describe("agent create command", () => { }), ); - expect(mockedWriteFile).toHaveBeenCalledTimes(4); + expect(mockedWriteFile).toHaveBeenCalledTimes(5); expect(mockedWriteFile).toHaveBeenCalledWith( "/mock-home/.clawdentity/agents/agent-01/secret.key", "secret-key-b64url", @@ -234,6 +268,11 @@ describe("agent create command", () => { "ait.jwt.value", "utf-8", ); + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", + expect.stringContaining('"refreshToken": "clw_rft_refresh_token"'), + "utf-8", + ); expect(result.stdout).toContain( "Agent DID: did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", @@ -306,7 +345,7 @@ describe("agent create command", () => { it("sets 0600 permissions on every identity file", async () => { await runAgentCommand(["create", "agent-01"]); - expect(mockedChmod).toHaveBeenCalledTimes(4); + expect(mockedChmod).toHaveBeenCalledTimes(5); expect(mockedChmod).toHaveBeenCalledWith( "/mock-home/.clawdentity/agents/agent-01/secret.key", 0o600, @@ -323,6 +362,10 @@ describe("agent create command", () => { "/mock-home/.clawdentity/agents/agent-01/ait.jwt", 0o600, ); + expect(mockedChmod).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", + 0o600, + ); }); it("sends optional framework and ttl-days values", async () => { @@ -360,6 +403,177 @@ describe("agent create command", () => { }); }); +describe("agent auth refresh command", () => { + beforeEach(() => { + vi.clearAllMocks(); + mockFetch.mockReset(); + vi.stubGlobal("fetch", mockFetch); + mockedSignHttpRequest.mockResolvedValue({ + canonicalRequest: "canonical", + proof: "proof", + headers: { + "X-Claw-Timestamp": "1739364000", + "X-Claw-Nonce": "nonce-value", + "X-Claw-Body-SHA256": "body-sha", + "X-Claw-Proof": "proof", + }, + }); + + mockedReadFile.mockImplementation(async (path) => { + const filePath = String(path); + if (filePath.endsWith("/ait.jwt")) { + return "ait.jwt.value"; + } + if (filePath.endsWith("/identity.json")) { + return JSON.stringify({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + registryUrl: "https://api.clawdentity.com", + }); + } + if (filePath.endsWith("/secret.key")) { + return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; + } + if (filePath.endsWith("/registry-auth.json")) { + return JSON.stringify({ + tokenType: "Bearer", + accessToken: "clw_agt_old_access", + accessExpiresAt: "2030-01-01T00:15:00.000Z", + refreshToken: "clw_rft_old_refresh", + refreshExpiresAt: "2030-01-31T00:00:00.000Z", + }); + } + + throw buildErrnoError("ENOENT"); + }); + + mockFetch.mockResolvedValue( + createJsonResponse(200, { + agentAuth: { + tokenType: "Bearer", + accessToken: "clw_agt_new_access", + accessExpiresAt: "2030-01-02T00:15:00.000Z", + refreshToken: "clw_rft_new_refresh", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }, + }), + ); + }); + + afterEach(() => { + process.exitCode = undefined; + vi.unstubAllGlobals(); + }); + + it("refreshes agent auth and rewrites registry-auth.json", async () => { + const result = await runAgentCommand(["auth", "refresh", "agent-01"]); + + expect(mockedSignHttpRequest).toHaveBeenCalledWith( + expect.objectContaining({ + method: "POST", + pathWithQuery: "/v1/agents/auth/refresh", + }), + ); + expect(mockFetch).toHaveBeenCalledWith( + "https://api.clawdentity.com/v1/agents/auth/refresh", + expect.objectContaining({ + method: "POST", + headers: expect.objectContaining({ + authorization: "Claw ait.jwt.value", + "content-type": "application/json", + }), + }), + ); + const [tempPath, tempContents, tempEncoding] = mockedWriteFile.mock + .calls[0] as [string, string, BufferEncoding]; + expect(tempPath).toContain( + "/mock-home/.clawdentity/agents/agent-01/registry-auth.json.tmp-", + ); + expect(tempContents).toContain('"refreshToken": "clw_rft_new_refresh"'); + expect(tempEncoding).toBe("utf-8"); + expect(mockedRename).toHaveBeenCalledWith( + tempPath, + "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", + ); + expect(mockedWriteFile).not.toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", + expect.stringContaining('"refreshToken": "clw_rft_new_refresh"'), + "utf-8", + ); + expect(result.stdout).toContain("Agent auth refreshed: agent-01"); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when registry-auth.json is missing", async () => { + mockedReadFile.mockImplementation(async (path) => { + const filePath = String(path); + if (filePath.endsWith("/registry-auth.json")) { + throw buildErrnoError("ENOENT"); + } + if (filePath.endsWith("/ait.jwt")) { + return "ait.jwt.value"; + } + if (filePath.endsWith("/identity.json")) { + return JSON.stringify({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + registryUrl: "https://api.clawdentity.com", + }); + } + if (filePath.endsWith("/secret.key")) { + return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; + } + + throw buildErrnoError("ENOENT"); + }); + + const result = await runAgentCommand(["auth", "refresh", "agent-01"]); + + expect(result.stderr).toContain("registry-auth.json"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("signs refresh proof with the resolved endpoint path for base-path registries", async () => { + mockedReadFile.mockImplementation(async (path) => { + const filePath = String(path); + if (filePath.endsWith("/ait.jwt")) { + return "ait.jwt.value"; + } + if (filePath.endsWith("/identity.json")) { + return JSON.stringify({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + registryUrl: "https://api.clawdentity.com/registry", + }); + } + if (filePath.endsWith("/secret.key")) { + return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; + } + if (filePath.endsWith("/registry-auth.json")) { + return JSON.stringify({ + tokenType: "Bearer", + accessToken: "clw_agt_old_access", + accessExpiresAt: "2030-01-01T00:15:00.000Z", + refreshToken: "clw_rft_old_refresh", + refreshExpiresAt: "2030-01-31T00:00:00.000Z", + }); + } + + throw buildErrnoError("ENOENT"); + }); + + await runAgentCommand(["auth", "refresh", "agent-01"]); + + expect(mockedSignHttpRequest).toHaveBeenCalledWith( + expect.objectContaining({ + pathWithQuery: "/registry/v1/agents/auth/refresh", + }), + ); + expect(mockFetch).toHaveBeenCalledWith( + "https://api.clawdentity.com/registry/v1/agents/auth/refresh", + expect.any(Object), + ); + }); +}); + describe("agent revoke command", () => { const agentDid = "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"; const agentId = "01HF7YAT00W6W7CM7N3W5FDXT4"; diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index 9daea9e..ce43753 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -1,8 +1,19 @@ -import { access, chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { + access, + chmod, + mkdir, + readFile, + rename, + unlink, + writeFile, +} from "node:fs/promises"; import { join } from "node:path"; import { + AGENT_AUTH_REFRESH_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, canonicalizeAgentRegistrationProof, + decodeBase64url, + encodeBase64url, parseDid, } from "@clawdentity/protocol"; import { @@ -13,6 +24,7 @@ import { encodeEd25519SignatureBase64url, generateEd25519Keypair, signEd25519, + signHttpRequest, } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; @@ -25,6 +37,7 @@ const logger = createLogger({ service: "cli", module: "agent" }); const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; const IDENTITY_FILE_NAME = "identity.json"; +const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; const FILE_MODE = 0o600; type AgentCreateOptions = { @@ -40,6 +53,7 @@ type AgentRegistrationResponse = { expiresAt: string; }; ait: string; + agentAuth: AgentAuthBundle; }; type AgentRegistrationChallengeResponse = { @@ -51,6 +65,19 @@ type AgentRegistrationChallengeResponse = { type LocalAgentIdentity = { did: string; + registryUrl?: string; +}; + +type AgentAuthBundle = { + tokenType: "Bearer"; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; +}; + +type LocalAgentRegistryAuth = { + refreshToken: string; }; type RegistryErrorEnvelope = { @@ -63,6 +90,14 @@ const isRecord = (value: unknown): value is Record => { return typeof value === "object" && value !== null; }; +const parseNonEmptyString = (value: unknown): string => { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +}; + const getAgentDirectory = (name: string): string => { return join(getConfigDir(), AGENTS_DIR_NAME, name); }; @@ -75,6 +110,14 @@ const getAgentIdentityPath = (name: string): string => { return join(getAgentDirectory(name), IDENTITY_FILE_NAME); }; +const getAgentSecretKeyPath = (name: string): string => { + return join(getAgentDirectory(name), "secret.key"); +}; + +const getAgentRegistryAuthPath = (name: string): string => { + return join(getAgentDirectory(name), REGISTRY_AUTH_FILE_NAME); +}; + const readAgentAitToken = async (agentName: string): Promise => { const aitPath = getAgentAitPath(agentName); @@ -137,7 +180,82 @@ const readAgentIdentity = async ( ); } - return { did }; + const registryUrl = parseNonEmptyString(parsed.registryUrl); + return { + did, + registryUrl: registryUrl.length > 0 ? registryUrl : undefined, + }; +}; + +const readAgentSecretKey = async (agentName: string): Promise => { + const secretKeyPath = getAgentSecretKeyPath(agentName); + + let rawSecretKey: string; + try { + rawSecretKey = await readFile(secretKeyPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error(`Agent "${agentName}" not found (${secretKeyPath})`); + } + throw error; + } + + const encodedSecretKey = rawSecretKey.trim(); + if (encodedSecretKey.length === 0) { + throw new Error(`Agent "${agentName}" has an empty secret.key`); + } + + try { + return decodeBase64url(encodedSecretKey); + } catch { + throw new Error(`Agent "${agentName}" has invalid secret.key format`); + } +}; + +const readAgentRegistryAuth = async ( + agentName: string, +): Promise => { + const registryAuthPath = getAgentRegistryAuthPath(agentName); + + let rawRegistryAuth: string; + try { + rawRegistryAuth = await readFile(registryAuthPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error( + `Agent "${agentName}" has no ${REGISTRY_AUTH_FILE_NAME}. Recreate agent identity or re-run auth bootstrap.`, + ); + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawRegistryAuth); + } catch { + throw new Error( + `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME} (must be valid JSON)`, + ); + } + + if (!isRecord(parsed)) { + throw new Error( + `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME}`, + ); + } + + const refreshToken = parseNonEmptyString(parsed.refreshToken); + if (refreshToken.length === 0) { + throw new Error( + `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME} (missing refreshToken)`, + ); + } + + return { + refreshToken, + }; }; const parseAgentIdFromDid = (agentName: string, did: string): string => { @@ -235,6 +353,22 @@ const toRegistryAgentChallengeRequestUrl = (registryUrl: string): string => { ).toString(); }; +const toRegistryAgentAuthRefreshRequestUrl = (registryUrl: string): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL( + AGENT_AUTH_REFRESH_PATH.slice(1), + normalizedBaseUrl, + ).toString(); +}; + +const toPathWithQuery = (requestUrl: string): string => { + const parsed = new URL(requestUrl); + return `${parsed.pathname}${parsed.search}`; +}; + const toHttpErrorMessage = (status: number, responseBody: unknown): string => { const registryMessage = extractRegistryErrorMessage(responseBody); @@ -261,6 +395,36 @@ const toHttpErrorMessage = (status: number, responseBody: unknown): string => { return `Registry request failed (${status})`; }; +const parseAgentAuthBundle = (value: unknown): AgentAuthBundle => { + if (!isRecord(value)) { + throw new Error("Registry returned an invalid response payload"); + } + + const tokenType = value.tokenType; + const accessToken = value.accessToken; + const accessExpiresAt = value.accessExpiresAt; + const refreshToken = value.refreshToken; + const refreshExpiresAt = value.refreshExpiresAt; + + if ( + tokenType !== "Bearer" || + typeof accessToken !== "string" || + typeof accessExpiresAt !== "string" || + typeof refreshToken !== "string" || + typeof refreshExpiresAt !== "string" + ) { + throw new Error("Registry returned an invalid response payload"); + } + + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + }; +}; + const parseAgentRegistrationResponse = ( payload: unknown, ): AgentRegistrationResponse => { @@ -270,8 +434,13 @@ const parseAgentRegistrationResponse = ( const agentValue = payload.agent; const aitValue = payload.ait; + const agentAuthValue = payload.agentAuth; - if (!isRecord(agentValue) || typeof aitValue !== "string") { + if ( + !isRecord(agentValue) || + typeof aitValue !== "string" || + !isRecord(agentAuthValue) + ) { throw new Error("Registry returned an invalid response payload"); } @@ -297,6 +466,7 @@ const parseAgentRegistrationResponse = ( expiresAt, }, ait: aitValue, + agentAuth: parseAgentAuthBundle(agentAuthValue), }; }; @@ -354,6 +524,28 @@ const writeSecureFile = async ( await chmod(path, FILE_MODE); }; +const writeSecureFileAtomically = async ( + path: string, + content: string, +): Promise => { + const tempPath = `${path}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + + await writeFile(tempPath, content, "utf-8"); + await chmod(tempPath, FILE_MODE); + + try { + await rename(tempPath, path); + } catch (error) { + try { + await unlink(tempPath); + } catch { + // Best-effort cleanup only. + } + + throw error; + } +}; + const ensureAgentDirectory = async ( agentName: string, agentDirectory: string, @@ -384,6 +576,7 @@ const writeAgentIdentity = async (input: { publicKey: string; secretKey: string; ait: string; + agentAuth: AgentAuthBundle; }): Promise => { await ensureAgentDirectory(input.name, input.agentDirectory); @@ -408,6 +601,20 @@ const writeAgentIdentity = async (input: { `${JSON.stringify(identityJson, null, 2)}\n`, ); await writeSecureFile(join(input.agentDirectory, "ait.jwt"), input.ait); + await writeSecureFile( + join(input.agentDirectory, REGISTRY_AUTH_FILE_NAME), + `${JSON.stringify(input.agentAuth, null, 2)}\n`, + ); +}; + +const writeAgentRegistryAuth = async (input: { + agentName: string; + agentAuth: AgentAuthBundle; +}): Promise => { + await writeSecureFileAtomically( + getAgentRegistryAuthPath(input.agentName), + `${JSON.stringify(input.agentAuth, null, 2)}\n`, + ); }; const requestAgentRegistrationChallenge = async (input: { @@ -558,6 +765,110 @@ const toRevokeHttpErrorMessage = ( return `Registry request failed (${status})`; }; +const toRefreshHttpErrorMessage = ( + status: number, + responseBody: unknown, +): string => { + const registryMessage = extractRegistryErrorMessage(responseBody); + + if (status === 400) { + return registryMessage + ? `Refresh request is invalid (400): ${registryMessage}` + : "Refresh request is invalid (400)."; + } + + if (status === 401) { + return registryMessage + ? `Refresh rejected (401): ${registryMessage}` + : "Refresh rejected (401). Agent credentials are invalid, revoked, or expired."; + } + + if (status === 409) { + return registryMessage + ? `Refresh conflict (409): ${registryMessage}` + : "Refresh conflict (409). Retry the command."; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (registryMessage) { + return `Registry request failed (${status}): ${registryMessage}`; + } + + return `Registry request failed (${status})`; +}; + +const parseAgentAuthRefreshResponse = (payload: unknown): AgentAuthBundle => { + if (!isRecord(payload) || !isRecord(payload.agentAuth)) { + throw new Error("Registry returned an invalid response payload"); + } + + return parseAgentAuthBundle(payload.agentAuth); +}; + +const refreshAgentAuth = async (input: { + agentName: string; +}): Promise<{ + registryUrl: string; + agentAuth: AgentAuthBundle; +}> => { + const ait = await readAgentAitToken(input.agentName); + const identity = await readAgentIdentity(input.agentName); + const secretKey = await readAgentSecretKey(input.agentName); + const localAuth = await readAgentRegistryAuth(input.agentName); + + const registryUrl = identity.registryUrl?.trim(); + if (!registryUrl) { + throw new Error( + `Agent "${input.agentName}" identity is missing registryUrl in ${IDENTITY_FILE_NAME}`, + ); + } + + const refreshBody = JSON.stringify({ + refreshToken: localAuth.refreshToken, + }); + const refreshUrl = toRegistryAgentAuthRefreshRequestUrl(registryUrl); + const timestamp = String(Math.floor(Date.now() / 1000)); + const nonce = encodeBase64url(crypto.getRandomValues(new Uint8Array(16))); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(refreshUrl), + timestamp, + nonce, + body: new TextEncoder().encode(refreshBody), + secretKey, + }); + + let response: Response; + try { + response = await fetch(refreshUrl, { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signed.headers, + }, + body: refreshBody, + }); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw new Error(toRefreshHttpErrorMessage(response.status, responseBody)); + } + + return { + registryUrl, + agentAuth: parseAgentAuthRefreshResponse(responseBody), + }; +}; + const revokeAgent = async (input: { apiKey: string; registryUrl: string; @@ -660,6 +971,7 @@ export const createAgentCommand = (): Command => { publicKey: encoded.publicKey, secretKey: encoded.secretKey, ait: registration.ait, + agentAuth: registration.agentAuth, }); logger.info("cli.agent_created", { @@ -685,6 +997,44 @@ export const createAgentCommand = (): Command => { }), ); + const authCommand = new Command("auth").description( + "Manage local agent registry auth credentials", + ); + + authCommand + .command("refresh ") + .description("Refresh agent registry auth credentials with Claw proof") + .action( + withErrorHandling("agent auth refresh", async (name: string) => { + const agentName = assertValidAgentName(name); + const result = await refreshAgentAuth({ + agentName, + }); + + await writeAgentRegistryAuth({ + agentName, + agentAuth: result.agentAuth, + }); + + logger.info("cli.agent_auth_refreshed", { + name: agentName, + registryUrl: result.registryUrl, + accessExpiresAt: result.agentAuth.accessExpiresAt, + refreshExpiresAt: result.agentAuth.refreshExpiresAt, + }); + + writeStdoutLine(`Agent auth refreshed: ${agentName}`); + writeStdoutLine( + `Access Expires At: ${result.agentAuth.accessExpiresAt}`, + ); + writeStdoutLine( + `Refresh Expires At: ${result.agentAuth.refreshExpiresAt}`, + ); + }), + ); + + agentCommand.addCommand(authCommand); + agentCommand .command("revoke ") .description("Revoke a local agent identity via the registry") diff --git a/apps/registry/drizzle/0002_agent_auth_refresh.sql b/apps/registry/drizzle/0002_agent_auth_refresh.sql new file mode 100644 index 0000000..bac047a --- /dev/null +++ b/apps/registry/drizzle/0002_agent_auth_refresh.sql @@ -0,0 +1,38 @@ +CREATE TABLE `agent_auth_sessions` ( + `id` text PRIMARY KEY NOT NULL, + `agent_id` text NOT NULL, + `refresh_key_hash` text NOT NULL, + `refresh_key_prefix` text NOT NULL, + `refresh_issued_at` text NOT NULL, + `refresh_expires_at` text NOT NULL, + `refresh_last_used_at` text, + `access_key_hash` text NOT NULL, + `access_key_prefix` text NOT NULL, + `access_issued_at` text NOT NULL, + `access_expires_at` text NOT NULL, + `access_last_used_at` text, + `status` text DEFAULT 'active' NOT NULL, + `revoked_at` text, + `created_at` text NOT NULL, + `updated_at` text NOT NULL, + FOREIGN KEY (`agent_id`) REFERENCES `agents`(`id`) ON UPDATE no action ON DELETE no action +); +--> statement-breakpoint +CREATE UNIQUE INDEX `agent_auth_sessions_agent_id_unique` ON `agent_auth_sessions` (`agent_id`);--> statement-breakpoint +CREATE INDEX `idx_agent_auth_sessions_agent_status` ON `agent_auth_sessions` (`agent_id`,`status`);--> statement-breakpoint +CREATE INDEX `idx_agent_auth_sessions_refresh_prefix` ON `agent_auth_sessions` (`refresh_key_prefix`);--> statement-breakpoint +CREATE INDEX `idx_agent_auth_sessions_access_prefix` ON `agent_auth_sessions` (`access_key_prefix`);--> statement-breakpoint +CREATE TABLE `agent_auth_events` ( + `id` text PRIMARY KEY NOT NULL, + `agent_id` text NOT NULL, + `session_id` text NOT NULL, + `event_type` text NOT NULL, + `reason` text, + `metadata_json` text, + `created_at` text NOT NULL, + FOREIGN KEY (`agent_id`) REFERENCES `agents`(`id`) ON UPDATE no action ON DELETE no action, + FOREIGN KEY (`session_id`) REFERENCES `agent_auth_sessions`(`id`) ON UPDATE no action ON DELETE no action +); +--> statement-breakpoint +CREATE INDEX `idx_agent_auth_events_agent_created` ON `agent_auth_events` (`agent_id`,`created_at`);--> statement-breakpoint +CREATE INDEX `idx_agent_auth_events_session_created` ON `agent_auth_events` (`session_id`,`created_at`); diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index d900fcf..0837dc4 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -122,7 +122,20 @@ - Use shared SDK datetime helpers (`nowIso`, `addSeconds`) for issuance/expiry math instead of ad-hoc `Date.now()` arithmetic in route logic. - Resolve signing material through a reusable signer helper (`registry-signer.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. - Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.api.clawdentity.com`, `production` -> `https://api.clawdentity.com`) rather than request-origin inference. -- Response shape remains `{ agent, ait }`; the token must be verifiable with the public keyset returned by `/.well-known/claw-keys.json`. +- Bootstrap agent auth refresh material in the same mutation unit as agent creation by inserting an active `agent_auth_sessions` row. +- Response shape is `{ agent, ait, agentAuth }` where `agentAuth` returns short-lived access credentials and rotating refresh credentials. + +## POST /v1/agents/auth/refresh Contract +- Public endpoint (no PAT): auth is agent-scoped via `Authorization: Claw ` + PoP headers + refresh token payload. +- Verify AIT against active registry signing keys and enforce deterministic issuer mapping for environment. +- Verify PoP using canonical request inputs and public key from AIT `cnf`. +- Enforce timestamp skew checks for replay-window reduction. +- Require payload `{ refreshToken }` and validate marker format (`clw_rft_`). +- Enforce single-active-session rotation semantics: + - refresh token must match current active session hash/prefix + - expired refresh token transitions session to `revoked` + - successful refresh rotates both refresh/access credentials with a guarded update +- Insert audit events in `agent_auth_events` for `refreshed`, `revoked`, and `refresh_rejected`. ## DELETE /v1/agents/:id Contract - Require PAT auth via `createApiKeyAuth`; only the caller-owned agent may be revoked. @@ -133,8 +146,16 @@ - return `204` after first successful revoke - If an owned active agent has no `current_jti`, fail with `409 AGENT_REVOKE_INVALID_STATE` rather than writing a partial revocation. - Perform state changes in one DB transaction: - - update `agents.status` to `revoked` and `agents.updated_at` to `nowIso()` - - insert `revocations` row using the previous `current_jti` +- update `agents.status` to `revoked` and `agents.updated_at` to `nowIso()` +- insert `revocations` row using the previous `current_jti` +- revoke active `agent_auth_sessions` row for the same agent and write `agent_auth_events` entry with reason `agent_revoked`. + +## DELETE /v1/agents/:id/auth/revoke Contract +- Require PAT auth via `createApiKeyAuth`; only the caller-owned agent may be targeted. +- Validate `:id` with the same ULID path parser used by revoke/reissue flows. +- Return `404 AGENT_NOT_FOUND` for unknown/foreign agents. +- Revoke active `agent_auth_sessions` rows idempotently (`204` if already revoked/missing). +- Write `agent_auth_events` entry with reason `owner_auth_revoke` on first successful revoke. ## POST /v1/agents/:id/reissue Contract - Require PAT auth via `createApiKeyAuth`; only the caller-owned agent may be reissued. diff --git a/apps/registry/src/agent-auth-lifecycle.ts b/apps/registry/src/agent-auth-lifecycle.ts new file mode 100644 index 0000000..257a135 --- /dev/null +++ b/apps/registry/src/agent-auth-lifecycle.ts @@ -0,0 +1,205 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { + AppError, + addSeconds, + nowIso, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; +import { + deriveAccessTokenLookupPrefix, + deriveRefreshTokenLookupPrefix, + generateAccessToken, + generateRefreshToken, + hashAgentToken, + parseRefreshToken, +} from "./auth/agent-auth-token.js"; + +export const DEFAULT_AGENT_ACCESS_TOKEN_TTL_SECONDS = 15 * 60; +export const DEFAULT_AGENT_REFRESH_TOKEN_TTL_SECONDS = 30 * 24 * 60 * 60; + +export type AgentAuthIssue = { + sessionId: string; + accessToken: string; + accessTokenHash: string; + accessTokenPrefix: string; + refreshToken: string; + refreshTokenHash: string; + refreshTokenPrefix: string; + accessIssuedAt: string; + accessExpiresAt: string; + refreshIssuedAt: string; + refreshExpiresAt: string; + createdAt: string; + updatedAt: string; +}; + +export type AgentAuthResponse = { + tokenType: "Bearer"; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; +}; + +function invalidRefreshPayloadError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: exposeDetails + ? "Refresh payload is invalid" + : "Request could not be processed", + status: 400, + expose: true, + details: exposeDetails ? options.details : undefined, + }); +} + +export function parseAgentAuthRefreshPayload(input: { + payload: unknown; + environment: RegistryConfig["ENVIRONMENT"]; +}): { refreshToken: string } { + if ( + typeof input.payload !== "object" || + input.payload === null || + Array.isArray(input.payload) + ) { + throw invalidRefreshPayloadError({ + environment: input.environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const payload = input.payload as Record; + if (typeof payload.refreshToken !== "string") { + throw invalidRefreshPayloadError({ + environment: input.environment, + details: { + fieldErrors: { + refreshToken: ["refreshToken is required"], + }, + formErrors: [], + }, + }); + } + + let refreshToken: string; + try { + refreshToken = parseRefreshToken(payload.refreshToken); + } catch { + throw invalidRefreshPayloadError({ + environment: input.environment, + details: { + fieldErrors: { + refreshToken: ["refreshToken format is invalid"], + }, + formErrors: [], + }, + }); + } + + return { + refreshToken, + }; +} + +export async function issueAgentAuth(options?: { + nowMs?: number; + accessTtlSeconds?: number; + refreshTtlSeconds?: number; +}): Promise { + const nowMs = options?.nowMs ?? Date.now(); + const accessTtlSeconds = + options?.accessTtlSeconds ?? DEFAULT_AGENT_ACCESS_TOKEN_TTL_SECONDS; + const refreshTtlSeconds = + options?.refreshTtlSeconds ?? DEFAULT_AGENT_REFRESH_TOKEN_TTL_SECONDS; + const accessToken = generateAccessToken(); + const refreshToken = generateRefreshToken(); + + const [accessTokenHash, refreshTokenHash] = await Promise.all([ + hashAgentToken(accessToken), + hashAgentToken(refreshToken), + ]); + + const accessIssuedAt = nowIso(); + const refreshIssuedAt = accessIssuedAt; + const accessExpiresAt = addSeconds(new Date(nowMs), accessTtlSeconds); + const refreshExpiresAt = addSeconds(new Date(nowMs), refreshTtlSeconds); + const createdAt = accessIssuedAt; + const updatedAt = accessIssuedAt; + + return { + sessionId: generateUlid(nowMs), + accessToken, + accessTokenHash, + accessTokenPrefix: deriveAccessTokenLookupPrefix(accessToken), + refreshToken, + refreshTokenHash, + refreshTokenPrefix: deriveRefreshTokenLookupPrefix(refreshToken), + accessIssuedAt, + accessExpiresAt, + refreshIssuedAt, + refreshExpiresAt, + createdAt, + updatedAt, + }; +} + +export function toAgentAuthResponse(input: { + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; +}): AgentAuthResponse { + return { + tokenType: "Bearer", + accessToken: input.accessToken, + accessExpiresAt: input.accessExpiresAt, + refreshToken: input.refreshToken, + refreshExpiresAt: input.refreshExpiresAt, + }; +} + +export function agentAuthRefreshUnauthorizedError(): AppError { + return new AppError({ + code: "AGENT_AUTH_REFRESH_UNAUTHORIZED", + message: "Agent auth refresh is unauthorized", + status: 401, + expose: true, + }); +} + +export function agentAuthRefreshRejectedError(options: { + code: + | "AGENT_AUTH_REFRESH_REVOKED" + | "AGENT_AUTH_REFRESH_EXPIRED" + | "AGENT_AUTH_REFRESH_INVALID"; + message: string; +}): AppError { + return new AppError({ + code: options.code, + message: options.message, + status: 401, + expose: true, + }); +} + +export function agentAuthRefreshConflictError(): AppError { + return new AppError({ + code: "AGENT_AUTH_REFRESH_CONFLICT", + message: "Agent auth refresh state changed; retry request", + status: 409, + expose: true, + }); +} diff --git a/apps/registry/src/auth/AGENTS.md b/apps/registry/src/auth/AGENTS.md index 78609aa..cbc4db7 100644 --- a/apps/registry/src/auth/AGENTS.md +++ b/apps/registry/src/auth/AGENTS.md @@ -18,3 +18,10 @@ ## Verification - Cover valid, invalid, and missing PAT paths in `server.test.ts`. - Verify middleware updates `api_keys.last_used_at` on successful auth. + +## Agent Auth Refresh Rules +- Keep agent refresh token helpers (`clw_rft_`, `clw_agt_`, prefix derivation, hashing, token generation) centralized in `agent-auth-token.ts`. +- Verify agent-authenticated refresh requests using `Authorization: Claw ` and PoP headers; never trust refresh payload without AIT + PoP verification. +- Enforce issuer + keyset-based AIT verification against active registry signing keys only. +- Validate `X-Claw-Timestamp` skew and fail closed on malformed/expired signatures. +- Never log or persist plaintext refresh/access tokens server-side; persist only hash/prefix material. diff --git a/apps/registry/src/auth/agent-auth-token.ts b/apps/registry/src/auth/agent-auth-token.ts new file mode 100644 index 0000000..ada8957 --- /dev/null +++ b/apps/registry/src/auth/agent-auth-token.ts @@ -0,0 +1,110 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { AppError } from "@clawdentity/sdk"; + +export const AGENT_ACCESS_TOKEN_MARKER = "clw_agt_"; +export const AGENT_REFRESH_TOKEN_MARKER = "clw_rft_"; +const AGENT_TOKEN_LOOKUP_ENTROPY_LENGTH = 8; +const AGENT_TOKEN_RANDOM_BYTES_LENGTH = 32; + +function parseAgentToken(options: { + token: string | undefined; + marker: string; + field: "accessToken" | "refreshToken"; +}): string { + const trimmedToken = options.token?.trim(); + + if (!trimmedToken) { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Refresh payload is invalid", + status: 400, + expose: true, + details: { + fieldErrors: { + [options.field]: [`${options.field} is required`], + }, + formErrors: [], + }, + }); + } + + if ( + !trimmedToken.startsWith(options.marker) || + trimmedToken.length <= options.marker.length + ) { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Refresh payload is invalid", + status: 400, + expose: true, + details: { + fieldErrors: { + [options.field]: [`${options.field} format is invalid`], + }, + formErrors: [], + }, + }); + } + + return trimmedToken; +} + +export function parseAccessToken(token: string | undefined): string { + return parseAgentToken({ + token, + marker: AGENT_ACCESS_TOKEN_MARKER, + field: "accessToken", + }); +} + +export function parseRefreshToken(token: string | undefined): string { + return parseAgentToken({ + token, + marker: AGENT_REFRESH_TOKEN_MARKER, + field: "refreshToken", + }); +} + +function deriveTokenLookupPrefix(token: string, marker: string): string { + const entropyPrefix = token.slice( + marker.length, + marker.length + AGENT_TOKEN_LOOKUP_ENTROPY_LENGTH, + ); + + return `${marker}${entropyPrefix}`; +} + +export function deriveAccessTokenLookupPrefix(token: string): string { + return deriveTokenLookupPrefix(token, AGENT_ACCESS_TOKEN_MARKER); +} + +export function deriveRefreshTokenLookupPrefix(token: string): string { + return deriveTokenLookupPrefix(token, AGENT_REFRESH_TOKEN_MARKER); +} + +export async function hashAgentToken(token: string): Promise { + const digest = await crypto.subtle.digest( + "SHA-256", + new TextEncoder().encode(token), + ); + + return Array.from(new Uint8Array(digest)) + .map((value) => value.toString(16).padStart(2, "0")) + .join(""); +} + +function generateToken(marker: string): string { + const randomBytes = crypto.getRandomValues( + new Uint8Array(AGENT_TOKEN_RANDOM_BYTES_LENGTH), + ); + + return `${marker}${encodeBase64url(randomBytes)}`; +} + +export function generateAccessToken(): string { + return generateToken(AGENT_ACCESS_TOKEN_MARKER); +} + +export function generateRefreshToken(): string { + return generateToken(AGENT_REFRESH_TOKEN_MARKER); +} diff --git a/apps/registry/src/auth/agent-claw-auth.ts b/apps/registry/src/auth/agent-claw-auth.ts new file mode 100644 index 0000000..39a09f2 --- /dev/null +++ b/apps/registry/src/auth/agent-claw-auth.ts @@ -0,0 +1,147 @@ +import { type AitClaims, decodeBase64url } from "@clawdentity/protocol"; +import { + AppError, + type RegistryAitVerificationKey, + type RegistryConfig, + verifyAIT, + verifyHttpRequest, +} from "@clawdentity/sdk"; +import { resolveRegistryIssuer } from "../agent-registration.js"; + +const DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS = 300; + +function unauthorizedError(message: string): AppError { + return new AppError({ + code: "AGENT_AUTH_REFRESH_UNAUTHORIZED", + message, + status: 401, + expose: true, + }); +} + +function parseClawAuthorizationHeader(authorization?: string): string { + if (typeof authorization !== "string" || authorization.trim().length === 0) { + throw unauthorizedError("Authorization header is required"); + } + + const parsed = authorization.trim().match(/^Claw\s+(\S+)$/); + if (!parsed || parsed[1].trim().length === 0) { + throw unauthorizedError("Authorization must be in the format 'Claw '"); + } + + return parsed[1].trim(); +} + +function parseUnixTimestamp(headerValue: string): number { + if (!/^\d+$/.test(headerValue)) { + throw unauthorizedError("X-Claw-Timestamp must be a unix seconds integer"); + } + + const timestamp = Number.parseInt(headerValue, 10); + if (!Number.isInteger(timestamp) || timestamp < 0) { + throw unauthorizedError("X-Claw-Timestamp must be a unix seconds integer"); + } + + return timestamp; +} + +function assertTimestampWithinSkew(options: { + nowMs: number; + maxSkewSeconds: number; + timestampSeconds: number; +}): void { + const nowSeconds = Math.floor(options.nowMs / 1000); + const skew = Math.abs(nowSeconds - options.timestampSeconds); + + if (skew > options.maxSkewSeconds) { + throw unauthorizedError( + "X-Claw-Timestamp is outside the allowed skew window", + ); + } +} + +function toPathWithQuery(url: string): string { + const parsed = new URL(url, "http://localhost"); + return `${parsed.pathname}${parsed.search}`; +} + +function buildRegistryVerificationKeys( + keys: RegistryConfig["REGISTRY_SIGNING_KEYS"], +): RegistryAitVerificationKey[] { + return (keys ?? []) + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP", + crv: "Ed25519", + x: key.x, + }, + })); +} + +export async function verifyAgentClawRequest(input: { + config: RegistryConfig; + request: Request; + bodyBytes: Uint8Array; + nowMs?: number; + maxTimestampSkewSeconds?: number; +}): Promise { + const nowMs = input.nowMs ?? Date.now(); + const maxTimestampSkewSeconds = + input.maxTimestampSkewSeconds ?? DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS; + const token = parseClawAuthorizationHeader( + input.request.headers.get("authorization") ?? undefined, + ); + const expectedIssuer = resolveRegistryIssuer(input.config.ENVIRONMENT); + const verificationKeys = buildRegistryVerificationKeys( + input.config.REGISTRY_SIGNING_KEYS, + ); + + if (verificationKeys.length === 0) { + throw unauthorizedError("Registry signing keys are unavailable"); + } + + let claims: AitClaims; + try { + claims = await verifyAIT({ + token, + registryKeys: verificationKeys, + expectedIssuer, + }); + } catch { + throw unauthorizedError("AIT verification failed"); + } + + const timestampHeader = input.request.headers.get("x-claw-timestamp"); + if (!timestampHeader) { + throw unauthorizedError("X-Claw-Timestamp header is required"); + } + + assertTimestampWithinSkew({ + nowMs, + maxSkewSeconds: maxTimestampSkewSeconds, + timestampSeconds: parseUnixTimestamp(timestampHeader), + }); + + let cnfPublicKey: Uint8Array; + try { + cnfPublicKey = decodeBase64url(claims.cnf.jwk.x); + } catch { + throw unauthorizedError("AIT public key is invalid"); + } + + try { + await verifyHttpRequest({ + method: input.request.method, + pathWithQuery: toPathWithQuery(input.request.url), + headers: Object.fromEntries(input.request.headers.entries()), + body: input.bodyBytes, + publicKey: cnfPublicKey, + }); + } catch { + throw unauthorizedError("PoP verification failed"); + } + + return claims; +} diff --git a/apps/registry/src/db/AGENTS.md b/apps/registry/src/db/AGENTS.md index a9df4f4..19bd6ed 100644 --- a/apps/registry/src/db/AGENTS.md +++ b/apps/registry/src/db/AGENTS.md @@ -9,9 +9,11 @@ - Treat contract tests (for example `schema.contract.test.ts`) as executable checks for required table/index coverage. ## Baseline Requirements -- Required tables: `humans`, `agents`, `revocations`, `api_keys`. +- Required tables: `humans`, `agents`, `revocations`, `api_keys`, `agent_auth_sessions`, `agent_auth_events`. - Required index: `idx_agents_owner_status` on `agents(owner_id, status)`. - Revocation `jti` lookup can be unique or non-unique; current baseline uses `revocations_jti_unique`. +- Agent auth refresh lookups require prefix indexes on `agent_auth_sessions.refresh_key_prefix` and `agent_auth_sessions.access_key_prefix`. +- One session per agent is enforced by `agent_auth_sessions_agent_id_unique`. ## Query Rules - Prefer Drizzle (`createDb`) for application reads/writes. diff --git a/apps/registry/src/db/schema.contract.test.ts b/apps/registry/src/db/schema.contract.test.ts index 7400249..fb33a23 100644 --- a/apps/registry/src/db/schema.contract.test.ts +++ b/apps/registry/src/db/schema.contract.test.ts @@ -1,13 +1,23 @@ import { getTableName } from "drizzle-orm"; import { describe, expect, it } from "vitest"; import migrationSql from "../../drizzle/0000_common_marrow.sql?raw"; -import { agents, api_keys, humans, revocations } from "./schema.js"; +import authMigrationSql from "../../drizzle/0002_agent_auth_refresh.sql?raw"; +import { + agent_auth_events, + agent_auth_sessions, + agents, + api_keys, + humans, + revocations, +} from "./schema.js"; const t10RequiredTables = [ "humans", "agents", "revocations", "api_keys", + "agent_auth_sessions", + "agent_auth_events", ] as const; describe("T10 schema contract", () => { it("defines required table names in schema source", () => { @@ -15,11 +25,13 @@ describe("T10 schema contract", () => { expect(getTableName(agents)).toBe("agents"); expect(getTableName(revocations)).toBe("revocations"); expect(getTableName(api_keys)).toBe("api_keys"); + expect(getTableName(agent_auth_sessions)).toBe("agent_auth_sessions"); + expect(getTableName(agent_auth_events)).toBe("agent_auth_events"); }); it("contains required tables in baseline migration SQL", () => { for (const tableName of t10RequiredTables) { - expect(migrationSql).toMatch( + expect(`${migrationSql}\n${authMigrationSql}`).toMatch( new RegExp(String.raw`CREATE TABLE \`${tableName}\``), ); } @@ -36,4 +48,13 @@ describe("T10 schema contract", () => { /CREATE (?:UNIQUE )?INDEX `[^`]+` ON `revocations` \(`jti`\);/, ); }); + + it("creates required agent auth session indexes", () => { + expect(authMigrationSql).toMatch( + /CREATE UNIQUE INDEX `agent_auth_sessions_agent_id_unique` ON `agent_auth_sessions` \(`agent_id`\);/, + ); + expect(authMigrationSql).toMatch( + /CREATE INDEX `idx_agent_auth_sessions_refresh_prefix` ON `agent_auth_sessions` \(`refresh_key_prefix`\);/, + ); + }); }); diff --git a/apps/registry/src/db/schema.ts b/apps/registry/src/db/schema.ts index aa3ed8c..cdc36da 100644 --- a/apps/registry/src/db/schema.ts +++ b/apps/registry/src/db/schema.ts @@ -1,4 +1,4 @@ -import { index, sqliteTable, text } from "drizzle-orm/sqlite-core"; +import { index, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core"; export const humans = sqliteTable("humans", { id: text("id").primaryKey(), @@ -98,6 +98,72 @@ export const agent_registration_challenges = sqliteTable( ], ); +export const agent_auth_sessions = sqliteTable( + "agent_auth_sessions", + { + id: text("id").primaryKey(), + agent_id: text("agent_id") + .notNull() + .references(() => agents.id), + refresh_key_hash: text("refresh_key_hash").notNull(), + refresh_key_prefix: text("refresh_key_prefix").notNull(), + refresh_issued_at: text("refresh_issued_at").notNull(), + refresh_expires_at: text("refresh_expires_at").notNull(), + refresh_last_used_at: text("refresh_last_used_at"), + access_key_hash: text("access_key_hash").notNull(), + access_key_prefix: text("access_key_prefix").notNull(), + access_issued_at: text("access_issued_at").notNull(), + access_expires_at: text("access_expires_at").notNull(), + access_last_used_at: text("access_last_used_at"), + status: text("status", { enum: ["active", "revoked"] }) + .notNull() + .default("active"), + revoked_at: text("revoked_at"), + created_at: text("created_at").notNull(), + updated_at: text("updated_at").notNull(), + }, + (table) => [ + uniqueIndex("agent_auth_sessions_agent_id_unique").on(table.agent_id), + index("idx_agent_auth_sessions_agent_status").on( + table.agent_id, + table.status, + ), + index("idx_agent_auth_sessions_refresh_prefix").on( + table.refresh_key_prefix, + ), + index("idx_agent_auth_sessions_access_prefix").on(table.access_key_prefix), + ], +); + +export const agent_auth_events = sqliteTable( + "agent_auth_events", + { + id: text("id").primaryKey(), + agent_id: text("agent_id") + .notNull() + .references(() => agents.id), + session_id: text("session_id") + .notNull() + .references(() => agent_auth_sessions.id), + event_type: text("event_type", { + enum: ["issued", "refreshed", "revoked", "refresh_rejected"], + }).notNull(), + reason: text("reason"), + metadata_json: text("metadata_json"), + created_at: text("created_at").notNull(), + }, + (table) => [ + index("idx_agent_auth_events_agent_created").on( + table.agent_id, + table.created_at, + ), + index("idx_agent_auth_events_session_created").on( + table.session_id, + table.created_at, + ), + ], +); + export const invites = sqliteTable("invites", { id: text("id").primaryKey(), code: text("code").notNull().unique(), diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index a323e27..b8670d0 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,5 +1,6 @@ import { ADMIN_BOOTSTRAP_PATH, + AGENT_AUTH_REFRESH_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, type AitClaims, canonicalizeAgentRegistrationProof, @@ -17,6 +18,7 @@ import { REQUEST_ID_HEADER, signAIT, signEd25519, + signHttpRequest, verifyAIT, verifyCRL, } from "@clawdentity/sdk"; @@ -26,6 +28,10 @@ import { DEFAULT_AGENT_FRAMEWORK, DEFAULT_AGENT_TTL_DAYS, } from "./agent-registration.js"; +import { + deriveRefreshTokenLookupPrefix, + hashAgentToken, +} from "./auth/agent-auth-token.js"; import { deriveApiKeyLookupPrefix, hashApiKeyToken, @@ -89,6 +95,29 @@ type FakeApiKeyRow = { createdAt: string; lastUsedAt: string | null; }; + +type FakeAgentAuthSessionRow = { + id: string; + agentId: string; + refreshKeyHash: string; + refreshKeyPrefix: string; + refreshIssuedAt: string; + refreshExpiresAt: string; + refreshLastUsedAt: string | null; + accessKeyHash: string; + accessKeyPrefix: string; + accessIssuedAt: string; + accessExpiresAt: string; + accessLastUsedAt: string | null; + status: "active" | "revoked"; + revokedAt: string | null; + createdAt: string; + updatedAt: string; +}; + +type FakeAgentAuthEventInsertRow = Record; +type FakeAgentAuthSessionInsertRow = Record; +type FakeAgentAuthSessionUpdateRow = Record; type FakeApiKeySelectRow = { id: string; human_id: string; @@ -172,6 +201,7 @@ type FakeDbOptions = { inviteRows?: FakeInviteRow[]; revocationRows?: FakeRevocationRow[]; registrationChallengeRows?: FakeAgentRegistrationChallengeRow[]; + agentAuthSessionRows?: FakeAgentAuthSessionRow[]; }; type FakeCrlSelectRow = { @@ -584,6 +614,112 @@ function resolveApiKeySelectRows(options: { return rows.slice(0, limit); } +function getAgentAuthSessionSelectColumnValue( + row: FakeAgentAuthSessionRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "agent_id") { + return row.agentId; + } + if (column === "refresh_key_hash") { + return row.refreshKeyHash; + } + if (column === "refresh_key_prefix") { + return row.refreshKeyPrefix; + } + if (column === "refresh_issued_at") { + return row.refreshIssuedAt; + } + if (column === "refresh_expires_at") { + return row.refreshExpiresAt; + } + if (column === "refresh_last_used_at") { + return row.refreshLastUsedAt; + } + if (column === "access_key_hash") { + return row.accessKeyHash; + } + if (column === "access_key_prefix") { + return row.accessKeyPrefix; + } + if (column === "access_issued_at") { + return row.accessIssuedAt; + } + if (column === "access_expires_at") { + return row.accessExpiresAt; + } + if (column === "access_last_used_at") { + return row.accessLastUsedAt; + } + if (column === "status") { + return row.status; + } + if (column === "revoked_at") { + return row.revokedAt; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +function resolveAgentAuthSessionSelectRows(options: { + query: string; + params: unknown[]; + sessionRows: FakeAgentAuthSessionRow[]; +}): FakeAgentAuthSessionRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasAgentIdFilter = hasFilter(whereClause, "agent_id"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasRefreshPrefixFilter = hasFilter(whereClause, "refresh_key_prefix"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const agentId = + hasAgentIdFilter && typeof equalityParams.values.agent_id?.[0] === "string" + ? String(equalityParams.values.agent_id[0]) + : undefined; + const id = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const refreshPrefix = + hasRefreshPrefixFilter && + typeof equalityParams.values.refresh_key_prefix?.[0] === "string" + ? String(equalityParams.values.refresh_key_prefix[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.sessionRows.length; + + return options.sessionRows + .filter((row) => (agentId ? row.agentId === agentId : true)) + .filter((row) => (id ? row.id === id : true)) + .filter((row) => (status ? row.status === status : true)) + .filter((row) => + refreshPrefix ? row.refreshKeyPrefix === refreshPrefix : true, + ) + .slice(0, limit); +} + function resolveAgentSelectRows(options: { query: string; params: unknown[]; @@ -600,6 +736,7 @@ function resolveAgentSelectRows(options: { const hasStatusFilter = hasFilter(whereClause, "status"); const hasFrameworkFilter = hasFilter(whereClause, "framework"); const hasIdFilter = hasFilter(whereClause, "id"); + const hasDidFilter = hasFilter(whereClause, "did"); const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); const hasCursorFilter = hasFilter(whereClause, "id", "<"); const hasLimitClause = options.query.toLowerCase().includes(" limit "); @@ -624,6 +761,10 @@ function resolveAgentSelectRows(options: { hasIdFilter && typeof equalityParams.values.id?.[0] === "string" ? String(equalityParams.values.id?.[0]) : undefined; + const didFilter = + hasDidFilter && typeof equalityParams.values.did?.[0] === "string" + ? String(equalityParams.values.did?.[0]) + : undefined; const currentJtiFilter = hasCurrentJtiFilter ? (equalityParams.values.current_jti?.[0] as string | null | undefined) : undefined; @@ -645,6 +786,7 @@ function resolveAgentSelectRows(options: { frameworkFilter ? row.framework === frameworkFilter : true, ) .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => (didFilter ? row.did === didFilter : true)) .filter((row) => currentJtiFilter !== undefined ? (row.currentJti ?? null) === currentJtiFilter @@ -872,12 +1014,16 @@ function createFakeDb( []; const agentRegistrationChallengeUpdates: FakeAgentRegistrationChallengeUpdateRow[] = []; + const agentAuthSessionInserts: FakeAgentAuthSessionInsertRow[] = []; + const agentAuthSessionUpdates: FakeAgentAuthSessionUpdateRow[] = []; + const agentAuthEventInserts: FakeAgentAuthEventInsertRow[] = []; const inviteInserts: FakeInviteInsertRow[] = []; const inviteUpdates: FakeInviteUpdateRow[] = []; const revocationRows = [...(options.revocationRows ?? [])]; const registrationChallengeRows = [ ...(options.registrationChallengeRows ?? []), ]; + const agentAuthSessionRows = [...(options.agentAuthSessionRows ?? [])]; const inviteRows = [...(options.inviteRows ?? [])]; const humanRows = rows.reduce((acc, row) => { if (acc.some((item) => item.id === row.humanId)) { @@ -1073,6 +1219,38 @@ function createFakeDb( }), }; } + if ( + (normalizedQuery.includes('from "agent_auth_sessions"') || + normalizedQuery.includes("from agent_auth_sessions")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveAgentAuthSessionSelectRows({ + query, + params, + sessionRows: agentAuthSessionRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getAgentAuthSessionSelectColumnValue( + row, + column, + ); + return acc; + }, + {}, + ); + }), + }; + } if ( (normalizedQuery.includes('from "invites"') || normalizedQuery.includes("from invites")) && @@ -1168,6 +1346,22 @@ function createFakeDb( ), ); } + if ( + normalizedQuery.includes('from "agent_auth_sessions"') || + normalizedQuery.includes("from agent_auth_sessions") + ) { + const resultRows = resolveAgentAuthSessionSelectRows({ + query, + params, + sessionRows: agentAuthSessionRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getAgentAuthSessionSelectColumnValue(row, column), + ), + ); + } if ( normalizedQuery.includes('from "humans"') || normalizedQuery.includes("from humans") @@ -1416,6 +1610,235 @@ function createFakeDb( changes = 1; } + if ( + normalizedQuery.includes('insert into "agent_auth_sessions"') || + normalizedQuery.includes("insert into agent_auth_sessions") + ) { + const columns = parseInsertColumns(query, "agent_auth_sessions"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentAuthSessionInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.agent_id === "string" && + typeof row.refresh_key_hash === "string" && + typeof row.refresh_key_prefix === "string" && + typeof row.refresh_issued_at === "string" && + typeof row.refresh_expires_at === "string" && + typeof row.access_key_hash === "string" && + typeof row.access_key_prefix === "string" && + typeof row.access_issued_at === "string" && + typeof row.access_expires_at === "string" && + (row.status === "active" || row.status === "revoked") && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + const existingIndex = agentAuthSessionRows.findIndex( + (sessionRow) => sessionRow.agentId === row.agent_id, + ); + const nextSession: FakeAgentAuthSessionRow = { + id: row.id, + agentId: row.agent_id, + refreshKeyHash: row.refresh_key_hash, + refreshKeyPrefix: row.refresh_key_prefix, + refreshIssuedAt: row.refresh_issued_at, + refreshExpiresAt: row.refresh_expires_at, + refreshLastUsedAt: + typeof row.refresh_last_used_at === "string" + ? row.refresh_last_used_at + : null, + accessKeyHash: row.access_key_hash, + accessKeyPrefix: row.access_key_prefix, + accessIssuedAt: row.access_issued_at, + accessExpiresAt: row.access_expires_at, + accessLastUsedAt: + typeof row.access_last_used_at === "string" + ? row.access_last_used_at + : null, + status: row.status, + revokedAt: + typeof row.revoked_at === "string" ? row.revoked_at : null, + createdAt: row.created_at, + updatedAt: row.updated_at, + }; + if (existingIndex >= 0) { + agentAuthSessionRows.splice(existingIndex, 1, nextSession); + } else { + agentAuthSessionRows.push(nextSession); + } + } + + changes = 1; + } + if ( + normalizedQuery.includes('insert into "agent_auth_events"') || + normalizedQuery.includes("insert into agent_auth_events") + ) { + const columns = parseInsertColumns(query, "agent_auth_events"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentAuthEventInserts.push(row); + changes = 1; + } + if ( + normalizedQuery.includes('update "agent_auth_sessions"') || + normalizedQuery.includes("update agent_auth_sessions") + ) { + const setColumns = parseUpdateSetColumns( + query, + "agent_auth_sessions", + ); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const agentIdFilter = + typeof equalityParams.values.agent_id?.[0] === "string" + ? String(equalityParams.values.agent_id[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const refreshHashFilter = + typeof equalityParams.values.refresh_key_hash?.[0] === "string" + ? String(equalityParams.values.refresh_key_hash[0]) + : undefined; + + let matchedRows = 0; + for (const row of agentAuthSessionRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (agentIdFilter && row.agentId !== agentIdFilter) { + continue; + } + if (statusFilter && row.status !== statusFilter) { + continue; + } + if ( + refreshHashFilter && + row.refreshKeyHash !== refreshHashFilter + ) { + continue; + } + + matchedRows += 1; + if (typeof nextValues.refresh_key_hash === "string") { + row.refreshKeyHash = nextValues.refresh_key_hash; + } + if (typeof nextValues.refresh_key_prefix === "string") { + row.refreshKeyPrefix = nextValues.refresh_key_prefix; + } + if (typeof nextValues.refresh_issued_at === "string") { + row.refreshIssuedAt = nextValues.refresh_issued_at; + } + if (typeof nextValues.refresh_expires_at === "string") { + row.refreshExpiresAt = nextValues.refresh_expires_at; + } + if ( + typeof nextValues.refresh_last_used_at === "string" || + nextValues.refresh_last_used_at === null + ) { + row.refreshLastUsedAt = nextValues.refresh_last_used_at; + } + if (typeof nextValues.access_key_hash === "string") { + row.accessKeyHash = nextValues.access_key_hash; + } + if (typeof nextValues.access_key_prefix === "string") { + row.accessKeyPrefix = nextValues.access_key_prefix; + } + if (typeof nextValues.access_issued_at === "string") { + row.accessIssuedAt = nextValues.access_issued_at; + } + if (typeof nextValues.access_expires_at === "string") { + row.accessExpiresAt = nextValues.access_expires_at; + } + if ( + typeof nextValues.access_last_used_at === "string" || + nextValues.access_last_used_at === null + ) { + row.accessLastUsedAt = nextValues.access_last_used_at; + } + if ( + nextValues.status === "active" || + nextValues.status === "revoked" + ) { + row.status = nextValues.status; + } + if ( + typeof nextValues.revoked_at === "string" || + nextValues.revoked_at === null + ) { + row.revokedAt = nextValues.revoked_at; + } + if (typeof nextValues.updated_at === "string") { + row.updatedAt = nextValues.updated_at; + } + } + + agentAuthSessionUpdates.push({ + ...nextValues, + id: idFilter, + agent_id: agentIdFilter, + status_where: statusFilter, + refresh_key_hash_where: refreshHashFilter, + matched_rows: matchedRows, + }); + changes = matchedRows; + } + if ( + normalizedQuery.includes('delete from "agent_auth_sessions"') || + normalizedQuery.includes("delete from agent_auth_sessions") + ) { + const whereClause = extractWhereClause(query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + + if (idFilter) { + for ( + let index = agentAuthSessionRows.length - 1; + index >= 0; + index -= 1 + ) { + if (agentAuthSessionRows[index]?.id === idFilter) { + agentAuthSessionRows.splice(index, 1); + changes += 1; + } + } + } + } if ( normalizedQuery.includes('insert into "invites"') || normalizedQuery.includes("insert into invites") @@ -1824,6 +2247,10 @@ function createFakeDb( humanRows, humanInserts, apiKeyInserts, + agentAuthSessionRows, + agentAuthSessionInserts, + agentAuthSessionUpdates, + agentAuthEventInserts, agentInserts, agentUpdates, agentRegistrationChallengeInserts, @@ -1881,6 +2308,40 @@ async function signRegistrationChallenge(options: { return encodeEd25519SignatureBase64url(signature); } +async function createSignedAgentRefreshRequest(options: { + ait: string; + secretKey: Uint8Array; + refreshToken: string; + timestamp?: string; + nonce?: string; +}): Promise<{ + body: string; + headers: Record; +}> { + const timestamp = options.timestamp ?? String(Math.floor(Date.now() / 1000)); + const nonce = options.nonce ?? "nonce-agent-refresh"; + const body = JSON.stringify({ + refreshToken: options.refreshToken, + }); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: AGENT_AUTH_REFRESH_PATH, + timestamp, + nonce, + body: new TextEncoder().encode(body), + secretKey: options.secretKey, + }); + + return { + body, + headers: { + authorization: `Claw ${options.ait}`, + "content-type": "application/json", + ...signed.headers, + }, + }; +} + describe("GET /health", () => { it("returns status ok with fallback version", async () => { const res = await app.request( @@ -5126,7 +5587,12 @@ describe("POST /v1/agents", () => { it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { const { token, authRow } = await makeValidPatContext(); - const { database, agentInserts } = createFakeDb([authRow]); + const { + database, + agentInserts, + agentAuthSessionInserts, + agentAuthEventInserts, + } = createFakeDb([authRow]); const signer = await generateEd25519Keypair(); const agentKeypair = await generateEd25519Keypair(); const appInstance = createRegistryApp(); @@ -5221,6 +5687,13 @@ describe("POST /v1/agents", () => { updatedAt: string; }; ait: string; + agentAuth: { + tokenType: string; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; + }; }; expect(body.agent.name).toBe("agent-01"); @@ -5229,6 +5702,15 @@ describe("POST /v1/agents", () => { expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); expect(body.agent.status).toBe("active"); expect(body.ait).toEqual(expect.any(String)); + expect(body.agentAuth.tokenType).toBe("Bearer"); + expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); + expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); + expect(Date.parse(body.agentAuth.accessExpiresAt)).toBeGreaterThan( + Date.now(), + ); + expect(Date.parse(body.agentAuth.refreshExpiresAt)).toBeGreaterThan( + Date.now(), + ); expect(agentInserts).toHaveLength(1); const inserted = agentInserts[0]; @@ -5238,6 +5720,16 @@ describe("POST /v1/agents", () => { expect(inserted?.public_key).toBe(encodeBase64url(agentKeypair.publicKey)); expect(inserted?.current_jti).toBe(body.agent.currentJti); expect(inserted?.expires_at).toBe(body.agent.expiresAt); + expect(agentAuthSessionInserts).toHaveLength(1); + expect(agentAuthSessionInserts[0]).toMatchObject({ + agent_id: body.agent.id, + status: "active", + }); + expect(agentAuthEventInserts).toHaveLength(1); + expect(agentAuthEventInserts[0]).toMatchObject({ + agent_id: body.agent.id, + event_type: "issued", + }); }); it("returns verifiable AIT using published keyset", async () => { @@ -5460,3 +5952,400 @@ describe("POST /v1/agents", () => { }); }); }); + +describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { + async function buildRefreshFixture() { + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const nowSeconds = Math.floor(Date.now() / 1000); + const agentId = generateUlid(Date.now()); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 1); + const refreshToken = + "clw_rft_fixture_refresh_token_value_for_registry_tests"; + const refreshTokenHash = await hashAgentToken(refreshToken); + const ait = await signAIT({ + claims: { + iss: "https://dev.api.clawdentity.com", + sub: agentDid, + ownerDid: makeHumanDid(generateUlid(Date.now() + 2)), + name: "agent-refresh-01", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(agentKeypair.publicKey), + }, + }, + iat: nowSeconds - 10, + nbf: nowSeconds - 10, + exp: nowSeconds + 3600, + jti: aitJti, + }, + signerKid: "reg-key-1", + signerKeypair: signer, + }); + + return { + signer, + agentKeypair, + agentId, + agentDid, + aitJti, + ait, + refreshToken, + refreshTokenHash, + }; + } + + it("rotates refresh credentials and returns a new agent auth bundle", async () => { + const fixture = await buildRefreshFixture(); + const nowIso = new Date().toISOString(); + const refreshExpiresAt = new Date(Date.now() + 60_000).toISOString(); + const { + database, + agentAuthSessionRows, + agentAuthSessionUpdates, + agentAuthEventInserts, + } = createFakeDb( + [], + [ + { + id: fixture.agentId, + did: fixture.agentDid, + ownerId: "human-1", + name: "agent-refresh-01", + framework: "openclaw", + publicKey: encodeBase64url(fixture.agentKeypair.publicKey), + status: "active", + expiresAt: null, + currentJti: fixture.aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 3), + agentId: fixture.agentId, + refreshKeyHash: fixture.refreshTokenHash, + refreshKeyPrefix: deriveRefreshTokenLookupPrefix( + fixture.refreshToken, + ), + refreshIssuedAt: nowIso, + refreshExpiresAt, + refreshLastUsedAt: null, + accessKeyHash: "old-access-hash", + accessKeyPrefix: "clw_agt_old", + accessIssuedAt: nowIso, + accessExpiresAt: refreshExpiresAt, + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + const request = await createSignedAgentRefreshRequest({ + ait: fixture.ait, + secretKey: fixture.agentKeypair.secretKey, + refreshToken: fixture.refreshToken, + }); + + const response = await createRegistryApp().request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: request.headers, + body: request.body, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(fixture.signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + agentAuth: { + tokenType: string; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; + }; + }; + expect(body.agentAuth.tokenType).toBe("Bearer"); + expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); + expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); + expect(body.agentAuth.refreshToken).not.toBe(fixture.refreshToken); + expect(agentAuthSessionUpdates).toHaveLength(1); + expect(agentAuthSessionRows[0]?.refreshKeyPrefix).toBe( + deriveRefreshTokenLookupPrefix(body.agentAuth.refreshToken), + ); + expect(agentAuthEventInserts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ event_type: "refreshed" }), + ]), + ); + }); + + it("rejects refresh when session is revoked", async () => { + const fixture = await buildRefreshFixture(); + const nowIso = new Date().toISOString(); + const request = await createSignedAgentRefreshRequest({ + ait: fixture.ait, + secretKey: fixture.agentKeypair.secretKey, + refreshToken: fixture.refreshToken, + }); + const { database } = createFakeDb( + [], + [ + { + id: fixture.agentId, + did: fixture.agentDid, + ownerId: "human-1", + name: "agent-refresh-01", + framework: "openclaw", + publicKey: encodeBase64url(fixture.agentKeypair.publicKey), + status: "active", + expiresAt: null, + currentJti: fixture.aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 4), + agentId: fixture.agentId, + refreshKeyHash: fixture.refreshTokenHash, + refreshKeyPrefix: deriveRefreshTokenLookupPrefix( + fixture.refreshToken, + ), + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: "old-access-hash", + accessKeyPrefix: "clw_agt_old", + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "revoked", + revokedAt: nowIso, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: request.headers, + body: request.body, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(fixture.signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_REFRESH_REVOKED"); + }); + + it("marks expired refresh credentials revoked and returns expired error", async () => { + const fixture = await buildRefreshFixture(); + const nowIso = new Date().toISOString(); + const { + database, + agentAuthSessionRows, + agentAuthEventInserts, + agentAuthSessionUpdates, + } = createFakeDb( + [], + [ + { + id: fixture.agentId, + did: fixture.agentDid, + ownerId: "human-1", + name: "agent-refresh-01", + framework: "openclaw", + publicKey: encodeBase64url(fixture.agentKeypair.publicKey), + status: "active", + expiresAt: null, + currentJti: fixture.aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 5), + agentId: fixture.agentId, + refreshKeyHash: fixture.refreshTokenHash, + refreshKeyPrefix: deriveRefreshTokenLookupPrefix( + fixture.refreshToken, + ), + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() - 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: "old-access-hash", + accessKeyPrefix: "clw_agt_old", + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() - 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + const request = await createSignedAgentRefreshRequest({ + ait: fixture.ait, + secretKey: fixture.agentKeypair.secretKey, + refreshToken: fixture.refreshToken, + }); + + const response = await createRegistryApp().request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: request.headers, + body: request.body, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(fixture.signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_REFRESH_EXPIRED"); + expect(agentAuthSessionRows[0]?.status).toBe("revoked"); + expect(agentAuthSessionUpdates).toHaveLength(1); + expect(agentAuthEventInserts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ event_type: "revoked" }), + ]), + ); + }); +}); + +describe("DELETE /v1/agents/:id/auth/revoke", () => { + it("revokes active session for owned agent and is idempotent", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(Date.now() + 10); + const nowIso = new Date().toISOString(); + const { database, agentAuthSessionRows, agentAuthEventInserts } = + createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: authRow.humanId, + name: "agent-auth-revoke", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: generateUlid(Date.now() + 11), + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 12), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_test", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: "access-hash", + accessKeyPrefix: "clw_agt_test", + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const appInstance = createRegistryApp(); + const firstResponse = await appInstance.request( + `/v1/agents/${agentId}/auth/revoke`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(firstResponse.status).toBe(204); + expect(agentAuthSessionRows[0]?.status).toBe("revoked"); + expect(agentAuthEventInserts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + event_type: "revoked", + reason: "owner_auth_revoke", + }), + ]), + ); + + const secondResponse = await appInstance.request( + `/v1/agents/${agentId}/auth/revoke`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(secondResponse.status).toBe(204); + }); +}); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 7391c58..07d2721 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,5 +1,6 @@ import { ADMIN_BOOTSTRAP_PATH, + AGENT_AUTH_REFRESH_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, generateUlid, INVITES_PATH, @@ -23,6 +24,13 @@ import { import { and, desc, eq, isNull, lt } from "drizzle-orm"; import { Hono } from "hono"; import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; +import { + agentAuthRefreshConflictError, + agentAuthRefreshRejectedError, + issueAgentAuth, + parseAgentAuthRefreshPayload, + toAgentAuthResponse, +} from "./agent-auth-lifecycle.js"; import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; import { buildAgentRegistrationChallenge, @@ -49,6 +57,11 @@ import { parseApiKeyCreatePayload, parseApiKeyRevokePath, } from "./api-key-lifecycle.js"; +import { + deriveRefreshTokenLookupPrefix, + hashAgentToken, +} from "./auth/agent-auth-token.js"; +import { verifyAgentClawRequest } from "./auth/agent-claw-auth.js"; import { type AuthenticatedHuman, createApiKeyAuth, @@ -61,6 +74,8 @@ import { } from "./auth/api-key-token.js"; import { createDb } from "./db/client.js"; import { + agent_auth_events, + agent_auth_sessions, agent_registration_challenges, agents, api_keys, @@ -126,6 +141,25 @@ type OwnedAgentRegistrationChallenge = { used_at: string | null; }; +type OwnedAgentAuthSession = { + id: string; + agent_id: string; + refresh_key_hash: string; + refresh_key_prefix: string; + refresh_issued_at: string; + refresh_expires_at: string; + refresh_last_used_at: string | null; + access_key_hash: string; + access_key_prefix: string; + access_issued_at: string; + access_expires_at: string; + access_last_used_at: string | null; + status: "active" | "revoked"; + revoked_at: string | null; + created_at: string; + updated_at: string; +}; + type InviteRow = { id: string; code: string; @@ -246,6 +280,58 @@ async function findOwnedAgent(input: { return rows[0]; } +async function findAgentAuthSessionByAgentId(input: { + db: ReturnType; + agentId: string; +}): Promise { + const rows = await input.db + .select({ + id: agent_auth_sessions.id, + agent_id: agent_auth_sessions.agent_id, + refresh_key_hash: agent_auth_sessions.refresh_key_hash, + refresh_key_prefix: agent_auth_sessions.refresh_key_prefix, + refresh_issued_at: agent_auth_sessions.refresh_issued_at, + refresh_expires_at: agent_auth_sessions.refresh_expires_at, + refresh_last_used_at: agent_auth_sessions.refresh_last_used_at, + access_key_hash: agent_auth_sessions.access_key_hash, + access_key_prefix: agent_auth_sessions.access_key_prefix, + access_issued_at: agent_auth_sessions.access_issued_at, + access_expires_at: agent_auth_sessions.access_expires_at, + access_last_used_at: agent_auth_sessions.access_last_used_at, + status: agent_auth_sessions.status, + revoked_at: agent_auth_sessions.revoked_at, + created_at: agent_auth_sessions.created_at, + updated_at: agent_auth_sessions.updated_at, + }) + .from(agent_auth_sessions) + .where(eq(agent_auth_sessions.agent_id, input.agentId)) + .limit(1); + + return rows[0]; +} + +async function findOwnedAgentByDid(input: { + db: ReturnType; + did: string; +}): Promise { + const rows = await input.db + .select({ + id: agents.id, + did: agents.did, + name: agents.name, + framework: agents.framework, + public_key: agents.public_key, + status: agents.status, + expires_at: agents.expires_at, + current_jti: agents.current_jti, + }) + .from(agents) + .where(eq(agents.did, input.did)) + .limit(1); + + return rows[0]; +} + async function findOwnedAgentRegistrationChallenge(input: { db: ReturnType; ownerId: string; @@ -329,6 +415,36 @@ function isInviteExpired(input: { return expiresAtMillis <= input.nowMillis; } +function isIsoExpired(expiresAtIso: string, nowMillis: number): boolean { + const parsed = Date.parse(expiresAtIso); + if (!Number.isFinite(parsed)) { + return true; + } + + return parsed <= nowMillis; +} + +async function insertAgentAuthEvent(input: { + db: ReturnType; + agentId: string; + sessionId: string; + eventType: "issued" | "refreshed" | "revoked" | "refresh_rejected"; + reason?: string; + metadata?: Record; + createdAt?: string; +}): Promise { + await input.db.insert(agent_auth_events).values({ + id: generateUlid(Date.now()), + agent_id: input.agentId, + session_id: input.sessionId, + event_type: input.eventType, + reason: input.reason ?? null, + metadata_json: + input.metadata === undefined ? null : JSON.stringify(input.metadata), + created_at: input.createdAt ?? nowIso(), + }); +} + async function resolveInviteRedeemStateError(input: { db: ReturnType; inviteId: string; @@ -1212,6 +1328,7 @@ function createRegistryApp() { signerKeypair: signer.signerKeypair, }); + const initialAuth = await issueAgentAuth(); const challengeUsedAt = nowIso(); const applyRegistrationMutation = async ( executor: typeof db, @@ -1258,8 +1375,62 @@ function createRegistryApp() { created_at: registration.agent.createdAt, updated_at: registration.agent.updatedAt, }); + + await executor.insert(agent_auth_sessions).values({ + id: initialAuth.sessionId, + agent_id: registration.agent.id, + refresh_key_hash: initialAuth.refreshTokenHash, + refresh_key_prefix: initialAuth.refreshTokenPrefix, + refresh_issued_at: initialAuth.refreshIssuedAt, + refresh_expires_at: initialAuth.refreshExpiresAt, + refresh_last_used_at: null, + access_key_hash: initialAuth.accessTokenHash, + access_key_prefix: initialAuth.accessTokenPrefix, + access_issued_at: initialAuth.accessIssuedAt, + access_expires_at: initialAuth.accessExpiresAt, + access_last_used_at: null, + status: "active", + revoked_at: null, + created_at: initialAuth.createdAt, + updated_at: initialAuth.updatedAt, + }); + + await insertAgentAuthEvent({ + db: executor, + agentId: registration.agent.id, + sessionId: initialAuth.sessionId, + eventType: "issued", + createdAt: initialAuth.createdAt, + metadata: { + actor: "agent_registration", + }, + }); } catch (error) { if (options.rollbackOnAgentInsertFailure) { + try { + await executor + .delete(agent_auth_sessions) + .where(eq(agent_auth_sessions.id, initialAuth.sessionId)); + } catch (rollbackError) { + logger.error("registry.agent_registration_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + stage: "auth_session_delete", + }); + } + + try { + await executor + .delete(agents) + .where(eq(agents.id, registration.agent.id)); + } catch (rollbackError) { + logger.error("registry.agent_registration_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + stage: "agent_delete", + }); + } + await executor .update(agent_registration_challenges) .set({ @@ -1296,7 +1467,269 @@ function createRegistryApp() { }); } - return c.json({ agent: registration.agent, ait }, 201); + return c.json( + { + agent: registration.agent, + ait, + agentAuth: toAgentAuthResponse({ + accessToken: initialAuth.accessToken, + accessExpiresAt: initialAuth.accessExpiresAt, + refreshToken: initialAuth.refreshToken, + refreshExpiresAt: initialAuth.refreshExpiresAt, + }), + }, + 201, + ); + }); + + app.post(AGENT_AUTH_REFRESH_PATH, async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); + + let payload: unknown; + try { + const rawBody = new TextDecoder().decode(bodyBytes); + payload = rawBody.trim().length === 0 ? {} : JSON.parse(rawBody); + } catch { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const parsedPayload = parseAgentAuthRefreshPayload({ + payload, + environment: config.ENVIRONMENT, + }); + const claims = await verifyAgentClawRequest({ + config, + request: c.req.raw, + bodyBytes, + }); + const nowMillis = Date.now(); + const db = createDb(c.env.DB); + const existingAgent = await findOwnedAgentByDid({ + db, + did: claims.sub, + }); + + if (!existingAgent || existingAgent.status !== "active") { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + if (existingAgent.current_jti !== claims.jti) { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + + if (!existingSession) { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + if (existingSession.status !== "active") { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_REVOKED", + message: "Agent auth refresh token is revoked", + }); + } + + const refreshPrefix = deriveRefreshTokenLookupPrefix( + parsedPayload.refreshToken, + ); + const refreshHash = await hashAgentToken(parsedPayload.refreshToken); + const refreshTokenMatches = + existingSession.refresh_key_prefix === refreshPrefix && + constantTimeEqual(existingSession.refresh_key_hash, refreshHash); + + if (!refreshTokenMatches) { + await insertAgentAuthEvent({ + db, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "refresh_rejected", + reason: "invalid_refresh_token", + }); + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + if (isIsoExpired(existingSession.refresh_expires_at, nowMillis)) { + const revokedAt = nowIso(); + await db + .update(agent_auth_sessions) + .set({ + status: "revoked", + revoked_at: revokedAt, + updated_at: revokedAt, + }) + .where(eq(agent_auth_sessions.id, existingSession.id)); + await insertAgentAuthEvent({ + db, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "revoked", + reason: "refresh_token_expired", + createdAt: revokedAt, + }); + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_EXPIRED", + message: "Agent auth refresh token is expired", + }); + } + + const rotatedAuth = await issueAgentAuth({ + nowMs: nowMillis, + }); + const refreshedAt = nowIso(); + const applyRefreshMutation = async (executor: typeof db): Promise => { + const updateResult = await executor + .update(agent_auth_sessions) + .set({ + refresh_key_hash: rotatedAuth.refreshTokenHash, + refresh_key_prefix: rotatedAuth.refreshTokenPrefix, + refresh_issued_at: rotatedAuth.refreshIssuedAt, + refresh_expires_at: rotatedAuth.refreshExpiresAt, + refresh_last_used_at: refreshedAt, + access_key_hash: rotatedAuth.accessTokenHash, + access_key_prefix: rotatedAuth.accessTokenPrefix, + access_issued_at: rotatedAuth.accessIssuedAt, + access_expires_at: rotatedAuth.accessExpiresAt, + access_last_used_at: null, + status: "active", + revoked_at: null, + updated_at: refreshedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + eq(agent_auth_sessions.refresh_key_hash, refreshHash), + ), + ); + + const updatedRows = getMutationRowCount(updateResult); + if (updatedRows === 0) { + throw agentAuthRefreshConflictError(); + } + + await insertAgentAuthEvent({ + db: executor, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "refreshed", + createdAt: refreshedAt, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyRefreshMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRefreshMutation(db); + } + + return c.json({ + agentAuth: toAgentAuthResponse({ + accessToken: rotatedAuth.accessToken, + accessExpiresAt: rotatedAuth.accessExpiresAt, + refreshToken: rotatedAuth.refreshToken, + refreshExpiresAt: rotatedAuth.refreshExpiresAt, + }), + }); + }); + + app.delete("/v1/agents/:id/auth/revoke", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + const existingAgent = await findOwnedAgent({ + db, + ownerId: human.id, + agentId, + }); + + if (!existingAgent) { + throw agentNotFoundError(); + } + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + if (!existingSession || existingSession.status === "revoked") { + return c.body(null, 204); + } + + const revokedAt = nowIso(); + const applyAuthRevokeMutation = async ( + executor: typeof db, + ): Promise => { + await executor + .update(agent_auth_sessions) + .set({ + status: "revoked", + revoked_at: revokedAt, + updated_at: revokedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + ), + ); + + await insertAgentAuthEvent({ + db: executor, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "revoked", + reason: "owner_auth_revoke", + createdAt: revokedAt, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyAuthRevokeMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyAuthRevokeMutation(db); + } + + return c.body(null, 204); }); app.delete("/v1/agents/:id", createApiKeyAuth(), async (c) => { @@ -1331,6 +1764,10 @@ function createRegistryApp() { }), }); + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); const revokedAt = nowIso(); const applyRevokeMutation = async (executor: typeof db): Promise => { await executor @@ -1353,6 +1790,31 @@ function createRegistryApp() { .onConflictDoNothing({ target: revocations.jti, }); + + if (existingSession && existingSession.status === "active") { + await executor + .update(agent_auth_sessions) + .set({ + status: "revoked", + revoked_at: revokedAt, + updated_at: revokedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + ), + ); + + await insertAgentAuthEvent({ + db: executor, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "revoked", + reason: "agent_revoked", + createdAt: revokedAt, + }); + } }; try { diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index d875eab..42035d9 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -18,7 +18,7 @@ - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. -- Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `ME_API_KEYS_PATH`) so registry and CLI stay contract-synchronized. +- Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `AGENT_AUTH_REFRESH_PATH`, `ME_API_KEYS_PATH`) so registry and CLI stay contract-synchronized. - Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. - Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 98aa369..8b36118 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -1,5 +1,6 @@ export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; export const AGENT_REGISTRATION_CHALLENGE_PATH = "/v1/agents/challenge"; +export const AGENT_AUTH_REFRESH_PATH = "/v1/agents/auth/refresh"; export const INVITES_PATH = "/v1/invites"; export const INVITES_REDEEM_PATH = "/v1/invites/redeem"; export const ME_API_KEYS_PATH = "/v1/me/api-keys"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index c232d82..b8a4c5d 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "vitest"; import { ADMIN_BOOTSTRAP_PATH, + AGENT_AUTH_REFRESH_PATH, AGENT_NAME_REGEX, AGENT_REGISTRATION_CHALLENGE_PATH, AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, @@ -37,6 +38,7 @@ describe("protocol", () => { it("exports shared endpoint constants", () => { expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); expect(AGENT_REGISTRATION_CHALLENGE_PATH).toBe("/v1/agents/challenge"); + expect(AGENT_AUTH_REFRESH_PATH).toBe("/v1/agents/auth/refresh"); expect(INVITES_PATH).toBe("/v1/invites"); expect(INVITES_REDEEM_PATH).toBe("/v1/invites/redeem"); expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index afcf910..491afef 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -22,6 +22,7 @@ export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export { ADMIN_BOOTSTRAP_PATH, + AGENT_AUTH_REFRESH_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, INVITES_PATH, INVITES_REDEEM_PATH, From e33fb7f2f43b83a81c927859cca08b934ff2cc61 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 18:58:46 +0530 Subject: [PATCH 065/190] feat: enforce runtime agent auth validation flow --- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/agent.test.ts | 75 ++-- apps/cli/src/commands/agent.ts | 103 +----- apps/openclaw-skill/src/AGENTS.md | 5 + apps/openclaw-skill/src/index.ts | 6 + .../src/transforms/registry-auth.test.ts | 128 +++++++ .../src/transforms/registry-auth.ts | 195 +++++++++++ .../src/transforms/relay-to-peer.test.ts | 141 +++++++- .../src/transforms/relay-to-peer.ts | 194 +++++++++-- apps/proxy/src/AGENTS.md | 2 + apps/proxy/src/auth-middleware.test.ts | 84 +++++ apps/proxy/src/auth-middleware.ts | 64 +++- apps/registry/src/AGENTS.md | 8 + apps/registry/src/auth/AGENTS.md | 5 + apps/registry/src/server.test.ts | 269 +++++++++++++++ apps/registry/src/server.ts | 150 ++++++++ packages/protocol/AGENTS.md | 2 +- packages/protocol/src/endpoints.ts | 1 + packages/protocol/src/index.test.ts | 2 + packages/protocol/src/index.ts | 1 + packages/sdk/AGENTS.md | 5 + packages/sdk/src/agent-auth-client.test.ts | 154 +++++++++ packages/sdk/src/agent-auth-client.ts | 321 ++++++++++++++++++ packages/sdk/src/index.test.ts | 42 +++ packages/sdk/src/index.ts | 6 + 25 files changed, 1781 insertions(+), 183 deletions(-) create mode 100644 apps/openclaw-skill/src/transforms/registry-auth.test.ts create mode 100644 apps/openclaw-skill/src/transforms/registry-auth.ts create mode 100644 packages/sdk/src/agent-auth-client.test.ts create mode 100644 packages/sdk/src/agent-auth-client.ts diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index a2fb05d..83c5047 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -48,6 +48,7 @@ - `agent create` must use a two-step registration handshake: request challenge from registry, sign canonical challenge message locally with agent private key, then submit registration with `challengeId` + `challengeSignature`. - `agent create` must persist returned `agentAuth` bootstrap credentials to `registry-auth.json` alongside `identity.json`, `secret.key`, `public.key`, and `ait.jwt`. - `agent auth refresh` must call `AGENT_AUTH_REFRESH_PATH` from `@clawdentity/protocol` using Claw + PoP headers and local refresh token payload, and PoP signing must use the resolved request path (including any registry base path prefix). +- `agent auth refresh` should call the shared SDK refresh client (`refreshAgentAuthWithClawProof`) so refresh request signing/error mapping stays consistent with runtime integrations. - `agent auth refresh` must rewrite `registry-auth.json` atomically on success and keep error mapping stable for `400`, `401`, `409`, and `5xx`. - Never send or log agent private keys; only send public key and proof signature. - Keep proof canonicalization sourced from `@clawdentity/protocol` helper exports to avoid CLI/registry signature drift. diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index acb1239..d4b2d2d 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -37,7 +37,7 @@ vi.mock("@clawdentity/sdk", () => ({ encodeEd25519SignatureBase64url: vi.fn(), encodeEd25519KeypairBase64url: vi.fn(), generateEd25519Keypair: vi.fn(), - signHttpRequest: vi.fn(), + refreshAgentAuthWithClawProof: vi.fn(), signEd25519: vi.fn(), })); @@ -47,8 +47,8 @@ import { encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, generateEd25519Keypair, + refreshAgentAuthWithClawProof, signEd25519, - signHttpRequest, } from "@clawdentity/sdk"; import { resolveConfig } from "../config/manager.js"; import { createAgentCommand } from "./agent.js"; @@ -62,7 +62,9 @@ const mockedUnlink = vi.mocked(unlink); const mockedWriteFile = vi.mocked(writeFile); const mockedResolveConfig = vi.mocked(resolveConfig); const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); -const mockedSignHttpRequest = vi.mocked(signHttpRequest); +const mockedRefreshAgentAuthWithClawProof = vi.mocked( + refreshAgentAuthWithClawProof, +); const mockedSignEd25519 = vi.mocked(signEd25519); const mockedEncodeEd25519SignatureBase64url = vi.mocked( encodeEd25519SignatureBase64url, @@ -164,16 +166,6 @@ describe("agent create command", () => { }); mockedSignEd25519.mockResolvedValue(Uint8Array.from([1, 2, 3])); - mockedSignHttpRequest.mockResolvedValue({ - canonicalRequest: "canonical", - proof: "proof", - headers: { - "X-Claw-Timestamp": "1739364000", - "X-Claw-Nonce": "nonce-value", - "X-Claw-Body-SHA256": "body-sha", - "X-Claw-Proof": "proof", - }, - }); mockedEncodeEd25519SignatureBase64url.mockReturnValue( "challenge-signature-b64url", ); @@ -408,16 +400,6 @@ describe("agent auth refresh command", () => { vi.clearAllMocks(); mockFetch.mockReset(); vi.stubGlobal("fetch", mockFetch); - mockedSignHttpRequest.mockResolvedValue({ - canonicalRequest: "canonical", - proof: "proof", - headers: { - "X-Claw-Timestamp": "1739364000", - "X-Claw-Nonce": "nonce-value", - "X-Claw-Body-SHA256": "body-sha", - "X-Claw-Proof": "proof", - }, - }); mockedReadFile.mockImplementation(async (path) => { const filePath = String(path); @@ -446,17 +428,13 @@ describe("agent auth refresh command", () => { throw buildErrnoError("ENOENT"); }); - mockFetch.mockResolvedValue( - createJsonResponse(200, { - agentAuth: { - tokenType: "Bearer", - accessToken: "clw_agt_new_access", - accessExpiresAt: "2030-01-02T00:15:00.000Z", - refreshToken: "clw_rft_new_refresh", - refreshExpiresAt: "2030-02-01T00:00:00.000Z", - }, - }), - ); + mockedRefreshAgentAuthWithClawProof.mockResolvedValue({ + tokenType: "Bearer", + accessToken: "clw_agt_new_access", + accessExpiresAt: "2030-01-02T00:15:00.000Z", + refreshToken: "clw_rft_new_refresh", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }); }); afterEach(() => { @@ -467,20 +445,11 @@ describe("agent auth refresh command", () => { it("refreshes agent auth and rewrites registry-auth.json", async () => { const result = await runAgentCommand(["auth", "refresh", "agent-01"]); - expect(mockedSignHttpRequest).toHaveBeenCalledWith( + expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( expect.objectContaining({ - method: "POST", - pathWithQuery: "/v1/agents/auth/refresh", - }), - ); - expect(mockFetch).toHaveBeenCalledWith( - "https://api.clawdentity.com/v1/agents/auth/refresh", - expect.objectContaining({ - method: "POST", - headers: expect.objectContaining({ - authorization: "Claw ait.jwt.value", - "content-type": "application/json", - }), + registryUrl: "https://api.clawdentity.com", + ait: "ait.jwt.value", + refreshToken: "clw_rft_old_refresh", }), ); const [tempPath, tempContents, tempEncoding] = mockedWriteFile.mock @@ -529,10 +498,10 @@ describe("agent auth refresh command", () => { expect(result.stderr).toContain("registry-auth.json"); expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); + expect(mockedRefreshAgentAuthWithClawProof).not.toHaveBeenCalled(); }); - it("signs refresh proof with the resolved endpoint path for base-path registries", async () => { + it("passes base-path registry urls through to shared refresh client", async () => { mockedReadFile.mockImplementation(async (path) => { const filePath = String(path); if (filePath.endsWith("/ait.jwt")) { @@ -562,15 +531,11 @@ describe("agent auth refresh command", () => { await runAgentCommand(["auth", "refresh", "agent-01"]); - expect(mockedSignHttpRequest).toHaveBeenCalledWith( + expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( expect.objectContaining({ - pathWithQuery: "/registry/v1/agents/auth/refresh", + registryUrl: "https://api.clawdentity.com/registry", }), ); - expect(mockFetch).toHaveBeenCalledWith( - "https://api.clawdentity.com/registry/v1/agents/auth/refresh", - expect.any(Object), - ); }); }); diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index ce43753..d37f2c1 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -9,11 +9,9 @@ import { } from "node:fs/promises"; import { join } from "node:path"; import { - AGENT_AUTH_REFRESH_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, canonicalizeAgentRegistrationProof, decodeBase64url, - encodeBase64url, parseDid, } from "@clawdentity/protocol"; import { @@ -23,8 +21,8 @@ import { encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, generateEd25519Keypair, + refreshAgentAuthWithClawProof, signEd25519, - signHttpRequest, } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; @@ -353,22 +351,6 @@ const toRegistryAgentChallengeRequestUrl = (registryUrl: string): string => { ).toString(); }; -const toRegistryAgentAuthRefreshRequestUrl = (registryUrl: string): string => { - const normalizedBaseUrl = registryUrl.endsWith("/") - ? registryUrl - : `${registryUrl}/`; - - return new URL( - AGENT_AUTH_REFRESH_PATH.slice(1), - normalizedBaseUrl, - ).toString(); -}; - -const toPathWithQuery = (requestUrl: string): string => { - const parsed = new URL(requestUrl); - return `${parsed.pathname}${parsed.search}`; -}; - const toHttpErrorMessage = (status: number, responseBody: unknown): string => { const registryMessage = extractRegistryErrorMessage(responseBody); @@ -765,49 +747,6 @@ const toRevokeHttpErrorMessage = ( return `Registry request failed (${status})`; }; -const toRefreshHttpErrorMessage = ( - status: number, - responseBody: unknown, -): string => { - const registryMessage = extractRegistryErrorMessage(responseBody); - - if (status === 400) { - return registryMessage - ? `Refresh request is invalid (400): ${registryMessage}` - : "Refresh request is invalid (400)."; - } - - if (status === 401) { - return registryMessage - ? `Refresh rejected (401): ${registryMessage}` - : "Refresh rejected (401). Agent credentials are invalid, revoked, or expired."; - } - - if (status === 409) { - return registryMessage - ? `Refresh conflict (409): ${registryMessage}` - : "Refresh conflict (409). Retry the command."; - } - - if (status >= 500) { - return `Registry server error (${status}). Try again later.`; - } - - if (registryMessage) { - return `Registry request failed (${status}): ${registryMessage}`; - } - - return `Registry request failed (${status})`; -}; - -const parseAgentAuthRefreshResponse = (payload: unknown): AgentAuthBundle => { - if (!isRecord(payload) || !isRecord(payload.agentAuth)) { - throw new Error("Registry returned an invalid response payload"); - } - - return parseAgentAuthBundle(payload.agentAuth); -}; - const refreshAgentAuth = async (input: { agentName: string; }): Promise<{ @@ -826,46 +765,16 @@ const refreshAgentAuth = async (input: { ); } - const refreshBody = JSON.stringify({ - refreshToken: localAuth.refreshToken, - }); - const refreshUrl = toRegistryAgentAuthRefreshRequestUrl(registryUrl); - const timestamp = String(Math.floor(Date.now() / 1000)); - const nonce = encodeBase64url(crypto.getRandomValues(new Uint8Array(16))); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: toPathWithQuery(refreshUrl), - timestamp, - nonce, - body: new TextEncoder().encode(refreshBody), + const agentAuth = await refreshAgentAuthWithClawProof({ + registryUrl, + ait, secretKey, + refreshToken: localAuth.refreshToken, }); - let response: Response; - try { - response = await fetch(refreshUrl, { - method: "POST", - headers: { - authorization: `Claw ${ait}`, - "content-type": "application/json", - ...signed.headers, - }, - body: refreshBody, - }); - } catch { - throw new Error( - "Unable to connect to the registry. Check network access and registryUrl.", - ); - } - - const responseBody = await parseJsonResponse(response); - if (!response.ok) { - throw new Error(toRefreshHttpErrorMessage(response.status, responseBody)); - } - return { registryUrl, - agentAuth: parseAgentAuthRefreshResponse(responseBody), + agentAuth, }; }; diff --git a/apps/openclaw-skill/src/AGENTS.md b/apps/openclaw-skill/src/AGENTS.md index bf13b77..0a23597 100644 --- a/apps/openclaw-skill/src/AGENTS.md +++ b/apps/openclaw-skill/src/AGENTS.md @@ -3,6 +3,7 @@ ## Source Layout - Keep package exports in `index.ts` only. - Keep peer config helpers in `transforms/peers-config.ts`. +- Keep local agent auth state helpers in `transforms/registry-auth.ts`. - Keep network relay behavior in `transforms/relay-to-peer.ts`. ## Safety Rules @@ -10,9 +11,13 @@ - Resolve selected agent in deterministic order: explicit option, env var, `~/.clawdentity/openclaw-agent-name`, then single-agent fallback. - Do not log or persist secret material from `secret.key` or `ait.jwt`. - Keep outbound peer requests as JSON POSTs with explicit auth + PoP headers. +- Require outbound relay requests to include `x-claw-agent-access` from local `registry-auth.json`. +- Keep refresh/write operations for `registry-auth.json` lock-protected and atomic. +- On relay `401` auth failures, use shared SDK refresh+retry orchestration and retry exactly once. - Keep peer schema strict (`did`, `proxyUrl`, optional `name`) and reject malformed values early. ## Testing Rules - Use temp directories for filesystem tests; no dependency on real user home state. - Mock `fetch` in relay tests and assert emitted headers/body. - Cover both happy path and failure paths (missing peer mapping, missing credentials, invalid config). +- Include refresh-retry tests: first relay `401` -> registry refresh -> one retry success. diff --git a/apps/openclaw-skill/src/index.ts b/apps/openclaw-skill/src/index.ts index 84a7e78..d93c2b0 100644 --- a/apps/openclaw-skill/src/index.ts +++ b/apps/openclaw-skill/src/index.ts @@ -9,6 +9,12 @@ export { resolvePeersConfigPath, savePeersConfig, } from "./transforms/peers-config.js"; +export { + readAgentRegistryAuth, + resolveAgentRegistryAuthPath, + withAgentRegistryAuthLock, + writeAgentRegistryAuthAtomic, +} from "./transforms/registry-auth.js"; export type { RelayToPeerOptions, diff --git a/apps/openclaw-skill/src/transforms/registry-auth.test.ts b/apps/openclaw-skill/src/transforms/registry-auth.test.ts new file mode 100644 index 0000000..18f5ff3 --- /dev/null +++ b/apps/openclaw-skill/src/transforms/registry-auth.test.ts @@ -0,0 +1,128 @@ +import { + mkdirSync, + mkdtempSync, + rmSync, + statSync, + writeFileSync, +} from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { + readAgentRegistryAuth, + resolveAgentRegistryAuthPath, + withAgentRegistryAuthLock, + writeAgentRegistryAuthAtomic, +} from "./registry-auth.js"; + +function createSandbox(agentName: string): { + cleanup: () => void; + homeDir: string; +} { + const homeDir = mkdtempSync( + join(tmpdir(), "clawdentity-openclaw-registry-auth-"), + ); + mkdirSync(join(homeDir, ".clawdentity", "agents", agentName), { + recursive: true, + }); + + return { + cleanup: () => { + rmSync(homeDir, { recursive: true, force: true }); + }, + homeDir, + }; +} + +describe("registry-auth store", () => { + it("reads an existing registry-auth bundle", async () => { + const sandbox = createSandbox("alpha-agent"); + const registryAuthPath = resolveAgentRegistryAuthPath({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + }); + writeFileSync( + registryAuthPath, + `${JSON.stringify( + { + tokenType: "Bearer", + accessToken: "clw_agt_access", + accessExpiresAt: "2030-01-01T00:00:00.000Z", + refreshToken: "clw_rft_refresh", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }, + null, + 2, + )}\n`, + "utf8", + ); + + try { + const auth = await readAgentRegistryAuth({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + }); + + expect(auth.accessToken).toBe("clw_agt_access"); + expect(auth.refreshToken).toBe("clw_rft_refresh"); + } finally { + sandbox.cleanup(); + } + }); + + it("writes registry-auth atomically with secure permissions", async () => { + const sandbox = createSandbox("alpha-agent"); + + try { + await writeAgentRegistryAuthAtomic({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + auth: { + tokenType: "Bearer", + accessToken: "clw_agt_new_access", + accessExpiresAt: "2030-03-01T00:00:00.000Z", + refreshToken: "clw_rft_new_refresh", + refreshExpiresAt: "2030-04-01T00:00:00.000Z", + }, + }); + + const registryAuthPath = resolveAgentRegistryAuthPath({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + }); + const mode = statSync(registryAuthPath).mode & 0o777; + expect(mode).toBe(0o600); + const auth = await readAgentRegistryAuth({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + }); + expect(auth.accessToken).toBe("clw_agt_new_access"); + } finally { + sandbox.cleanup(); + } + }); + + it("creates and removes lock around operations", async () => { + const sandbox = createSandbox("alpha-agent"); + const registryAuthPath = resolveAgentRegistryAuthPath({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + }); + const lockPath = `${registryAuthPath}.lock`; + + try { + await withAgentRegistryAuthLock({ + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + operation: async () => { + expect(() => statSync(lockPath)).not.toThrow(); + return undefined; + }, + }); + + expect(() => statSync(lockPath)).toThrow(); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/openclaw-skill/src/transforms/registry-auth.ts b/apps/openclaw-skill/src/transforms/registry-auth.ts new file mode 100644 index 0000000..e636e73 --- /dev/null +++ b/apps/openclaw-skill/src/transforms/registry-auth.ts @@ -0,0 +1,195 @@ +import { + chmod, + open, + readFile, + rename, + stat, + unlink, + writeFile, +} from "node:fs/promises"; +import { join } from "node:path"; +import type { AgentAuthBundle } from "@clawdentity/sdk"; + +const CLAWDENTITY_DIR = ".clawdentity"; +const AGENTS_DIR = "agents"; +const REGISTRY_AUTH_FILENAME = "registry-auth.json"; +const FILE_MODE = 0o600; +const LOCK_RETRY_DELAY_MS = 50; +const LOCK_MAX_ATTEMPTS = 200; +const STALE_LOCK_AGE_MS = 30_000; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +function sleep(delayMs: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, delayMs); + }); +} + +function parseAgentAuthBundle( + payload: unknown, + options: { agentName: string }, +): AgentAuthBundle { + if (!isRecord(payload)) { + throw new Error( + `Agent "${options.agentName}" has invalid ${REGISTRY_AUTH_FILENAME}`, + ); + } + + const tokenType = payload.tokenType; + const accessToken = payload.accessToken; + const accessExpiresAt = payload.accessExpiresAt; + const refreshToken = payload.refreshToken; + const refreshExpiresAt = payload.refreshExpiresAt; + + if ( + tokenType !== "Bearer" || + typeof accessToken !== "string" || + typeof accessExpiresAt !== "string" || + typeof refreshToken !== "string" || + typeof refreshExpiresAt !== "string" + ) { + throw new Error( + `Agent "${options.agentName}" has invalid ${REGISTRY_AUTH_FILENAME}`, + ); + } + + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + }; +} + +export function resolveAgentRegistryAuthPath(input: { + homeDir: string; + agentName: string; +}): string { + return join( + input.homeDir, + CLAWDENTITY_DIR, + AGENTS_DIR, + input.agentName, + REGISTRY_AUTH_FILENAME, + ); +} + +export async function readAgentRegistryAuth(input: { + homeDir: string; + agentName: string; +}): Promise { + const registryAuthPath = resolveAgentRegistryAuthPath(input); + let rawRegistryAuth: string; + try { + rawRegistryAuth = await readFile(registryAuthPath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw new Error( + `Agent "${input.agentName}" has no ${REGISTRY_AUTH_FILENAME}. Recreate agent identity or re-run auth bootstrap.`, + ); + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawRegistryAuth); + } catch { + throw new Error( + `Agent "${input.agentName}" has invalid ${REGISTRY_AUTH_FILENAME} (must be valid JSON)`, + ); + } + + return parseAgentAuthBundle(parsed, { agentName: input.agentName }); +} + +export async function writeAgentRegistryAuthAtomic(input: { + homeDir: string; + agentName: string; + auth: AgentAuthBundle; +}): Promise { + const registryAuthPath = resolveAgentRegistryAuthPath(input); + const tempPath = `${registryAuthPath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + const content = `${JSON.stringify(input.auth, null, 2)}\n`; + + await writeFile(tempPath, content, "utf8"); + await chmod(tempPath, FILE_MODE); + + try { + await rename(tempPath, registryAuthPath); + await chmod(registryAuthPath, FILE_MODE); + } catch (error) { + try { + await unlink(tempPath); + } catch { + // Best-effort cleanup. + } + throw error; + } +} + +export async function withAgentRegistryAuthLock(input: { + homeDir: string; + agentName: string; + operation: () => Promise; +}): Promise { + const registryAuthPath = resolveAgentRegistryAuthPath(input); + const lockPath = `${registryAuthPath}.lock`; + let lockAcquired = false; + + for (let attempt = 0; attempt < LOCK_MAX_ATTEMPTS; attempt += 1) { + try { + const lockHandle = await open(lockPath, "wx", FILE_MODE); + await lockHandle.writeFile(`${Date.now()}`); + await lockHandle.close(); + lockAcquired = true; + break; + } catch (error) { + if (getErrorCode(error) !== "EEXIST") { + throw error; + } + + try { + const lockStat = await stat(lockPath); + if (Date.now() - lockStat.mtimeMs > STALE_LOCK_AGE_MS) { + await unlink(lockPath); + continue; + } + } catch (statError) { + if (getErrorCode(statError) !== "ENOENT") { + throw statError; + } + } + + await sleep(LOCK_RETRY_DELAY_MS); + } + } + + if (!lockAcquired) { + throw new Error( + `Timed out waiting for ${REGISTRY_AUTH_FILENAME} lock for agent "${input.agentName}"`, + ); + } + + try { + return await input.operation(); + } finally { + try { + await unlink(lockPath); + } catch { + // Best-effort cleanup. + } + } +} diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts index 6834755..a894267 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts @@ -1,4 +1,10 @@ -import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { + mkdirSync, + mkdtempSync, + readFileSync, + rmSync, + writeFileSync, +} from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; import { encodeBase64url } from "@clawdentity/protocol"; @@ -43,6 +49,35 @@ function createRelaySandbox(agentName: string): RelaySandbox { "utf8", ); writeFileSync(join(agentDirectory, "ait.jwt"), "mock.ait.jwt", "utf8"); + writeFileSync( + join(agentDirectory, "identity.json"), + `${JSON.stringify( + { + did: "did:claw:agent:01ALPHA", + name: agentName, + framework: "openclaw", + registryUrl: "https://registry.example.com", + }, + null, + 2, + )}\n`, + "utf8", + ); + writeFileSync( + join(agentDirectory, "registry-auth.json"), + `${JSON.stringify( + { + tokenType: "Bearer", + accessToken: "clw_agt_access_initial", + accessExpiresAt: "2030-01-01T00:00:00.000Z", + refreshToken: "clw_rft_refresh_initial", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }, + null, + 2, + )}\n`, + "utf8", + ); return { cleanup: () => { @@ -61,6 +96,35 @@ function writeAgentCredentials(homeDir: string, agentName: string): void { "utf8", ); writeFileSync(join(agentDirectory, "ait.jwt"), "mock.ait.jwt", "utf8"); + writeFileSync( + join(agentDirectory, "identity.json"), + `${JSON.stringify( + { + did: "did:claw:agent:01ALPHA", + name: agentName, + framework: "openclaw", + registryUrl: "https://registry.example.com", + }, + null, + 2, + )}\n`, + "utf8", + ); + writeFileSync( + join(agentDirectory, "registry-auth.json"), + `${JSON.stringify( + { + tokenType: "Bearer", + accessToken: "clw_agt_access_initial", + accessExpiresAt: "2030-01-01T00:00:00.000Z", + refreshToken: "clw_rft_refresh_initial", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }, + null, + 2, + )}\n`, + "utf8", + ); } describe("relay-to-peer transform", () => { @@ -110,6 +174,7 @@ describe("relay-to-peer transform", () => { const headers = new Headers(requestInit.headers); expect(headers.get("authorization")).toBe("Claw mock.ait.jwt"); expect(headers.get("content-type")).toBe("application/json"); + expect(headers.get("x-claw-agent-access")).toBe("clw_agt_access_initial"); expect(headers.get("x-claw-timestamp")).toBe("1700000000"); expect(headers.get("x-claw-nonce")).toBe("AQIDBAUGBwgJCgsMDQ4PEA"); expect(headers.get("x-claw-body-sha256")).toMatch(/^[A-Za-z0-9_-]+$/); @@ -216,6 +281,80 @@ describe("relay-to-peer transform", () => { } }); + it("refreshes auth and retries once when peer returns 401", async () => { + const sandbox = createRelaySandbox("alpha-agent"); + const fetchMock = vi.fn(async (input: unknown, init?: RequestInit) => { + const url = + typeof input === "string" + ? input + : input instanceof URL + ? input.toString() + : ""; + if (url === "https://peer.example.com/hooks/agent?source=skill") { + const headers = new Headers(init?.headers); + const accessToken = headers.get("x-claw-agent-access"); + if (accessToken === "clw_agt_access_initial") { + return new Response("", { status: 401 }); + } + if (accessToken === "clw_agt_access_refreshed") { + return new Response("", { status: 202 }); + } + } + + if (url === "https://registry.example.com/v1/agents/auth/refresh") { + return new Response( + JSON.stringify({ + agentAuth: { + tokenType: "Bearer", + accessToken: "clw_agt_access_refreshed", + accessExpiresAt: "2030-03-01T00:00:00.000Z", + refreshToken: "clw_rft_refresh_refreshed", + refreshExpiresAt: "2030-04-01T00:00:00.000Z", + }, + }), + { status: 200 }, + ); + } + + return new Response("not found", { status: 404 }); + }); + + try { + const result = await relayPayloadToPeer( + { + peer: "beta", + message: "retry me", + }, + { + homeDir: sandbox.homeDir, + agentName: "alpha-agent", + fetchImpl: fetchMock as typeof fetch, + }, + ); + + expect(result).toBeNull(); + expect(fetchMock).toHaveBeenCalledTimes(3); + const registryAuth = JSON.parse( + String( + readFileSync( + join( + sandbox.homeDir, + ".clawdentity", + "agents", + "alpha-agent", + "registry-auth.json", + ), + "utf8", + ), + ), + ) as { accessToken: string; refreshToken: string }; + expect(registryAuth.accessToken).toBe("clw_agt_access_refreshed"); + expect(registryAuth.refreshToken).toBe("clw_rft_refresh_refreshed"); + } finally { + sandbox.cleanup(); + } + }); + it("uses default export with transform context payload", async () => { const payload = { message: "context payload" }; const result = await relayToPeer({ payload }); diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.ts index 5589924..8b69bdd 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.ts @@ -3,19 +3,32 @@ import { readdir, readFile } from "node:fs/promises"; import { homedir } from "node:os"; import { join } from "node:path"; import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; -import { signHttpRequest } from "@clawdentity/sdk"; +import { + type AgentAuthBundle, + AppError, + executeWithAgentAuthRefreshRetry, + refreshAgentAuthWithClawProof, + signHttpRequest, +} from "@clawdentity/sdk"; import { loadPeersConfig, type PeersConfigPathOptions, } from "./peers-config.js"; +import { + readAgentRegistryAuth, + withAgentRegistryAuthLock, + writeAgentRegistryAuthAtomic, +} from "./registry-auth.js"; const CLAWDENTITY_DIR = ".clawdentity"; const AGENTS_DIR = "agents"; const SECRET_KEY_FILENAME = "secret.key"; const AIT_FILENAME = "ait.jwt"; +const IDENTITY_FILENAME = "identity.json"; const AGENT_NAME_ENV = "CLAWDENTITY_AGENT_NAME"; const OPENCLAW_AGENT_NAME_FILENAME = "openclaw-agent-name"; const NONCE_SIZE = 16; +const AGENT_ACCESS_HEADER = "x-claw-agent-access"; const textEncoder = new TextEncoder(); @@ -55,6 +68,26 @@ function parseRequiredString(value: unknown): string { return trimmed; } +function parseIdentityRegistryUrl( + payload: unknown, + options: { agentName: string }, +): string { + if (!isRecord(payload) || typeof payload.registryUrl !== "string") { + throw new Error( + `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)`, + ); + } + + const registryUrl = payload.registryUrl.trim(); + if (registryUrl.length === 0) { + throw new Error( + `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)`, + ); + } + + return registryUrl; +} + function resolvePathWithQuery(url: URL): string { return `${url.pathname}${url.search}`; } @@ -166,7 +199,7 @@ async function resolveAgentName(input: { async function readAgentCredentials(input: { agentName: string; homeDir: string; -}): Promise<{ ait: string; secretKey: Uint8Array }> { +}): Promise<{ ait: string; secretKey: Uint8Array; registryUrl: string }> { const agentDir = join( input.homeDir, CLAWDENTITY_DIR, @@ -175,9 +208,13 @@ async function readAgentCredentials(input: { ); const secretPath = join(agentDir, SECRET_KEY_FILENAME); const aitPath = join(agentDir, AIT_FILENAME); + const identityPath = join(agentDir, IDENTITY_FILENAME); - const encodedSecret = await readTrimmedFile(secretPath, SECRET_KEY_FILENAME); - const ait = await readTrimmedFile(aitPath, AIT_FILENAME); + const [encodedSecret, ait, rawIdentity] = await Promise.all([ + readTrimmedFile(secretPath, SECRET_KEY_FILENAME), + readTrimmedFile(aitPath, AIT_FILENAME), + readTrimmedFile(identityPath, IDENTITY_FILENAME), + ]); let secretKey: Uint8Array; try { @@ -186,9 +223,22 @@ async function readAgentCredentials(input: { throw new Error("Agent secret key is invalid"); } + let parsedIdentity: unknown; + try { + parsedIdentity = JSON.parse(rawIdentity); + } catch { + throw new Error( + `Agent "${input.agentName}" has invalid ${IDENTITY_FILENAME} (must be valid JSON)`, + ); + } + const registryUrl = parseIdentityRegistryUrl(parsedIdentity, { + agentName: input.agentName, + }); + return { ait, secretKey, + registryUrl, }; } @@ -206,6 +256,21 @@ function removePeerField( return outbound; } +function isRetryableRelayAuthError(error: unknown): boolean { + return ( + error instanceof AppError && + error.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && + error.status === 401 + ); +} + +function buildRefreshSingleFlightKey(input: { + homeDir: string; + agentName: string; +}): string { + return `${input.homeDir}:${input.agentName}`; +} + export async function relayPayloadToPeer( payload: unknown, options: RelayToPeerOptions = {}, @@ -235,48 +300,121 @@ export async function relayPayloadToPeer( overrideName: options.agentName, homeDir: home, }); - const { ait, secretKey } = await readAgentCredentials({ + const { ait, secretKey, registryUrl } = await readAgentCredentials({ agentName, homeDir: home, }); const outboundPayload = removePeerField(payload); const body = JSON.stringify(outboundPayload); - const peerUrl = new URL(peerEntry.proxyUrl); - const unixSeconds = Math.floor( - (options.clock ?? Date.now)() / 1000, - ).toString(); - const nonce = encodeBase64url( - (options.randomBytesImpl ?? randomBytes)(NONCE_SIZE), - ); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: resolvePathWithQuery(peerUrl), - timestamp: unixSeconds, - nonce, - body: textEncoder.encode(body), - secretKey, + const fetchImpl = resolveRelayFetch(options.fetchImpl); + const refreshSingleFlightKey = buildRefreshSingleFlightKey({ + homeDir: home, + agentName, }); - const response = await resolveRelayFetch(options.fetchImpl)( - peerUrl.toString(), - { + const sendRelayRequest = async (auth: AgentAuthBundle): Promise => { + const unixSeconds = Math.floor( + (options.clock ?? Date.now)() / 1000, + ).toString(); + const nonce = encodeBase64url( + (options.randomBytesImpl ?? randomBytes)(NONCE_SIZE), + ); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: resolvePathWithQuery(peerUrl), + timestamp: unixSeconds, + nonce, + body: textEncoder.encode(body), + secretKey, + }); + + return fetchImpl(peerUrl.toString(), { method: "POST", headers: { Authorization: `Claw ${ait}`, "Content-Type": "application/json", + [AGENT_ACCESS_HEADER]: auth.accessToken, ...signed.headers, }, body, - }, - ); + }); + }; - if (!response.ok) { - throw new Error("Peer relay request failed"); - } + const performRelay = async (auth: AgentAuthBundle): Promise => { + const response = await sendRelayRequest(auth); + if (!response.ok) { + if (response.status === 401) { + throw new AppError({ + code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", + message: "Peer relay rejected agent auth credentials", + status: 401, + expose: true, + }); + } + + throw new Error("Peer relay request failed"); + } + + return null; + }; + + const result = await executeWithAgentAuthRefreshRetry({ + key: refreshSingleFlightKey, + shouldRetry: isRetryableRelayAuthError, + getAuth: async () => + readAgentRegistryAuth({ + homeDir: home, + agentName, + }), + persistAuth: async () => {}, + refreshAuth: async (currentAuth) => + withAgentRegistryAuthLock({ + homeDir: home, + agentName, + operation: async () => { + const latestAuth = await readAgentRegistryAuth({ + homeDir: home, + agentName, + }); + if (latestAuth.refreshToken !== currentAuth.refreshToken) { + return latestAuth; + } + + let refreshedAuth: AgentAuthBundle; + try { + refreshedAuth = await refreshAgentAuthWithClawProof({ + registryUrl, + ait, + secretKey, + refreshToken: latestAuth.refreshToken, + fetchImpl, + }); + } catch (error) { + const afterFailureAuth = await readAgentRegistryAuth({ + homeDir: home, + agentName, + }); + if (afterFailureAuth.refreshToken !== latestAuth.refreshToken) { + return afterFailureAuth; + } + + throw error; + } + await writeAgentRegistryAuthAtomic({ + homeDir: home, + agentName, + auth: refreshedAuth, + }); + + return refreshedAuth; + }, + }), + perform: performRelay, + }); - return null; + return result; } export default async function relayToPeer( diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 79bb3e2..900a57a 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -28,11 +28,13 @@ - Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. - Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep rate-limit failure semantics stable: verified requests over budget map to `429` with code `PROXY_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.rate_limit.exceeded`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. - Keep `/hooks/agent` input contract strict: require `Content-Type: application/json` and reject malformed JSON with explicit client errors. +- Keep agent-access validation centralized in `auth-middleware.ts` and call registry `POST /v1/agents/auth/validate`; treat non-`204` non-`401` responses as dependency failures (`503`). - Keep `/hooks/agent` upstream failure mapping explicit: timeout errors -> `504`, network errors -> `502`, and never log `openclawHookToken` or request payload. - Keep identity message injection optional and default-off (`INJECT_IDENTITY_INTO_MESSAGE=false`) so forwarding behavior is unchanged unless explicitly enabled. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index bf79da8..7476a8a 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -1,4 +1,5 @@ import { + AGENT_AUTH_VALIDATE_PATH, generateUlid, makeAgentDid, makeHumanDid, @@ -28,6 +29,7 @@ type AuthHarnessOptions = { allowCurrentAgent?: boolean; allowCurrentOwner?: boolean; revoked?: boolean; + validateStatus?: number; }; type AuthHarness = { @@ -106,6 +108,7 @@ function createFetchMock(input: { fetchCrlFails?: boolean; fetchKeysFails?: boolean; registryPublicKeyX: string; + validateStatus?: number; }) { return vi.fn(async (requestInput: unknown): Promise => { const url = resolveRequestUrl(requestInput); @@ -144,6 +147,11 @@ function createFetchMock(input: { ); } + if (url.endsWith(AGENT_AUTH_VALIDATE_PATH)) { + const status = input.validateStatus ?? 204; + return new Response(status === 204 ? null : "", { status }); + } + return new Response("not found", { status: 404 }); }); } @@ -195,6 +203,7 @@ async function createAuthHarness( fetchCrlFails: options.fetchCrlFails, fetchKeysFails: options.fetchKeysFails, registryPublicKeyX: encodedRegistry.publicKey, + validateStatus: options.validateStatus, }); const allowListAgents = @@ -218,6 +227,22 @@ async function createAuthHarness( fetchImpl: fetchMock as typeof fetch, clock: () => NOW_MS, }, + hooks: { + fetchImpl: vi.fn( + async () => + new Response( + JSON.stringify({ + ok: true, + }), + { + status: 202, + headers: { + "content-type": "application/json", + }, + }, + ), + ) as typeof fetch, + }, registerRoutes: (nextApp) => { nextApp.post("/protected", (c) => { const auth = c.get("auth"); @@ -562,6 +587,65 @@ describe("proxy auth middleware", () => { expect(keyFetchCount).toBe(2); }); + it("requires x-claw-agent-access for /hooks/agent", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + pathWithQuery: "/hooks/agent", + nonce: "nonce-hooks-agent-access-required", + }); + const response = await harness.app.request("/hooks/agent", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AGENT_ACCESS_REQUIRED"); + }); + + it("rejects /hooks/agent when registry access-token validation fails", async () => { + const harness = await createAuthHarness({ + validateStatus: 401, + }); + const headers = await harness.createSignedHeaders({ + pathWithQuery: "/hooks/agent", + nonce: "nonce-hooks-agent-access-invalid", + }); + const response = await harness.app.request("/hooks/agent", { + method: "POST", + headers: { + ...headers, + "x-claw-agent-access": "clw_agt_invalid", + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AGENT_ACCESS_INVALID"); + }); + + it("accepts /hooks/agent when x-claw-agent-access validates", async () => { + const harness = await createAuthHarness({ + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + pathWithQuery: "/hooks/agent", + nonce: "nonce-hooks-agent-access-valid", + }); + const response = await harness.app.request("/hooks/agent", { + method: "POST", + headers: { + ...headers, + "x-claw-agent-access": "clw_agt_validtoken", + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(202); + }); + it("rejects non-health route when Authorization scheme is not Claw", async () => { const harness = await createAuthHarness(); const response = await harness.app.request("/protected", { diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 5b94004..8887416 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -1,4 +1,7 @@ -import { decodeBase64url } from "@clawdentity/protocol"; +import { + AGENT_AUTH_VALIDATE_PATH, + decodeBase64url, +} from "@clawdentity/protocol"; import { AitJwtError, AppError, @@ -95,6 +98,17 @@ function toRegistryUrl(registryUrl: string, path: string): string { return new URL(path, normalizedBaseUrl).toString(); } +function parseAgentAccessHeader(value: string | undefined): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw unauthorizedError({ + code: "PROXY_AGENT_ACCESS_REQUIRED", + message: "X-Claw-Agent-Access header is required", + }); + } + + return value.trim(); +} + function unauthorizedError(options: { code: string; message: string; @@ -294,6 +308,10 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { options.registryKeysCacheTtlMs ?? DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS; const registryUrl = normalizeRegistryUrl(options.config.registryUrl); const expectedIssuer = resolveExpectedIssuer(registryUrl); + const agentAuthValidateUrl = toRegistryUrl( + registryUrl, + AGENT_AUTH_VALIDATE_PATH, + ); let registryKeysCache: RegistryKeysCache | undefined; @@ -587,6 +605,50 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { }); } + if (c.req.path === "/hooks/agent") { + const accessToken = parseAgentAccessHeader( + c.req.header("x-claw-agent-access"), + ); + + let validateResponse: Response; + try { + validateResponse = await fetchImpl(agentAuthValidateUrl, { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid: claims.sub, + aitJti: claims.jti, + }), + }); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry agent auth validation is unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (validateResponse.status === 401) { + throw unauthorizedError({ + code: "PROXY_AGENT_ACCESS_INVALID", + message: "Agent access token is invalid or expired", + }); + } + + if (validateResponse.status !== 204) { + throw dependencyUnavailableError({ + message: "Registry agent auth validation is unavailable", + details: { + status: validateResponse.status, + }, + }); + } + } + c.set("auth", { agentDid: claims.sub, ownerDid: claims.ownerDid, diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 0837dc4..4101479 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -137,6 +137,14 @@ - successful refresh rotates both refresh/access credentials with a guarded update - Insert audit events in `agent_auth_events` for `refreshed`, `revoked`, and `refresh_rejected`. +## POST /v1/agents/auth/validate Contract +- Public endpoint used by proxy runtime auth enforcement; request must include `x-claw-agent-access` and JSON payload `{ agentDid, aitJti }`. +- Validate `agentDid` + `aitJti` against active agent state (`agents.status=active`, `agents.current_jti` match). +- Validate access token against active session hash/prefix material with constant-time comparison. +- Expired access credentials must return `401 AGENT_AUTH_VALIDATE_EXPIRED` without rotating refresh credentials. +- Successful validation must update `agent_auth_sessions.access_last_used_at` and return `204`. +- Treat the `access_last_used_at` write as a guarded mutation: if the update matches zero rows, fail closed with `401 AGENT_AUTH_VALIDATE_UNAUTHORIZED` to prevent race-window acceptance after concurrent refresh/revoke. + ## DELETE /v1/agents/:id Contract - Require PAT auth via `createApiKeyAuth`; only the caller-owned agent may be revoked. - Validate `:id` as ULID in `agent-revocation.ts`; path validation errors must be environment-aware via `shouldExposeVerboseErrors`. diff --git a/apps/registry/src/auth/AGENTS.md b/apps/registry/src/auth/AGENTS.md index cbc4db7..248150c 100644 --- a/apps/registry/src/auth/AGENTS.md +++ b/apps/registry/src/auth/AGENTS.md @@ -25,3 +25,8 @@ - Enforce issuer + keyset-based AIT verification against active registry signing keys only. - Validate `X-Claw-Timestamp` skew and fail closed on malformed/expired signatures. - Never log or persist plaintext refresh/access tokens server-side; persist only hash/prefix material. + +## Agent Access Validation Rules +- Keep access-token parsing (`clw_agt_`) centralized in `agent-auth-token.ts`; do not duplicate marker/format checks in route handlers. +- `POST /v1/agents/auth/validate` must fail closed with `401` for missing/invalid/expired/revoked credentials. +- Access validation must compare hashed token material with constant-time semantics and update `access_last_used_at` on successful validation. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index b8670d0..563a26f 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,6 +1,7 @@ import { ADMIN_BOOTSTRAP_PATH, AGENT_AUTH_REFRESH_PATH, + AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, type AitClaims, canonicalizeAgentRegistrationProof, @@ -29,6 +30,7 @@ import { DEFAULT_AGENT_TTL_DAYS, } from "./agent-registration.js"; import { + deriveAccessTokenLookupPrefix, deriveRefreshTokenLookupPrefix, hashAgentToken, } from "./auth/agent-auth-token.js"; @@ -196,6 +198,9 @@ type FakeAgentSelectRow = { type FakeDbOptions = { beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; + beforeFirstAgentAuthSessionUpdate?: ( + sessionRows: FakeAgentAuthSessionRow[], + ) => void; failApiKeyInsertCount?: number; failBeginTransaction?: boolean; inviteRows?: FakeInviteRow[]; @@ -1052,6 +1057,7 @@ function createFakeDb( lastUsedAt: null, })); let beforeFirstAgentUpdateApplied = false; + let beforeFirstAgentAuthSessionUpdateApplied = false; let remainingApiKeyInsertFailures = options.failApiKeyInsertCount ?? 0; const database: D1Database = { @@ -1695,6 +1701,14 @@ function createFakeDb( normalizedQuery.includes('update "agent_auth_sessions"') || normalizedQuery.includes("update agent_auth_sessions") ) { + if ( + !beforeFirstAgentAuthSessionUpdateApplied && + options.beforeFirstAgentAuthSessionUpdate + ) { + options.beforeFirstAgentAuthSessionUpdate(agentAuthSessionRows); + beforeFirstAgentAuthSessionUpdateApplied = true; + } + const setColumns = parseUpdateSetColumns( query, "agent_auth_sessions", @@ -1729,6 +1743,10 @@ function createFakeDb( typeof equalityParams.values.refresh_key_hash?.[0] === "string" ? String(equalityParams.values.refresh_key_hash[0]) : undefined; + const accessHashFilter = + typeof equalityParams.values.access_key_hash?.[0] === "string" + ? String(equalityParams.values.access_key_hash[0]) + : undefined; let matchedRows = 0; for (const row of agentAuthSessionRows) { @@ -1747,6 +1765,9 @@ function createFakeDb( ) { continue; } + if (accessHashFilter && row.accessKeyHash !== accessHashFilter) { + continue; + } matchedRows += 1; if (typeof nextValues.refresh_key_hash === "string") { @@ -1808,6 +1829,7 @@ function createFakeDb( agent_id: agentIdFilter, status_where: statusFilter, refresh_key_hash_where: refreshHashFilter, + access_key_hash_where: accessHashFilter, matched_rows: matchedRows, }); changes = matchedRows; @@ -6269,6 +6291,253 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { }); }); +describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { + it("validates active access token and updates access_last_used_at", async () => { + const nowIso = new Date().toISOString(); + const accessToken = "clw_agt_fixture_access_token_value_for_registry_tests"; + const accessTokenHash = await hashAgentToken(accessToken); + const agentId = generateUlid(Date.now() + 200); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 201); + const { database, agentAuthSessionRows, agentAuthSessionUpdates } = + createFakeDb( + [], + [ + { + id: agentId, + did: agentDid, + ownerId: "human-1", + name: "agent-access-validate-01", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 202), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_fixture", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: accessTokenHash, + accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid, + aitJti, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(204); + expect(agentAuthSessionUpdates).toHaveLength(1); + expect(agentAuthSessionRows[0]?.accessLastUsedAt).not.toBeNull(); + }); + + it("rejects validation when x-claw-agent-access is missing", async () => { + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + agentDid: makeAgentDid(generateUlid(Date.now() + 203)), + aitJti: generateUlid(Date.now() + 204), + }), + }, + { + DB: {}, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_UNAUTHORIZED"); + }); + + it("rejects validation for expired access token", async () => { + const nowIso = new Date().toISOString(); + const accessToken = + "clw_agt_fixture_expired_access_token_for_registry_tests"; + const accessTokenHash = await hashAgentToken(accessToken); + const agentId = generateUlid(Date.now() + 205); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 206); + const { database } = createFakeDb( + [], + [ + { + id: agentId, + did: agentDid, + ownerId: "human-1", + name: "agent-access-validate-expired", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 207), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_fixture", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: accessTokenHash, + accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() - 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid, + aitJti, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_EXPIRED"); + }); + + it("rejects validation when guarded session update matches zero rows", async () => { + const nowIso = new Date().toISOString(); + const accessToken = + "clw_agt_fixture_race_window_access_token_for_registry_tests"; + const accessTokenHash = await hashAgentToken(accessToken); + const agentId = generateUlid(Date.now() + 208); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 209); + const { database, agentAuthSessionUpdates } = createFakeDb( + [], + [ + { + id: agentId, + did: agentDid, + ownerId: "human-1", + name: "agent-access-validate-race", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 210), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_fixture", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: accessTokenHash, + accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + beforeFirstAgentAuthSessionUpdate: (rows) => { + if (rows[0]) { + rows[0].status = "revoked"; + } + }, + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid, + aitJti, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_UNAUTHORIZED"); + expect(agentAuthSessionUpdates).toEqual( + expect.arrayContaining([expect.objectContaining({ matched_rows: 0 })]), + ); + }); +}); + describe("DELETE /v1/agents/:id/auth/revoke", () => { it("revokes active session for owned agent and is idempotent", async () => { const { token, authRow } = await makeValidPatContext(); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 07d2721..452bad7 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,6 +1,7 @@ import { ADMIN_BOOTSTRAP_PATH, AGENT_AUTH_REFRESH_PATH, + AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, generateUlid, INVITES_PATH, @@ -58,8 +59,10 @@ import { parseApiKeyRevokePath, } from "./api-key-lifecycle.js"; import { + deriveAccessTokenLookupPrefix, deriveRefreshTokenLookupPrefix, hashAgentToken, + parseAccessToken, } from "./auth/agent-auth-token.js"; import { verifyAgentClawRequest } from "./auth/agent-claw-auth.js"; import { @@ -424,6 +427,52 @@ function isIsoExpired(expiresAtIso: string, nowMillis: number): boolean { return parsed <= nowMillis; } +function parseAgentAuthValidatePayload(payload: unknown): { + agentDid: string; + aitJti: string; +} { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_INVALID", + message: "Validation payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + const agentDid = + typeof value.agentDid === "string" ? value.agentDid.trim() : ""; + const aitJti = typeof value.aitJti === "string" ? value.aitJti.trim() : ""; + + if (agentDid.length === 0 || aitJti.length === 0) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_INVALID", + message: "Validation payload is invalid", + status: 400, + expose: true, + }); + } + + return { + agentDid, + aitJti, + }; +} + +function parseAgentAccessHeaderToken(token: string | undefined): string { + try { + return parseAccessToken(token); + } catch { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } +} + async function insertAgentAuthEvent(input: { db: ReturnType; agentId: string; @@ -1663,6 +1712,107 @@ function createRegistryApp() { }); }); + app.post(AGENT_AUTH_VALIDATE_PATH, async (c) => { + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_INVALID", + message: "Validation payload is invalid", + status: 400, + expose: true, + }); + } + + const parsedPayload = parseAgentAuthValidatePayload(payload); + const accessToken = parseAgentAccessHeaderToken( + c.req.header("x-claw-agent-access"), + ); + + const db = createDb(c.env.DB); + const existingAgent = await findOwnedAgentByDid({ + db, + did: parsedPayload.agentDid, + }); + if ( + !existingAgent || + existingAgent.status !== "active" || + existingAgent.current_jti !== parsedPayload.aitJti + ) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + if (!existingSession || existingSession.status !== "active") { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + const nowMillis = Date.now(); + if (isIsoExpired(existingSession.access_expires_at, nowMillis)) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_EXPIRED", + message: "Agent access token is expired", + status: 401, + expose: true, + }); + } + + const accessTokenPrefix = deriveAccessTokenLookupPrefix(accessToken); + const accessTokenHash = await hashAgentToken(accessToken); + const accessTokenMatches = + existingSession.access_key_prefix === accessTokenPrefix && + constantTimeEqual(existingSession.access_key_hash, accessTokenHash); + if (!accessTokenMatches) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + const accessLastUsedAt = nowIso(); + const updateResult = await db + .update(agent_auth_sessions) + .set({ + access_last_used_at: accessLastUsedAt, + updated_at: accessLastUsedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + eq(agent_auth_sessions.access_key_hash, accessTokenHash), + ), + ); + + const updatedRows = getMutationRowCount(updateResult); + if (updatedRows === 0) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + return c.body(null, 204); + }); + app.delete("/v1/agents/:id/auth/revoke", createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const agentId = parseAgentRevokePath({ diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index 42035d9..dd0b1a1 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -18,7 +18,7 @@ - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. -- Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `AGENT_AUTH_REFRESH_PATH`, `ME_API_KEYS_PATH`) so registry and CLI stay contract-synchronized. +- Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `AGENT_AUTH_REFRESH_PATH`, `AGENT_AUTH_VALIDATE_PATH`, `ME_API_KEYS_PATH`) so registry, proxy, and CLI stay contract-synchronized. - Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. - Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 8b36118..62246dd 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -1,6 +1,7 @@ export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; export const AGENT_REGISTRATION_CHALLENGE_PATH = "/v1/agents/challenge"; export const AGENT_AUTH_REFRESH_PATH = "/v1/agents/auth/refresh"; +export const AGENT_AUTH_VALIDATE_PATH = "/v1/agents/auth/validate"; export const INVITES_PATH = "/v1/invites"; export const INVITES_REDEEM_PATH = "/v1/invites/redeem"; export const ME_API_KEYS_PATH = "/v1/me/api-keys"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index b8a4c5d..eb1a63b 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -2,6 +2,7 @@ import { describe, expect, it } from "vitest"; import { ADMIN_BOOTSTRAP_PATH, AGENT_AUTH_REFRESH_PATH, + AGENT_AUTH_VALIDATE_PATH, AGENT_NAME_REGEX, AGENT_REGISTRATION_CHALLENGE_PATH, AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, @@ -39,6 +40,7 @@ describe("protocol", () => { expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); expect(AGENT_REGISTRATION_CHALLENGE_PATH).toBe("/v1/agents/challenge"); expect(AGENT_AUTH_REFRESH_PATH).toBe("/v1/agents/auth/refresh"); + expect(AGENT_AUTH_VALIDATE_PATH).toBe("/v1/agents/auth/validate"); expect(INVITES_PATH).toBe("/v1/invites"); expect(INVITES_REDEEM_PATH).toBe("/v1/invites/redeem"); expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 491afef..2e6c03e 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -23,6 +23,7 @@ export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export { ADMIN_BOOTSTRAP_PATH, AGENT_AUTH_REFRESH_PATH, + AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, INVITES_PATH, INVITES_REDEEM_PATH, diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index b5b17d5..cf43aab 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -16,6 +16,7 @@ - `crl/cache`: in-memory CRL cache with periodic refresh, staleness reporting, and configurable stale behavior. - `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. - `security/nonce-cache`: in-memory TTL nonce replay protection keyed by `agentDid + nonce`. +- `agent-auth-client`: shared agent auth refresh client + retry orchestration (`executeWithAgentAuthRefreshRetry`) for CLI/runtime integrations. - Tests should prove tamper cases (payload change, header kid swap, signature corruption). ## Design Rules @@ -41,6 +42,9 @@ - Registry config parsing must validate `REGISTRY_SIGNING_KEYS` as JSON before runtime use so keyset endpoints fail fast with `CONFIG_VALIDATION_FAILED` on malformed key documents. - Registry keyset validation must reject duplicate `kid` values and malformed `x` key material (non-base64url or non-32-byte Ed25519) so verifier behavior cannot become order-dependent. - Use `RuntimeEnvironment` + `shouldExposeVerboseErrors` from `runtime-environment` for environment-based error-detail behavior; do not duplicate ad-hoc `NODE_ENV`/string checks. +- Keep `agent-auth-client` runtime-portable (no Node-only filesystem APIs); delegate persistence/locking to callers. +- Keep refresh retry policy strict: a single refresh attempt and a single request retry on retryable auth failures. +- Keep per-agent refresh single-flight keyed by explicit caller-provided key to avoid duplicate refresh races. ## Testing Rules - Unit test each shared module. @@ -52,3 +56,4 @@ - Nonce cache tests must include duplicate nonce rejection within TTL and acceptance after TTL expiry. - CRL cache tests must cover revoked lookup, refresh-on-stale, and stale-path behavior in both `fail-open` and `fail-closed` modes. - When new registry routes emit signed AITs (e.g., POST `/v1/agents`), tests should consume those tokens with the published `REGISTRY_SIGNING_KEYS` set (as returned by `/.well-known/claw-keys.json`) and assert that `verifyAIT` succeeds/fails exactly the same way the local `claw verify` workflow will, keeping the offline verification contract fully covered. +- `agent-auth-client` tests must cover: success path, refresh HTTP error mapping, and concurrent retry callers sharing a single refresh. diff --git a/packages/sdk/src/agent-auth-client.test.ts b/packages/sdk/src/agent-auth-client.test.ts new file mode 100644 index 0000000..c582867 --- /dev/null +++ b/packages/sdk/src/agent-auth-client.test.ts @@ -0,0 +1,154 @@ +import { describe, expect, it, vi } from "vitest"; +import { + type AgentAuthBundle, + executeWithAgentAuthRefreshRetry, + refreshAgentAuthWithClawProof, +} from "./agent-auth-client.js"; +import { AppError } from "./exceptions.js"; + +const STALE_AUTH: AgentAuthBundle = { + tokenType: "Bearer", + accessToken: "clw_agt_old", + accessExpiresAt: "2030-01-01T00:00:00.000Z", + refreshToken: "clw_rft_old", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", +}; + +const FRESH_AUTH: AgentAuthBundle = { + tokenType: "Bearer", + accessToken: "clw_agt_new", + accessExpiresAt: "2030-03-01T00:00:00.000Z", + refreshToken: "clw_rft_new", + refreshExpiresAt: "2030-04-01T00:00:00.000Z", +}; + +describe("agent auth client", () => { + it("refreshes auth with claw proof", async () => { + const fetchMock = vi.fn(async (input: unknown, _init?: RequestInit) => { + expect(String(input)).toBe( + "https://registry.example.com/v1/agents/auth/refresh", + ); + return new Response( + JSON.stringify({ + agentAuth: FRESH_AUTH, + }), + { status: 200 }, + ); + }); + + const result = await refreshAgentAuthWithClawProof({ + registryUrl: "https://registry.example.com", + ait: "mock.ait.jwt", + secretKey: new Uint8Array(32).fill(1), + refreshToken: STALE_AUTH.refreshToken, + fetchImpl: fetchMock as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + + expect(fetchMock).toHaveBeenCalledTimes(1); + const [, init] = fetchMock.mock.calls[0]; + const headers = new Headers(init?.headers); + expect(headers.get("authorization")).toBe("Claw mock.ait.jwt"); + expect(headers.get("content-type")).toBe("application/json"); + expect(headers.get("x-claw-timestamp")).toBe("1700000000"); + expect(result).toEqual(FRESH_AUTH); + }); + + it("maps refresh 401 responses to unauthorized app errors", async () => { + await expect( + refreshAgentAuthWithClawProof({ + registryUrl: "https://registry.example.com", + ait: "mock.ait.jwt", + secretKey: new Uint8Array(32).fill(1), + refreshToken: STALE_AUTH.refreshToken, + fetchImpl: vi.fn( + async () => + new Response( + JSON.stringify({ + error: { + code: "AGENT_AUTH_REFRESH_EXPIRED", + message: "Agent auth refresh token is expired", + }, + }), + { status: 401 }, + ), + ) as typeof fetch, + }), + ).rejects.toMatchObject({ + code: "AGENT_AUTH_REFRESH_UNAUTHORIZED", + status: 401, + }); + }); + + it("retries once after auth failure and returns operation result", async () => { + let persistedAuth = STALE_AUTH; + const persistAuth = vi.fn(async (nextAuth: AgentAuthBundle) => { + persistedAuth = nextAuth; + }); + const refreshAuth = vi.fn(async () => FRESH_AUTH); + const perform = vi.fn(async (auth: AgentAuthBundle) => { + if (auth.accessToken === STALE_AUTH.accessToken) { + throw new AppError({ + code: "AUTH_EXPIRED", + message: "expired", + status: 401, + expose: true, + }); + } + + return "ok"; + }); + + const result = await executeWithAgentAuthRefreshRetry({ + key: "agent-alpha", + getAuth: async () => persistedAuth, + refreshAuth, + persistAuth, + perform, + }); + + expect(result).toBe("ok"); + expect(refreshAuth).toHaveBeenCalledTimes(1); + expect(persistAuth).toHaveBeenCalledWith(FRESH_AUTH); + expect(perform).toHaveBeenCalledTimes(2); + }); + + it("shares one refresh in flight across concurrent retries", async () => { + let persistedAuth = STALE_AUTH; + let refreshCalls = 0; + const refreshAuth = vi.fn(async () => { + refreshCalls += 1; + await new Promise((resolve) => { + setTimeout(resolve, 25); + }); + return FRESH_AUTH; + }); + + const run = () => + executeWithAgentAuthRefreshRetry({ + key: "agent-concurrent", + getAuth: async () => persistedAuth, + refreshAuth, + persistAuth: async (nextAuth) => { + persistedAuth = nextAuth; + }, + perform: async (auth) => { + if (auth.accessToken === STALE_AUTH.accessToken) { + throw new AppError({ + code: "AUTH_EXPIRED", + message: "expired", + status: 401, + expose: true, + }); + } + return auth.accessToken; + }, + }); + + const [first, second] = await Promise.all([run(), run()]); + + expect(first).toBe("clw_agt_new"); + expect(second).toBe("clw_agt_new"); + expect(refreshCalls).toBe(1); + }); +}); diff --git a/packages/sdk/src/agent-auth-client.ts b/packages/sdk/src/agent-auth-client.ts new file mode 100644 index 0000000..a0fcf6e --- /dev/null +++ b/packages/sdk/src/agent-auth-client.ts @@ -0,0 +1,321 @@ +import { + AGENT_AUTH_REFRESH_PATH, + encodeBase64url, +} from "@clawdentity/protocol"; +import { AppError } from "./exceptions.js"; +import { signHttpRequest } from "./http/sign.js"; + +export type AgentAuthBundle = { + tokenType: "Bearer"; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; +}; + +type RegistryErrorEnvelope = { + error?: { + code?: string; + message?: string; + }; +}; + +type RefreshSingleFlightOptions = { + key: string; + run: () => Promise; +}; + +const refreshSingleFlights = new Map>(); + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +const parseNonEmptyString = (value: unknown): string => { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +}; + +const parseJsonResponse = async (response: Response): Promise => { + try { + return await response.json(); + } catch { + return undefined; + } +}; + +const toPathWithQuery = (requestUrl: string): string => { + const parsed = new URL(requestUrl); + return `${parsed.pathname}${parsed.search}`; +}; + +const parseRegistryErrorEnvelope = ( + payload: unknown, +): RegistryErrorEnvelope | undefined => { + if (!isRecord(payload)) { + return undefined; + } + + const errorValue = payload.error; + if (!isRecord(errorValue)) { + return undefined; + } + + return { + error: { + code: parseNonEmptyString(errorValue.code) || undefined, + message: parseNonEmptyString(errorValue.message) || undefined, + }, + }; +}; + +const parseAgentAuthBundle = (payload: unknown): AgentAuthBundle => { + if (!isRecord(payload)) { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID_RESPONSE", + message: "Registry returned an invalid refresh response payload", + status: 502, + expose: true, + }); + } + + const source = isRecord(payload.agentAuth) ? payload.agentAuth : payload; + + const tokenType = source.tokenType; + const accessToken = source.accessToken; + const accessExpiresAt = source.accessExpiresAt; + const refreshToken = source.refreshToken; + const refreshExpiresAt = source.refreshExpiresAt; + + if ( + tokenType !== "Bearer" || + typeof accessToken !== "string" || + typeof accessExpiresAt !== "string" || + typeof refreshToken !== "string" || + typeof refreshExpiresAt !== "string" + ) { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID_RESPONSE", + message: "Registry returned an invalid refresh response payload", + status: 502, + expose: true, + }); + } + + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + }; +}; + +const toRefreshHttpError = ( + status: number, + responseBody: unknown, +): AppError => { + const parsedEnvelope = parseRegistryErrorEnvelope(responseBody); + const registryCode = parsedEnvelope?.error?.code; + const registryMessage = parsedEnvelope?.error?.message; + + if (status === 400) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: registryMessage ?? "Refresh request is invalid (400).", + status, + expose: true, + details: { + registryCode, + registryMessage, + }, + }); + } + + if (status === 401) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_UNAUTHORIZED", + message: + registryMessage ?? + "Refresh rejected (401). Agent credentials are invalid, revoked, or expired.", + status, + expose: true, + details: { + registryCode, + registryMessage, + }, + }); + } + + if (status === 409) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_CONFLICT", + message: registryMessage ?? "Refresh conflict (409). Retry request.", + status, + expose: true, + details: { + registryCode, + registryMessage, + }, + }); + } + + if (status >= 500) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_SERVER_ERROR", + message: `Registry server error (${status}). Try again later.`, + status: 503, + expose: true, + details: { + status, + }, + }); + } + + return new AppError({ + code: "AGENT_AUTH_REFRESH_FAILED", + message: + registryMessage ?? `Registry request failed during refresh (${status}).`, + status, + expose: true, + details: { + registryCode, + registryMessage, + status, + }, + }); +}; + +const toRegistryAgentAuthRefreshRequestUrl = (registryUrl: string): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL( + AGENT_AUTH_REFRESH_PATH.slice(1), + normalizedBaseUrl, + ).toString(); +}; + +async function runRefreshSingleFlight( + options: RefreshSingleFlightOptions, +): Promise { + const existing = refreshSingleFlights.get(options.key); + if (existing) { + return existing as Promise; + } + + const inFlight = options.run().finally(() => { + if (refreshSingleFlights.get(options.key) === inFlight) { + refreshSingleFlights.delete(options.key); + } + }); + refreshSingleFlights.set(options.key, inFlight); + return inFlight; +} + +export async function refreshAgentAuthWithClawProof(input: { + registryUrl: string; + ait: string; + secretKey: Uint8Array; + refreshToken: string; + fetchImpl?: typeof fetch; + nowMs?: () => number; +}): Promise { + const fetchImpl = input.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_NETWORK", + message: "fetch implementation is required", + status: 500, + expose: true, + }); + } + + const refreshUrl = toRegistryAgentAuthRefreshRequestUrl(input.registryUrl); + const refreshBody = JSON.stringify({ + refreshToken: input.refreshToken, + }); + const nowMs = input.nowMs?.() ?? Date.now(); + const timestamp = String(Math.floor(nowMs / 1000)); + const nonce = encodeBase64url(crypto.getRandomValues(new Uint8Array(16))); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(refreshUrl), + timestamp, + nonce, + body: new TextEncoder().encode(refreshBody), + secretKey: input.secretKey, + }); + + let response: Response; + try { + response = await fetchImpl(refreshUrl, { + method: "POST", + headers: { + authorization: `Claw ${input.ait}`, + "content-type": "application/json", + ...signed.headers, + }, + body: refreshBody, + }); + } catch { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_NETWORK", + message: + "Unable to connect to the registry. Check network access and registryUrl.", + status: 503, + expose: true, + }); + } + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw toRefreshHttpError(response.status, responseBody); + } + + return parseAgentAuthBundle(responseBody); +} + +export function isRetryableAuthExpiryError(error: unknown): boolean { + if (!(error instanceof AppError)) { + return false; + } + + return error.status === 401; +} + +export async function executeWithAgentAuthRefreshRetry(input: { + key: string; + getAuth: () => Promise; + refreshAuth: (currentAuth: AgentAuthBundle) => Promise; + persistAuth: (refreshedAuth: AgentAuthBundle) => Promise; + perform: (auth: AgentAuthBundle) => Promise; + shouldRetry?: (error: unknown) => boolean; +}): Promise { + const shouldRetry = input.shouldRetry ?? isRetryableAuthExpiryError; + const currentAuth = await input.getAuth(); + + try { + return await input.perform(currentAuth); + } catch (error) { + if (!shouldRetry(error)) { + throw error; + } + + const refreshedAuth = await runRefreshSingleFlight({ + key: input.key, + run: async () => { + const latestAuth = await input.getAuth(); + const nextAuth = await input.refreshAuth(latestAuth); + await input.persistAuth(nextAuth); + return nextAuth; + }, + }); + + return input.perform(refreshedAuth); + } +} diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 1467adb..d0f4ff1 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "vitest"; import { + type AgentAuthBundle, AitJwtError, AppError, addSeconds, @@ -13,6 +14,7 @@ import { decodeEd25519SignatureBase64url, encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, + executeWithAgentAuthRefreshRetry, generateEd25519Keypair, parseRegistryConfig, REQUEST_ID_HEADER, @@ -47,6 +49,46 @@ describe("sdk", () => { expect(AppError).toBeTypeOf("function"); }); + it("exports agent auth refresh retry helpers", async () => { + const stale: AgentAuthBundle = { + tokenType: "Bearer", + accessToken: "clw_agt_old", + accessExpiresAt: "2030-01-01T00:00:00.000Z", + refreshToken: "clw_rft_old", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }; + const fresh: AgentAuthBundle = { + tokenType: "Bearer", + accessToken: "clw_agt_new", + accessExpiresAt: "2030-03-01T00:00:00.000Z", + refreshToken: "clw_rft_new", + refreshExpiresAt: "2030-04-01T00:00:00.000Z", + }; + let current = stale; + + const result = await executeWithAgentAuthRefreshRetry({ + key: "sdk-root-export", + getAuth: async () => current, + refreshAuth: async () => fresh, + persistAuth: async (next) => { + current = next; + }, + perform: async (auth) => { + if (auth.accessToken === stale.accessToken) { + throw new AppError({ + code: "AUTH_EXPIRED", + message: "expired", + status: 401, + expose: true, + }); + } + return auth.accessToken; + }, + }); + + expect(result).toBe("clw_agt_new"); + }); + it("exports Ed25519 helpers from package root", async () => { const keypair = await generateEd25519Keypair(); const message = new TextEncoder().encode("root-export-crypto-test"); diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index a170c74..3943178 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -1,5 +1,11 @@ export const SDK_VERSION = "0.0.0"; +export type { AgentAuthBundle } from "./agent-auth-client.js"; +export { + executeWithAgentAuthRefreshRetry, + isRetryableAuthExpiryError, + refreshAgentAuthWithClawProof, +} from "./agent-auth-client.js"; export type { RegistryConfig } from "./config.js"; export { parseRegistryConfig, registryConfigSchema } from "./config.js"; export type { From c0739279115fd4048528b8fe026af7028e913548 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 20:14:36 +0530 Subject: [PATCH 066/190] fix(cli): bundle skill assets for npm --skill install --- AGENTS.md | 1 + PRD.md | 11 + README.md | 308 ++++++++++- apps/cli/AGENTS.md | 3 + apps/cli/package.json | 10 +- apps/cli/postinstall.mjs | 48 ++ apps/cli/scripts/AGENTS.md | 10 + apps/cli/scripts/sync-skill-bundle.mjs | 67 +++ apps/cli/skill-bundle/AGENTS.md | 12 + .../openclaw-skill/skill/SKILL.md | 126 +++++ .../skill/references/clawdentity-protocol.md | 130 +++++ apps/cli/src/AGENTS.md | 7 + apps/cli/src/install-skill-mode.test.ts | 256 +++++++++ apps/cli/src/install-skill-mode.ts | 488 ++++++++++++++++++ apps/cli/src/postinstall.ts | 5 + apps/cli/tsup.config.ts | 2 +- apps/openclaw-skill/AGENTS.md | 5 + apps/openclaw-skill/skill/SKILL.md | 9 + 18 files changed, 1489 insertions(+), 9 deletions(-) create mode 100644 apps/cli/postinstall.mjs create mode 100644 apps/cli/scripts/AGENTS.md create mode 100644 apps/cli/scripts/sync-skill-bundle.mjs create mode 100644 apps/cli/skill-bundle/AGENTS.md create mode 100644 apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md create mode 100644 apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md create mode 100644 apps/cli/src/install-skill-mode.test.ts create mode 100644 apps/cli/src/install-skill-mode.ts create mode 100644 apps/cli/src/postinstall.ts diff --git a/AGENTS.md b/AGENTS.md index a180049..5f802cc 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -100,6 +100,7 @@ - Reset for rerun must remove only skill-created artifacts first: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`, and `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/`. - Use a full reset only when required for identity reprovisioning, and then also clear `~/.clawdentity/agents//` before re-onboarding. - Skill-only policy: no direct `clawdentity openclaw setup` execution by humans during E2E validation; the agent must run the skill flow and prompt the human only for missing invite code or confirmations. +- npm-first contract: `npm install clawdentity --skill` must be the default install trigger for skill artifact preparation, and install logs should report deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). ## Scaffold Best Practices - Start by reviewing README, PRD, and the active execution tracker issue so documentation mirrors the execution model. diff --git a/PRD.md b/PRD.md index 8915883..38eed19 100644 --- a/PRD.md +++ b/PRD.md @@ -73,6 +73,7 @@ Because OpenClaw requires `hooks.token` and expects Bearer/token auth for `/hook - Verify token (`claw verify`) - Personal PAT lifecycle (`clawdentity api-key create|list|revoke`) - Share contact card (`claw share`) + - npm-first skill install path (`npm install clawdentity --skill`) that prepares OpenClaw relay skill artifacts automatically - **Proxy** - Verify inbound Clawdentity headers @@ -156,6 +157,16 @@ Verifier must enforce: - Revoked PATs must fail auth with `401 API_KEY_REVOKED`. - Unrelated active PATs must continue to authenticate after targeted key revocation. +### 6.8 npm-first OpenClaw skill install +- Installer detects npm skill mode via install-time npm config/environment. +- Installer must prepare these artifacts without manual copy steps: + - `SKILL.md` + - `references/*` + - `relay-to-peer.mjs` in workspace skill path and hooks transform path +- Runtime installs must not depend on sibling workspace packages; required skill assets are bundled with the CLI package. +- Re-running install must be idempotent and safe. +- Missing source artifacts must fail with actionable errors. + --- ## 7) Non-functional requirements diff --git a/README.md b/README.md index 21b04f7..321681c 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,263 @@ OpenClaw Gateway (normal /hooks/agent handling) --- +## Agent-to-Agent Communication: Complete Flow + +This section walks through **every step** from zero to two OpenClaw agents exchanging their first message. Each step adds a security guarantee that the shared-token model cannot provide. + +### Overview + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ CLAWDENTITY REGISTRY │ +│ │ +│ Issues identities (AIT) · Publishes revocation list (CRL) │ +│ Validates agent auth · Manages invite-gated onboarding │ +└───────────────┬─────────────────────────────────┬──────────────────────┘ + │ │ + issues AIT + auth issues AIT + auth + │ │ + ┌───────────▼──────────┐ ┌───────────▼──────────┐ + │ AGENT ALICE │ │ AGENT BOB │ + │ (OpenClaw + keys) │ │ (OpenClaw + keys) │ + │ │ │ │ + │ Ed25519 keypair │ │ Ed25519 keypair │ + │ AIT (signed passport)│ │ AIT (signed passport│ + │ Auth tokens │ │ Auth tokens │ + └───────────┬───────────┘ └──────────┬───────────┘ + │ │ + signs every request signs every request + with private key with private key + │ │ + ┌───────────▼──────────┐ ┌───────────▼──────────┐ + │ ALICE'S PROXY │◄─────────│ Bob sends signed │ + │ (Cloudflare Worker) │ HTTP POST│ request to Alice │ + │ │ │ │ + │ Verifies identity │ └──────────────────────┘ + │ Checks revocation │ + │ Enforces allowlist │ + │ Rejects replays │ + │ Rate limits per agent│ + └───────────┬───────────┘ + │ + only verified requests + reach OpenClaw + │ + ┌───────────▼──────────┐ + │ ALICE'S OPENCLAW │ + │ (localhost, private) │ + │ │ + │ Receives message │ + │ Never exposed to │ + │ public internet │ + └───────────────────────┘ +``` + +### Step 1: Human Onboarding (Invite-Gated) + +An admin creates an invite code. A new operator redeems it to get API access. + +``` +Admin Registry + │ │ + │ clawdentity invite create │ + │──────────────────────────────►│ Generates clw_inv_ + │◄──────────────────────────────│ Stores with optional expiry + │ │ + │ Shares invite code │ + │ out-of-band (email, etc.) │ + │ │ + +New Operator Registry + │ │ + │ clawdentity invite redeem │ + │──────────────────────────────►│ Creates human account + │◄──────────────────────────────│ Issues API key (shown once) + │ │ + │ Stores API key locally │ +``` + +**Security:** Invite codes are single-use and time-limited. One agent per invite prevents bulk abuse. + +### Step 2: Agent Identity Creation (Challenge-Response) + +The operator creates an agent identity. The private key **never leaves the machine**. + +``` +CLI (operator's machine) Registry + │ │ + │ 1. Generate Ed25519 keypair │ + │ (secret.key stays local) │ + │ │ + │ 2. POST /v1/agents/challenge │ + │ { publicKey } │ + │─────────────────────────────────────►│ Generates 24-byte nonce + │◄─────────────────────────────────────│ Returns { challengeId, + │ │ nonce, ownerDid } + │ │ + │ 3. Sign canonical proof with │ + │ private key (proves ownership) │ + │ │ + │ 4. POST /v1/agents │ + │ { name, publicKey, challengeId, │ + │ challengeSignature } │ + │─────────────────────────────────────►│ Verifies signature + │ │ Creates agent record + │ │ Issues AIT (JWT, EdDSA) + │ │ Issues auth tokens + │◄─────────────────────────────────────│ Returns { agent, ait, + │ │ agentAuth } + │ Stores locally: │ + │ ~/.clawdentity/agents// │ + │ ├── secret.key (private, 0600) │ + │ ├── public.key │ + │ ├── ait.jwt (signed passport) │ + │ ├── identity.json │ + │ └── registry-auth.json │ +``` + +**Security:** Challenge-response proves the operator holds the private key without ever transmitting it. The 5-minute challenge window prevents delayed replay. Each challenge is single-use. + +**What's in the AIT (Agent Identity Token):** + +| Claim | Purpose | +|-------|---------| +| `sub` | Agent DID (`did:claw:agent:`) — unique identity | +| `ownerDid` | Human DID — who owns this agent | +| `cnf.jwk.x` | Agent's public key — for verifying PoP signatures | +| `jti` | Token ID — for revocation tracking | +| `iss` | Registry URL — who vouches for this identity | +| `exp` | Expiry — credential lifetime (1-90 days) | + +### Step 3: Peer Discovery (Out-of-Band Invite) + +Alice creates an invite code for Bob. No secrets are exchanged — only a DID and endpoint. + +``` +Alice's Operator Bob's Operator + │ │ + │ clawdentity openclaw invite create │ + │ → Encodes: { │ + │ did: "did:claw:agent:...", │ + │ proxyUrl: "https://alice-proxy/ │ + │ hooks/agent", │ + │ alias: "bob", │ + │ name: "Bob Agent" │ + │ } │ + │ → Base64url invite code │ + │ │ + │ Shares code out-of-band ─────────────►│ + │ (email, QR, chat, etc.) │ + │ │ + │ │ clawdentity openclaw setup + │ │ bob --invite-code + │ │ + │ │ Stores peer in peers.json: + │ │ { "alice": { + │ │ "did": "did:claw:agent:...", + │ │ "proxyUrl": "https://..." + │ │ }} + │ │ + │ │ Installs relay transform + │ │ Configures OpenClaw hooks +``` + +**Security:** The invite contains only public information (DID + proxy URL). No keys, tokens, or secrets are exchanged. Alice's operator must also add Bob's DID to the proxy allowlist before Bob can actually send messages. + +### Step 4: First Message (Bob → Alice) + +Bob's OpenClaw triggers the relay. Every request is cryptographically signed. + +``` +Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's OpenClaw + │ │ │ │ + │ Hook trigger: │ │ │ + │ { peer: "alice", │ │ │ + │ message: "Hi!" } │ │ │ + │─────────────────────►│ │ │ + │ │ │ │ + │ 1. Load Bob's credentials │ │ + │ (secret.key, ait.jwt) │ │ + │ 2. Look up "alice" in │ │ + │ peers.json → proxy URL │ │ + │ 3. Sign HTTP request: │ │ + │ ┌─────────────────────┐ │ │ + │ │ Canonical string: │ │ │ + │ │ POST /hooks/agent │ │ │ + │ │ timestamp: │ │ │ + │ │ nonce: │ │ │ + │ │ body-sha256: │ │ │ + │ │ │ │ │ + │ │ Ed25519.sign(canon, │ │ │ + │ │ secretKey) → proof │ │ │ + │ └─────────────────────┘ │ │ + │ 4. Send signed request: │ │ + │ POST https://alice-proxy/hooks/agent │ + │ Authorization: Claw │ │ + │ X-Claw-Timestamp: │ │ + │ X-Claw-Nonce: │ │ + │ X-Claw-Body-SHA256: │ │ + │ X-Claw-Proof: │ │ + │ X-Claw-Agent-Access: │ │ + │ │─────────────────────►│ │ + │ │ │ │ + │ │ VERIFICATION PIPELINE │ + │ │ ───────────────────── │ + │ │ ① Verify AIT signature │ + │ │ (registry EdDSA keys) │ + │ │ ② Check timestamp skew │ + │ │ (max ±300 seconds) │ + │ │ ③ Verify PoP signature │ + │ │ (Ed25519 from AIT cnf key) │ + │ │ ④ Reject nonce replay │ + │ │ (per-agent nonce cache) │ + │ │ ⑤ Check CRL revocation │ + │ │ (signed list from registry) │ + │ │ ⑥ Enforce allowlist │ + │ │ (is Bob's DID permitted?) │ + │ │ ⑦ Validate agent access token │ + │ │ (POST to registry) │ + │ │ │ │ + │ │ │ ALL CHECKS PASSED │ + │ │ │ │ + │ │ │ Forward to OpenClaw: │ + │ │ │ POST /hooks/agent │ + │ │ │ x-openclaw-token: │ + │ │ │──────────────────────►│ + │ │ │ │ Message + │ │ │◄──────────────────────│ delivered! + │ │◄─────────────────────│ 202 │ + │◄─────────────────────│ │ │ +``` + +### Why This Beats Shared Tokens + +| Property | Shared Webhook Token | Clawdentity | +|----------|---------------------|-------------| +| **Identity** | All callers look the same | Each agent has a unique DID and signed passport | +| **Accountability** | Cannot trace who sent what | Every request proves exactly which agent sent it | +| **Blast radius** | One leak exposes everything | One compromised key only affects that agent | +| **Revocation** | Rotate shared token = break all integrations | Revoke one agent instantly via CRL, others unaffected | +| **Replay protection** | None | Timestamp + nonce + signature on every request | +| **Tamper detection** | None | Body hash + PoP signature = any modification is detectable | +| **Per-caller policy** | Not possible | Allowlist by agent DID, rate limit per agent | +| **Key exposure** | Token must be shared with every caller | Private key never leaves the agent's machine | + +### What Gets Verified (and When It Fails) + +| Check | Failure | HTTP Status | Meaning | +|-------|---------|-------------|---------| +| AIT signature | `PROXY_AUTH_INVALID_AIT` | 401 | Token is forged or tampered | +| Timestamp skew | `PROXY_AUTH_TIMESTAMP_SKEW` | 401 | Request is too old or clock is wrong | +| PoP signature | `PROXY_AUTH_INVALID_PROOF` | 401 | Sender doesn't hold the private key | +| Nonce replay | `PROXY_AUTH_REPLAY` | 401 | Same request was sent twice | +| CRL revocation | `PROXY_AUTH_REVOKED` | 401 | Agent identity has been revoked | +| Allowlist | `PROXY_AUTH_FORBIDDEN` | 403 | Agent is valid but not authorized here | +| Agent access token | `PROXY_AGENT_ACCESS_INVALID` | 401 | Session token expired or revoked | +| Rate limit | `PROXY_RATE_LIMIT_EXCEEDED` | 429 | Too many requests from this agent | + +--- + ## Operator controls on both ends ### Sender side operator (owner/admin) @@ -145,15 +402,33 @@ OpenClaw Gateway (normal /hooks/agent handling) --- -## Repo layout (planned MVP) +## Repo layout -This repo is a monorepo: +Nx monorepo with pnpm workspaces: -- `apps/registry` — issues AITs, serves CRL + public keys (Worker config: `apps/registry/wrangler.jsonc`) -- `apps/proxy` — verifies Clawdentity headers then forwards to OpenClaw hooks (Worker config: `apps/proxy/wrangler.jsonc`) -- `apps/cli` — operator workflow (`claw create`, `claw revoke`, `claw share`) -- `packages/sdk` — TS SDK (sign + verify + CRL cache) -- `packages/protocol` — shared types + canonical signing rules +``` +clawdentity/ +├── apps/ +│ ├── registry/ — Identity registry (Cloudflare Worker) +│ │ Issues AITs, serves CRL + public keys +│ │ Worker config: apps/registry/wrangler.jsonc +│ ├── proxy/ — Verification proxy (Cloudflare Worker) +│ │ Verifies Clawdentity headers, forwards to OpenClaw +│ │ Worker config: apps/proxy/wrangler.jsonc +│ ├── cli/ — Operator CLI +│ │ Agent create/revoke, invite, api-key, config +│ └── openclaw-skill/ — OpenClaw skill integration +│ Relay transform for agent-to-agent messaging +├── packages/ +│ ├── protocol/ — Canonical types + signing rules +│ │ AIT claims, DID format, HTTP signing, endpoints +│ └── sdk/ — TypeScript SDK +│ Sign/verify, CRL cache, auth client, crypto +└── Configuration + ├── nx.json — Monorepo task orchestration + ├── pnpm-workspace.yaml + └── tsconfig.base.json +``` --- @@ -223,6 +498,25 @@ This repo is a monorepo: --- +## OpenClaw skill install (npm-first) + +Expected operator flow starts from npm: + +```bash +npm install clawdentity --skill +``` + +When `--skill` mode is detected, installer logic prepares OpenClaw runtime artifacts automatically: +- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` +- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/references/*` +- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` +- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` + +Install is idempotent and logs deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). +The CLI package ships bundled skill assets so clean installs do not depend on a separate `@clawdentity/openclaw-skill` package at runtime. + +--- + ## MVP goals 1. **Create agent identity** (local keypair + registry-issued AIT) diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 6b2db44..d384996 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -7,6 +7,7 @@ ## Command Architecture - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). +- Keep `src/postinstall.ts` as a thin install entrypoint only; it should detect npm `--skill` mode and call shared installer helpers without mutating runtime CLI command wiring. - Implement command groups under `src/commands/*` and register them from `createProgram()`. - Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. @@ -16,6 +17,8 @@ - Prefer `@clawdentity/sdk` helpers (`decodeAIT`) when surfacing agent metadata instead of parsing JWTs manually. - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. - Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. +- npm `--skill` installer behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. +- Keep `skill-bundle/openclaw-skill/` in sync with `apps/openclaw-skill` via `pnpm -F @clawdentity/cli run sync:skill-bundle` before build/pack so `postinstall --skill` works in clean installs. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. diff --git a/apps/cli/package.json b/apps/cli/package.json index 51ac08f..c95e4ef 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -14,10 +14,18 @@ "types": "./dist/index.d.ts" } }, + "files": [ + "dist", + "postinstall.mjs", + "skill-bundle" + ], "scripts": { - "build": "tsup", + "build": "pnpm run sync:skill-bundle && tsup", "format": "biome format .", "lint": "biome lint .", + "prepack": "pnpm run build", + "postinstall": "node ./postinstall.mjs", + "sync:skill-bundle": "node ./scripts/sync-skill-bundle.mjs", "test": "vitest run", "typecheck": "tsc --noEmit" }, diff --git a/apps/cli/postinstall.mjs b/apps/cli/postinstall.mjs new file mode 100644 index 0000000..5f051a6 --- /dev/null +++ b/apps/cli/postinstall.mjs @@ -0,0 +1,48 @@ +import { constants } from "node:fs"; +import { access } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { fileURLToPath, pathToFileURL } from "node:url"; + +function parseBooleanFlag(value) { + if (typeof value !== "string") { + return undefined; + } + + const normalized = value.trim().toLowerCase(); + if ( + normalized === "" || + normalized === "1" || + normalized === "true" || + normalized === "yes" + ) { + return true; + } + + if (normalized === "0" || normalized === "false" || normalized === "no") { + return false; + } + + return undefined; +} + +const packageRoot = dirname(fileURLToPath(import.meta.url)); +const bundledPostinstallPath = join(packageRoot, "dist", "postinstall.js"); +const skillRequested = parseBooleanFlag(process.env.npm_config_skill) === true; + +try { + await access(bundledPostinstallPath, constants.R_OK); + await import(pathToFileURL(bundledPostinstallPath).href); +} catch (error) { + if (error && typeof error === "object" && error.code === "ENOENT") { + if (skillRequested) { + process.stderr.write( + `[clawdentity] skill install failed: build artifact not found at ${bundledPostinstallPath}\n`, + ); + process.exitCode = 1; + } + } else { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`[clawdentity] postinstall failed: ${message}\n`); + process.exitCode = 1; + } +} diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md new file mode 100644 index 0000000..5ed7530 --- /dev/null +++ b/apps/cli/scripts/AGENTS.md @@ -0,0 +1,10 @@ +# AGENTS.md (apps/cli/scripts) + +## Purpose +- Keep CLI helper scripts deterministic and safe for release packaging. + +## Rules +- `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. +- Scripts must fail with actionable errors when required source artifacts are missing. +- Keep script output concise and stable for CI/release logs. +- Do not add install-time network fetches to packaging scripts. diff --git a/apps/cli/scripts/sync-skill-bundle.mjs b/apps/cli/scripts/sync-skill-bundle.mjs new file mode 100644 index 0000000..72789c1 --- /dev/null +++ b/apps/cli/scripts/sync-skill-bundle.mjs @@ -0,0 +1,67 @@ +import { constants } from "node:fs"; +import { access, cp, mkdir, readFile, rm, writeFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; + +const scriptDir = dirname(fileURLToPath(import.meta.url)); +const cliRoot = join(scriptDir, ".."); +const skillRoot = join(cliRoot, "..", "openclaw-skill"); +const sourceSkillDirectory = join(skillRoot, "skill"); +const sourceRelayModule = join(skillRoot, "dist", "relay-to-peer.mjs"); +const targetSkillRoot = join(cliRoot, "skill-bundle", "openclaw-skill"); +const targetRelayModule = join(targetSkillRoot, "dist", "relay-to-peer.mjs"); + +async function assertReadable(path, label) { + try { + await access(path, constants.R_OK); + } catch { + throw new Error( + `[sync-skill-bundle] Missing required ${label} at ${path}. Build @clawdentity/openclaw-skill first.`, + ); + } +} + +async function tryRead(path) { + try { + return await readFile(path); + } catch { + return undefined; + } +} + +async function main() { + await assertReadable(sourceSkillDirectory, "skill directory"); + + const sourceRelayContent = await tryRead(sourceRelayModule); + const bundledRelayContent = await tryRead(targetRelayModule); + const relayModuleContent = sourceRelayContent ?? bundledRelayContent; + + if (relayModuleContent === undefined) { + throw new Error( + `[sync-skill-bundle] Missing required relay module at ${sourceRelayModule}. Build @clawdentity/openclaw-skill first.`, + ); + } + + await rm(targetSkillRoot, { recursive: true, force: true }); + await mkdir(join(targetSkillRoot, "dist"), { recursive: true }); + + await cp(sourceSkillDirectory, join(targetSkillRoot, "skill"), { + recursive: true, + }); + await writeFile(targetRelayModule, relayModuleContent); + + process.stdout.write( + `[sync-skill-bundle] Bundled skill assets into ${targetSkillRoot}\n`, + ); + if (sourceRelayContent === undefined) { + process.stdout.write( + "[sync-skill-bundle] Source relay build missing; reused existing bundled relay artifact.\n", + ); + } +} + +main().catch((error) => { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`${message}\n`); + process.exitCode = 1; +}); diff --git a/apps/cli/skill-bundle/AGENTS.md b/apps/cli/skill-bundle/AGENTS.md new file mode 100644 index 0000000..34eafac --- /dev/null +++ b/apps/cli/skill-bundle/AGENTS.md @@ -0,0 +1,12 @@ +# AGENTS.md (apps/cli/skill-bundle) + +## Purpose +- Store bundled skill artifacts shipped with the CLI package for npm `--skill` postinstall. + +## Rules +- Treat this folder as generated release input; do not hand-edit bundled files. +- Regenerate by running `pnpm -F @clawdentity/cli run sync:skill-bundle` after changes in `apps/openclaw-skill`. +- Required bundled files: + - `openclaw-skill/skill/SKILL.md` + - `openclaw-skill/skill/references/*` + - `openclaw-skill/dist/relay-to-peer.mjs` diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md new file mode 100644 index 0000000..8e09b22 --- /dev/null +++ b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md @@ -0,0 +1,126 @@ +--- +name: clawdentity_openclaw_relay +description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication with Clawdentity PoP verification. +version: 0.1.0 +--- + +# Clawdentity OpenClaw Relay Skill + +This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through Clawdentity identity (`Authorization: Claw ` + PoP headers) using a single invite code. + +## Trigger Conditions + +Use this skill when any of the following are requested: +- Install relay support for OpenClaw peer communication. +- Complete first-time setup from an invite code. +- Repair broken relay setup after config drift. +- Verify invite-code onboarding and peer mapping. + +## Filesystem Truth (must be used exactly) + +### OpenClaw state files +- OpenClaw state root (default): `~/.openclaw` +- OpenClaw config: `~/.openclaw/openclaw.json` +- Transform target path: `~/.openclaw/hooks/transforms/relay-to-peer.mjs` +- Workspace skill location: `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` +- Default transform source expected by CLI setup: + `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` + +### Clawdentity identity files +- Clawdentity root: `~/.clawdentity` +- Agent config: `~/.clawdentity/config.json` +- Agent identity directory: `~/.clawdentity/agents//` +- Agent private key: `~/.clawdentity/agents//secret.key` +- Agent AIT token: `~/.clawdentity/agents//ait.jwt` +- Peer map: `~/.clawdentity/peers.json` +- Local selected agent marker: `~/.clawdentity/openclaw-agent-name` +- Relay runtime config: `~/.clawdentity/openclaw-relay.json` + +## Operator Split + +### Admin/operator side (only action required) +Create invite code: + +`clawdentity openclaw invite --did --proxy-url --peer-alias ` + +Share the invite code string with the human who owns the target agent. + +## Human + Agent Flow (strict user-style) + +Follow this order. Do not skip steps. + +## Tool Execution Contract (Agent) + +This skill is operational. The agent must execute side effects via tools. +- Use the `exec` tool for npm/CLI/filesystem actions needed by onboarding. +- Do not ask the human to run shell commands that the agent can run itself. +- Ask the human only for missing secrets/inputs (for example API key or invite code). +- Report final status with concrete outputs (local DID, peer alias, written paths). + +1. Confirm prerequisites with the human. +- Confirm `clawdentity` CLI is installed and runnable. +- Confirm API key exists for this agent (if missing, ask the human for it). +- Confirm OpenClaw state directory path if non-default. +- Confirm OpenClaw base URL if local endpoint is non-default. + +2. Confirm skill artifact exists in workspace skills directory. +- Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. +- If missing, install/update skill package contents before setup. + +3. Configure local Clawdentity identity for this OpenClaw agent. +- Run `clawdentity config init`. +- If needed, ask the human for API key and run `clawdentity config set apiKey `. +- Create identity: `clawdentity agent create --framework openclaw`. +- Verify identity: `clawdentity agent inspect `. + +4. Ask the human for invite code. +- Prompt exactly for one invite code string. +- Do not ask for DID/proxy URL when invite code is present. + +5. Run automated setup from invite code. +- Execute: + `clawdentity openclaw setup --invite-code ` +- Use `--openclaw-dir ` when state directory is non-default. +- Use `--openclaw-base-url ` when local OpenClaw HTTP endpoint is non-default. +- Use `--peer-alias ` only when alias override is required. + +6. Verify setup outputs. +- Confirm setup reports: + - peer alias + - peer DID + - updated OpenClaw config path + - installed transform path + - OpenClaw base URL + - relay runtime config path +- Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. + +7. Validate with user-style relay test. +- Human asks Alpha to send a request with `peer: "beta"`. +- Agent relays with Claw + PoP headers. +- Peer proxy verifies and forwards to peer OpenClaw. +- Verify success logs on both sides. + +## Required question policy + +Ask the human only when required inputs are missing: +- Missing Clawdentity API key. +- Unclear OpenClaw state directory. +- Non-default OpenClaw base URL. +- Missing invite code. +- Local registry/proxy network location is unknown or unreachable from agent runtime. + +## Failure Handling + +If setup or relay fails: +- Report precise missing file/path/value. +- Fix only the failing config/input. +- Re-run the same user-style flow from step 5 onward. + +## Bundled Resources + +### References +| File | Purpose | +|------|---------| +| `references/clawdentity-protocol.md` | Header format, peer map schema, and relay verification details | + +Directive: read the reference file before troubleshooting protocol or signature failures. diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md new file mode 100644 index 0000000..a2df2e1 --- /dev/null +++ b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md @@ -0,0 +1,130 @@ +# Clawdentity Relay Protocol Reference + +## Purpose + +Define the exact runtime contract used by `relay-to-peer.mjs`. + +## Filesystem Paths + +### OpenClaw files +- `~/.openclaw/openclaw.json` +- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` +- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` + +### Clawdentity files +- `~/.clawdentity/config.json` +- `~/.clawdentity/agents//secret.key` +- `~/.clawdentity/agents//ait.jwt` +- `~/.clawdentity/peers.json` +- `~/.clawdentity/openclaw-agent-name` +- `~/.clawdentity/openclaw-relay.json` + +## Invite Code Contract + +Invite codes are prefixed with `clawd1_` and contain base64url JSON: + +```json +{ + "v": 1, + "issuedAt": "2026-02-15T20:00:00.000Z", + "did": "did:claw:agent:01H...", + "proxyUrl": "https://beta-proxy.example.com/hooks/agent", + "alias": "beta", + "name": "Beta Agent" +} +``` + +Rules: +- `v` must be `1`. +- `issuedAt` is ISO-8601 UTC timestamp. +- `did` must be an agent DID. +- `proxyUrl` must be absolute `http` or `https`. +- `alias` is optional but preferred for zero-question setup. + +## Peer Map Schema + +`~/.clawdentity/peers.json` must be valid JSON: + +```json +{ + "peers": { + "beta": { + "did": "did:claw:agent:01H...", + "proxyUrl": "https://beta-proxy.example.com/hooks/agent", + "name": "Beta Agent" + } + } +} +``` + +Rules: +- peer alias key uses `[a-zA-Z0-9._-]` +- `did` required and must begin with `did:` +- `proxyUrl` required and must be a valid absolute URL +- `name` optional + +## Relay Input Contract + +The OpenClaw transform reads `ctx.payload`. + +- If `payload.peer` is absent: + - return payload unchanged + - do not relay +- If `payload.peer` exists: + - resolve peer from `peers.json` + - remove `peer` from forwarded body + - send JSON POST to `peer.proxyUrl` + - return `null` to skip local handling + +## Relay Agent Selection Contract + +Relay resolves local agent name in this order: +1. transform option `agentName` +2. `CLAWDENTITY_AGENT_NAME` +3. `~/.clawdentity/openclaw-agent-name` +4. single local agent fallback from `~/.clawdentity/agents/` + +## Local OpenClaw Base URL Contract + +`~/.clawdentity/openclaw-relay.json` stores the OpenClaw upstream base URL used by local proxy runtime fallback: + +```json +{ + "openclawBaseUrl": "http://127.0.0.1:18789", + "updatedAt": "2026-02-15T20:00:00.000Z" +} +``` + +Rules: +- `openclawBaseUrl` must be absolute `http` or `https`. +- `updatedAt` is ISO-8601 UTC timestamp. +- Proxy runtime precedence is: `OPENCLAW_BASE_URL` env first, then `openclaw-relay.json`, then built-in default. + +## Outbound Auth Contract + +Headers sent to peer proxy: +- `Authorization: Claw ` +- `Content-Type: application/json` +- `X-Claw-Timestamp` +- `X-Claw-Nonce` +- `X-Claw-Body-SHA256` +- `X-Claw-Proof` + +Signing inputs: +- HTTP method: `POST` +- path+query from peer URL +- unix seconds timestamp +- random nonce +- outbound JSON body bytes +- agent secret key from `secret.key` + +## Error Conditions + +Relay fails when: +- no selected local agent can be resolved +- peer alias missing from config +- `secret.key` or `ait.jwt` missing/empty/invalid +- peer returns non-2xx +- peer network request fails + +Error messages should include file/path context but never print secret content. diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 82cdb5b..b50efe5 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -17,6 +17,13 @@ - Agent auth refresh state is stored per-agent at `~/.clawdentity/agents//registry-auth.json` and must be written with secure file permissions. - `agent auth refresh` must use `Authorization: Claw ` + PoP headers from local agent keys and must not require PAT config. +## Skill Install Mode +- Keep npm skill-install logic in shared helpers (`install-skill-mode.ts`) and invoke it from `postinstall.ts`; do not embed installer logic inside command factories. +- Detect install mode via npm environment (`npm_config_skill` and npm argv fallback) so non-skill installs remain unaffected. +- Resolve skill artifacts in this order: explicit override, bundled `skill-bundle/openclaw-skill`, installed `@clawdentity/openclaw-skill`, then workspace fallback. +- Skill install must copy `SKILL.md`, `references/*`, and `relay-to-peer.mjs` into OpenClaw runtime paths under `~/.openclaw` and must fail with actionable errors when source artifacts are missing. +- Installer logs must be deterministic and explicit (`installed`, `updated`, `unchanged`) so E2E skill tests can assert outcomes reliably. + ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. - Resolve registry material from configured `registryUrl` only (`/.well-known/claw-keys.json`, `/v1/crl`). diff --git a/apps/cli/src/install-skill-mode.test.ts b/apps/cli/src/install-skill-mode.test.ts new file mode 100644 index 0000000..a1b44eb --- /dev/null +++ b/apps/cli/src/install-skill-mode.test.ts @@ -0,0 +1,256 @@ +import { + mkdirSync, + mkdtempSync, + readFileSync, + rmSync, + writeFileSync, +} from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import type { SkillInstallError } from "./install-skill-mode.js"; +import { + installOpenclawSkillArtifacts, + isSkillInstallRequested, + runNpmSkillInstall, +} from "./install-skill-mode.js"; + +type SkillSandbox = { + cleanup: () => void; + homeDir: string; + openclawDir: string; + skillPackageRoot: string; + referencesRoot: string; +}; + +function createSkillSandbox(): SkillSandbox { + const root = mkdtempSync(join(tmpdir(), "clawdentity-skill-install-")); + const homeDir = join(root, "home"); + const openclawDir = join(homeDir, ".openclaw"); + const skillPackageRoot = join(root, "openclaw-skill-package"); + const skillRoot = join(skillPackageRoot, "skill"); + const referencesRoot = join(skillRoot, "references"); + + mkdirSync(referencesRoot, { recursive: true }); + mkdirSync(join(skillPackageRoot, "dist"), { recursive: true }); + mkdirSync(openclawDir, { recursive: true }); + + writeFileSync( + join(skillRoot, "SKILL.md"), + "# Clawdentity OpenClaw Relay Skill\n", + "utf8", + ); + writeFileSync( + join(referencesRoot, "clawdentity-protocol.md"), + "Protocol reference\n", + "utf8", + ); + writeFileSync( + join(skillPackageRoot, "dist", "relay-to-peer.mjs"), + "export default async function relayToPeer(){ return null; }\n", + "utf8", + ); + + return { + cleanup: () => rmSync(root, { recursive: true, force: true }), + homeDir, + openclawDir, + skillPackageRoot, + referencesRoot, + }; +} + +describe("install skill mode detection", () => { + it("detects --skill from npm_config_skill env", () => { + expect(isSkillInstallRequested({ npm_config_skill: "true" })).toBe(true); + expect(isSkillInstallRequested({ npm_config_skill: "1" })).toBe(true); + expect(isSkillInstallRequested({ npm_config_skill: "false" })).toBe(false); + }); + + it("detects --skill from npm_config_argv", () => { + expect( + isSkillInstallRequested({ + npm_config_argv: JSON.stringify({ + original: ["install", "clawdentity", "--skill"], + }), + }), + ).toBe(true); + + expect( + isSkillInstallRequested({ + npm_config_argv: JSON.stringify({ + original: ["install", "clawdentity"], + }), + }), + ).toBe(false); + }); +}); + +describe("installOpenclawSkillArtifacts", () => { + it("installs skill artifacts and remains idempotent on rerun", async () => { + const sandbox = createSkillSandbox(); + + try { + const firstRun = await installOpenclawSkillArtifacts({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + skillPackageRoot: sandbox.skillPackageRoot, + }); + + expect( + firstRun.records.some((record) => record.action === "installed"), + ).toBe(true); + + const skillPath = join( + sandbox.openclawDir, + "workspace", + "skills", + "clawdentity-openclaw-relay", + "SKILL.md", + ); + const workspaceRelayPath = join( + sandbox.openclawDir, + "workspace", + "skills", + "clawdentity-openclaw-relay", + "relay-to-peer.mjs", + ); + const hooksRelayPath = join( + sandbox.openclawDir, + "hooks", + "transforms", + "relay-to-peer.mjs", + ); + const referencePath = join( + sandbox.openclawDir, + "workspace", + "skills", + "clawdentity-openclaw-relay", + "references", + "clawdentity-protocol.md", + ); + + expect(readFileSync(skillPath, "utf8")).toContain("Clawdentity"); + expect(readFileSync(workspaceRelayPath, "utf8")).toContain("relayToPeer"); + expect(readFileSync(hooksRelayPath, "utf8")).toContain("relayToPeer"); + expect(readFileSync(referencePath, "utf8")).toContain("Protocol"); + + const secondRun = await installOpenclawSkillArtifacts({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + skillPackageRoot: sandbox.skillPackageRoot, + }); + + expect( + secondRun.records.every((record) => record.action === "unchanged"), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("reports updated artifacts when source content changes", async () => { + const sandbox = createSkillSandbox(); + + try { + await installOpenclawSkillArtifacts({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + skillPackageRoot: sandbox.skillPackageRoot, + }); + + writeFileSync( + join(sandbox.referencesRoot, "clawdentity-protocol.md"), + "Protocol reference v2\n", + "utf8", + ); + + const rerun = await installOpenclawSkillArtifacts({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + skillPackageRoot: sandbox.skillPackageRoot, + }); + + expect(rerun.records.some((record) => record.action === "updated")).toBe( + true, + ); + } finally { + sandbox.cleanup(); + } + }); + + it("fails with actionable error when required artifact is missing", async () => { + const sandbox = createSkillSandbox(); + rmSync(join(sandbox.skillPackageRoot, "dist", "relay-to-peer.mjs"), { + force: true, + }); + + try { + await expect( + installOpenclawSkillArtifacts({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + skillPackageRoot: sandbox.skillPackageRoot, + }), + ).rejects.toMatchObject({ + code: "CLI_SKILL_ARTIFACT_MISSING", + } satisfies Partial); + } finally { + sandbox.cleanup(); + } + }); +}); + +describe("runNpmSkillInstall", () => { + it("skips install when --skill is not set", async () => { + const result = await runNpmSkillInstall({ + env: {}, + writeStdout: () => undefined, + writeStderr: () => undefined, + }); + + expect(result.skipped).toBe(true); + }); + + it("installs bundled skill artifacts when --skill is set", async () => { + const root = mkdtempSync(join(tmpdir(), "clawdentity-skill-bundle-")); + const openclawDir = join(root, ".openclaw"); + const stdout: string[] = []; + const stderr: string[] = []; + + try { + const result = await runNpmSkillInstall({ + env: { npm_config_skill: "true" }, + homeDir: root, + openclawDir, + writeStdout: (line) => stdout.push(line), + writeStderr: (line) => stderr.push(line), + }); + + expect(result.skipped).toBe(false); + expect(stderr).toHaveLength(0); + expect(stdout.some((line) => line.includes("skill install mode"))).toBe( + true, + ); + + const skillPath = join( + openclawDir, + "workspace", + "skills", + "clawdentity-openclaw-relay", + "SKILL.md", + ); + const hooksRelayPath = join( + openclawDir, + "hooks", + "transforms", + "relay-to-peer.mjs", + ); + + expect(readFileSync(skillPath, "utf8")).toContain("OpenClaw Relay"); + expect(readFileSync(hooksRelayPath, "utf8")).toContain("relay-to-peer"); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/apps/cli/src/install-skill-mode.ts b/apps/cli/src/install-skill-mode.ts new file mode 100644 index 0000000..ea0bf42 --- /dev/null +++ b/apps/cli/src/install-skill-mode.ts @@ -0,0 +1,488 @@ +import { constants, existsSync } from "node:fs"; +import { access, copyFile, mkdir, readdir, readFile } from "node:fs/promises"; +import { createRequire } from "node:module"; +import { homedir } from "node:os"; +import { dirname, join, relative } from "node:path"; +import { fileURLToPath } from "node:url"; +import { writeStderrLine, writeStdoutLine } from "./io.js"; + +const OPENCLAW_DIR_NAME = ".openclaw"; +const SKILL_PACKAGE_NAME = "@clawdentity/openclaw-skill"; +const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; +const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; + +type InstallAction = "installed" | "updated" | "unchanged"; + +export type SkillInstallRecord = { + action: InstallAction; + sourcePath: string; + targetPath: string; +}; + +export type SkillInstallResult = { + homeDir: string; + openclawDir: string; + skillPackageRoot: string; + targetSkillDirectory: string; + records: SkillInstallRecord[]; +}; + +export type RunNpmSkillInstallResult = + | { + skipped: true; + } + | ({ + skipped: false; + } & SkillInstallResult); + +type SkillInstallOptions = { + homeDir?: string; + openclawDir?: string; + skillPackageRoot?: string; + env?: NodeJS.ProcessEnv; +}; + +type RunNpmSkillInstallOptions = SkillInstallOptions & { + env?: NodeJS.ProcessEnv; + writeStdout?: (line: string) => void; + writeStderr?: (line: string) => void; +}; + +type SkillInstallArtifact = { + sourcePath: string; + targetPath: string; +}; + +type SkillInstallErrorCode = + | "CLI_SKILL_PACKAGE_NOT_FOUND" + | "CLI_SKILL_ARTIFACT_MISSING" + | "CLI_SKILL_REFERENCE_DIR_EMPTY"; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export class SkillInstallError extends Error { + public readonly code: SkillInstallErrorCode; + public readonly details: Record; + + public constructor(input: { + code: SkillInstallErrorCode; + message: string; + details?: Record; + }) { + super(input.message); + this.name = "SkillInstallError"; + this.code = input.code; + this.details = input.details ?? {}; + } +} + +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +function parseBooleanFlag(value: string | undefined): boolean | undefined { + if (value === undefined) { + return undefined; + } + + const normalized = value.trim().toLowerCase(); + if ( + normalized === "" || + normalized === "1" || + normalized === "true" || + normalized === "yes" + ) { + return true; + } + + if (normalized === "0" || normalized === "false" || normalized === "no") { + return false; + } + + return undefined; +} + +function hasSkillFlagInNpmArgv(rawArgv: string | undefined): boolean { + if (!rawArgv || rawArgv.trim().length === 0) { + return false; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawArgv); + } catch { + return false; + } + + if (!isRecord(parsed)) { + return false; + } + + const original = parsed.original; + if (!Array.isArray(original)) { + return false; + } + + return original.some((entry) => entry === "--skill"); +} + +export function isSkillInstallRequested(env: NodeJS.ProcessEnv = process.env) { + const envFlag = parseBooleanFlag(env.npm_config_skill); + if (envFlag !== undefined) { + return envFlag; + } + + return hasSkillFlagInNpmArgv(env.npm_config_argv); +} + +function resolveHomeDir(inputHomeDir?: string): string { + if (typeof inputHomeDir === "string" && inputHomeDir.trim().length > 0) { + return inputHomeDir.trim(); + } + + return homedir(); +} + +function resolveOpenclawDir( + homeDir: string, + inputOpenclawDir?: string, +): string { + if ( + typeof inputOpenclawDir === "string" && + inputOpenclawDir.trim().length > 0 + ) { + return inputOpenclawDir.trim(); + } + + return join(homeDir, OPENCLAW_DIR_NAME); +} + +function resolveSkillPackageRoot(input: { + skillPackageRoot?: string; + env: NodeJS.ProcessEnv; +}): string { + if ( + typeof input.skillPackageRoot === "string" && + input.skillPackageRoot.trim().length > 0 + ) { + return input.skillPackageRoot.trim(); + } + + const overriddenRoot = input.env.CLAWDENTITY_SKILL_PACKAGE_ROOT; + if (typeof overriddenRoot === "string" && overriddenRoot.trim().length > 0) { + return overriddenRoot.trim(); + } + + const bundledSkillRoot = join( + dirname(fileURLToPath(import.meta.url)), + "..", + "skill-bundle", + "openclaw-skill", + ); + if (existsSync(bundledSkillRoot)) { + return bundledSkillRoot; + } + + const require = createRequire(import.meta.url); + + let packageJsonPath: string; + try { + packageJsonPath = require.resolve(`${SKILL_PACKAGE_NAME}/package.json`); + return dirname(packageJsonPath); + } catch { + const workspaceFallbackRoot = join( + dirname(fileURLToPath(import.meta.url)), + "..", + "..", + "openclaw-skill", + ); + if (existsSync(workspaceFallbackRoot)) { + return workspaceFallbackRoot; + } + + throw new SkillInstallError({ + code: "CLI_SKILL_PACKAGE_NOT_FOUND", + message: + "Skill artifacts are unavailable. Set CLAWDENTITY_SKILL_PACKAGE_ROOT or provide bundled skill assets before using --skill mode.", + details: { + packageName: SKILL_PACKAGE_NAME, + bundledSkillRoot, + workspaceFallbackRoot, + }, + }); + } +} + +async function assertReadableFile( + filePath: string, + details: Record, +): Promise { + try { + await access(filePath, constants.R_OK); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw new SkillInstallError({ + code: "CLI_SKILL_ARTIFACT_MISSING", + message: "Required skill artifact is missing", + details: { + ...details, + sourcePath: filePath, + }, + }); + } + + throw error; + } +} + +async function listFilesRecursively(directoryPath: string): Promise { + const entries = await readdir(directoryPath, { withFileTypes: true }); + const files: string[] = []; + + for (const entry of entries.sort((left, right) => + left.name.localeCompare(right.name), + )) { + const entryPath = join(directoryPath, entry.name); + if (entry.isDirectory()) { + files.push(...(await listFilesRecursively(entryPath))); + continue; + } + + if (entry.isFile()) { + files.push(entryPath); + } + } + + return files; +} + +async function resolveArtifacts(input: { + skillPackageRoot: string; + openclawDir: string; +}): Promise { + const skillRoot = join(input.skillPackageRoot, "skill"); + const skillDocSource = join(skillRoot, "SKILL.md"); + const referencesRoot = join(skillRoot, "references"); + const relaySource = join( + input.skillPackageRoot, + "dist", + RELAY_MODULE_FILE_NAME, + ); + + await assertReadableFile(skillDocSource, { + artifact: "SKILL.md", + }); + await assertReadableFile(relaySource, { + artifact: RELAY_MODULE_FILE_NAME, + }); + + let referenceFiles: string[]; + try { + referenceFiles = await listFilesRecursively(referencesRoot); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw new SkillInstallError({ + code: "CLI_SKILL_ARTIFACT_MISSING", + message: "Required skill references directory is missing", + details: { + sourcePath: referencesRoot, + artifact: "references", + }, + }); + } + + throw error; + } + + if (referenceFiles.length === 0) { + throw new SkillInstallError({ + code: "CLI_SKILL_REFERENCE_DIR_EMPTY", + message: "Required skill references directory is empty", + details: { + sourcePath: referencesRoot, + }, + }); + } + + const targetSkillRoot = join( + input.openclawDir, + "workspace", + "skills", + SKILL_DIR_NAME, + ); + + const artifacts: SkillInstallArtifact[] = [ + { + sourcePath: skillDocSource, + targetPath: join(targetSkillRoot, "SKILL.md"), + }, + { + sourcePath: relaySource, + targetPath: join(targetSkillRoot, RELAY_MODULE_FILE_NAME), + }, + { + sourcePath: relaySource, + targetPath: join( + input.openclawDir, + "hooks", + "transforms", + RELAY_MODULE_FILE_NAME, + ), + }, + ]; + + for (const referenceFile of referenceFiles) { + const relativePath = relative(referencesRoot, referenceFile); + artifacts.push({ + sourcePath: referenceFile, + targetPath: join(targetSkillRoot, "references", relativePath), + }); + } + + return artifacts.sort((left, right) => + left.targetPath.localeCompare(right.targetPath), + ); +} + +async function copyArtifact(input: { + sourcePath: string; + targetPath: string; +}): Promise { + const sourceContent = await readFile(input.sourcePath); + let existingContent: Buffer | undefined; + + try { + existingContent = await readFile(input.targetPath); + } catch (error) { + if (getErrorCode(error) !== "ENOENT") { + throw error; + } + } + + if (existingContent !== undefined && sourceContent.equals(existingContent)) { + return "unchanged"; + } + + await mkdir(dirname(input.targetPath), { recursive: true }); + await copyFile(input.sourcePath, input.targetPath); + + if (existingContent !== undefined) { + return "updated"; + } + + return "installed"; +} + +export async function installOpenclawSkillArtifacts( + options: SkillInstallOptions = {}, +): Promise { + const env = options.env ?? process.env; + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(homeDir, options.openclawDir); + const skillPackageRoot = resolveSkillPackageRoot({ + skillPackageRoot: options.skillPackageRoot, + env, + }); + const artifacts = await resolveArtifacts({ + skillPackageRoot, + openclawDir, + }); + const records: SkillInstallRecord[] = []; + + for (const artifact of artifacts) { + const action = await copyArtifact(artifact); + records.push({ + action, + sourcePath: artifact.sourcePath, + targetPath: artifact.targetPath, + }); + } + + return { + homeDir, + openclawDir, + skillPackageRoot, + targetSkillDirectory: join( + openclawDir, + "workspace", + "skills", + SKILL_DIR_NAME, + ), + records, + }; +} + +function toSummaryCounts(records: SkillInstallRecord[]): string { + const installed = records.filter((record) => record.action === "installed"); + const updated = records.filter((record) => record.action === "updated"); + const unchanged = records.filter((record) => record.action === "unchanged"); + + return `installed=${installed.length} updated=${updated.length} unchanged=${unchanged.length}`; +} + +export function formatSkillInstallError(error: unknown): string { + if (error instanceof SkillInstallError) { + const details = Object.entries(error.details) + .map(([key, value]) => `${key}=${value}`) + .join(" "); + + if (details.length === 0) { + return `${error.code}: ${error.message}`; + } + + return `${error.code}: ${error.message} (${details})`; + } + + if (error instanceof Error) { + return error.message; + } + + return String(error); +} + +export async function runNpmSkillInstall( + options: RunNpmSkillInstallOptions = {}, +): Promise { + const env = options.env ?? process.env; + const writeStdout = options.writeStdout ?? writeStdoutLine; + const writeStderr = options.writeStderr ?? writeStderrLine; + + if (!isSkillInstallRequested(env)) { + return { skipped: true }; + } + + writeStdout("[clawdentity] skill install mode detected (--skill)"); + + try { + const result = await installOpenclawSkillArtifacts({ + env, + homeDir: options.homeDir, + openclawDir: options.openclawDir, + skillPackageRoot: options.skillPackageRoot, + }); + + for (const record of result.records) { + writeStdout( + `[clawdentity] ${record.action}: ${record.targetPath} (source: ${record.sourcePath})`, + ); + } + + writeStdout(`[clawdentity] ${toSummaryCounts(result.records)}`); + + return { + skipped: false, + ...result, + }; + } catch (error) { + writeStderr( + `[clawdentity] skill install failed: ${formatSkillInstallError(error)}`, + ); + throw error; + } +} diff --git a/apps/cli/src/postinstall.ts b/apps/cli/src/postinstall.ts new file mode 100644 index 0000000..57c44c5 --- /dev/null +++ b/apps/cli/src/postinstall.ts @@ -0,0 +1,5 @@ +import { runNpmSkillInstall } from "./install-skill-mode.js"; + +runNpmSkillInstall().catch(() => { + process.exitCode = 1; +}); diff --git a/apps/cli/tsup.config.ts b/apps/cli/tsup.config.ts index 364351c..9d03333 100644 --- a/apps/cli/tsup.config.ts +++ b/apps/cli/tsup.config.ts @@ -1,7 +1,7 @@ import { defineConfig } from "tsup"; export default defineConfig({ - entry: ["src/index.ts", "src/bin.ts"], + entry: ["src/index.ts", "src/bin.ts", "src/postinstall.ts"], format: ["esm"], bundle: true, splitting: false, diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 59d8833..42e7ef5 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -46,3 +46,8 @@ - Do not edit relay hooks, peer config, or selected-agent files manually during validation. - After skill setup, verify these artifacts exist and are agent-generated: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.clawdentity/openclaw-relay.json`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`. - For reruns after failures, clear skill-generated artifacts first; only perform full identity reset (`~/.clawdentity/agents//`) when identity reprovisioning is needed. + +## Skill Runtime Behavior +- Keep onboarding prompts input-focused (invite/API key/URLs) and let the skill decide command execution. +- The agent should run required npm/CLI/filesystem operations via tools and only ask the human for missing inputs. +- Report deterministic completion outputs: local DID, peer alias, and generated filesystem paths. diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index 90ffc24..8e09b22 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -49,6 +49,14 @@ Share the invite code string with the human who owns the target agent. Follow this order. Do not skip steps. +## Tool Execution Contract (Agent) + +This skill is operational. The agent must execute side effects via tools. +- Use the `exec` tool for npm/CLI/filesystem actions needed by onboarding. +- Do not ask the human to run shell commands that the agent can run itself. +- Ask the human only for missing secrets/inputs (for example API key or invite code). +- Report final status with concrete outputs (local DID, peer alias, written paths). + 1. Confirm prerequisites with the human. - Confirm `clawdentity` CLI is installed and runnable. - Confirm API key exists for this agent (if missing, ask the human for it). @@ -99,6 +107,7 @@ Ask the human only when required inputs are missing: - Unclear OpenClaw state directory. - Non-default OpenClaw base URL. - Missing invite code. +- Local registry/proxy network location is unknown or unreachable from agent runtime. ## Failure Handling From 7ebd637a9242483fbb9d8da8de93182a8e550cd5 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Mon, 16 Feb 2026 22:09:55 +0530 Subject: [PATCH 067/190] fix(cli): track bundled relay artifact for clean CI installs --- .gitignore | 2 + apps/cli/AGENTS.md | 2 + .../openclaw-skill/dist/relay-to-peer.mjs | 16177 ++++++++++++++++ 3 files changed, 16181 insertions(+) create mode 100644 apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs diff --git a/.gitignore b/.gitignore index 5118bd4..6e04fb7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,7 @@ node_modules/ dist/ +!apps/cli/skill-bundle/openclaw-skill/dist/ +!apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs .nx/ nx nx.bat diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index d384996..68762a5 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -19,6 +19,8 @@ - Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. - npm `--skill` installer behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. - Keep `skill-bundle/openclaw-skill/` in sync with `apps/openclaw-skill` via `pnpm -F @clawdentity/cli run sync:skill-bundle` before build/pack so `postinstall --skill` works in clean installs. +- Keep `skill-bundle/openclaw-skill/dist/relay-to-peer.mjs` tracked in git so clean-checkout tests and packaged installs have the required relay artifact before workspace builds run. +- When running the `@clawdentity/cli` test suite (`pnpm -F @clawdentity/cli test`), build `@clawdentity/openclaw-skill` and resync the skill bundle first so `relay-to-peer.mjs` exists on clean checkout and tests pass with deterministic artifacts. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. diff --git a/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs b/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs new file mode 100644 index 0000000..b35a138 --- /dev/null +++ b/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs @@ -0,0 +1,16177 @@ +var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; + +// src/transforms/relay-to-peer.ts +import { randomBytes } from "crypto"; +import { readdir, readFile as readFile3 } from "fs/promises"; +import { homedir as homedir2 } from "os"; +import { join as join4 } from "path"; + +// ../../packages/protocol/src/agent-registration-proof.ts +var AGENT_REGISTRATION_PROOF_VERSION = "clawdentity.register.v1"; +var AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE = `${AGENT_REGISTRATION_PROOF_VERSION} +challengeId:{challengeId} +nonce:{nonce} +ownerDid:{ownerDid} +publicKey:{publicKey} +name:{name} +framework:{framework} +ttlDays:{ttlDays}`; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/external.js +var external_exports = {}; +__export(external_exports, { + $brand: () => $brand, + $input: () => $input, + $output: () => $output, + NEVER: () => NEVER, + TimePrecision: () => TimePrecision, + ZodAny: () => ZodAny, + ZodArray: () => ZodArray, + ZodBase64: () => ZodBase64, + ZodBase64URL: () => ZodBase64URL, + ZodBigInt: () => ZodBigInt, + ZodBigIntFormat: () => ZodBigIntFormat, + ZodBoolean: () => ZodBoolean, + ZodCIDRv4: () => ZodCIDRv4, + ZodCIDRv6: () => ZodCIDRv6, + ZodCUID: () => ZodCUID, + ZodCUID2: () => ZodCUID2, + ZodCatch: () => ZodCatch, + ZodCodec: () => ZodCodec, + ZodCustom: () => ZodCustom, + ZodCustomStringFormat: () => ZodCustomStringFormat, + ZodDate: () => ZodDate, + ZodDefault: () => ZodDefault, + ZodDiscriminatedUnion: () => ZodDiscriminatedUnion, + ZodE164: () => ZodE164, + ZodEmail: () => ZodEmail, + ZodEmoji: () => ZodEmoji, + ZodEnum: () => ZodEnum, + ZodError: () => ZodError, + ZodExactOptional: () => ZodExactOptional, + ZodFile: () => ZodFile, + ZodFirstPartyTypeKind: () => ZodFirstPartyTypeKind, + ZodFunction: () => ZodFunction, + ZodGUID: () => ZodGUID, + ZodIPv4: () => ZodIPv4, + ZodIPv6: () => ZodIPv6, + ZodISODate: () => ZodISODate, + ZodISODateTime: () => ZodISODateTime, + ZodISODuration: () => ZodISODuration, + ZodISOTime: () => ZodISOTime, + ZodIntersection: () => ZodIntersection, + ZodIssueCode: () => ZodIssueCode, + ZodJWT: () => ZodJWT, + ZodKSUID: () => ZodKSUID, + ZodLazy: () => ZodLazy, + ZodLiteral: () => ZodLiteral, + ZodMAC: () => ZodMAC, + ZodMap: () => ZodMap, + ZodNaN: () => ZodNaN, + ZodNanoID: () => ZodNanoID, + ZodNever: () => ZodNever, + ZodNonOptional: () => ZodNonOptional, + ZodNull: () => ZodNull, + ZodNullable: () => ZodNullable, + ZodNumber: () => ZodNumber, + ZodNumberFormat: () => ZodNumberFormat, + ZodObject: () => ZodObject, + ZodOptional: () => ZodOptional, + ZodPipe: () => ZodPipe, + ZodPrefault: () => ZodPrefault, + ZodPromise: () => ZodPromise, + ZodReadonly: () => ZodReadonly, + ZodRealError: () => ZodRealError, + ZodRecord: () => ZodRecord, + ZodSet: () => ZodSet, + ZodString: () => ZodString, + ZodStringFormat: () => ZodStringFormat, + ZodSuccess: () => ZodSuccess, + ZodSymbol: () => ZodSymbol, + ZodTemplateLiteral: () => ZodTemplateLiteral, + ZodTransform: () => ZodTransform, + ZodTuple: () => ZodTuple, + ZodType: () => ZodType, + ZodULID: () => ZodULID, + ZodURL: () => ZodURL, + ZodUUID: () => ZodUUID, + ZodUndefined: () => ZodUndefined, + ZodUnion: () => ZodUnion, + ZodUnknown: () => ZodUnknown, + ZodVoid: () => ZodVoid, + ZodXID: () => ZodXID, + ZodXor: () => ZodXor, + _ZodString: () => _ZodString, + _default: () => _default2, + _function: () => _function, + any: () => any, + array: () => array, + base64: () => base642, + base64url: () => base64url2, + bigint: () => bigint2, + boolean: () => boolean2, + catch: () => _catch2, + check: () => check, + cidrv4: () => cidrv42, + cidrv6: () => cidrv62, + clone: () => clone, + codec: () => codec, + coerce: () => coerce_exports, + config: () => config, + core: () => core_exports2, + cuid: () => cuid3, + cuid2: () => cuid22, + custom: () => custom, + date: () => date3, + decode: () => decode2, + decodeAsync: () => decodeAsync2, + describe: () => describe2, + discriminatedUnion: () => discriminatedUnion, + e164: () => e1642, + email: () => email2, + emoji: () => emoji2, + encode: () => encode2, + encodeAsync: () => encodeAsync2, + endsWith: () => _endsWith, + enum: () => _enum2, + exactOptional: () => exactOptional, + file: () => file, + flattenError: () => flattenError, + float32: () => float32, + float64: () => float64, + formatError: () => formatError, + fromJSONSchema: () => fromJSONSchema, + function: () => _function, + getErrorMap: () => getErrorMap, + globalRegistry: () => globalRegistry, + gt: () => _gt, + gte: () => _gte, + guid: () => guid2, + hash: () => hash, + hex: () => hex2, + hostname: () => hostname2, + httpUrl: () => httpUrl, + includes: () => _includes, + instanceof: () => _instanceof, + int: () => int, + int32: () => int32, + int64: () => int64, + intersection: () => intersection, + ipv4: () => ipv42, + ipv6: () => ipv62, + iso: () => iso_exports, + json: () => json, + jwt: () => jwt, + keyof: () => keyof, + ksuid: () => ksuid2, + lazy: () => lazy, + length: () => _length, + literal: () => literal, + locales: () => locales_exports, + looseObject: () => looseObject, + looseRecord: () => looseRecord, + lowercase: () => _lowercase, + lt: () => _lt, + lte: () => _lte, + mac: () => mac2, + map: () => map, + maxLength: () => _maxLength, + maxSize: () => _maxSize, + meta: () => meta2, + mime: () => _mime, + minLength: () => _minLength, + minSize: () => _minSize, + multipleOf: () => _multipleOf, + nan: () => nan, + nanoid: () => nanoid2, + nativeEnum: () => nativeEnum, + negative: () => _negative, + never: () => never, + nonnegative: () => _nonnegative, + nonoptional: () => nonoptional, + nonpositive: () => _nonpositive, + normalize: () => _normalize, + null: () => _null3, + nullable: () => nullable, + nullish: () => nullish2, + number: () => number2, + object: () => object, + optional: () => optional, + overwrite: () => _overwrite, + parse: () => parse2, + parseAsync: () => parseAsync2, + partialRecord: () => partialRecord, + pipe: () => pipe, + positive: () => _positive, + prefault: () => prefault, + preprocess: () => preprocess, + prettifyError: () => prettifyError, + promise: () => promise, + property: () => _property, + readonly: () => readonly, + record: () => record, + refine: () => refine, + regex: () => _regex, + regexes: () => regexes_exports, + registry: () => registry, + safeDecode: () => safeDecode2, + safeDecodeAsync: () => safeDecodeAsync2, + safeEncode: () => safeEncode2, + safeEncodeAsync: () => safeEncodeAsync2, + safeParse: () => safeParse2, + safeParseAsync: () => safeParseAsync2, + set: () => set, + setErrorMap: () => setErrorMap, + size: () => _size, + slugify: () => _slugify, + startsWith: () => _startsWith, + strictObject: () => strictObject, + string: () => string2, + stringFormat: () => stringFormat, + stringbool: () => stringbool, + success: () => success, + superRefine: () => superRefine, + symbol: () => symbol, + templateLiteral: () => templateLiteral, + toJSONSchema: () => toJSONSchema, + toLowerCase: () => _toLowerCase, + toUpperCase: () => _toUpperCase, + transform: () => transform, + treeifyError: () => treeifyError, + trim: () => _trim, + tuple: () => tuple, + uint32: () => uint32, + uint64: () => uint64, + ulid: () => ulid2, + undefined: () => _undefined3, + union: () => union, + unknown: () => unknown, + uppercase: () => _uppercase, + url: () => url, + util: () => util_exports, + uuid: () => uuid2, + uuidv4: () => uuidv4, + uuidv6: () => uuidv6, + uuidv7: () => uuidv7, + void: () => _void2, + xid: () => xid2, + xor: () => xor +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/index.js +var core_exports2 = {}; +__export(core_exports2, { + $ZodAny: () => $ZodAny, + $ZodArray: () => $ZodArray, + $ZodAsyncError: () => $ZodAsyncError, + $ZodBase64: () => $ZodBase64, + $ZodBase64URL: () => $ZodBase64URL, + $ZodBigInt: () => $ZodBigInt, + $ZodBigIntFormat: () => $ZodBigIntFormat, + $ZodBoolean: () => $ZodBoolean, + $ZodCIDRv4: () => $ZodCIDRv4, + $ZodCIDRv6: () => $ZodCIDRv6, + $ZodCUID: () => $ZodCUID, + $ZodCUID2: () => $ZodCUID2, + $ZodCatch: () => $ZodCatch, + $ZodCheck: () => $ZodCheck, + $ZodCheckBigIntFormat: () => $ZodCheckBigIntFormat, + $ZodCheckEndsWith: () => $ZodCheckEndsWith, + $ZodCheckGreaterThan: () => $ZodCheckGreaterThan, + $ZodCheckIncludes: () => $ZodCheckIncludes, + $ZodCheckLengthEquals: () => $ZodCheckLengthEquals, + $ZodCheckLessThan: () => $ZodCheckLessThan, + $ZodCheckLowerCase: () => $ZodCheckLowerCase, + $ZodCheckMaxLength: () => $ZodCheckMaxLength, + $ZodCheckMaxSize: () => $ZodCheckMaxSize, + $ZodCheckMimeType: () => $ZodCheckMimeType, + $ZodCheckMinLength: () => $ZodCheckMinLength, + $ZodCheckMinSize: () => $ZodCheckMinSize, + $ZodCheckMultipleOf: () => $ZodCheckMultipleOf, + $ZodCheckNumberFormat: () => $ZodCheckNumberFormat, + $ZodCheckOverwrite: () => $ZodCheckOverwrite, + $ZodCheckProperty: () => $ZodCheckProperty, + $ZodCheckRegex: () => $ZodCheckRegex, + $ZodCheckSizeEquals: () => $ZodCheckSizeEquals, + $ZodCheckStartsWith: () => $ZodCheckStartsWith, + $ZodCheckStringFormat: () => $ZodCheckStringFormat, + $ZodCheckUpperCase: () => $ZodCheckUpperCase, + $ZodCodec: () => $ZodCodec, + $ZodCustom: () => $ZodCustom, + $ZodCustomStringFormat: () => $ZodCustomStringFormat, + $ZodDate: () => $ZodDate, + $ZodDefault: () => $ZodDefault, + $ZodDiscriminatedUnion: () => $ZodDiscriminatedUnion, + $ZodE164: () => $ZodE164, + $ZodEmail: () => $ZodEmail, + $ZodEmoji: () => $ZodEmoji, + $ZodEncodeError: () => $ZodEncodeError, + $ZodEnum: () => $ZodEnum, + $ZodError: () => $ZodError, + $ZodExactOptional: () => $ZodExactOptional, + $ZodFile: () => $ZodFile, + $ZodFunction: () => $ZodFunction, + $ZodGUID: () => $ZodGUID, + $ZodIPv4: () => $ZodIPv4, + $ZodIPv6: () => $ZodIPv6, + $ZodISODate: () => $ZodISODate, + $ZodISODateTime: () => $ZodISODateTime, + $ZodISODuration: () => $ZodISODuration, + $ZodISOTime: () => $ZodISOTime, + $ZodIntersection: () => $ZodIntersection, + $ZodJWT: () => $ZodJWT, + $ZodKSUID: () => $ZodKSUID, + $ZodLazy: () => $ZodLazy, + $ZodLiteral: () => $ZodLiteral, + $ZodMAC: () => $ZodMAC, + $ZodMap: () => $ZodMap, + $ZodNaN: () => $ZodNaN, + $ZodNanoID: () => $ZodNanoID, + $ZodNever: () => $ZodNever, + $ZodNonOptional: () => $ZodNonOptional, + $ZodNull: () => $ZodNull, + $ZodNullable: () => $ZodNullable, + $ZodNumber: () => $ZodNumber, + $ZodNumberFormat: () => $ZodNumberFormat, + $ZodObject: () => $ZodObject, + $ZodObjectJIT: () => $ZodObjectJIT, + $ZodOptional: () => $ZodOptional, + $ZodPipe: () => $ZodPipe, + $ZodPrefault: () => $ZodPrefault, + $ZodPromise: () => $ZodPromise, + $ZodReadonly: () => $ZodReadonly, + $ZodRealError: () => $ZodRealError, + $ZodRecord: () => $ZodRecord, + $ZodRegistry: () => $ZodRegistry, + $ZodSet: () => $ZodSet, + $ZodString: () => $ZodString, + $ZodStringFormat: () => $ZodStringFormat, + $ZodSuccess: () => $ZodSuccess, + $ZodSymbol: () => $ZodSymbol, + $ZodTemplateLiteral: () => $ZodTemplateLiteral, + $ZodTransform: () => $ZodTransform, + $ZodTuple: () => $ZodTuple, + $ZodType: () => $ZodType, + $ZodULID: () => $ZodULID, + $ZodURL: () => $ZodURL, + $ZodUUID: () => $ZodUUID, + $ZodUndefined: () => $ZodUndefined, + $ZodUnion: () => $ZodUnion, + $ZodUnknown: () => $ZodUnknown, + $ZodVoid: () => $ZodVoid, + $ZodXID: () => $ZodXID, + $ZodXor: () => $ZodXor, + $brand: () => $brand, + $constructor: () => $constructor, + $input: () => $input, + $output: () => $output, + Doc: () => Doc, + JSONSchema: () => json_schema_exports, + JSONSchemaGenerator: () => JSONSchemaGenerator, + NEVER: () => NEVER, + TimePrecision: () => TimePrecision, + _any: () => _any, + _array: () => _array, + _base64: () => _base64, + _base64url: () => _base64url, + _bigint: () => _bigint, + _boolean: () => _boolean, + _catch: () => _catch, + _check: () => _check, + _cidrv4: () => _cidrv4, + _cidrv6: () => _cidrv6, + _coercedBigint: () => _coercedBigint, + _coercedBoolean: () => _coercedBoolean, + _coercedDate: () => _coercedDate, + _coercedNumber: () => _coercedNumber, + _coercedString: () => _coercedString, + _cuid: () => _cuid, + _cuid2: () => _cuid2, + _custom: () => _custom, + _date: () => _date, + _decode: () => _decode, + _decodeAsync: () => _decodeAsync, + _default: () => _default, + _discriminatedUnion: () => _discriminatedUnion, + _e164: () => _e164, + _email: () => _email, + _emoji: () => _emoji2, + _encode: () => _encode, + _encodeAsync: () => _encodeAsync, + _endsWith: () => _endsWith, + _enum: () => _enum, + _file: () => _file, + _float32: () => _float32, + _float64: () => _float64, + _gt: () => _gt, + _gte: () => _gte, + _guid: () => _guid, + _includes: () => _includes, + _int: () => _int, + _int32: () => _int32, + _int64: () => _int64, + _intersection: () => _intersection, + _ipv4: () => _ipv4, + _ipv6: () => _ipv6, + _isoDate: () => _isoDate, + _isoDateTime: () => _isoDateTime, + _isoDuration: () => _isoDuration, + _isoTime: () => _isoTime, + _jwt: () => _jwt, + _ksuid: () => _ksuid, + _lazy: () => _lazy, + _length: () => _length, + _literal: () => _literal, + _lowercase: () => _lowercase, + _lt: () => _lt, + _lte: () => _lte, + _mac: () => _mac, + _map: () => _map, + _max: () => _lte, + _maxLength: () => _maxLength, + _maxSize: () => _maxSize, + _mime: () => _mime, + _min: () => _gte, + _minLength: () => _minLength, + _minSize: () => _minSize, + _multipleOf: () => _multipleOf, + _nan: () => _nan, + _nanoid: () => _nanoid, + _nativeEnum: () => _nativeEnum, + _negative: () => _negative, + _never: () => _never, + _nonnegative: () => _nonnegative, + _nonoptional: () => _nonoptional, + _nonpositive: () => _nonpositive, + _normalize: () => _normalize, + _null: () => _null2, + _nullable: () => _nullable, + _number: () => _number, + _optional: () => _optional, + _overwrite: () => _overwrite, + _parse: () => _parse, + _parseAsync: () => _parseAsync, + _pipe: () => _pipe, + _positive: () => _positive, + _promise: () => _promise, + _property: () => _property, + _readonly: () => _readonly, + _record: () => _record, + _refine: () => _refine, + _regex: () => _regex, + _safeDecode: () => _safeDecode, + _safeDecodeAsync: () => _safeDecodeAsync, + _safeEncode: () => _safeEncode, + _safeEncodeAsync: () => _safeEncodeAsync, + _safeParse: () => _safeParse, + _safeParseAsync: () => _safeParseAsync, + _set: () => _set, + _size: () => _size, + _slugify: () => _slugify, + _startsWith: () => _startsWith, + _string: () => _string, + _stringFormat: () => _stringFormat, + _stringbool: () => _stringbool, + _success: () => _success, + _superRefine: () => _superRefine, + _symbol: () => _symbol, + _templateLiteral: () => _templateLiteral, + _toLowerCase: () => _toLowerCase, + _toUpperCase: () => _toUpperCase, + _transform: () => _transform, + _trim: () => _trim, + _tuple: () => _tuple, + _uint32: () => _uint32, + _uint64: () => _uint64, + _ulid: () => _ulid, + _undefined: () => _undefined2, + _union: () => _union, + _unknown: () => _unknown, + _uppercase: () => _uppercase, + _url: () => _url, + _uuid: () => _uuid, + _uuidv4: () => _uuidv4, + _uuidv6: () => _uuidv6, + _uuidv7: () => _uuidv7, + _void: () => _void, + _xid: () => _xid, + _xor: () => _xor, + clone: () => clone, + config: () => config, + createStandardJSONSchemaMethod: () => createStandardJSONSchemaMethod, + createToJSONSchemaMethod: () => createToJSONSchemaMethod, + decode: () => decode, + decodeAsync: () => decodeAsync, + describe: () => describe, + encode: () => encode, + encodeAsync: () => encodeAsync, + extractDefs: () => extractDefs, + finalize: () => finalize, + flattenError: () => flattenError, + formatError: () => formatError, + globalConfig: () => globalConfig, + globalRegistry: () => globalRegistry, + initializeContext: () => initializeContext, + isValidBase64: () => isValidBase64, + isValidBase64URL: () => isValidBase64URL, + isValidJWT: () => isValidJWT, + locales: () => locales_exports, + meta: () => meta, + parse: () => parse, + parseAsync: () => parseAsync, + prettifyError: () => prettifyError, + process: () => process2, + regexes: () => regexes_exports, + registry: () => registry, + safeDecode: () => safeDecode, + safeDecodeAsync: () => safeDecodeAsync, + safeEncode: () => safeEncode, + safeEncodeAsync: () => safeEncodeAsync, + safeParse: () => safeParse, + safeParseAsync: () => safeParseAsync, + toDotPath: () => toDotPath, + toJSONSchema: () => toJSONSchema, + treeifyError: () => treeifyError, + util: () => util_exports, + version: () => version +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/core.js +var NEVER = Object.freeze({ + status: "aborted" +}); +// @__NO_SIDE_EFFECTS__ +function $constructor(name, initializer3, params) { + function init(inst, def) { + if (!inst._zod) { + Object.defineProperty(inst, "_zod", { + value: { + def, + constr: _, + traits: /* @__PURE__ */ new Set() + }, + enumerable: false + }); + } + if (inst._zod.traits.has(name)) { + return; + } + inst._zod.traits.add(name); + initializer3(inst, def); + const proto = _.prototype; + const keys = Object.keys(proto); + for (let i = 0; i < keys.length; i++) { + const k = keys[i]; + if (!(k in inst)) { + inst[k] = proto[k].bind(inst); + } + } + } + const Parent = params?.Parent ?? Object; + class Definition extends Parent { + } + Object.defineProperty(Definition, "name", { value: name }); + function _(def) { + var _a3; + const inst = params?.Parent ? new Definition() : this; + init(inst, def); + (_a3 = inst._zod).deferred ?? (_a3.deferred = []); + for (const fn of inst._zod.deferred) { + fn(); + } + return inst; + } + Object.defineProperty(_, "init", { value: init }); + Object.defineProperty(_, Symbol.hasInstance, { + value: (inst) => { + if (params?.Parent && inst instanceof params.Parent) + return true; + return inst?._zod?.traits?.has(name); + } + }); + Object.defineProperty(_, "name", { value: name }); + return _; +} +var $brand = /* @__PURE__ */ Symbol("zod_brand"); +var $ZodAsyncError = class extends Error { + constructor() { + super(`Encountered Promise during synchronous parse. Use .parseAsync() instead.`); + } +}; +var $ZodEncodeError = class extends Error { + constructor(name) { + super(`Encountered unidirectional transform during encode: ${name}`); + this.name = "ZodEncodeError"; + } +}; +var globalConfig = {}; +function config(newConfig) { + if (newConfig) + Object.assign(globalConfig, newConfig); + return globalConfig; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/util.js +var util_exports = {}; +__export(util_exports, { + BIGINT_FORMAT_RANGES: () => BIGINT_FORMAT_RANGES, + Class: () => Class, + NUMBER_FORMAT_RANGES: () => NUMBER_FORMAT_RANGES, + aborted: () => aborted, + allowsEval: () => allowsEval, + assert: () => assert, + assertEqual: () => assertEqual, + assertIs: () => assertIs, + assertNever: () => assertNever, + assertNotEqual: () => assertNotEqual, + assignProp: () => assignProp, + base64ToUint8Array: () => base64ToUint8Array, + base64urlToUint8Array: () => base64urlToUint8Array, + cached: () => cached, + captureStackTrace: () => captureStackTrace, + cleanEnum: () => cleanEnum, + cleanRegex: () => cleanRegex, + clone: () => clone, + cloneDef: () => cloneDef, + createTransparentProxy: () => createTransparentProxy, + defineLazy: () => defineLazy, + esc: () => esc, + escapeRegex: () => escapeRegex, + extend: () => extend, + finalizeIssue: () => finalizeIssue, + floatSafeRemainder: () => floatSafeRemainder, + getElementAtPath: () => getElementAtPath, + getEnumValues: () => getEnumValues, + getLengthableOrigin: () => getLengthableOrigin, + getParsedType: () => getParsedType, + getSizableOrigin: () => getSizableOrigin, + hexToUint8Array: () => hexToUint8Array, + isObject: () => isObject, + isPlainObject: () => isPlainObject, + issue: () => issue, + joinValues: () => joinValues, + jsonStringifyReplacer: () => jsonStringifyReplacer, + merge: () => merge, + mergeDefs: () => mergeDefs, + normalizeParams: () => normalizeParams, + nullish: () => nullish, + numKeys: () => numKeys, + objectClone: () => objectClone, + omit: () => omit, + optionalKeys: () => optionalKeys, + parsedType: () => parsedType, + partial: () => partial, + pick: () => pick, + prefixIssues: () => prefixIssues, + primitiveTypes: () => primitiveTypes, + promiseAllObject: () => promiseAllObject, + propertyKeyTypes: () => propertyKeyTypes, + randomString: () => randomString, + required: () => required, + safeExtend: () => safeExtend, + shallowClone: () => shallowClone, + slugify: () => slugify, + stringifyPrimitive: () => stringifyPrimitive, + uint8ArrayToBase64: () => uint8ArrayToBase64, + uint8ArrayToBase64url: () => uint8ArrayToBase64url, + uint8ArrayToHex: () => uint8ArrayToHex, + unwrapMessage: () => unwrapMessage +}); +function assertEqual(val) { + return val; +} +function assertNotEqual(val) { + return val; +} +function assertIs(_arg) { +} +function assertNever(_x) { + throw new Error("Unexpected value in exhaustive check"); +} +function assert(_) { +} +function getEnumValues(entries) { + const numericValues = Object.values(entries).filter((v) => typeof v === "number"); + const values = Object.entries(entries).filter(([k, _]) => numericValues.indexOf(+k) === -1).map(([_, v]) => v); + return values; +} +function joinValues(array2, separator = "|") { + return array2.map((val) => stringifyPrimitive(val)).join(separator); +} +function jsonStringifyReplacer(_, value) { + if (typeof value === "bigint") + return value.toString(); + return value; +} +function cached(getter) { + const set2 = false; + return { + get value() { + if (!set2) { + const value = getter(); + Object.defineProperty(this, "value", { value }); + return value; + } + throw new Error("cached value already set"); + } + }; +} +function nullish(input) { + return input === null || input === void 0; +} +function cleanRegex(source) { + const start = source.startsWith("^") ? 1 : 0; + const end = source.endsWith("$") ? source.length - 1 : source.length; + return source.slice(start, end); +} +function floatSafeRemainder(val, step) { + const valDecCount = (val.toString().split(".")[1] || "").length; + const stepString = step.toString(); + let stepDecCount = (stepString.split(".")[1] || "").length; + if (stepDecCount === 0 && /\d?e-\d?/.test(stepString)) { + const match2 = stepString.match(/\d?e-(\d?)/); + if (match2?.[1]) { + stepDecCount = Number.parseInt(match2[1]); + } + } + const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount; + const valInt = Number.parseInt(val.toFixed(decCount).replace(".", "")); + const stepInt = Number.parseInt(step.toFixed(decCount).replace(".", "")); + return valInt % stepInt / 10 ** decCount; +} +var EVALUATING = /* @__PURE__ */ Symbol("evaluating"); +function defineLazy(object2, key, getter) { + let value = void 0; + Object.defineProperty(object2, key, { + get() { + if (value === EVALUATING) { + return void 0; + } + if (value === void 0) { + value = EVALUATING; + value = getter(); + } + return value; + }, + set(v) { + Object.defineProperty(object2, key, { + value: v + // configurable: true, + }); + }, + configurable: true + }); +} +function objectClone(obj) { + return Object.create(Object.getPrototypeOf(obj), Object.getOwnPropertyDescriptors(obj)); +} +function assignProp(target, prop, value) { + Object.defineProperty(target, prop, { + value, + writable: true, + enumerable: true, + configurable: true + }); +} +function mergeDefs(...defs) { + const mergedDescriptors = {}; + for (const def of defs) { + const descriptors = Object.getOwnPropertyDescriptors(def); + Object.assign(mergedDescriptors, descriptors); + } + return Object.defineProperties({}, mergedDescriptors); +} +function cloneDef(schema) { + return mergeDefs(schema._zod.def); +} +function getElementAtPath(obj, path) { + if (!path) + return obj; + return path.reduce((acc, key) => acc?.[key], obj); +} +function promiseAllObject(promisesObj) { + const keys = Object.keys(promisesObj); + const promises = keys.map((key) => promisesObj[key]); + return Promise.all(promises).then((results) => { + const resolvedObj = {}; + for (let i = 0; i < keys.length; i++) { + resolvedObj[keys[i]] = results[i]; + } + return resolvedObj; + }); +} +function randomString(length = 10) { + const chars = "abcdefghijklmnopqrstuvwxyz"; + let str = ""; + for (let i = 0; i < length; i++) { + str += chars[Math.floor(Math.random() * chars.length)]; + } + return str; +} +function esc(str) { + return JSON.stringify(str); +} +function slugify(input) { + return input.toLowerCase().trim().replace(/[^\w\s-]/g, "").replace(/[\s_-]+/g, "-").replace(/^-+|-+$/g, ""); +} +var captureStackTrace = "captureStackTrace" in Error ? Error.captureStackTrace : (..._args) => { +}; +function isObject(data) { + return typeof data === "object" && data !== null && !Array.isArray(data); +} +var allowsEval = cached(() => { + if (typeof navigator !== "undefined" && navigator?.userAgent?.includes("Cloudflare")) { + return false; + } + try { + const F = Function; + new F(""); + return true; + } catch (_) { + return false; + } +}); +function isPlainObject(o) { + if (isObject(o) === false) + return false; + const ctor = o.constructor; + if (ctor === void 0) + return true; + if (typeof ctor !== "function") + return true; + const prot = ctor.prototype; + if (isObject(prot) === false) + return false; + if (Object.prototype.hasOwnProperty.call(prot, "isPrototypeOf") === false) { + return false; + } + return true; +} +function shallowClone(o) { + if (isPlainObject(o)) + return { ...o }; + if (Array.isArray(o)) + return [...o]; + return o; +} +function numKeys(data) { + let keyCount = 0; + for (const key in data) { + if (Object.prototype.hasOwnProperty.call(data, key)) { + keyCount++; + } + } + return keyCount; +} +var getParsedType = (data) => { + const t = typeof data; + switch (t) { + case "undefined": + return "undefined"; + case "string": + return "string"; + case "number": + return Number.isNaN(data) ? "nan" : "number"; + case "boolean": + return "boolean"; + case "function": + return "function"; + case "bigint": + return "bigint"; + case "symbol": + return "symbol"; + case "object": + if (Array.isArray(data)) { + return "array"; + } + if (data === null) { + return "null"; + } + if (data.then && typeof data.then === "function" && data.catch && typeof data.catch === "function") { + return "promise"; + } + if (typeof Map !== "undefined" && data instanceof Map) { + return "map"; + } + if (typeof Set !== "undefined" && data instanceof Set) { + return "set"; + } + if (typeof Date !== "undefined" && data instanceof Date) { + return "date"; + } + if (typeof File !== "undefined" && data instanceof File) { + return "file"; + } + return "object"; + default: + throw new Error(`Unknown data type: ${t}`); + } +}; +var propertyKeyTypes = /* @__PURE__ */ new Set(["string", "number", "symbol"]); +var primitiveTypes = /* @__PURE__ */ new Set(["string", "number", "bigint", "boolean", "symbol", "undefined"]); +function escapeRegex(str) { + return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); +} +function clone(inst, def, params) { + const cl = new inst._zod.constr(def ?? inst._zod.def); + if (!def || params?.parent) + cl._zod.parent = inst; + return cl; +} +function normalizeParams(_params) { + const params = _params; + if (!params) + return {}; + if (typeof params === "string") + return { error: () => params }; + if (params?.message !== void 0) { + if (params?.error !== void 0) + throw new Error("Cannot specify both `message` and `error` params"); + params.error = params.message; + } + delete params.message; + if (typeof params.error === "string") + return { ...params, error: () => params.error }; + return params; +} +function createTransparentProxy(getter) { + let target; + return new Proxy({}, { + get(_, prop, receiver) { + target ?? (target = getter()); + return Reflect.get(target, prop, receiver); + }, + set(_, prop, value, receiver) { + target ?? (target = getter()); + return Reflect.set(target, prop, value, receiver); + }, + has(_, prop) { + target ?? (target = getter()); + return Reflect.has(target, prop); + }, + deleteProperty(_, prop) { + target ?? (target = getter()); + return Reflect.deleteProperty(target, prop); + }, + ownKeys(_) { + target ?? (target = getter()); + return Reflect.ownKeys(target); + }, + getOwnPropertyDescriptor(_, prop) { + target ?? (target = getter()); + return Reflect.getOwnPropertyDescriptor(target, prop); + }, + defineProperty(_, prop, descriptor) { + target ?? (target = getter()); + return Reflect.defineProperty(target, prop, descriptor); + } + }); +} +function stringifyPrimitive(value) { + if (typeof value === "bigint") + return value.toString() + "n"; + if (typeof value === "string") + return `"${value}"`; + return `${value}`; +} +function optionalKeys(shape) { + return Object.keys(shape).filter((k) => { + return shape[k]._zod.optin === "optional" && shape[k]._zod.optout === "optional"; + }); +} +var NUMBER_FORMAT_RANGES = { + safeint: [Number.MIN_SAFE_INTEGER, Number.MAX_SAFE_INTEGER], + int32: [-2147483648, 2147483647], + uint32: [0, 4294967295], + float32: [-34028234663852886e22, 34028234663852886e22], + float64: [-Number.MAX_VALUE, Number.MAX_VALUE] +}; +var BIGINT_FORMAT_RANGES = { + int64: [/* @__PURE__ */ BigInt("-9223372036854775808"), /* @__PURE__ */ BigInt("9223372036854775807")], + uint64: [/* @__PURE__ */ BigInt(0), /* @__PURE__ */ BigInt("18446744073709551615")] +}; +function pick(schema, mask) { + const currDef = schema._zod.def; + const checks = currDef.checks; + const hasChecks = checks && checks.length > 0; + if (hasChecks) { + throw new Error(".pick() cannot be used on object schemas containing refinements"); + } + const def = mergeDefs(schema._zod.def, { + get shape() { + const newShape = {}; + for (const key in mask) { + if (!(key in currDef.shape)) { + throw new Error(`Unrecognized key: "${key}"`); + } + if (!mask[key]) + continue; + newShape[key] = currDef.shape[key]; + } + assignProp(this, "shape", newShape); + return newShape; + }, + checks: [] + }); + return clone(schema, def); +} +function omit(schema, mask) { + const currDef = schema._zod.def; + const checks = currDef.checks; + const hasChecks = checks && checks.length > 0; + if (hasChecks) { + throw new Error(".omit() cannot be used on object schemas containing refinements"); + } + const def = mergeDefs(schema._zod.def, { + get shape() { + const newShape = { ...schema._zod.def.shape }; + for (const key in mask) { + if (!(key in currDef.shape)) { + throw new Error(`Unrecognized key: "${key}"`); + } + if (!mask[key]) + continue; + delete newShape[key]; + } + assignProp(this, "shape", newShape); + return newShape; + }, + checks: [] + }); + return clone(schema, def); +} +function extend(schema, shape) { + if (!isPlainObject(shape)) { + throw new Error("Invalid input to extend: expected a plain object"); + } + const checks = schema._zod.def.checks; + const hasChecks = checks && checks.length > 0; + if (hasChecks) { + const existingShape = schema._zod.def.shape; + for (const key in shape) { + if (Object.getOwnPropertyDescriptor(existingShape, key) !== void 0) { + throw new Error("Cannot overwrite keys on object schemas containing refinements. Use `.safeExtend()` instead."); + } + } + } + const def = mergeDefs(schema._zod.def, { + get shape() { + const _shape = { ...schema._zod.def.shape, ...shape }; + assignProp(this, "shape", _shape); + return _shape; + } + }); + return clone(schema, def); +} +function safeExtend(schema, shape) { + if (!isPlainObject(shape)) { + throw new Error("Invalid input to safeExtend: expected a plain object"); + } + const def = mergeDefs(schema._zod.def, { + get shape() { + const _shape = { ...schema._zod.def.shape, ...shape }; + assignProp(this, "shape", _shape); + return _shape; + } + }); + return clone(schema, def); +} +function merge(a, b) { + const def = mergeDefs(a._zod.def, { + get shape() { + const _shape = { ...a._zod.def.shape, ...b._zod.def.shape }; + assignProp(this, "shape", _shape); + return _shape; + }, + get catchall() { + return b._zod.def.catchall; + }, + checks: [] + // delete existing checks + }); + return clone(a, def); +} +function partial(Class2, schema, mask) { + const currDef = schema._zod.def; + const checks = currDef.checks; + const hasChecks = checks && checks.length > 0; + if (hasChecks) { + throw new Error(".partial() cannot be used on object schemas containing refinements"); + } + const def = mergeDefs(schema._zod.def, { + get shape() { + const oldShape = schema._zod.def.shape; + const shape = { ...oldShape }; + if (mask) { + for (const key in mask) { + if (!(key in oldShape)) { + throw new Error(`Unrecognized key: "${key}"`); + } + if (!mask[key]) + continue; + shape[key] = Class2 ? new Class2({ + type: "optional", + innerType: oldShape[key] + }) : oldShape[key]; + } + } else { + for (const key in oldShape) { + shape[key] = Class2 ? new Class2({ + type: "optional", + innerType: oldShape[key] + }) : oldShape[key]; + } + } + assignProp(this, "shape", shape); + return shape; + }, + checks: [] + }); + return clone(schema, def); +} +function required(Class2, schema, mask) { + const def = mergeDefs(schema._zod.def, { + get shape() { + const oldShape = schema._zod.def.shape; + const shape = { ...oldShape }; + if (mask) { + for (const key in mask) { + if (!(key in shape)) { + throw new Error(`Unrecognized key: "${key}"`); + } + if (!mask[key]) + continue; + shape[key] = new Class2({ + type: "nonoptional", + innerType: oldShape[key] + }); + } + } else { + for (const key in oldShape) { + shape[key] = new Class2({ + type: "nonoptional", + innerType: oldShape[key] + }); + } + } + assignProp(this, "shape", shape); + return shape; + } + }); + return clone(schema, def); +} +function aborted(x, startIndex = 0) { + if (x.aborted === true) + return true; + for (let i = startIndex; i < x.issues.length; i++) { + if (x.issues[i]?.continue !== true) { + return true; + } + } + return false; +} +function prefixIssues(path, issues) { + return issues.map((iss) => { + var _a3; + (_a3 = iss).path ?? (_a3.path = []); + iss.path.unshift(path); + return iss; + }); +} +function unwrapMessage(message) { + return typeof message === "string" ? message : message?.message; +} +function finalizeIssue(iss, ctx, config2) { + const full = { ...iss, path: iss.path ?? [] }; + if (!iss.message) { + const message = unwrapMessage(iss.inst?._zod.def?.error?.(iss)) ?? unwrapMessage(ctx?.error?.(iss)) ?? unwrapMessage(config2.customError?.(iss)) ?? unwrapMessage(config2.localeError?.(iss)) ?? "Invalid input"; + full.message = message; + } + delete full.inst; + delete full.continue; + if (!ctx?.reportInput) { + delete full.input; + } + return full; +} +function getSizableOrigin(input) { + if (input instanceof Set) + return "set"; + if (input instanceof Map) + return "map"; + if (input instanceof File) + return "file"; + return "unknown"; +} +function getLengthableOrigin(input) { + if (Array.isArray(input)) + return "array"; + if (typeof input === "string") + return "string"; + return "unknown"; +} +function parsedType(data) { + const t = typeof data; + switch (t) { + case "number": { + return Number.isNaN(data) ? "nan" : "number"; + } + case "object": { + if (data === null) { + return "null"; + } + if (Array.isArray(data)) { + return "array"; + } + const obj = data; + if (obj && Object.getPrototypeOf(obj) !== Object.prototype && "constructor" in obj && obj.constructor) { + return obj.constructor.name; + } + } + } + return t; +} +function issue(...args) { + const [iss, input, inst] = args; + if (typeof iss === "string") { + return { + message: iss, + code: "custom", + input, + inst + }; + } + return { ...iss }; +} +function cleanEnum(obj) { + return Object.entries(obj).filter(([k, _]) => { + return Number.isNaN(Number.parseInt(k, 10)); + }).map((el) => el[1]); +} +function base64ToUint8Array(base643) { + const binaryString = atob(base643); + const bytes = new Uint8Array(binaryString.length); + for (let i = 0; i < binaryString.length; i++) { + bytes[i] = binaryString.charCodeAt(i); + } + return bytes; +} +function uint8ArrayToBase64(bytes) { + let binaryString = ""; + for (let i = 0; i < bytes.length; i++) { + binaryString += String.fromCharCode(bytes[i]); + } + return btoa(binaryString); +} +function base64urlToUint8Array(base64url3) { + const base643 = base64url3.replace(/-/g, "+").replace(/_/g, "/"); + const padding = "=".repeat((4 - base643.length % 4) % 4); + return base64ToUint8Array(base643 + padding); +} +function uint8ArrayToBase64url(bytes) { + return uint8ArrayToBase64(bytes).replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, ""); +} +function hexToUint8Array(hex3) { + const cleanHex = hex3.replace(/^0x/, ""); + if (cleanHex.length % 2 !== 0) { + throw new Error("Invalid hex string length"); + } + const bytes = new Uint8Array(cleanHex.length / 2); + for (let i = 0; i < cleanHex.length; i += 2) { + bytes[i / 2] = Number.parseInt(cleanHex.slice(i, i + 2), 16); + } + return bytes; +} +function uint8ArrayToHex(bytes) { + return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); +} +var Class = class { + constructor(..._args) { + } +}; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/errors.js +var initializer = (inst, def) => { + inst.name = "$ZodError"; + Object.defineProperty(inst, "_zod", { + value: inst._zod, + enumerable: false + }); + Object.defineProperty(inst, "issues", { + value: def, + enumerable: false + }); + inst.message = JSON.stringify(def, jsonStringifyReplacer, 2); + Object.defineProperty(inst, "toString", { + value: () => inst.message, + enumerable: false + }); +}; +var $ZodError = $constructor("$ZodError", initializer); +var $ZodRealError = $constructor("$ZodError", initializer, { Parent: Error }); +function flattenError(error48, mapper = (issue2) => issue2.message) { + const fieldErrors = {}; + const formErrors = []; + for (const sub of error48.issues) { + if (sub.path.length > 0) { + fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || []; + fieldErrors[sub.path[0]].push(mapper(sub)); + } else { + formErrors.push(mapper(sub)); + } + } + return { formErrors, fieldErrors }; +} +function formatError(error48, mapper = (issue2) => issue2.message) { + const fieldErrors = { _errors: [] }; + const processError = (error49) => { + for (const issue2 of error49.issues) { + if (issue2.code === "invalid_union" && issue2.errors.length) { + issue2.errors.map((issues) => processError({ issues })); + } else if (issue2.code === "invalid_key") { + processError({ issues: issue2.issues }); + } else if (issue2.code === "invalid_element") { + processError({ issues: issue2.issues }); + } else if (issue2.path.length === 0) { + fieldErrors._errors.push(mapper(issue2)); + } else { + let curr = fieldErrors; + let i = 0; + while (i < issue2.path.length) { + const el = issue2.path[i]; + const terminal = i === issue2.path.length - 1; + if (!terminal) { + curr[el] = curr[el] || { _errors: [] }; + } else { + curr[el] = curr[el] || { _errors: [] }; + curr[el]._errors.push(mapper(issue2)); + } + curr = curr[el]; + i++; + } + } + } + }; + processError(error48); + return fieldErrors; +} +function treeifyError(error48, mapper = (issue2) => issue2.message) { + const result = { errors: [] }; + const processError = (error49, path = []) => { + var _a3, _b; + for (const issue2 of error49.issues) { + if (issue2.code === "invalid_union" && issue2.errors.length) { + issue2.errors.map((issues) => processError({ issues }, issue2.path)); + } else if (issue2.code === "invalid_key") { + processError({ issues: issue2.issues }, issue2.path); + } else if (issue2.code === "invalid_element") { + processError({ issues: issue2.issues }, issue2.path); + } else { + const fullpath = [...path, ...issue2.path]; + if (fullpath.length === 0) { + result.errors.push(mapper(issue2)); + continue; + } + let curr = result; + let i = 0; + while (i < fullpath.length) { + const el = fullpath[i]; + const terminal = i === fullpath.length - 1; + if (typeof el === "string") { + curr.properties ?? (curr.properties = {}); + (_a3 = curr.properties)[el] ?? (_a3[el] = { errors: [] }); + curr = curr.properties[el]; + } else { + curr.items ?? (curr.items = []); + (_b = curr.items)[el] ?? (_b[el] = { errors: [] }); + curr = curr.items[el]; + } + if (terminal) { + curr.errors.push(mapper(issue2)); + } + i++; + } + } + } + }; + processError(error48); + return result; +} +function toDotPath(_path) { + const segs = []; + const path = _path.map((seg) => typeof seg === "object" ? seg.key : seg); + for (const seg of path) { + if (typeof seg === "number") + segs.push(`[${seg}]`); + else if (typeof seg === "symbol") + segs.push(`[${JSON.stringify(String(seg))}]`); + else if (/[^\w$]/.test(seg)) + segs.push(`[${JSON.stringify(seg)}]`); + else { + if (segs.length) + segs.push("."); + segs.push(seg); + } + } + return segs.join(""); +} +function prettifyError(error48) { + const lines = []; + const issues = [...error48.issues].sort((a, b) => (a.path ?? []).length - (b.path ?? []).length); + for (const issue2 of issues) { + lines.push(`\u2716 ${issue2.message}`); + if (issue2.path?.length) + lines.push(` \u2192 at ${toDotPath(issue2.path)}`); + } + return lines.join("\n"); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/parse.js +var _parse = (_Err) => (schema, value, _ctx, _params) => { + const ctx = _ctx ? Object.assign(_ctx, { async: false }) : { async: false }; + const result = schema._zod.run({ value, issues: [] }, ctx); + if (result instanceof Promise) { + throw new $ZodAsyncError(); + } + if (result.issues.length) { + const e = new (_params?.Err ?? _Err)(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))); + captureStackTrace(e, _params?.callee); + throw e; + } + return result.value; +}; +var parse = /* @__PURE__ */ _parse($ZodRealError); +var _parseAsync = (_Err) => async (schema, value, _ctx, params) => { + const ctx = _ctx ? Object.assign(_ctx, { async: true }) : { async: true }; + let result = schema._zod.run({ value, issues: [] }, ctx); + if (result instanceof Promise) + result = await result; + if (result.issues.length) { + const e = new (params?.Err ?? _Err)(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))); + captureStackTrace(e, params?.callee); + throw e; + } + return result.value; +}; +var parseAsync = /* @__PURE__ */ _parseAsync($ZodRealError); +var _safeParse = (_Err) => (schema, value, _ctx) => { + const ctx = _ctx ? { ..._ctx, async: false } : { async: false }; + const result = schema._zod.run({ value, issues: [] }, ctx); + if (result instanceof Promise) { + throw new $ZodAsyncError(); + } + return result.issues.length ? { + success: false, + error: new (_Err ?? $ZodError)(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) + } : { success: true, data: result.value }; +}; +var safeParse = /* @__PURE__ */ _safeParse($ZodRealError); +var _safeParseAsync = (_Err) => async (schema, value, _ctx) => { + const ctx = _ctx ? Object.assign(_ctx, { async: true }) : { async: true }; + let result = schema._zod.run({ value, issues: [] }, ctx); + if (result instanceof Promise) + result = await result; + return result.issues.length ? { + success: false, + error: new _Err(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) + } : { success: true, data: result.value }; +}; +var safeParseAsync = /* @__PURE__ */ _safeParseAsync($ZodRealError); +var _encode = (_Err) => (schema, value, _ctx) => { + const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; + return _parse(_Err)(schema, value, ctx); +}; +var encode = /* @__PURE__ */ _encode($ZodRealError); +var _decode = (_Err) => (schema, value, _ctx) => { + return _parse(_Err)(schema, value, _ctx); +}; +var decode = /* @__PURE__ */ _decode($ZodRealError); +var _encodeAsync = (_Err) => async (schema, value, _ctx) => { + const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; + return _parseAsync(_Err)(schema, value, ctx); +}; +var encodeAsync = /* @__PURE__ */ _encodeAsync($ZodRealError); +var _decodeAsync = (_Err) => async (schema, value, _ctx) => { + return _parseAsync(_Err)(schema, value, _ctx); +}; +var decodeAsync = /* @__PURE__ */ _decodeAsync($ZodRealError); +var _safeEncode = (_Err) => (schema, value, _ctx) => { + const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; + return _safeParse(_Err)(schema, value, ctx); +}; +var safeEncode = /* @__PURE__ */ _safeEncode($ZodRealError); +var _safeDecode = (_Err) => (schema, value, _ctx) => { + return _safeParse(_Err)(schema, value, _ctx); +}; +var safeDecode = /* @__PURE__ */ _safeDecode($ZodRealError); +var _safeEncodeAsync = (_Err) => async (schema, value, _ctx) => { + const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; + return _safeParseAsync(_Err)(schema, value, ctx); +}; +var safeEncodeAsync = /* @__PURE__ */ _safeEncodeAsync($ZodRealError); +var _safeDecodeAsync = (_Err) => async (schema, value, _ctx) => { + return _safeParseAsync(_Err)(schema, value, _ctx); +}; +var safeDecodeAsync = /* @__PURE__ */ _safeDecodeAsync($ZodRealError); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/regexes.js +var regexes_exports = {}; +__export(regexes_exports, { + base64: () => base64, + base64url: () => base64url, + bigint: () => bigint, + boolean: () => boolean, + browserEmail: () => browserEmail, + cidrv4: () => cidrv4, + cidrv6: () => cidrv6, + cuid: () => cuid, + cuid2: () => cuid2, + date: () => date, + datetime: () => datetime, + domain: () => domain, + duration: () => duration, + e164: () => e164, + email: () => email, + emoji: () => emoji, + extendedDuration: () => extendedDuration, + guid: () => guid, + hex: () => hex, + hostname: () => hostname, + html5Email: () => html5Email, + idnEmail: () => idnEmail, + integer: () => integer, + ipv4: () => ipv4, + ipv6: () => ipv6, + ksuid: () => ksuid, + lowercase: () => lowercase, + mac: () => mac, + md5_base64: () => md5_base64, + md5_base64url: () => md5_base64url, + md5_hex: () => md5_hex, + nanoid: () => nanoid, + null: () => _null, + number: () => number, + rfc5322Email: () => rfc5322Email, + sha1_base64: () => sha1_base64, + sha1_base64url: () => sha1_base64url, + sha1_hex: () => sha1_hex, + sha256_base64: () => sha256_base64, + sha256_base64url: () => sha256_base64url, + sha256_hex: () => sha256_hex, + sha384_base64: () => sha384_base64, + sha384_base64url: () => sha384_base64url, + sha384_hex: () => sha384_hex, + sha512_base64: () => sha512_base64, + sha512_base64url: () => sha512_base64url, + sha512_hex: () => sha512_hex, + string: () => string, + time: () => time, + ulid: () => ulid, + undefined: () => _undefined, + unicodeEmail: () => unicodeEmail, + uppercase: () => uppercase, + uuid: () => uuid, + uuid4: () => uuid4, + uuid6: () => uuid6, + uuid7: () => uuid7, + xid: () => xid +}); +var cuid = /^[cC][^\s-]{8,}$/; +var cuid2 = /^[0-9a-z]+$/; +var ulid = /^[0-9A-HJKMNP-TV-Za-hjkmnp-tv-z]{26}$/; +var xid = /^[0-9a-vA-V]{20}$/; +var ksuid = /^[A-Za-z0-9]{27}$/; +var nanoid = /^[a-zA-Z0-9_-]{21}$/; +var duration = /^P(?:(\d+W)|(?!.*W)(?=\d|T\d)(\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+([.,]\d+)?S)?)?)$/; +var extendedDuration = /^[-+]?P(?!$)(?:(?:[-+]?\d+Y)|(?:[-+]?\d+[.,]\d+Y$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:(?:[-+]?\d+W)|(?:[-+]?\d+[.,]\d+W$))?(?:(?:[-+]?\d+D)|(?:[-+]?\d+[.,]\d+D$))?(?:T(?=[\d+-])(?:(?:[-+]?\d+H)|(?:[-+]?\d+[.,]\d+H$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:[-+]?\d+(?:[.,]\d+)?S)?)??$/; +var guid = /^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$/; +var uuid = (version2) => { + if (!version2) + return /^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/; + return new RegExp(`^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-${version2}[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$`); +}; +var uuid4 = /* @__PURE__ */ uuid(4); +var uuid6 = /* @__PURE__ */ uuid(6); +var uuid7 = /* @__PURE__ */ uuid(7); +var email = /^(?!\.)(?!.*\.\.)([A-Za-z0-9_'+\-\.]*)[A-Za-z0-9_+-]@([A-Za-z0-9][A-Za-z0-9\-]*\.)+[A-Za-z]{2,}$/; +var html5Email = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; +var rfc5322Email = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; +var unicodeEmail = /^[^\s@"]{1,64}@[^\s@]{1,255}$/u; +var idnEmail = unicodeEmail; +var browserEmail = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; +var _emoji = `^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$`; +function emoji() { + return new RegExp(_emoji, "u"); +} +var ipv4 = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/; +var ipv6 = /^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:))$/; +var mac = (delimiter) => { + const escapedDelim = escapeRegex(delimiter ?? ":"); + return new RegExp(`^(?:[0-9A-F]{2}${escapedDelim}){5}[0-9A-F]{2}$|^(?:[0-9a-f]{2}${escapedDelim}){5}[0-9a-f]{2}$`); +}; +var cidrv4 = /^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\/([0-9]|[1-2][0-9]|3[0-2])$/; +var cidrv6 = /^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|::|([0-9a-fA-F]{1,4})?::([0-9a-fA-F]{1,4}:?){0,6})\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])$/; +var base64 = /^$|^(?:[0-9a-zA-Z+/]{4})*(?:(?:[0-9a-zA-Z+/]{2}==)|(?:[0-9a-zA-Z+/]{3}=))?$/; +var base64url = /^[A-Za-z0-9_-]*$/; +var hostname = /^(?=.{1,253}\.?$)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[-0-9a-zA-Z]{0,61}[0-9a-zA-Z])?)*\.?$/; +var domain = /^([a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,}$/; +var e164 = /^\+[1-9]\d{6,14}$/; +var dateSource = `(?:(?:\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-(?:(?:0[13578]|1[02])-(?:0[1-9]|[12]\\d|3[01])|(?:0[469]|11)-(?:0[1-9]|[12]\\d|30)|(?:02)-(?:0[1-9]|1\\d|2[0-8])))`; +var date = /* @__PURE__ */ new RegExp(`^${dateSource}$`); +function timeSource(args) { + const hhmm = `(?:[01]\\d|2[0-3]):[0-5]\\d`; + const regex = typeof args.precision === "number" ? args.precision === -1 ? `${hhmm}` : args.precision === 0 ? `${hhmm}:[0-5]\\d` : `${hhmm}:[0-5]\\d\\.\\d{${args.precision}}` : `${hhmm}(?::[0-5]\\d(?:\\.\\d+)?)?`; + return regex; +} +function time(args) { + return new RegExp(`^${timeSource(args)}$`); +} +function datetime(args) { + const time3 = timeSource({ precision: args.precision }); + const opts = ["Z"]; + if (args.local) + opts.push(""); + if (args.offset) + opts.push(`([+-](?:[01]\\d|2[0-3]):[0-5]\\d)`); + const timeRegex = `${time3}(?:${opts.join("|")})`; + return new RegExp(`^${dateSource}T(?:${timeRegex})$`); +} +var string = (params) => { + const regex = params ? `[\\s\\S]{${params?.minimum ?? 0},${params?.maximum ?? ""}}` : `[\\s\\S]*`; + return new RegExp(`^${regex}$`); +}; +var bigint = /^-?\d+n?$/; +var integer = /^-?\d+$/; +var number = /^-?\d+(?:\.\d+)?$/; +var boolean = /^(?:true|false)$/i; +var _null = /^null$/i; +var _undefined = /^undefined$/i; +var lowercase = /^[^A-Z]*$/; +var uppercase = /^[^a-z]*$/; +var hex = /^[0-9a-fA-F]*$/; +function fixedBase64(bodyLength, padding) { + return new RegExp(`^[A-Za-z0-9+/]{${bodyLength}}${padding}$`); +} +function fixedBase64url(length) { + return new RegExp(`^[A-Za-z0-9_-]{${length}}$`); +} +var md5_hex = /^[0-9a-fA-F]{32}$/; +var md5_base64 = /* @__PURE__ */ fixedBase64(22, "=="); +var md5_base64url = /* @__PURE__ */ fixedBase64url(22); +var sha1_hex = /^[0-9a-fA-F]{40}$/; +var sha1_base64 = /* @__PURE__ */ fixedBase64(27, "="); +var sha1_base64url = /* @__PURE__ */ fixedBase64url(27); +var sha256_hex = /^[0-9a-fA-F]{64}$/; +var sha256_base64 = /* @__PURE__ */ fixedBase64(43, "="); +var sha256_base64url = /* @__PURE__ */ fixedBase64url(43); +var sha384_hex = /^[0-9a-fA-F]{96}$/; +var sha384_base64 = /* @__PURE__ */ fixedBase64(64, ""); +var sha384_base64url = /* @__PURE__ */ fixedBase64url(64); +var sha512_hex = /^[0-9a-fA-F]{128}$/; +var sha512_base64 = /* @__PURE__ */ fixedBase64(86, "=="); +var sha512_base64url = /* @__PURE__ */ fixedBase64url(86); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/checks.js +var $ZodCheck = /* @__PURE__ */ $constructor("$ZodCheck", (inst, def) => { + var _a3; + inst._zod ?? (inst._zod = {}); + inst._zod.def = def; + (_a3 = inst._zod).onattach ?? (_a3.onattach = []); +}); +var numericOriginMap = { + number: "number", + bigint: "bigint", + object: "date" +}; +var $ZodCheckLessThan = /* @__PURE__ */ $constructor("$ZodCheckLessThan", (inst, def) => { + $ZodCheck.init(inst, def); + const origin = numericOriginMap[typeof def.value]; + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + const curr = (def.inclusive ? bag.maximum : bag.exclusiveMaximum) ?? Number.POSITIVE_INFINITY; + if (def.value < curr) { + if (def.inclusive) + bag.maximum = def.value; + else + bag.exclusiveMaximum = def.value; + } + }); + inst._zod.check = (payload) => { + if (def.inclusive ? payload.value <= def.value : payload.value < def.value) { + return; + } + payload.issues.push({ + origin, + code: "too_big", + maximum: typeof def.value === "object" ? def.value.getTime() : def.value, + input: payload.value, + inclusive: def.inclusive, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckGreaterThan = /* @__PURE__ */ $constructor("$ZodCheckGreaterThan", (inst, def) => { + $ZodCheck.init(inst, def); + const origin = numericOriginMap[typeof def.value]; + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + const curr = (def.inclusive ? bag.minimum : bag.exclusiveMinimum) ?? Number.NEGATIVE_INFINITY; + if (def.value > curr) { + if (def.inclusive) + bag.minimum = def.value; + else + bag.exclusiveMinimum = def.value; + } + }); + inst._zod.check = (payload) => { + if (def.inclusive ? payload.value >= def.value : payload.value > def.value) { + return; + } + payload.issues.push({ + origin, + code: "too_small", + minimum: typeof def.value === "object" ? def.value.getTime() : def.value, + input: payload.value, + inclusive: def.inclusive, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckMultipleOf = /* @__PURE__ */ $constructor("$ZodCheckMultipleOf", (inst, def) => { + $ZodCheck.init(inst, def); + inst._zod.onattach.push((inst2) => { + var _a3; + (_a3 = inst2._zod.bag).multipleOf ?? (_a3.multipleOf = def.value); + }); + inst._zod.check = (payload) => { + if (typeof payload.value !== typeof def.value) + throw new Error("Cannot mix number and bigint in multiple_of check."); + const isMultiple = typeof payload.value === "bigint" ? payload.value % def.value === BigInt(0) : floatSafeRemainder(payload.value, def.value) === 0; + if (isMultiple) + return; + payload.issues.push({ + origin: typeof payload.value, + code: "not_multiple_of", + divisor: def.value, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckNumberFormat = /* @__PURE__ */ $constructor("$ZodCheckNumberFormat", (inst, def) => { + $ZodCheck.init(inst, def); + def.format = def.format || "float64"; + const isInt = def.format?.includes("int"); + const origin = isInt ? "int" : "number"; + const [minimum, maximum] = NUMBER_FORMAT_RANGES[def.format]; + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.format = def.format; + bag.minimum = minimum; + bag.maximum = maximum; + if (isInt) + bag.pattern = integer; + }); + inst._zod.check = (payload) => { + const input = payload.value; + if (isInt) { + if (!Number.isInteger(input)) { + payload.issues.push({ + expected: origin, + format: def.format, + code: "invalid_type", + continue: false, + input, + inst + }); + return; + } + if (!Number.isSafeInteger(input)) { + if (input > 0) { + payload.issues.push({ + input, + code: "too_big", + maximum: Number.MAX_SAFE_INTEGER, + note: "Integers must be within the safe integer range.", + inst, + origin, + inclusive: true, + continue: !def.abort + }); + } else { + payload.issues.push({ + input, + code: "too_small", + minimum: Number.MIN_SAFE_INTEGER, + note: "Integers must be within the safe integer range.", + inst, + origin, + inclusive: true, + continue: !def.abort + }); + } + return; + } + } + if (input < minimum) { + payload.issues.push({ + origin: "number", + input, + code: "too_small", + minimum, + inclusive: true, + inst, + continue: !def.abort + }); + } + if (input > maximum) { + payload.issues.push({ + origin: "number", + input, + code: "too_big", + maximum, + inclusive: true, + inst, + continue: !def.abort + }); + } + }; +}); +var $ZodCheckBigIntFormat = /* @__PURE__ */ $constructor("$ZodCheckBigIntFormat", (inst, def) => { + $ZodCheck.init(inst, def); + const [minimum, maximum] = BIGINT_FORMAT_RANGES[def.format]; + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.format = def.format; + bag.minimum = minimum; + bag.maximum = maximum; + }); + inst._zod.check = (payload) => { + const input = payload.value; + if (input < minimum) { + payload.issues.push({ + origin: "bigint", + input, + code: "too_small", + minimum, + inclusive: true, + inst, + continue: !def.abort + }); + } + if (input > maximum) { + payload.issues.push({ + origin: "bigint", + input, + code: "too_big", + maximum, + inclusive: true, + inst, + continue: !def.abort + }); + } + }; +}); +var $ZodCheckMaxSize = /* @__PURE__ */ $constructor("$ZodCheckMaxSize", (inst, def) => { + var _a3; + $ZodCheck.init(inst, def); + (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { + const val = payload.value; + return !nullish(val) && val.size !== void 0; + }); + inst._zod.onattach.push((inst2) => { + const curr = inst2._zod.bag.maximum ?? Number.POSITIVE_INFINITY; + if (def.maximum < curr) + inst2._zod.bag.maximum = def.maximum; + }); + inst._zod.check = (payload) => { + const input = payload.value; + const size = input.size; + if (size <= def.maximum) + return; + payload.issues.push({ + origin: getSizableOrigin(input), + code: "too_big", + maximum: def.maximum, + inclusive: true, + input, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckMinSize = /* @__PURE__ */ $constructor("$ZodCheckMinSize", (inst, def) => { + var _a3; + $ZodCheck.init(inst, def); + (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { + const val = payload.value; + return !nullish(val) && val.size !== void 0; + }); + inst._zod.onattach.push((inst2) => { + const curr = inst2._zod.bag.minimum ?? Number.NEGATIVE_INFINITY; + if (def.minimum > curr) + inst2._zod.bag.minimum = def.minimum; + }); + inst._zod.check = (payload) => { + const input = payload.value; + const size = input.size; + if (size >= def.minimum) + return; + payload.issues.push({ + origin: getSizableOrigin(input), + code: "too_small", + minimum: def.minimum, + inclusive: true, + input, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckSizeEquals = /* @__PURE__ */ $constructor("$ZodCheckSizeEquals", (inst, def) => { + var _a3; + $ZodCheck.init(inst, def); + (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { + const val = payload.value; + return !nullish(val) && val.size !== void 0; + }); + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.minimum = def.size; + bag.maximum = def.size; + bag.size = def.size; + }); + inst._zod.check = (payload) => { + const input = payload.value; + const size = input.size; + if (size === def.size) + return; + const tooBig = size > def.size; + payload.issues.push({ + origin: getSizableOrigin(input), + ...tooBig ? { code: "too_big", maximum: def.size } : { code: "too_small", minimum: def.size }, + inclusive: true, + exact: true, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckMaxLength = /* @__PURE__ */ $constructor("$ZodCheckMaxLength", (inst, def) => { + var _a3; + $ZodCheck.init(inst, def); + (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { + const val = payload.value; + return !nullish(val) && val.length !== void 0; + }); + inst._zod.onattach.push((inst2) => { + const curr = inst2._zod.bag.maximum ?? Number.POSITIVE_INFINITY; + if (def.maximum < curr) + inst2._zod.bag.maximum = def.maximum; + }); + inst._zod.check = (payload) => { + const input = payload.value; + const length = input.length; + if (length <= def.maximum) + return; + const origin = getLengthableOrigin(input); + payload.issues.push({ + origin, + code: "too_big", + maximum: def.maximum, + inclusive: true, + input, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckMinLength = /* @__PURE__ */ $constructor("$ZodCheckMinLength", (inst, def) => { + var _a3; + $ZodCheck.init(inst, def); + (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { + const val = payload.value; + return !nullish(val) && val.length !== void 0; + }); + inst._zod.onattach.push((inst2) => { + const curr = inst2._zod.bag.minimum ?? Number.NEGATIVE_INFINITY; + if (def.minimum > curr) + inst2._zod.bag.minimum = def.minimum; + }); + inst._zod.check = (payload) => { + const input = payload.value; + const length = input.length; + if (length >= def.minimum) + return; + const origin = getLengthableOrigin(input); + payload.issues.push({ + origin, + code: "too_small", + minimum: def.minimum, + inclusive: true, + input, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckLengthEquals = /* @__PURE__ */ $constructor("$ZodCheckLengthEquals", (inst, def) => { + var _a3; + $ZodCheck.init(inst, def); + (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { + const val = payload.value; + return !nullish(val) && val.length !== void 0; + }); + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.minimum = def.length; + bag.maximum = def.length; + bag.length = def.length; + }); + inst._zod.check = (payload) => { + const input = payload.value; + const length = input.length; + if (length === def.length) + return; + const origin = getLengthableOrigin(input); + const tooBig = length > def.length; + payload.issues.push({ + origin, + ...tooBig ? { code: "too_big", maximum: def.length } : { code: "too_small", minimum: def.length }, + inclusive: true, + exact: true, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckStringFormat = /* @__PURE__ */ $constructor("$ZodCheckStringFormat", (inst, def) => { + var _a3, _b; + $ZodCheck.init(inst, def); + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.format = def.format; + if (def.pattern) { + bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); + bag.patterns.add(def.pattern); + } + }); + if (def.pattern) + (_a3 = inst._zod).check ?? (_a3.check = (payload) => { + def.pattern.lastIndex = 0; + if (def.pattern.test(payload.value)) + return; + payload.issues.push({ + origin: "string", + code: "invalid_format", + format: def.format, + input: payload.value, + ...def.pattern ? { pattern: def.pattern.toString() } : {}, + inst, + continue: !def.abort + }); + }); + else + (_b = inst._zod).check ?? (_b.check = () => { + }); +}); +var $ZodCheckRegex = /* @__PURE__ */ $constructor("$ZodCheckRegex", (inst, def) => { + $ZodCheckStringFormat.init(inst, def); + inst._zod.check = (payload) => { + def.pattern.lastIndex = 0; + if (def.pattern.test(payload.value)) + return; + payload.issues.push({ + origin: "string", + code: "invalid_format", + format: "regex", + input: payload.value, + pattern: def.pattern.toString(), + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckLowerCase = /* @__PURE__ */ $constructor("$ZodCheckLowerCase", (inst, def) => { + def.pattern ?? (def.pattern = lowercase); + $ZodCheckStringFormat.init(inst, def); +}); +var $ZodCheckUpperCase = /* @__PURE__ */ $constructor("$ZodCheckUpperCase", (inst, def) => { + def.pattern ?? (def.pattern = uppercase); + $ZodCheckStringFormat.init(inst, def); +}); +var $ZodCheckIncludes = /* @__PURE__ */ $constructor("$ZodCheckIncludes", (inst, def) => { + $ZodCheck.init(inst, def); + const escapedRegex = escapeRegex(def.includes); + const pattern = new RegExp(typeof def.position === "number" ? `^.{${def.position}}${escapedRegex}` : escapedRegex); + def.pattern = pattern; + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); + bag.patterns.add(pattern); + }); + inst._zod.check = (payload) => { + if (payload.value.includes(def.includes, def.position)) + return; + payload.issues.push({ + origin: "string", + code: "invalid_format", + format: "includes", + includes: def.includes, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckStartsWith = /* @__PURE__ */ $constructor("$ZodCheckStartsWith", (inst, def) => { + $ZodCheck.init(inst, def); + const pattern = new RegExp(`^${escapeRegex(def.prefix)}.*`); + def.pattern ?? (def.pattern = pattern); + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); + bag.patterns.add(pattern); + }); + inst._zod.check = (payload) => { + if (payload.value.startsWith(def.prefix)) + return; + payload.issues.push({ + origin: "string", + code: "invalid_format", + format: "starts_with", + prefix: def.prefix, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckEndsWith = /* @__PURE__ */ $constructor("$ZodCheckEndsWith", (inst, def) => { + $ZodCheck.init(inst, def); + const pattern = new RegExp(`.*${escapeRegex(def.suffix)}$`); + def.pattern ?? (def.pattern = pattern); + inst._zod.onattach.push((inst2) => { + const bag = inst2._zod.bag; + bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); + bag.patterns.add(pattern); + }); + inst._zod.check = (payload) => { + if (payload.value.endsWith(def.suffix)) + return; + payload.issues.push({ + origin: "string", + code: "invalid_format", + format: "ends_with", + suffix: def.suffix, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +function handleCheckPropertyResult(result, payload, property) { + if (result.issues.length) { + payload.issues.push(...prefixIssues(property, result.issues)); + } +} +var $ZodCheckProperty = /* @__PURE__ */ $constructor("$ZodCheckProperty", (inst, def) => { + $ZodCheck.init(inst, def); + inst._zod.check = (payload) => { + const result = def.schema._zod.run({ + value: payload.value[def.property], + issues: [] + }, {}); + if (result instanceof Promise) { + return result.then((result2) => handleCheckPropertyResult(result2, payload, def.property)); + } + handleCheckPropertyResult(result, payload, def.property); + return; + }; +}); +var $ZodCheckMimeType = /* @__PURE__ */ $constructor("$ZodCheckMimeType", (inst, def) => { + $ZodCheck.init(inst, def); + const mimeSet = new Set(def.mime); + inst._zod.onattach.push((inst2) => { + inst2._zod.bag.mime = def.mime; + }); + inst._zod.check = (payload) => { + if (mimeSet.has(payload.value.type)) + return; + payload.issues.push({ + code: "invalid_value", + values: def.mime, + input: payload.value.type, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCheckOverwrite = /* @__PURE__ */ $constructor("$ZodCheckOverwrite", (inst, def) => { + $ZodCheck.init(inst, def); + inst._zod.check = (payload) => { + payload.value = def.tx(payload.value); + }; +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/doc.js +var Doc = class { + constructor(args = []) { + this.content = []; + this.indent = 0; + if (this) + this.args = args; + } + indented(fn) { + this.indent += 1; + fn(this); + this.indent -= 1; + } + write(arg) { + if (typeof arg === "function") { + arg(this, { execution: "sync" }); + arg(this, { execution: "async" }); + return; + } + const content = arg; + const lines = content.split("\n").filter((x) => x); + const minIndent = Math.min(...lines.map((x) => x.length - x.trimStart().length)); + const dedented = lines.map((x) => x.slice(minIndent)).map((x) => " ".repeat(this.indent * 2) + x); + for (const line of dedented) { + this.content.push(line); + } + } + compile() { + const F = Function; + const args = this?.args; + const content = this?.content ?? [``]; + const lines = [...content.map((x) => ` ${x}`)]; + return new F(...args, lines.join("\n")); + } +}; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/versions.js +var version = { + major: 4, + minor: 3, + patch: 6 +}; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/schemas.js +var $ZodType = /* @__PURE__ */ $constructor("$ZodType", (inst, def) => { + var _a3; + inst ?? (inst = {}); + inst._zod.def = def; + inst._zod.bag = inst._zod.bag || {}; + inst._zod.version = version; + const checks = [...inst._zod.def.checks ?? []]; + if (inst._zod.traits.has("$ZodCheck")) { + checks.unshift(inst); + } + for (const ch of checks) { + for (const fn of ch._zod.onattach) { + fn(inst); + } + } + if (checks.length === 0) { + (_a3 = inst._zod).deferred ?? (_a3.deferred = []); + inst._zod.deferred?.push(() => { + inst._zod.run = inst._zod.parse; + }); + } else { + const runChecks = (payload, checks2, ctx) => { + let isAborted = aborted(payload); + let asyncResult; + for (const ch of checks2) { + if (ch._zod.def.when) { + const shouldRun = ch._zod.def.when(payload); + if (!shouldRun) + continue; + } else if (isAborted) { + continue; + } + const currLen = payload.issues.length; + const _ = ch._zod.check(payload); + if (_ instanceof Promise && ctx?.async === false) { + throw new $ZodAsyncError(); + } + if (asyncResult || _ instanceof Promise) { + asyncResult = (asyncResult ?? Promise.resolve()).then(async () => { + await _; + const nextLen = payload.issues.length; + if (nextLen === currLen) + return; + if (!isAborted) + isAborted = aborted(payload, currLen); + }); + } else { + const nextLen = payload.issues.length; + if (nextLen === currLen) + continue; + if (!isAborted) + isAborted = aborted(payload, currLen); + } + } + if (asyncResult) { + return asyncResult.then(() => { + return payload; + }); + } + return payload; + }; + const handleCanaryResult = (canary, payload, ctx) => { + if (aborted(canary)) { + canary.aborted = true; + return canary; + } + const checkResult = runChecks(payload, checks, ctx); + if (checkResult instanceof Promise) { + if (ctx.async === false) + throw new $ZodAsyncError(); + return checkResult.then((checkResult2) => inst._zod.parse(checkResult2, ctx)); + } + return inst._zod.parse(checkResult, ctx); + }; + inst._zod.run = (payload, ctx) => { + if (ctx.skipChecks) { + return inst._zod.parse(payload, ctx); + } + if (ctx.direction === "backward") { + const canary = inst._zod.parse({ value: payload.value, issues: [] }, { ...ctx, skipChecks: true }); + if (canary instanceof Promise) { + return canary.then((canary2) => { + return handleCanaryResult(canary2, payload, ctx); + }); + } + return handleCanaryResult(canary, payload, ctx); + } + const result = inst._zod.parse(payload, ctx); + if (result instanceof Promise) { + if (ctx.async === false) + throw new $ZodAsyncError(); + return result.then((result2) => runChecks(result2, checks, ctx)); + } + return runChecks(result, checks, ctx); + }; + } + defineLazy(inst, "~standard", () => ({ + validate: (value) => { + try { + const r = safeParse(inst, value); + return r.success ? { value: r.data } : { issues: r.error?.issues }; + } catch (_) { + return safeParseAsync(inst, value).then((r) => r.success ? { value: r.data } : { issues: r.error?.issues }); + } + }, + vendor: "zod", + version: 1 + })); +}); +var $ZodString = /* @__PURE__ */ $constructor("$ZodString", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.pattern = [...inst?._zod.bag?.patterns ?? []].pop() ?? string(inst._zod.bag); + inst._zod.parse = (payload, _) => { + if (def.coerce) + try { + payload.value = String(payload.value); + } catch (_2) { + } + if (typeof payload.value === "string") + return payload; + payload.issues.push({ + expected: "string", + code: "invalid_type", + input: payload.value, + inst + }); + return payload; + }; +}); +var $ZodStringFormat = /* @__PURE__ */ $constructor("$ZodStringFormat", (inst, def) => { + $ZodCheckStringFormat.init(inst, def); + $ZodString.init(inst, def); +}); +var $ZodGUID = /* @__PURE__ */ $constructor("$ZodGUID", (inst, def) => { + def.pattern ?? (def.pattern = guid); + $ZodStringFormat.init(inst, def); +}); +var $ZodUUID = /* @__PURE__ */ $constructor("$ZodUUID", (inst, def) => { + if (def.version) { + const versionMap = { + v1: 1, + v2: 2, + v3: 3, + v4: 4, + v5: 5, + v6: 6, + v7: 7, + v8: 8 + }; + const v = versionMap[def.version]; + if (v === void 0) + throw new Error(`Invalid UUID version: "${def.version}"`); + def.pattern ?? (def.pattern = uuid(v)); + } else + def.pattern ?? (def.pattern = uuid()); + $ZodStringFormat.init(inst, def); +}); +var $ZodEmail = /* @__PURE__ */ $constructor("$ZodEmail", (inst, def) => { + def.pattern ?? (def.pattern = email); + $ZodStringFormat.init(inst, def); +}); +var $ZodURL = /* @__PURE__ */ $constructor("$ZodURL", (inst, def) => { + $ZodStringFormat.init(inst, def); + inst._zod.check = (payload) => { + try { + const trimmed = payload.value.trim(); + const url2 = new URL(trimmed); + if (def.hostname) { + def.hostname.lastIndex = 0; + if (!def.hostname.test(url2.hostname)) { + payload.issues.push({ + code: "invalid_format", + format: "url", + note: "Invalid hostname", + pattern: def.hostname.source, + input: payload.value, + inst, + continue: !def.abort + }); + } + } + if (def.protocol) { + def.protocol.lastIndex = 0; + if (!def.protocol.test(url2.protocol.endsWith(":") ? url2.protocol.slice(0, -1) : url2.protocol)) { + payload.issues.push({ + code: "invalid_format", + format: "url", + note: "Invalid protocol", + pattern: def.protocol.source, + input: payload.value, + inst, + continue: !def.abort + }); + } + } + if (def.normalize) { + payload.value = url2.href; + } else { + payload.value = trimmed; + } + return; + } catch (_) { + payload.issues.push({ + code: "invalid_format", + format: "url", + input: payload.value, + inst, + continue: !def.abort + }); + } + }; +}); +var $ZodEmoji = /* @__PURE__ */ $constructor("$ZodEmoji", (inst, def) => { + def.pattern ?? (def.pattern = emoji()); + $ZodStringFormat.init(inst, def); +}); +var $ZodNanoID = /* @__PURE__ */ $constructor("$ZodNanoID", (inst, def) => { + def.pattern ?? (def.pattern = nanoid); + $ZodStringFormat.init(inst, def); +}); +var $ZodCUID = /* @__PURE__ */ $constructor("$ZodCUID", (inst, def) => { + def.pattern ?? (def.pattern = cuid); + $ZodStringFormat.init(inst, def); +}); +var $ZodCUID2 = /* @__PURE__ */ $constructor("$ZodCUID2", (inst, def) => { + def.pattern ?? (def.pattern = cuid2); + $ZodStringFormat.init(inst, def); +}); +var $ZodULID = /* @__PURE__ */ $constructor("$ZodULID", (inst, def) => { + def.pattern ?? (def.pattern = ulid); + $ZodStringFormat.init(inst, def); +}); +var $ZodXID = /* @__PURE__ */ $constructor("$ZodXID", (inst, def) => { + def.pattern ?? (def.pattern = xid); + $ZodStringFormat.init(inst, def); +}); +var $ZodKSUID = /* @__PURE__ */ $constructor("$ZodKSUID", (inst, def) => { + def.pattern ?? (def.pattern = ksuid); + $ZodStringFormat.init(inst, def); +}); +var $ZodISODateTime = /* @__PURE__ */ $constructor("$ZodISODateTime", (inst, def) => { + def.pattern ?? (def.pattern = datetime(def)); + $ZodStringFormat.init(inst, def); +}); +var $ZodISODate = /* @__PURE__ */ $constructor("$ZodISODate", (inst, def) => { + def.pattern ?? (def.pattern = date); + $ZodStringFormat.init(inst, def); +}); +var $ZodISOTime = /* @__PURE__ */ $constructor("$ZodISOTime", (inst, def) => { + def.pattern ?? (def.pattern = time(def)); + $ZodStringFormat.init(inst, def); +}); +var $ZodISODuration = /* @__PURE__ */ $constructor("$ZodISODuration", (inst, def) => { + def.pattern ?? (def.pattern = duration); + $ZodStringFormat.init(inst, def); +}); +var $ZodIPv4 = /* @__PURE__ */ $constructor("$ZodIPv4", (inst, def) => { + def.pattern ?? (def.pattern = ipv4); + $ZodStringFormat.init(inst, def); + inst._zod.bag.format = `ipv4`; +}); +var $ZodIPv6 = /* @__PURE__ */ $constructor("$ZodIPv6", (inst, def) => { + def.pattern ?? (def.pattern = ipv6); + $ZodStringFormat.init(inst, def); + inst._zod.bag.format = `ipv6`; + inst._zod.check = (payload) => { + try { + new URL(`http://[${payload.value}]`); + } catch { + payload.issues.push({ + code: "invalid_format", + format: "ipv6", + input: payload.value, + inst, + continue: !def.abort + }); + } + }; +}); +var $ZodMAC = /* @__PURE__ */ $constructor("$ZodMAC", (inst, def) => { + def.pattern ?? (def.pattern = mac(def.delimiter)); + $ZodStringFormat.init(inst, def); + inst._zod.bag.format = `mac`; +}); +var $ZodCIDRv4 = /* @__PURE__ */ $constructor("$ZodCIDRv4", (inst, def) => { + def.pattern ?? (def.pattern = cidrv4); + $ZodStringFormat.init(inst, def); +}); +var $ZodCIDRv6 = /* @__PURE__ */ $constructor("$ZodCIDRv6", (inst, def) => { + def.pattern ?? (def.pattern = cidrv6); + $ZodStringFormat.init(inst, def); + inst._zod.check = (payload) => { + const parts = payload.value.split("/"); + try { + if (parts.length !== 2) + throw new Error(); + const [address, prefix] = parts; + if (!prefix) + throw new Error(); + const prefixNum = Number(prefix); + if (`${prefixNum}` !== prefix) + throw new Error(); + if (prefixNum < 0 || prefixNum > 128) + throw new Error(); + new URL(`http://[${address}]`); + } catch { + payload.issues.push({ + code: "invalid_format", + format: "cidrv6", + input: payload.value, + inst, + continue: !def.abort + }); + } + }; +}); +function isValidBase64(data) { + if (data === "") + return true; + if (data.length % 4 !== 0) + return false; + try { + atob(data); + return true; + } catch { + return false; + } +} +var $ZodBase64 = /* @__PURE__ */ $constructor("$ZodBase64", (inst, def) => { + def.pattern ?? (def.pattern = base64); + $ZodStringFormat.init(inst, def); + inst._zod.bag.contentEncoding = "base64"; + inst._zod.check = (payload) => { + if (isValidBase64(payload.value)) + return; + payload.issues.push({ + code: "invalid_format", + format: "base64", + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +function isValidBase64URL(data) { + if (!base64url.test(data)) + return false; + const base643 = data.replace(/[-_]/g, (c) => c === "-" ? "+" : "/"); + const padded = base643.padEnd(Math.ceil(base643.length / 4) * 4, "="); + return isValidBase64(padded); +} +var $ZodBase64URL = /* @__PURE__ */ $constructor("$ZodBase64URL", (inst, def) => { + def.pattern ?? (def.pattern = base64url); + $ZodStringFormat.init(inst, def); + inst._zod.bag.contentEncoding = "base64url"; + inst._zod.check = (payload) => { + if (isValidBase64URL(payload.value)) + return; + payload.issues.push({ + code: "invalid_format", + format: "base64url", + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodE164 = /* @__PURE__ */ $constructor("$ZodE164", (inst, def) => { + def.pattern ?? (def.pattern = e164); + $ZodStringFormat.init(inst, def); +}); +function isValidJWT(token, algorithm = null) { + try { + const tokensParts = token.split("."); + if (tokensParts.length !== 3) + return false; + const [header] = tokensParts; + if (!header) + return false; + const parsedHeader = JSON.parse(atob(header)); + if ("typ" in parsedHeader && parsedHeader?.typ !== "JWT") + return false; + if (!parsedHeader.alg) + return false; + if (algorithm && (!("alg" in parsedHeader) || parsedHeader.alg !== algorithm)) + return false; + return true; + } catch { + return false; + } +} +var $ZodJWT = /* @__PURE__ */ $constructor("$ZodJWT", (inst, def) => { + $ZodStringFormat.init(inst, def); + inst._zod.check = (payload) => { + if (isValidJWT(payload.value, def.alg)) + return; + payload.issues.push({ + code: "invalid_format", + format: "jwt", + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodCustomStringFormat = /* @__PURE__ */ $constructor("$ZodCustomStringFormat", (inst, def) => { + $ZodStringFormat.init(inst, def); + inst._zod.check = (payload) => { + if (def.fn(payload.value)) + return; + payload.issues.push({ + code: "invalid_format", + format: def.format, + input: payload.value, + inst, + continue: !def.abort + }); + }; +}); +var $ZodNumber = /* @__PURE__ */ $constructor("$ZodNumber", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.pattern = inst._zod.bag.pattern ?? number; + inst._zod.parse = (payload, _ctx) => { + if (def.coerce) + try { + payload.value = Number(payload.value); + } catch (_) { + } + const input = payload.value; + if (typeof input === "number" && !Number.isNaN(input) && Number.isFinite(input)) { + return payload; + } + const received = typeof input === "number" ? Number.isNaN(input) ? "NaN" : !Number.isFinite(input) ? "Infinity" : void 0 : void 0; + payload.issues.push({ + expected: "number", + code: "invalid_type", + input, + inst, + ...received ? { received } : {} + }); + return payload; + }; +}); +var $ZodNumberFormat = /* @__PURE__ */ $constructor("$ZodNumberFormat", (inst, def) => { + $ZodCheckNumberFormat.init(inst, def); + $ZodNumber.init(inst, def); +}); +var $ZodBoolean = /* @__PURE__ */ $constructor("$ZodBoolean", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.pattern = boolean; + inst._zod.parse = (payload, _ctx) => { + if (def.coerce) + try { + payload.value = Boolean(payload.value); + } catch (_) { + } + const input = payload.value; + if (typeof input === "boolean") + return payload; + payload.issues.push({ + expected: "boolean", + code: "invalid_type", + input, + inst + }); + return payload; + }; +}); +var $ZodBigInt = /* @__PURE__ */ $constructor("$ZodBigInt", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.pattern = bigint; + inst._zod.parse = (payload, _ctx) => { + if (def.coerce) + try { + payload.value = BigInt(payload.value); + } catch (_) { + } + if (typeof payload.value === "bigint") + return payload; + payload.issues.push({ + expected: "bigint", + code: "invalid_type", + input: payload.value, + inst + }); + return payload; + }; +}); +var $ZodBigIntFormat = /* @__PURE__ */ $constructor("$ZodBigIntFormat", (inst, def) => { + $ZodCheckBigIntFormat.init(inst, def); + $ZodBigInt.init(inst, def); +}); +var $ZodSymbol = /* @__PURE__ */ $constructor("$ZodSymbol", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (typeof input === "symbol") + return payload; + payload.issues.push({ + expected: "symbol", + code: "invalid_type", + input, + inst + }); + return payload; + }; +}); +var $ZodUndefined = /* @__PURE__ */ $constructor("$ZodUndefined", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.pattern = _undefined; + inst._zod.values = /* @__PURE__ */ new Set([void 0]); + inst._zod.optin = "optional"; + inst._zod.optout = "optional"; + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (typeof input === "undefined") + return payload; + payload.issues.push({ + expected: "undefined", + code: "invalid_type", + input, + inst + }); + return payload; + }; +}); +var $ZodNull = /* @__PURE__ */ $constructor("$ZodNull", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.pattern = _null; + inst._zod.values = /* @__PURE__ */ new Set([null]); + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (input === null) + return payload; + payload.issues.push({ + expected: "null", + code: "invalid_type", + input, + inst + }); + return payload; + }; +}); +var $ZodAny = /* @__PURE__ */ $constructor("$ZodAny", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload) => payload; +}); +var $ZodUnknown = /* @__PURE__ */ $constructor("$ZodUnknown", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload) => payload; +}); +var $ZodNever = /* @__PURE__ */ $constructor("$ZodNever", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, _ctx) => { + payload.issues.push({ + expected: "never", + code: "invalid_type", + input: payload.value, + inst + }); + return payload; + }; +}); +var $ZodVoid = /* @__PURE__ */ $constructor("$ZodVoid", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (typeof input === "undefined") + return payload; + payload.issues.push({ + expected: "void", + code: "invalid_type", + input, + inst + }); + return payload; + }; +}); +var $ZodDate = /* @__PURE__ */ $constructor("$ZodDate", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, _ctx) => { + if (def.coerce) { + try { + payload.value = new Date(payload.value); + } catch (_err) { + } + } + const input = payload.value; + const isDate = input instanceof Date; + const isValidDate = isDate && !Number.isNaN(input.getTime()); + if (isValidDate) + return payload; + payload.issues.push({ + expected: "date", + code: "invalid_type", + input, + ...isDate ? { received: "Invalid Date" } : {}, + inst + }); + return payload; + }; +}); +function handleArrayResult(result, final, index) { + if (result.issues.length) { + final.issues.push(...prefixIssues(index, result.issues)); + } + final.value[index] = result.value; +} +var $ZodArray = /* @__PURE__ */ $constructor("$ZodArray", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + if (!Array.isArray(input)) { + payload.issues.push({ + expected: "array", + code: "invalid_type", + input, + inst + }); + return payload; + } + payload.value = Array(input.length); + const proms = []; + for (let i = 0; i < input.length; i++) { + const item = input[i]; + const result = def.element._zod.run({ + value: item, + issues: [] + }, ctx); + if (result instanceof Promise) { + proms.push(result.then((result2) => handleArrayResult(result2, payload, i))); + } else { + handleArrayResult(result, payload, i); + } + } + if (proms.length) { + return Promise.all(proms).then(() => payload); + } + return payload; + }; +}); +function handlePropertyResult(result, final, key, input, isOptionalOut) { + if (result.issues.length) { + if (isOptionalOut && !(key in input)) { + return; + } + final.issues.push(...prefixIssues(key, result.issues)); + } + if (result.value === void 0) { + if (key in input) { + final.value[key] = void 0; + } + } else { + final.value[key] = result.value; + } +} +function normalizeDef(def) { + const keys = Object.keys(def.shape); + for (const k of keys) { + if (!def.shape?.[k]?._zod?.traits?.has("$ZodType")) { + throw new Error(`Invalid element at key "${k}": expected a Zod schema`); + } + } + const okeys = optionalKeys(def.shape); + return { + ...def, + keys, + keySet: new Set(keys), + numKeys: keys.length, + optionalKeys: new Set(okeys) + }; +} +function handleCatchall(proms, input, payload, ctx, def, inst) { + const unrecognized = []; + const keySet = def.keySet; + const _catchall = def.catchall._zod; + const t = _catchall.def.type; + const isOptionalOut = _catchall.optout === "optional"; + for (const key in input) { + if (keySet.has(key)) + continue; + if (t === "never") { + unrecognized.push(key); + continue; + } + const r = _catchall.run({ value: input[key], issues: [] }, ctx); + if (r instanceof Promise) { + proms.push(r.then((r2) => handlePropertyResult(r2, payload, key, input, isOptionalOut))); + } else { + handlePropertyResult(r, payload, key, input, isOptionalOut); + } + } + if (unrecognized.length) { + payload.issues.push({ + code: "unrecognized_keys", + keys: unrecognized, + input, + inst + }); + } + if (!proms.length) + return payload; + return Promise.all(proms).then(() => { + return payload; + }); +} +var $ZodObject = /* @__PURE__ */ $constructor("$ZodObject", (inst, def) => { + $ZodType.init(inst, def); + const desc = Object.getOwnPropertyDescriptor(def, "shape"); + if (!desc?.get) { + const sh = def.shape; + Object.defineProperty(def, "shape", { + get: () => { + const newSh = { ...sh }; + Object.defineProperty(def, "shape", { + value: newSh + }); + return newSh; + } + }); + } + const _normalized = cached(() => normalizeDef(def)); + defineLazy(inst._zod, "propValues", () => { + const shape = def.shape; + const propValues = {}; + for (const key in shape) { + const field = shape[key]._zod; + if (field.values) { + propValues[key] ?? (propValues[key] = /* @__PURE__ */ new Set()); + for (const v of field.values) + propValues[key].add(v); + } + } + return propValues; + }); + const isObject2 = isObject; + const catchall = def.catchall; + let value; + inst._zod.parse = (payload, ctx) => { + value ?? (value = _normalized.value); + const input = payload.value; + if (!isObject2(input)) { + payload.issues.push({ + expected: "object", + code: "invalid_type", + input, + inst + }); + return payload; + } + payload.value = {}; + const proms = []; + const shape = value.shape; + for (const key of value.keys) { + const el = shape[key]; + const isOptionalOut = el._zod.optout === "optional"; + const r = el._zod.run({ value: input[key], issues: [] }, ctx); + if (r instanceof Promise) { + proms.push(r.then((r2) => handlePropertyResult(r2, payload, key, input, isOptionalOut))); + } else { + handlePropertyResult(r, payload, key, input, isOptionalOut); + } + } + if (!catchall) { + return proms.length ? Promise.all(proms).then(() => payload) : payload; + } + return handleCatchall(proms, input, payload, ctx, _normalized.value, inst); + }; +}); +var $ZodObjectJIT = /* @__PURE__ */ $constructor("$ZodObjectJIT", (inst, def) => { + $ZodObject.init(inst, def); + const superParse = inst._zod.parse; + const _normalized = cached(() => normalizeDef(def)); + const generateFastpass = (shape) => { + const doc = new Doc(["shape", "payload", "ctx"]); + const normalized = _normalized.value; + const parseStr = (key) => { + const k = esc(key); + return `shape[${k}]._zod.run({ value: input[${k}], issues: [] }, ctx)`; + }; + doc.write(`const input = payload.value;`); + const ids = /* @__PURE__ */ Object.create(null); + let counter = 0; + for (const key of normalized.keys) { + ids[key] = `key_${counter++}`; + } + doc.write(`const newResult = {};`); + for (const key of normalized.keys) { + const id = ids[key]; + const k = esc(key); + const schema = shape[key]; + const isOptionalOut = schema?._zod?.optout === "optional"; + doc.write(`const ${id} = ${parseStr(key)};`); + if (isOptionalOut) { + doc.write(` + if (${id}.issues.length) { + if (${k} in input) { + payload.issues = payload.issues.concat(${id}.issues.map(iss => ({ + ...iss, + path: iss.path ? [${k}, ...iss.path] : [${k}] + }))); + } + } + + if (${id}.value === undefined) { + if (${k} in input) { + newResult[${k}] = undefined; + } + } else { + newResult[${k}] = ${id}.value; + } + + `); + } else { + doc.write(` + if (${id}.issues.length) { + payload.issues = payload.issues.concat(${id}.issues.map(iss => ({ + ...iss, + path: iss.path ? [${k}, ...iss.path] : [${k}] + }))); + } + + if (${id}.value === undefined) { + if (${k} in input) { + newResult[${k}] = undefined; + } + } else { + newResult[${k}] = ${id}.value; + } + + `); + } + } + doc.write(`payload.value = newResult;`); + doc.write(`return payload;`); + const fn = doc.compile(); + return (payload, ctx) => fn(shape, payload, ctx); + }; + let fastpass; + const isObject2 = isObject; + const jit = !globalConfig.jitless; + const allowsEval2 = allowsEval; + const fastEnabled = jit && allowsEval2.value; + const catchall = def.catchall; + let value; + inst._zod.parse = (payload, ctx) => { + value ?? (value = _normalized.value); + const input = payload.value; + if (!isObject2(input)) { + payload.issues.push({ + expected: "object", + code: "invalid_type", + input, + inst + }); + return payload; + } + if (jit && fastEnabled && ctx?.async === false && ctx.jitless !== true) { + if (!fastpass) + fastpass = generateFastpass(def.shape); + payload = fastpass(payload, ctx); + if (!catchall) + return payload; + return handleCatchall([], input, payload, ctx, value, inst); + } + return superParse(payload, ctx); + }; +}); +function handleUnionResults(results, final, inst, ctx) { + for (const result of results) { + if (result.issues.length === 0) { + final.value = result.value; + return final; + } + } + const nonaborted = results.filter((r) => !aborted(r)); + if (nonaborted.length === 1) { + final.value = nonaborted[0].value; + return nonaborted[0]; + } + final.issues.push({ + code: "invalid_union", + input: final.value, + inst, + errors: results.map((result) => result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) + }); + return final; +} +var $ZodUnion = /* @__PURE__ */ $constructor("$ZodUnion", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "optin", () => def.options.some((o) => o._zod.optin === "optional") ? "optional" : void 0); + defineLazy(inst._zod, "optout", () => def.options.some((o) => o._zod.optout === "optional") ? "optional" : void 0); + defineLazy(inst._zod, "values", () => { + if (def.options.every((o) => o._zod.values)) { + return new Set(def.options.flatMap((option) => Array.from(option._zod.values))); + } + return void 0; + }); + defineLazy(inst._zod, "pattern", () => { + if (def.options.every((o) => o._zod.pattern)) { + const patterns = def.options.map((o) => o._zod.pattern); + return new RegExp(`^(${patterns.map((p) => cleanRegex(p.source)).join("|")})$`); + } + return void 0; + }); + const single = def.options.length === 1; + const first = def.options[0]._zod.run; + inst._zod.parse = (payload, ctx) => { + if (single) { + return first(payload, ctx); + } + let async = false; + const results = []; + for (const option of def.options) { + const result = option._zod.run({ + value: payload.value, + issues: [] + }, ctx); + if (result instanceof Promise) { + results.push(result); + async = true; + } else { + if (result.issues.length === 0) + return result; + results.push(result); + } + } + if (!async) + return handleUnionResults(results, payload, inst, ctx); + return Promise.all(results).then((results2) => { + return handleUnionResults(results2, payload, inst, ctx); + }); + }; +}); +function handleExclusiveUnionResults(results, final, inst, ctx) { + const successes = results.filter((r) => r.issues.length === 0); + if (successes.length === 1) { + final.value = successes[0].value; + return final; + } + if (successes.length === 0) { + final.issues.push({ + code: "invalid_union", + input: final.value, + inst, + errors: results.map((result) => result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) + }); + } else { + final.issues.push({ + code: "invalid_union", + input: final.value, + inst, + errors: [], + inclusive: false + }); + } + return final; +} +var $ZodXor = /* @__PURE__ */ $constructor("$ZodXor", (inst, def) => { + $ZodUnion.init(inst, def); + def.inclusive = false; + const single = def.options.length === 1; + const first = def.options[0]._zod.run; + inst._zod.parse = (payload, ctx) => { + if (single) { + return first(payload, ctx); + } + let async = false; + const results = []; + for (const option of def.options) { + const result = option._zod.run({ + value: payload.value, + issues: [] + }, ctx); + if (result instanceof Promise) { + results.push(result); + async = true; + } else { + results.push(result); + } + } + if (!async) + return handleExclusiveUnionResults(results, payload, inst, ctx); + return Promise.all(results).then((results2) => { + return handleExclusiveUnionResults(results2, payload, inst, ctx); + }); + }; +}); +var $ZodDiscriminatedUnion = /* @__PURE__ */ $constructor("$ZodDiscriminatedUnion", (inst, def) => { + def.inclusive = false; + $ZodUnion.init(inst, def); + const _super = inst._zod.parse; + defineLazy(inst._zod, "propValues", () => { + const propValues = {}; + for (const option of def.options) { + const pv = option._zod.propValues; + if (!pv || Object.keys(pv).length === 0) + throw new Error(`Invalid discriminated union option at index "${def.options.indexOf(option)}"`); + for (const [k, v] of Object.entries(pv)) { + if (!propValues[k]) + propValues[k] = /* @__PURE__ */ new Set(); + for (const val of v) { + propValues[k].add(val); + } + } + } + return propValues; + }); + const disc = cached(() => { + const opts = def.options; + const map2 = /* @__PURE__ */ new Map(); + for (const o of opts) { + const values = o._zod.propValues?.[def.discriminator]; + if (!values || values.size === 0) + throw new Error(`Invalid discriminated union option at index "${def.options.indexOf(o)}"`); + for (const v of values) { + if (map2.has(v)) { + throw new Error(`Duplicate discriminator value "${String(v)}"`); + } + map2.set(v, o); + } + } + return map2; + }); + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + if (!isObject(input)) { + payload.issues.push({ + code: "invalid_type", + expected: "object", + input, + inst + }); + return payload; + } + const opt = disc.value.get(input?.[def.discriminator]); + if (opt) { + return opt._zod.run(payload, ctx); + } + if (def.unionFallback) { + return _super(payload, ctx); + } + payload.issues.push({ + code: "invalid_union", + errors: [], + note: "No matching discriminator", + discriminator: def.discriminator, + input, + path: [def.discriminator], + inst + }); + return payload; + }; +}); +var $ZodIntersection = /* @__PURE__ */ $constructor("$ZodIntersection", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + const left = def.left._zod.run({ value: input, issues: [] }, ctx); + const right = def.right._zod.run({ value: input, issues: [] }, ctx); + const async = left instanceof Promise || right instanceof Promise; + if (async) { + return Promise.all([left, right]).then(([left2, right2]) => { + return handleIntersectionResults(payload, left2, right2); + }); + } + return handleIntersectionResults(payload, left, right); + }; +}); +function mergeValues(a, b) { + if (a === b) { + return { valid: true, data: a }; + } + if (a instanceof Date && b instanceof Date && +a === +b) { + return { valid: true, data: a }; + } + if (isPlainObject(a) && isPlainObject(b)) { + const bKeys = Object.keys(b); + const sharedKeys = Object.keys(a).filter((key) => bKeys.indexOf(key) !== -1); + const newObj = { ...a, ...b }; + for (const key of sharedKeys) { + const sharedValue = mergeValues(a[key], b[key]); + if (!sharedValue.valid) { + return { + valid: false, + mergeErrorPath: [key, ...sharedValue.mergeErrorPath] + }; + } + newObj[key] = sharedValue.data; + } + return { valid: true, data: newObj }; + } + if (Array.isArray(a) && Array.isArray(b)) { + if (a.length !== b.length) { + return { valid: false, mergeErrorPath: [] }; + } + const newArray = []; + for (let index = 0; index < a.length; index++) { + const itemA = a[index]; + const itemB = b[index]; + const sharedValue = mergeValues(itemA, itemB); + if (!sharedValue.valid) { + return { + valid: false, + mergeErrorPath: [index, ...sharedValue.mergeErrorPath] + }; + } + newArray.push(sharedValue.data); + } + return { valid: true, data: newArray }; + } + return { valid: false, mergeErrorPath: [] }; +} +function handleIntersectionResults(result, left, right) { + const unrecKeys = /* @__PURE__ */ new Map(); + let unrecIssue; + for (const iss of left.issues) { + if (iss.code === "unrecognized_keys") { + unrecIssue ?? (unrecIssue = iss); + for (const k of iss.keys) { + if (!unrecKeys.has(k)) + unrecKeys.set(k, {}); + unrecKeys.get(k).l = true; + } + } else { + result.issues.push(iss); + } + } + for (const iss of right.issues) { + if (iss.code === "unrecognized_keys") { + for (const k of iss.keys) { + if (!unrecKeys.has(k)) + unrecKeys.set(k, {}); + unrecKeys.get(k).r = true; + } + } else { + result.issues.push(iss); + } + } + const bothKeys = [...unrecKeys].filter(([, f]) => f.l && f.r).map(([k]) => k); + if (bothKeys.length && unrecIssue) { + result.issues.push({ ...unrecIssue, keys: bothKeys }); + } + if (aborted(result)) + return result; + const merged = mergeValues(left.value, right.value); + if (!merged.valid) { + throw new Error(`Unmergable intersection. Error path: ${JSON.stringify(merged.mergeErrorPath)}`); + } + result.value = merged.data; + return result; +} +var $ZodTuple = /* @__PURE__ */ $constructor("$ZodTuple", (inst, def) => { + $ZodType.init(inst, def); + const items = def.items; + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + if (!Array.isArray(input)) { + payload.issues.push({ + input, + inst, + expected: "tuple", + code: "invalid_type" + }); + return payload; + } + payload.value = []; + const proms = []; + const reversedIndex = [...items].reverse().findIndex((item) => item._zod.optin !== "optional"); + const optStart = reversedIndex === -1 ? 0 : items.length - reversedIndex; + if (!def.rest) { + const tooBig = input.length > items.length; + const tooSmall = input.length < optStart - 1; + if (tooBig || tooSmall) { + payload.issues.push({ + ...tooBig ? { code: "too_big", maximum: items.length, inclusive: true } : { code: "too_small", minimum: items.length }, + input, + inst, + origin: "array" + }); + return payload; + } + } + let i = -1; + for (const item of items) { + i++; + if (i >= input.length) { + if (i >= optStart) + continue; + } + const result = item._zod.run({ + value: input[i], + issues: [] + }, ctx); + if (result instanceof Promise) { + proms.push(result.then((result2) => handleTupleResult(result2, payload, i))); + } else { + handleTupleResult(result, payload, i); + } + } + if (def.rest) { + const rest = input.slice(items.length); + for (const el of rest) { + i++; + const result = def.rest._zod.run({ + value: el, + issues: [] + }, ctx); + if (result instanceof Promise) { + proms.push(result.then((result2) => handleTupleResult(result2, payload, i))); + } else { + handleTupleResult(result, payload, i); + } + } + } + if (proms.length) + return Promise.all(proms).then(() => payload); + return payload; + }; +}); +function handleTupleResult(result, final, index) { + if (result.issues.length) { + final.issues.push(...prefixIssues(index, result.issues)); + } + final.value[index] = result.value; +} +var $ZodRecord = /* @__PURE__ */ $constructor("$ZodRecord", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + if (!isPlainObject(input)) { + payload.issues.push({ + expected: "record", + code: "invalid_type", + input, + inst + }); + return payload; + } + const proms = []; + const values = def.keyType._zod.values; + if (values) { + payload.value = {}; + const recordKeys = /* @__PURE__ */ new Set(); + for (const key of values) { + if (typeof key === "string" || typeof key === "number" || typeof key === "symbol") { + recordKeys.add(typeof key === "number" ? key.toString() : key); + const result = def.valueType._zod.run({ value: input[key], issues: [] }, ctx); + if (result instanceof Promise) { + proms.push(result.then((result2) => { + if (result2.issues.length) { + payload.issues.push(...prefixIssues(key, result2.issues)); + } + payload.value[key] = result2.value; + })); + } else { + if (result.issues.length) { + payload.issues.push(...prefixIssues(key, result.issues)); + } + payload.value[key] = result.value; + } + } + } + let unrecognized; + for (const key in input) { + if (!recordKeys.has(key)) { + unrecognized = unrecognized ?? []; + unrecognized.push(key); + } + } + if (unrecognized && unrecognized.length > 0) { + payload.issues.push({ + code: "unrecognized_keys", + input, + inst, + keys: unrecognized + }); + } + } else { + payload.value = {}; + for (const key of Reflect.ownKeys(input)) { + if (key === "__proto__") + continue; + let keyResult = def.keyType._zod.run({ value: key, issues: [] }, ctx); + if (keyResult instanceof Promise) { + throw new Error("Async schemas not supported in object keys currently"); + } + const checkNumericKey = typeof key === "string" && number.test(key) && keyResult.issues.length; + if (checkNumericKey) { + const retryResult = def.keyType._zod.run({ value: Number(key), issues: [] }, ctx); + if (retryResult instanceof Promise) { + throw new Error("Async schemas not supported in object keys currently"); + } + if (retryResult.issues.length === 0) { + keyResult = retryResult; + } + } + if (keyResult.issues.length) { + if (def.mode === "loose") { + payload.value[key] = input[key]; + } else { + payload.issues.push({ + code: "invalid_key", + origin: "record", + issues: keyResult.issues.map((iss) => finalizeIssue(iss, ctx, config())), + input: key, + path: [key], + inst + }); + } + continue; + } + const result = def.valueType._zod.run({ value: input[key], issues: [] }, ctx); + if (result instanceof Promise) { + proms.push(result.then((result2) => { + if (result2.issues.length) { + payload.issues.push(...prefixIssues(key, result2.issues)); + } + payload.value[keyResult.value] = result2.value; + })); + } else { + if (result.issues.length) { + payload.issues.push(...prefixIssues(key, result.issues)); + } + payload.value[keyResult.value] = result.value; + } + } + } + if (proms.length) { + return Promise.all(proms).then(() => payload); + } + return payload; + }; +}); +var $ZodMap = /* @__PURE__ */ $constructor("$ZodMap", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + if (!(input instanceof Map)) { + payload.issues.push({ + expected: "map", + code: "invalid_type", + input, + inst + }); + return payload; + } + const proms = []; + payload.value = /* @__PURE__ */ new Map(); + for (const [key, value] of input) { + const keyResult = def.keyType._zod.run({ value: key, issues: [] }, ctx); + const valueResult = def.valueType._zod.run({ value, issues: [] }, ctx); + if (keyResult instanceof Promise || valueResult instanceof Promise) { + proms.push(Promise.all([keyResult, valueResult]).then(([keyResult2, valueResult2]) => { + handleMapResult(keyResult2, valueResult2, payload, key, input, inst, ctx); + })); + } else { + handleMapResult(keyResult, valueResult, payload, key, input, inst, ctx); + } + } + if (proms.length) + return Promise.all(proms).then(() => payload); + return payload; + }; +}); +function handleMapResult(keyResult, valueResult, final, key, input, inst, ctx) { + if (keyResult.issues.length) { + if (propertyKeyTypes.has(typeof key)) { + final.issues.push(...prefixIssues(key, keyResult.issues)); + } else { + final.issues.push({ + code: "invalid_key", + origin: "map", + input, + inst, + issues: keyResult.issues.map((iss) => finalizeIssue(iss, ctx, config())) + }); + } + } + if (valueResult.issues.length) { + if (propertyKeyTypes.has(typeof key)) { + final.issues.push(...prefixIssues(key, valueResult.issues)); + } else { + final.issues.push({ + origin: "map", + code: "invalid_element", + input, + inst, + key, + issues: valueResult.issues.map((iss) => finalizeIssue(iss, ctx, config())) + }); + } + } + final.value.set(keyResult.value, valueResult.value); +} +var $ZodSet = /* @__PURE__ */ $constructor("$ZodSet", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + const input = payload.value; + if (!(input instanceof Set)) { + payload.issues.push({ + input, + inst, + expected: "set", + code: "invalid_type" + }); + return payload; + } + const proms = []; + payload.value = /* @__PURE__ */ new Set(); + for (const item of input) { + const result = def.valueType._zod.run({ value: item, issues: [] }, ctx); + if (result instanceof Promise) { + proms.push(result.then((result2) => handleSetResult(result2, payload))); + } else + handleSetResult(result, payload); + } + if (proms.length) + return Promise.all(proms).then(() => payload); + return payload; + }; +}); +function handleSetResult(result, final) { + if (result.issues.length) { + final.issues.push(...result.issues); + } + final.value.add(result.value); +} +var $ZodEnum = /* @__PURE__ */ $constructor("$ZodEnum", (inst, def) => { + $ZodType.init(inst, def); + const values = getEnumValues(def.entries); + const valuesSet = new Set(values); + inst._zod.values = valuesSet; + inst._zod.pattern = new RegExp(`^(${values.filter((k) => propertyKeyTypes.has(typeof k)).map((o) => typeof o === "string" ? escapeRegex(o) : o.toString()).join("|")})$`); + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (valuesSet.has(input)) { + return payload; + } + payload.issues.push({ + code: "invalid_value", + values, + input, + inst + }); + return payload; + }; +}); +var $ZodLiteral = /* @__PURE__ */ $constructor("$ZodLiteral", (inst, def) => { + $ZodType.init(inst, def); + if (def.values.length === 0) { + throw new Error("Cannot create literal schema with no valid values"); + } + const values = new Set(def.values); + inst._zod.values = values; + inst._zod.pattern = new RegExp(`^(${def.values.map((o) => typeof o === "string" ? escapeRegex(o) : o ? escapeRegex(o.toString()) : String(o)).join("|")})$`); + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (values.has(input)) { + return payload; + } + payload.issues.push({ + code: "invalid_value", + values: def.values, + input, + inst + }); + return payload; + }; +}); +var $ZodFile = /* @__PURE__ */ $constructor("$ZodFile", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, _ctx) => { + const input = payload.value; + if (input instanceof File) + return payload; + payload.issues.push({ + expected: "file", + code: "invalid_type", + input, + inst + }); + return payload; + }; +}); +var $ZodTransform = /* @__PURE__ */ $constructor("$ZodTransform", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + throw new $ZodEncodeError(inst.constructor.name); + } + const _out = def.transform(payload.value, payload); + if (ctx.async) { + const output = _out instanceof Promise ? _out : Promise.resolve(_out); + return output.then((output2) => { + payload.value = output2; + return payload; + }); + } + if (_out instanceof Promise) { + throw new $ZodAsyncError(); + } + payload.value = _out; + return payload; + }; +}); +function handleOptionalResult(result, input) { + if (result.issues.length && input === void 0) { + return { issues: [], value: void 0 }; + } + return result; +} +var $ZodOptional = /* @__PURE__ */ $constructor("$ZodOptional", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.optin = "optional"; + inst._zod.optout = "optional"; + defineLazy(inst._zod, "values", () => { + return def.innerType._zod.values ? /* @__PURE__ */ new Set([...def.innerType._zod.values, void 0]) : void 0; + }); + defineLazy(inst._zod, "pattern", () => { + const pattern = def.innerType._zod.pattern; + return pattern ? new RegExp(`^(${cleanRegex(pattern.source)})?$`) : void 0; + }); + inst._zod.parse = (payload, ctx) => { + if (def.innerType._zod.optin === "optional") { + const result = def.innerType._zod.run(payload, ctx); + if (result instanceof Promise) + return result.then((r) => handleOptionalResult(r, payload.value)); + return handleOptionalResult(result, payload.value); + } + if (payload.value === void 0) { + return payload; + } + return def.innerType._zod.run(payload, ctx); + }; +}); +var $ZodExactOptional = /* @__PURE__ */ $constructor("$ZodExactOptional", (inst, def) => { + $ZodOptional.init(inst, def); + defineLazy(inst._zod, "values", () => def.innerType._zod.values); + defineLazy(inst._zod, "pattern", () => def.innerType._zod.pattern); + inst._zod.parse = (payload, ctx) => { + return def.innerType._zod.run(payload, ctx); + }; +}); +var $ZodNullable = /* @__PURE__ */ $constructor("$ZodNullable", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "optin", () => def.innerType._zod.optin); + defineLazy(inst._zod, "optout", () => def.innerType._zod.optout); + defineLazy(inst._zod, "pattern", () => { + const pattern = def.innerType._zod.pattern; + return pattern ? new RegExp(`^(${cleanRegex(pattern.source)}|null)$`) : void 0; + }); + defineLazy(inst._zod, "values", () => { + return def.innerType._zod.values ? /* @__PURE__ */ new Set([...def.innerType._zod.values, null]) : void 0; + }); + inst._zod.parse = (payload, ctx) => { + if (payload.value === null) + return payload; + return def.innerType._zod.run(payload, ctx); + }; +}); +var $ZodDefault = /* @__PURE__ */ $constructor("$ZodDefault", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.optin = "optional"; + defineLazy(inst._zod, "values", () => def.innerType._zod.values); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + return def.innerType._zod.run(payload, ctx); + } + if (payload.value === void 0) { + payload.value = def.defaultValue; + return payload; + } + const result = def.innerType._zod.run(payload, ctx); + if (result instanceof Promise) { + return result.then((result2) => handleDefaultResult(result2, def)); + } + return handleDefaultResult(result, def); + }; +}); +function handleDefaultResult(payload, def) { + if (payload.value === void 0) { + payload.value = def.defaultValue; + } + return payload; +} +var $ZodPrefault = /* @__PURE__ */ $constructor("$ZodPrefault", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.optin = "optional"; + defineLazy(inst._zod, "values", () => def.innerType._zod.values); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + return def.innerType._zod.run(payload, ctx); + } + if (payload.value === void 0) { + payload.value = def.defaultValue; + } + return def.innerType._zod.run(payload, ctx); + }; +}); +var $ZodNonOptional = /* @__PURE__ */ $constructor("$ZodNonOptional", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "values", () => { + const v = def.innerType._zod.values; + return v ? new Set([...v].filter((x) => x !== void 0)) : void 0; + }); + inst._zod.parse = (payload, ctx) => { + const result = def.innerType._zod.run(payload, ctx); + if (result instanceof Promise) { + return result.then((result2) => handleNonOptionalResult(result2, inst)); + } + return handleNonOptionalResult(result, inst); + }; +}); +function handleNonOptionalResult(payload, inst) { + if (!payload.issues.length && payload.value === void 0) { + payload.issues.push({ + code: "invalid_type", + expected: "nonoptional", + input: payload.value, + inst + }); + } + return payload; +} +var $ZodSuccess = /* @__PURE__ */ $constructor("$ZodSuccess", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + throw new $ZodEncodeError("ZodSuccess"); + } + const result = def.innerType._zod.run(payload, ctx); + if (result instanceof Promise) { + return result.then((result2) => { + payload.value = result2.issues.length === 0; + return payload; + }); + } + payload.value = result.issues.length === 0; + return payload; + }; +}); +var $ZodCatch = /* @__PURE__ */ $constructor("$ZodCatch", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "optin", () => def.innerType._zod.optin); + defineLazy(inst._zod, "optout", () => def.innerType._zod.optout); + defineLazy(inst._zod, "values", () => def.innerType._zod.values); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + return def.innerType._zod.run(payload, ctx); + } + const result = def.innerType._zod.run(payload, ctx); + if (result instanceof Promise) { + return result.then((result2) => { + payload.value = result2.value; + if (result2.issues.length) { + payload.value = def.catchValue({ + ...payload, + error: { + issues: result2.issues.map((iss) => finalizeIssue(iss, ctx, config())) + }, + input: payload.value + }); + payload.issues = []; + } + return payload; + }); + } + payload.value = result.value; + if (result.issues.length) { + payload.value = def.catchValue({ + ...payload, + error: { + issues: result.issues.map((iss) => finalizeIssue(iss, ctx, config())) + }, + input: payload.value + }); + payload.issues = []; + } + return payload; + }; +}); +var $ZodNaN = /* @__PURE__ */ $constructor("$ZodNaN", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, _ctx) => { + if (typeof payload.value !== "number" || !Number.isNaN(payload.value)) { + payload.issues.push({ + input: payload.value, + inst, + expected: "nan", + code: "invalid_type" + }); + return payload; + } + return payload; + }; +}); +var $ZodPipe = /* @__PURE__ */ $constructor("$ZodPipe", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "values", () => def.in._zod.values); + defineLazy(inst._zod, "optin", () => def.in._zod.optin); + defineLazy(inst._zod, "optout", () => def.out._zod.optout); + defineLazy(inst._zod, "propValues", () => def.in._zod.propValues); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + const right = def.out._zod.run(payload, ctx); + if (right instanceof Promise) { + return right.then((right2) => handlePipeResult(right2, def.in, ctx)); + } + return handlePipeResult(right, def.in, ctx); + } + const left = def.in._zod.run(payload, ctx); + if (left instanceof Promise) { + return left.then((left2) => handlePipeResult(left2, def.out, ctx)); + } + return handlePipeResult(left, def.out, ctx); + }; +}); +function handlePipeResult(left, next, ctx) { + if (left.issues.length) { + left.aborted = true; + return left; + } + return next._zod.run({ value: left.value, issues: left.issues }, ctx); +} +var $ZodCodec = /* @__PURE__ */ $constructor("$ZodCodec", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "values", () => def.in._zod.values); + defineLazy(inst._zod, "optin", () => def.in._zod.optin); + defineLazy(inst._zod, "optout", () => def.out._zod.optout); + defineLazy(inst._zod, "propValues", () => def.in._zod.propValues); + inst._zod.parse = (payload, ctx) => { + const direction = ctx.direction || "forward"; + if (direction === "forward") { + const left = def.in._zod.run(payload, ctx); + if (left instanceof Promise) { + return left.then((left2) => handleCodecAResult(left2, def, ctx)); + } + return handleCodecAResult(left, def, ctx); + } else { + const right = def.out._zod.run(payload, ctx); + if (right instanceof Promise) { + return right.then((right2) => handleCodecAResult(right2, def, ctx)); + } + return handleCodecAResult(right, def, ctx); + } + }; +}); +function handleCodecAResult(result, def, ctx) { + if (result.issues.length) { + result.aborted = true; + return result; + } + const direction = ctx.direction || "forward"; + if (direction === "forward") { + const transformed = def.transform(result.value, result); + if (transformed instanceof Promise) { + return transformed.then((value) => handleCodecTxResult(result, value, def.out, ctx)); + } + return handleCodecTxResult(result, transformed, def.out, ctx); + } else { + const transformed = def.reverseTransform(result.value, result); + if (transformed instanceof Promise) { + return transformed.then((value) => handleCodecTxResult(result, value, def.in, ctx)); + } + return handleCodecTxResult(result, transformed, def.in, ctx); + } +} +function handleCodecTxResult(left, value, nextSchema, ctx) { + if (left.issues.length) { + left.aborted = true; + return left; + } + return nextSchema._zod.run({ value, issues: left.issues }, ctx); +} +var $ZodReadonly = /* @__PURE__ */ $constructor("$ZodReadonly", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "propValues", () => def.innerType._zod.propValues); + defineLazy(inst._zod, "values", () => def.innerType._zod.values); + defineLazy(inst._zod, "optin", () => def.innerType?._zod?.optin); + defineLazy(inst._zod, "optout", () => def.innerType?._zod?.optout); + inst._zod.parse = (payload, ctx) => { + if (ctx.direction === "backward") { + return def.innerType._zod.run(payload, ctx); + } + const result = def.innerType._zod.run(payload, ctx); + if (result instanceof Promise) { + return result.then(handleReadonlyResult); + } + return handleReadonlyResult(result); + }; +}); +function handleReadonlyResult(payload) { + payload.value = Object.freeze(payload.value); + return payload; +} +var $ZodTemplateLiteral = /* @__PURE__ */ $constructor("$ZodTemplateLiteral", (inst, def) => { + $ZodType.init(inst, def); + const regexParts = []; + for (const part of def.parts) { + if (typeof part === "object" && part !== null) { + if (!part._zod.pattern) { + throw new Error(`Invalid template literal part, no pattern found: ${[...part._zod.traits].shift()}`); + } + const source = part._zod.pattern instanceof RegExp ? part._zod.pattern.source : part._zod.pattern; + if (!source) + throw new Error(`Invalid template literal part: ${part._zod.traits}`); + const start = source.startsWith("^") ? 1 : 0; + const end = source.endsWith("$") ? source.length - 1 : source.length; + regexParts.push(source.slice(start, end)); + } else if (part === null || primitiveTypes.has(typeof part)) { + regexParts.push(escapeRegex(`${part}`)); + } else { + throw new Error(`Invalid template literal part: ${part}`); + } + } + inst._zod.pattern = new RegExp(`^${regexParts.join("")}$`); + inst._zod.parse = (payload, _ctx) => { + if (typeof payload.value !== "string") { + payload.issues.push({ + input: payload.value, + inst, + expected: "string", + code: "invalid_type" + }); + return payload; + } + inst._zod.pattern.lastIndex = 0; + if (!inst._zod.pattern.test(payload.value)) { + payload.issues.push({ + input: payload.value, + inst, + code: "invalid_format", + format: def.format ?? "template_literal", + pattern: inst._zod.pattern.source + }); + return payload; + } + return payload; + }; +}); +var $ZodFunction = /* @__PURE__ */ $constructor("$ZodFunction", (inst, def) => { + $ZodType.init(inst, def); + inst._def = def; + inst._zod.def = def; + inst.implement = (func) => { + if (typeof func !== "function") { + throw new Error("implement() must be called with a function"); + } + return function(...args) { + const parsedArgs = inst._def.input ? parse(inst._def.input, args) : args; + const result = Reflect.apply(func, this, parsedArgs); + if (inst._def.output) { + return parse(inst._def.output, result); + } + return result; + }; + }; + inst.implementAsync = (func) => { + if (typeof func !== "function") { + throw new Error("implementAsync() must be called with a function"); + } + return async function(...args) { + const parsedArgs = inst._def.input ? await parseAsync(inst._def.input, args) : args; + const result = await Reflect.apply(func, this, parsedArgs); + if (inst._def.output) { + return await parseAsync(inst._def.output, result); + } + return result; + }; + }; + inst._zod.parse = (payload, _ctx) => { + if (typeof payload.value !== "function") { + payload.issues.push({ + code: "invalid_type", + expected: "function", + input: payload.value, + inst + }); + return payload; + } + const hasPromiseOutput = inst._def.output && inst._def.output._zod.def.type === "promise"; + if (hasPromiseOutput) { + payload.value = inst.implementAsync(payload.value); + } else { + payload.value = inst.implement(payload.value); + } + return payload; + }; + inst.input = (...args) => { + const F = inst.constructor; + if (Array.isArray(args[0])) { + return new F({ + type: "function", + input: new $ZodTuple({ + type: "tuple", + items: args[0], + rest: args[1] + }), + output: inst._def.output + }); + } + return new F({ + type: "function", + input: args[0], + output: inst._def.output + }); + }; + inst.output = (output) => { + const F = inst.constructor; + return new F({ + type: "function", + input: inst._def.input, + output + }); + }; + return inst; +}); +var $ZodPromise = /* @__PURE__ */ $constructor("$ZodPromise", (inst, def) => { + $ZodType.init(inst, def); + inst._zod.parse = (payload, ctx) => { + return Promise.resolve(payload.value).then((inner) => def.innerType._zod.run({ value: inner, issues: [] }, ctx)); + }; +}); +var $ZodLazy = /* @__PURE__ */ $constructor("$ZodLazy", (inst, def) => { + $ZodType.init(inst, def); + defineLazy(inst._zod, "innerType", () => def.getter()); + defineLazy(inst._zod, "pattern", () => inst._zod.innerType?._zod?.pattern); + defineLazy(inst._zod, "propValues", () => inst._zod.innerType?._zod?.propValues); + defineLazy(inst._zod, "optin", () => inst._zod.innerType?._zod?.optin ?? void 0); + defineLazy(inst._zod, "optout", () => inst._zod.innerType?._zod?.optout ?? void 0); + inst._zod.parse = (payload, ctx) => { + const inner = inst._zod.innerType; + return inner._zod.run(payload, ctx); + }; +}); +var $ZodCustom = /* @__PURE__ */ $constructor("$ZodCustom", (inst, def) => { + $ZodCheck.init(inst, def); + $ZodType.init(inst, def); + inst._zod.parse = (payload, _) => { + return payload; + }; + inst._zod.check = (payload) => { + const input = payload.value; + const r = def.fn(input); + if (r instanceof Promise) { + return r.then((r2) => handleRefineResult(r2, payload, input, inst)); + } + handleRefineResult(r, payload, input, inst); + return; + }; +}); +function handleRefineResult(result, payload, input, inst) { + if (!result) { + const _iss = { + code: "custom", + input, + inst, + // incorporates params.error into issue reporting + path: [...inst._zod.def.path ?? []], + // incorporates params.error into issue reporting + continue: !inst._zod.def.abort + // params: inst._zod.def.params, + }; + if (inst._zod.def.params) + _iss.params = inst._zod.def.params; + payload.issues.push(issue(_iss)); + } +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/index.js +var locales_exports = {}; +__export(locales_exports, { + ar: () => ar_default, + az: () => az_default, + be: () => be_default, + bg: () => bg_default, + ca: () => ca_default, + cs: () => cs_default, + da: () => da_default, + de: () => de_default, + en: () => en_default, + eo: () => eo_default, + es: () => es_default, + fa: () => fa_default, + fi: () => fi_default, + fr: () => fr_default, + frCA: () => fr_CA_default, + he: () => he_default, + hu: () => hu_default, + hy: () => hy_default, + id: () => id_default, + is: () => is_default, + it: () => it_default, + ja: () => ja_default, + ka: () => ka_default, + kh: () => kh_default, + km: () => km_default, + ko: () => ko_default, + lt: () => lt_default, + mk: () => mk_default, + ms: () => ms_default, + nl: () => nl_default, + no: () => no_default, + ota: () => ota_default, + pl: () => pl_default, + ps: () => ps_default, + pt: () => pt_default, + ru: () => ru_default, + sl: () => sl_default, + sv: () => sv_default, + ta: () => ta_default, + th: () => th_default, + tr: () => tr_default, + ua: () => ua_default, + uk: () => uk_default, + ur: () => ur_default, + uz: () => uz_default, + vi: () => vi_default, + yo: () => yo_default, + zhCN: () => zh_CN_default, + zhTW: () => zh_TW_default +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ar.js +var error = () => { + const Sizable = { + string: { unit: "\u062D\u0631\u0641", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" }, + file: { unit: "\u0628\u0627\u064A\u062A", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" }, + array: { unit: "\u0639\u0646\u0635\u0631", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" }, + set: { unit: "\u0639\u0646\u0635\u0631", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0645\u062F\u062E\u0644", + email: "\u0628\u0631\u064A\u062F \u0625\u0644\u0643\u062A\u0631\u0648\u0646\u064A", + url: "\u0631\u0627\u0628\u0637", + emoji: "\u0625\u064A\u0645\u0648\u062C\u064A", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u062A\u0627\u0631\u064A\u062E \u0648\u0648\u0642\u062A \u0628\u0645\u0639\u064A\u0627\u0631 ISO", + date: "\u062A\u0627\u0631\u064A\u062E \u0628\u0645\u0639\u064A\u0627\u0631 ISO", + time: "\u0648\u0642\u062A \u0628\u0645\u0639\u064A\u0627\u0631 ISO", + duration: "\u0645\u062F\u0629 \u0628\u0645\u0639\u064A\u0627\u0631 ISO", + ipv4: "\u0639\u0646\u0648\u0627\u0646 IPv4", + ipv6: "\u0639\u0646\u0648\u0627\u0646 IPv6", + cidrv4: "\u0645\u062F\u0649 \u0639\u0646\u0627\u0648\u064A\u0646 \u0628\u0635\u064A\u063A\u0629 IPv4", + cidrv6: "\u0645\u062F\u0649 \u0639\u0646\u0627\u0648\u064A\u0646 \u0628\u0635\u064A\u063A\u0629 IPv6", + base64: "\u0646\u064E\u0635 \u0628\u062A\u0631\u0645\u064A\u0632 base64-encoded", + base64url: "\u0646\u064E\u0635 \u0628\u062A\u0631\u0645\u064A\u0632 base64url-encoded", + json_string: "\u0646\u064E\u0635 \u0639\u0644\u0649 \u0647\u064A\u0626\u0629 JSON", + e164: "\u0631\u0642\u0645 \u0647\u0627\u062A\u0641 \u0628\u0645\u0639\u064A\u0627\u0631 E.164", + jwt: "JWT", + template_literal: "\u0645\u062F\u062E\u0644" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0645\u062F\u062E\u0644\u0627\u062A \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644\u0629: \u064A\u0641\u062A\u0631\u0636 \u0625\u062F\u062E\u0627\u0644 instanceof ${issue2.expected}\u060C \u0648\u0644\u0643\u0646 \u062A\u0645 \u0625\u062F\u062E\u0627\u0644 ${received}`; + } + return `\u0645\u062F\u062E\u0644\u0627\u062A \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644\u0629: \u064A\u0641\u062A\u0631\u0636 \u0625\u062F\u062E\u0627\u0644 ${expected}\u060C \u0648\u0644\u0643\u0646 \u062A\u0645 \u0625\u062F\u062E\u0627\u0644 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u0645\u062F\u062E\u0644\u0627\u062A \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644\u0629: \u064A\u0641\u062A\u0631\u0636 \u0625\u062F\u062E\u0627\u0644 ${stringifyPrimitive(issue2.values[0])}`; + return `\u0627\u062E\u062A\u064A\u0627\u0631 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062A\u0648\u0642\u0639 \u0627\u0646\u062A\u0642\u0627\u0621 \u0623\u062D\u062F \u0647\u0630\u0647 \u0627\u0644\u062E\u064A\u0627\u0631\u0627\u062A: ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return ` \u0623\u0643\u0628\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0623\u0646 \u062A\u0643\u0648\u0646 ${issue2.origin ?? "\u0627\u0644\u0642\u064A\u0645\u0629"} ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0635\u0631"}`; + return `\u0623\u0643\u0628\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0623\u0646 \u062A\u0643\u0648\u0646 ${issue2.origin ?? "\u0627\u0644\u0642\u064A\u0645\u0629"} ${adj} ${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0623\u0635\u063A\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0644\u0640 ${issue2.origin} \u0623\u0646 \u064A\u0643\u0648\u0646 ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u0623\u0635\u063A\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0644\u0640 ${issue2.origin} \u0623\u0646 \u064A\u0643\u0648\u0646 ${adj} ${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0628\u062F\u0623 \u0628\u0640 "${issue2.prefix}"`; + if (_issue.format === "ends_with") + return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0646\u062A\u0647\u064A \u0628\u0640 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u062A\u0636\u0645\u0651\u064E\u0646 "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0637\u0627\u0628\u0642 \u0627\u0644\u0646\u0645\u0637 ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644`; + } + case "not_multiple_of": + return `\u0631\u0642\u0645 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0643\u0648\u0646 \u0645\u0646 \u0645\u0636\u0627\u0639\u0641\u0627\u062A ${issue2.divisor}`; + case "unrecognized_keys": + return `\u0645\u0639\u0631\u0641${issue2.keys.length > 1 ? "\u0627\u062A" : ""} \u063A\u0631\u064A\u0628${issue2.keys.length > 1 ? "\u0629" : ""}: ${joinValues(issue2.keys, "\u060C ")}`; + case "invalid_key": + return `\u0645\u0639\u0631\u0641 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644 \u0641\u064A ${issue2.origin}`; + case "invalid_union": + return "\u0645\u062F\u062E\u0644 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644"; + case "invalid_element": + return `\u0645\u062F\u062E\u0644 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644 \u0641\u064A ${issue2.origin}`; + default: + return "\u0645\u062F\u062E\u0644 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644"; + } + }; +}; +function ar_default() { + return { + localeError: error() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/az.js +var error2 = () => { + const Sizable = { + string: { unit: "simvol", verb: "olmal\u0131d\u0131r" }, + file: { unit: "bayt", verb: "olmal\u0131d\u0131r" }, + array: { unit: "element", verb: "olmal\u0131d\u0131r" }, + set: { unit: "element", verb: "olmal\u0131d\u0131r" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "email address", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO datetime", + date: "ISO date", + time: "ISO time", + duration: "ISO duration", + ipv4: "IPv4 address", + ipv6: "IPv6 address", + cidrv4: "IPv4 range", + cidrv6: "IPv6 range", + base64: "base64-encoded string", + base64url: "base64url-encoded string", + json_string: "JSON string", + e164: "E.164 number", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Yanl\u0131\u015F d\u0259y\u0259r: g\xF6zl\u0259nil\u0259n instanceof ${issue2.expected}, daxil olan ${received}`; + } + return `Yanl\u0131\u015F d\u0259y\u0259r: g\xF6zl\u0259nil\u0259n ${expected}, daxil olan ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Yanl\u0131\u015F d\u0259y\u0259r: g\xF6zl\u0259nil\u0259n ${stringifyPrimitive(issue2.values[0])}`; + return `Yanl\u0131\u015F se\xE7im: a\u015Fa\u011F\u0131dak\u0131lardan biri olmal\u0131d\u0131r: ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\xC7ox b\xF6y\xFCk: g\xF6zl\u0259nil\u0259n ${issue2.origin ?? "d\u0259y\u0259r"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "element"}`; + return `\xC7ox b\xF6y\xFCk: g\xF6zl\u0259nil\u0259n ${issue2.origin ?? "d\u0259y\u0259r"} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\xC7ox ki\xE7ik: g\xF6zl\u0259nil\u0259n ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + return `\xC7ox ki\xE7ik: g\xF6zl\u0259nil\u0259n ${issue2.origin} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Yanl\u0131\u015F m\u0259tn: "${_issue.prefix}" il\u0259 ba\u015Flamal\u0131d\u0131r`; + if (_issue.format === "ends_with") + return `Yanl\u0131\u015F m\u0259tn: "${_issue.suffix}" il\u0259 bitm\u0259lidir`; + if (_issue.format === "includes") + return `Yanl\u0131\u015F m\u0259tn: "${_issue.includes}" daxil olmal\u0131d\u0131r`; + if (_issue.format === "regex") + return `Yanl\u0131\u015F m\u0259tn: ${_issue.pattern} \u015Fablonuna uy\u011Fun olmal\u0131d\u0131r`; + return `Yanl\u0131\u015F ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Yanl\u0131\u015F \u0259d\u0259d: ${issue2.divisor} il\u0259 b\xF6l\xFCn\u0259 bil\u0259n olmal\u0131d\u0131r`; + case "unrecognized_keys": + return `Tan\u0131nmayan a\xE7ar${issue2.keys.length > 1 ? "lar" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `${issue2.origin} daxilind\u0259 yanl\u0131\u015F a\xE7ar`; + case "invalid_union": + return "Yanl\u0131\u015F d\u0259y\u0259r"; + case "invalid_element": + return `${issue2.origin} daxilind\u0259 yanl\u0131\u015F d\u0259y\u0259r`; + default: + return `Yanl\u0131\u015F d\u0259y\u0259r`; + } + }; +}; +function az_default() { + return { + localeError: error2() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/be.js +function getBelarusianPlural(count, one, few, many) { + const absCount = Math.abs(count); + const lastDigit = absCount % 10; + const lastTwoDigits = absCount % 100; + if (lastTwoDigits >= 11 && lastTwoDigits <= 19) { + return many; + } + if (lastDigit === 1) { + return one; + } + if (lastDigit >= 2 && lastDigit <= 4) { + return few; + } + return many; +} +var error3 = () => { + const Sizable = { + string: { + unit: { + one: "\u0441\u0456\u043C\u0432\u0430\u043B", + few: "\u0441\u0456\u043C\u0432\u0430\u043B\u044B", + many: "\u0441\u0456\u043C\u0432\u0430\u043B\u0430\u045E" + }, + verb: "\u043C\u0435\u0446\u044C" + }, + array: { + unit: { + one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", + few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u044B", + many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430\u045E" + }, + verb: "\u043C\u0435\u0446\u044C" + }, + set: { + unit: { + one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", + few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u044B", + many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430\u045E" + }, + verb: "\u043C\u0435\u0446\u044C" + }, + file: { + unit: { + one: "\u0431\u0430\u0439\u0442", + few: "\u0431\u0430\u0439\u0442\u044B", + many: "\u0431\u0430\u0439\u0442\u0430\u045E" + }, + verb: "\u043C\u0435\u0446\u044C" + } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0443\u0432\u043E\u0434", + email: "email \u0430\u0434\u0440\u0430\u0441", + url: "URL", + emoji: "\u044D\u043C\u043E\u0434\u0437\u0456", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u0434\u0430\u0442\u0430 \u0456 \u0447\u0430\u0441", + date: "ISO \u0434\u0430\u0442\u0430", + time: "ISO \u0447\u0430\u0441", + duration: "ISO \u043F\u0440\u0430\u0446\u044F\u0433\u043B\u0430\u0441\u0446\u044C", + ipv4: "IPv4 \u0430\u0434\u0440\u0430\u0441", + ipv6: "IPv6 \u0430\u0434\u0440\u0430\u0441", + cidrv4: "IPv4 \u0434\u044B\u044F\u043F\u0430\u0437\u043E\u043D", + cidrv6: "IPv6 \u0434\u044B\u044F\u043F\u0430\u0437\u043E\u043D", + base64: "\u0440\u0430\u0434\u043E\u043A \u0443 \u0444\u0430\u0440\u043C\u0430\u0446\u0435 base64", + base64url: "\u0440\u0430\u0434\u043E\u043A \u0443 \u0444\u0430\u0440\u043C\u0430\u0446\u0435 base64url", + json_string: "JSON \u0440\u0430\u0434\u043E\u043A", + e164: "\u043D\u0443\u043C\u0430\u0440 E.164", + jwt: "JWT", + template_literal: "\u0443\u0432\u043E\u0434" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u043B\u0456\u043A", + array: "\u043C\u0430\u0441\u0456\u045E" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434: \u0447\u0430\u043A\u0430\u045E\u0441\u044F instanceof ${issue2.expected}, \u0430\u0442\u0440\u044B\u043C\u0430\u043D\u0430 ${received}`; + } + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434: \u0447\u0430\u043A\u0430\u045E\u0441\u044F ${expected}, \u0430\u0442\u0440\u044B\u043C\u0430\u043D\u0430 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F ${stringifyPrimitive(issue2.values[0])}`; + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0432\u0430\u0440\u044B\u044F\u043D\u0442: \u0447\u0430\u043A\u0430\u045E\u0441\u044F \u0430\u0434\u0437\u0456\u043D \u0437 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + const maxValue = Number(issue2.maximum); + const unit = getBelarusianPlural(maxValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); + return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u0432\u044F\u043B\u0456\u043A\u0456: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u044D\u043D\u043D\u0435"} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 ${sizing.verb} ${adj}${issue2.maximum.toString()} ${unit}`; + } + return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u0432\u044F\u043B\u0456\u043A\u0456: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u044D\u043D\u043D\u0435"} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 \u0431\u044B\u0446\u044C ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + const minValue = Number(issue2.minimum); + const unit = getBelarusianPlural(minValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); + return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u043C\u0430\u043B\u044B: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 ${sizing.verb} ${adj}${issue2.minimum.toString()} ${unit}`; + } + return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u043C\u0430\u043B\u044B: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 \u0431\u044B\u0446\u044C ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u043F\u0430\u0447\u044B\u043D\u0430\u0446\u0446\u0430 \u0437 "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0437\u0430\u043A\u0430\u043D\u0447\u0432\u0430\u0446\u0446\u0430 \u043D\u0430 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0437\u043C\u044F\u0448\u0447\u0430\u0446\u044C "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0430\u0434\u043F\u0430\u0432\u044F\u0434\u0430\u0446\u044C \u0448\u0430\u0431\u043B\u043E\u043D\u0443 ${_issue.pattern}`; + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u043B\u0456\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0431\u044B\u0446\u044C \u043A\u0440\u0430\u0442\u043D\u044B\u043C ${issue2.divisor}`; + case "unrecognized_keys": + return `\u041D\u0435\u0440\u0430\u0441\u043F\u0430\u0437\u043D\u0430\u043D\u044B ${issue2.keys.length > 1 ? "\u043A\u043B\u044E\u0447\u044B" : "\u043A\u043B\u044E\u0447"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u043A\u043B\u044E\u0447 \u0443 ${issue2.origin}`; + case "invalid_union": + return "\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434"; + case "invalid_element": + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u0430\u0435 \u0437\u043D\u0430\u0447\u044D\u043D\u043D\u0435 \u045E ${issue2.origin}`; + default: + return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434`; + } + }; +}; +function be_default() { + return { + localeError: error3() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/bg.js +var error4 = () => { + const Sizable = { + string: { unit: "\u0441\u0438\u043C\u0432\u043E\u043B\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" }, + file: { unit: "\u0431\u0430\u0439\u0442\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" }, + array: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" }, + set: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0432\u0445\u043E\u0434", + email: "\u0438\u043C\u0435\u0439\u043B \u0430\u0434\u0440\u0435\u0441", + url: "URL", + emoji: "\u0435\u043C\u043E\u0434\u0436\u0438", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u0432\u0440\u0435\u043C\u0435", + date: "ISO \u0434\u0430\u0442\u0430", + time: "ISO \u0432\u0440\u0435\u043C\u0435", + duration: "ISO \u043F\u0440\u043E\u0434\u044A\u043B\u0436\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442", + ipv4: "IPv4 \u0430\u0434\u0440\u0435\u0441", + ipv6: "IPv6 \u0430\u0434\u0440\u0435\u0441", + cidrv4: "IPv4 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", + cidrv6: "IPv6 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", + base64: "base64-\u043A\u043E\u0434\u0438\u0440\u0430\u043D \u043D\u0438\u0437", + base64url: "base64url-\u043A\u043E\u0434\u0438\u0440\u0430\u043D \u043D\u0438\u0437", + json_string: "JSON \u043D\u0438\u0437", + e164: "E.164 \u043D\u043E\u043C\u0435\u0440", + jwt: "JWT", + template_literal: "\u0432\u0445\u043E\u0434" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0447\u0438\u0441\u043B\u043E", + array: "\u043C\u0430\u0441\u0438\u0432" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434: \u043E\u0447\u0430\u043A\u0432\u0430\u043D instanceof ${issue2.expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D ${received}`; + } + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434: \u043E\u0447\u0430\u043A\u0432\u0430\u043D ${expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434: \u043E\u0447\u0430\u043A\u0432\u0430\u043D ${stringifyPrimitive(issue2.values[0])}`; + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430 \u043E\u043F\u0446\u0438\u044F: \u043E\u0447\u0430\u043A\u0432\u0430\u043D\u043E \u0435\u0434\u043D\u043E \u043E\u0442 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u0422\u0432\u044A\u0440\u0434\u0435 \u0433\u043E\u043B\u044F\u043C\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin ?? "\u0441\u0442\u043E\u0439\u043D\u043E\u0441\u0442"} \u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430"}`; + return `\u0422\u0432\u044A\u0440\u0434\u0435 \u0433\u043E\u043B\u044F\u043C\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin ?? "\u0441\u0442\u043E\u0439\u043D\u043E\u0441\u0442"} \u0434\u0430 \u0431\u044A\u0434\u0435 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0422\u0432\u044A\u0440\u0434\u0435 \u043C\u0430\u043B\u043A\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin} \u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430 ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u0422\u0432\u044A\u0440\u0434\u0435 \u043C\u0430\u043B\u043A\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin} \u0434\u0430 \u0431\u044A\u0434\u0435 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0437\u0430\u043F\u043E\u0447\u0432\u0430 \u0441 "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0437\u0430\u0432\u044A\u0440\u0448\u0432\u0430 \u0441 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0432\u043A\u043B\u044E\u0447\u0432\u0430 "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0441\u044A\u0432\u043F\u0430\u0434\u0430 \u0441 ${_issue.pattern}`; + let invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D"; + if (_issue.format === "emoji") + invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E"; + if (_issue.format === "datetime") + invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E"; + if (_issue.format === "date") + invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430"; + if (_issue.format === "time") + invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E"; + if (_issue.format === "duration") + invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430"; + return `${invalid_adj} ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E \u0447\u0438\u0441\u043B\u043E: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0431\u044A\u0434\u0435 \u043A\u0440\u0430\u0442\u043D\u043E \u043D\u0430 ${issue2.divisor}`; + case "unrecognized_keys": + return `\u041D\u0435\u0440\u0430\u0437\u043F\u043E\u0437\u043D\u0430\u0442${issue2.keys.length > 1 ? "\u0438" : ""} \u043A\u043B\u044E\u0447${issue2.keys.length > 1 ? "\u043E\u0432\u0435" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043A\u043B\u044E\u0447 \u0432 ${issue2.origin}`; + case "invalid_union": + return "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434"; + case "invalid_element": + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430 \u0441\u0442\u043E\u0439\u043D\u043E\u0441\u0442 \u0432 ${issue2.origin}`; + default: + return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434`; + } + }; +}; +function bg_default() { + return { + localeError: error4() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ca.js +var error5 = () => { + const Sizable = { + string: { unit: "car\xE0cters", verb: "contenir" }, + file: { unit: "bytes", verb: "contenir" }, + array: { unit: "elements", verb: "contenir" }, + set: { unit: "elements", verb: "contenir" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "entrada", + email: "adre\xE7a electr\xF2nica", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "data i hora ISO", + date: "data ISO", + time: "hora ISO", + duration: "durada ISO", + ipv4: "adre\xE7a IPv4", + ipv6: "adre\xE7a IPv6", + cidrv4: "rang IPv4", + cidrv6: "rang IPv6", + base64: "cadena codificada en base64", + base64url: "cadena codificada en base64url", + json_string: "cadena JSON", + e164: "n\xFAmero E.164", + jwt: "JWT", + template_literal: "entrada" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Tipus inv\xE0lid: s'esperava instanceof ${issue2.expected}, s'ha rebut ${received}`; + } + return `Tipus inv\xE0lid: s'esperava ${expected}, s'ha rebut ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Valor inv\xE0lid: s'esperava ${stringifyPrimitive(issue2.values[0])}`; + return `Opci\xF3 inv\xE0lida: s'esperava una de ${joinValues(issue2.values, " o ")}`; + case "too_big": { + const adj = issue2.inclusive ? "com a m\xE0xim" : "menys de"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Massa gran: s'esperava que ${issue2.origin ?? "el valor"} contingu\xE9s ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "elements"}`; + return `Massa gran: s'esperava que ${issue2.origin ?? "el valor"} fos ${adj} ${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? "com a m\xEDnim" : "m\xE9s de"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Massa petit: s'esperava que ${issue2.origin} contingu\xE9s ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Massa petit: s'esperava que ${issue2.origin} fos ${adj} ${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Format inv\xE0lid: ha de comen\xE7ar amb "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `Format inv\xE0lid: ha d'acabar amb "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Format inv\xE0lid: ha d'incloure "${_issue.includes}"`; + if (_issue.format === "regex") + return `Format inv\xE0lid: ha de coincidir amb el patr\xF3 ${_issue.pattern}`; + return `Format inv\xE0lid per a ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `N\xFAmero inv\xE0lid: ha de ser m\xFAltiple de ${issue2.divisor}`; + case "unrecognized_keys": + return `Clau${issue2.keys.length > 1 ? "s" : ""} no reconeguda${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Clau inv\xE0lida a ${issue2.origin}`; + case "invalid_union": + return "Entrada inv\xE0lida"; + // Could also be "Tipus d'unió invàlid" but "Entrada invàlida" is more general + case "invalid_element": + return `Element inv\xE0lid a ${issue2.origin}`; + default: + return `Entrada inv\xE0lida`; + } + }; +}; +function ca_default() { + return { + localeError: error5() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/cs.js +var error6 = () => { + const Sizable = { + string: { unit: "znak\u016F", verb: "m\xEDt" }, + file: { unit: "bajt\u016F", verb: "m\xEDt" }, + array: { unit: "prvk\u016F", verb: "m\xEDt" }, + set: { unit: "prvk\u016F", verb: "m\xEDt" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "regul\xE1rn\xED v\xFDraz", + email: "e-mailov\xE1 adresa", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "datum a \u010Das ve form\xE1tu ISO", + date: "datum ve form\xE1tu ISO", + time: "\u010Das ve form\xE1tu ISO", + duration: "doba trv\xE1n\xED ISO", + ipv4: "IPv4 adresa", + ipv6: "IPv6 adresa", + cidrv4: "rozsah IPv4", + cidrv6: "rozsah IPv6", + base64: "\u0159et\u011Bzec zak\xF3dovan\xFD ve form\xE1tu base64", + base64url: "\u0159et\u011Bzec zak\xF3dovan\xFD ve form\xE1tu base64url", + json_string: "\u0159et\u011Bzec ve form\xE1tu JSON", + e164: "\u010D\xEDslo E.164", + jwt: "JWT", + template_literal: "vstup" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u010D\xEDslo", + string: "\u0159et\u011Bzec", + function: "funkce", + array: "pole" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Neplatn\xFD vstup: o\u010Dek\xE1v\xE1no instanceof ${issue2.expected}, obdr\u017Eeno ${received}`; + } + return `Neplatn\xFD vstup: o\u010Dek\xE1v\xE1no ${expected}, obdr\u017Eeno ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Neplatn\xFD vstup: o\u010Dek\xE1v\xE1no ${stringifyPrimitive(issue2.values[0])}`; + return `Neplatn\xE1 mo\u017Enost: o\u010Dek\xE1v\xE1na jedna z hodnot ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Hodnota je p\u0159\xEDli\u0161 velk\xE1: ${issue2.origin ?? "hodnota"} mus\xED m\xEDt ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "prvk\u016F"}`; + } + return `Hodnota je p\u0159\xEDli\u0161 velk\xE1: ${issue2.origin ?? "hodnota"} mus\xED b\xFDt ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Hodnota je p\u0159\xEDli\u0161 mal\xE1: ${issue2.origin ?? "hodnota"} mus\xED m\xEDt ${adj}${issue2.minimum.toString()} ${sizing.unit ?? "prvk\u016F"}`; + } + return `Hodnota je p\u0159\xEDli\u0161 mal\xE1: ${issue2.origin ?? "hodnota"} mus\xED b\xFDt ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Neplatn\xFD \u0159et\u011Bzec: mus\xED za\u010D\xEDnat na "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Neplatn\xFD \u0159et\u011Bzec: mus\xED kon\u010Dit na "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Neplatn\xFD \u0159et\u011Bzec: mus\xED obsahovat "${_issue.includes}"`; + if (_issue.format === "regex") + return `Neplatn\xFD \u0159et\u011Bzec: mus\xED odpov\xEDdat vzoru ${_issue.pattern}`; + return `Neplatn\xFD form\xE1t ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Neplatn\xE9 \u010D\xEDslo: mus\xED b\xFDt n\xE1sobkem ${issue2.divisor}`; + case "unrecognized_keys": + return `Nezn\xE1m\xE9 kl\xED\u010De: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Neplatn\xFD kl\xED\u010D v ${issue2.origin}`; + case "invalid_union": + return "Neplatn\xFD vstup"; + case "invalid_element": + return `Neplatn\xE1 hodnota v ${issue2.origin}`; + default: + return `Neplatn\xFD vstup`; + } + }; +}; +function cs_default() { + return { + localeError: error6() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/da.js +var error7 = () => { + const Sizable = { + string: { unit: "tegn", verb: "havde" }, + file: { unit: "bytes", verb: "havde" }, + array: { unit: "elementer", verb: "indeholdt" }, + set: { unit: "elementer", verb: "indeholdt" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "e-mailadresse", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO dato- og klokkesl\xE6t", + date: "ISO-dato", + time: "ISO-klokkesl\xE6t", + duration: "ISO-varighed", + ipv4: "IPv4-omr\xE5de", + ipv6: "IPv6-omr\xE5de", + cidrv4: "IPv4-spektrum", + cidrv6: "IPv6-spektrum", + base64: "base64-kodet streng", + base64url: "base64url-kodet streng", + json_string: "JSON-streng", + e164: "E.164-nummer", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN", + string: "streng", + number: "tal", + boolean: "boolean", + array: "liste", + object: "objekt", + set: "s\xE6t", + file: "fil" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Ugyldigt input: forventede instanceof ${issue2.expected}, fik ${received}`; + } + return `Ugyldigt input: forventede ${expected}, fik ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Ugyldig v\xE6rdi: forventede ${stringifyPrimitive(issue2.values[0])}`; + return `Ugyldigt valg: forventede en af f\xF8lgende ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + if (sizing) + return `For stor: forventede ${origin ?? "value"} ${sizing.verb} ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "elementer"}`; + return `For stor: forventede ${origin ?? "value"} havde ${adj} ${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + if (sizing) { + return `For lille: forventede ${origin} ${sizing.verb} ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; + } + return `For lille: forventede ${origin} havde ${adj} ${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Ugyldig streng: skal starte med "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Ugyldig streng: skal ende med "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Ugyldig streng: skal indeholde "${_issue.includes}"`; + if (_issue.format === "regex") + return `Ugyldig streng: skal matche m\xF8nsteret ${_issue.pattern}`; + return `Ugyldig ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Ugyldigt tal: skal v\xE6re deleligt med ${issue2.divisor}`; + case "unrecognized_keys": + return `${issue2.keys.length > 1 ? "Ukendte n\xF8gler" : "Ukendt n\xF8gle"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Ugyldig n\xF8gle i ${issue2.origin}`; + case "invalid_union": + return "Ugyldigt input: matcher ingen af de tilladte typer"; + case "invalid_element": + return `Ugyldig v\xE6rdi i ${issue2.origin}`; + default: + return `Ugyldigt input`; + } + }; +}; +function da_default() { + return { + localeError: error7() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/de.js +var error8 = () => { + const Sizable = { + string: { unit: "Zeichen", verb: "zu haben" }, + file: { unit: "Bytes", verb: "zu haben" }, + array: { unit: "Elemente", verb: "zu haben" }, + set: { unit: "Elemente", verb: "zu haben" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "Eingabe", + email: "E-Mail-Adresse", + url: "URL", + emoji: "Emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO-Datum und -Uhrzeit", + date: "ISO-Datum", + time: "ISO-Uhrzeit", + duration: "ISO-Dauer", + ipv4: "IPv4-Adresse", + ipv6: "IPv6-Adresse", + cidrv4: "IPv4-Bereich", + cidrv6: "IPv6-Bereich", + base64: "Base64-codierter String", + base64url: "Base64-URL-codierter String", + json_string: "JSON-String", + e164: "E.164-Nummer", + jwt: "JWT", + template_literal: "Eingabe" + }; + const TypeDictionary = { + nan: "NaN", + number: "Zahl", + array: "Array" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Ung\xFCltige Eingabe: erwartet instanceof ${issue2.expected}, erhalten ${received}`; + } + return `Ung\xFCltige Eingabe: erwartet ${expected}, erhalten ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Ung\xFCltige Eingabe: erwartet ${stringifyPrimitive(issue2.values[0])}`; + return `Ung\xFCltige Option: erwartet eine von ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Zu gro\xDF: erwartet, dass ${issue2.origin ?? "Wert"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "Elemente"} hat`; + return `Zu gro\xDF: erwartet, dass ${issue2.origin ?? "Wert"} ${adj}${issue2.maximum.toString()} ist`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Zu klein: erwartet, dass ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} hat`; + } + return `Zu klein: erwartet, dass ${issue2.origin} ${adj}${issue2.minimum.toString()} ist`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Ung\xFCltiger String: muss mit "${_issue.prefix}" beginnen`; + if (_issue.format === "ends_with") + return `Ung\xFCltiger String: muss mit "${_issue.suffix}" enden`; + if (_issue.format === "includes") + return `Ung\xFCltiger String: muss "${_issue.includes}" enthalten`; + if (_issue.format === "regex") + return `Ung\xFCltiger String: muss dem Muster ${_issue.pattern} entsprechen`; + return `Ung\xFCltig: ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Ung\xFCltige Zahl: muss ein Vielfaches von ${issue2.divisor} sein`; + case "unrecognized_keys": + return `${issue2.keys.length > 1 ? "Unbekannte Schl\xFCssel" : "Unbekannter Schl\xFCssel"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Ung\xFCltiger Schl\xFCssel in ${issue2.origin}`; + case "invalid_union": + return "Ung\xFCltige Eingabe"; + case "invalid_element": + return `Ung\xFCltiger Wert in ${issue2.origin}`; + default: + return `Ung\xFCltige Eingabe`; + } + }; +}; +function de_default() { + return { + localeError: error8() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/en.js +var error9 = () => { + const Sizable = { + string: { unit: "characters", verb: "to have" }, + file: { unit: "bytes", verb: "to have" }, + array: { unit: "items", verb: "to have" }, + set: { unit: "items", verb: "to have" }, + map: { unit: "entries", verb: "to have" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "email address", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO datetime", + date: "ISO date", + time: "ISO time", + duration: "ISO duration", + ipv4: "IPv4 address", + ipv6: "IPv6 address", + mac: "MAC address", + cidrv4: "IPv4 range", + cidrv6: "IPv6 range", + base64: "base64-encoded string", + base64url: "base64url-encoded string", + json_string: "JSON string", + e164: "E.164 number", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + // Compatibility: "nan" -> "NaN" for display + nan: "NaN" + // All other type names omitted - they fall back to raw values via ?? operator + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + return `Invalid input: expected ${expected}, received ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Invalid input: expected ${stringifyPrimitive(issue2.values[0])}`; + return `Invalid option: expected one of ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Too big: expected ${issue2.origin ?? "value"} to have ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elements"}`; + return `Too big: expected ${issue2.origin ?? "value"} to be ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Too small: expected ${issue2.origin} to have ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Too small: expected ${issue2.origin} to be ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Invalid string: must start with "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `Invalid string: must end with "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Invalid string: must include "${_issue.includes}"`; + if (_issue.format === "regex") + return `Invalid string: must match pattern ${_issue.pattern}`; + return `Invalid ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Invalid number: must be a multiple of ${issue2.divisor}`; + case "unrecognized_keys": + return `Unrecognized key${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Invalid key in ${issue2.origin}`; + case "invalid_union": + return "Invalid input"; + case "invalid_element": + return `Invalid value in ${issue2.origin}`; + default: + return `Invalid input`; + } + }; +}; +function en_default() { + return { + localeError: error9() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/eo.js +var error10 = () => { + const Sizable = { + string: { unit: "karaktrojn", verb: "havi" }, + file: { unit: "bajtojn", verb: "havi" }, + array: { unit: "elementojn", verb: "havi" }, + set: { unit: "elementojn", verb: "havi" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "enigo", + email: "retadreso", + url: "URL", + emoji: "emo\u011Dio", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO-datotempo", + date: "ISO-dato", + time: "ISO-tempo", + duration: "ISO-da\u016Dro", + ipv4: "IPv4-adreso", + ipv6: "IPv6-adreso", + cidrv4: "IPv4-rango", + cidrv6: "IPv6-rango", + base64: "64-ume kodita karaktraro", + base64url: "URL-64-ume kodita karaktraro", + json_string: "JSON-karaktraro", + e164: "E.164-nombro", + jwt: "JWT", + template_literal: "enigo" + }; + const TypeDictionary = { + nan: "NaN", + number: "nombro", + array: "tabelo", + null: "senvalora" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Nevalida enigo: atendi\u011Dis instanceof ${issue2.expected}, ricevi\u011Dis ${received}`; + } + return `Nevalida enigo: atendi\u011Dis ${expected}, ricevi\u011Dis ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Nevalida enigo: atendi\u011Dis ${stringifyPrimitive(issue2.values[0])}`; + return `Nevalida opcio: atendi\u011Dis unu el ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Tro granda: atendi\u011Dis ke ${issue2.origin ?? "valoro"} havu ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementojn"}`; + return `Tro granda: atendi\u011Dis ke ${issue2.origin ?? "valoro"} havu ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Tro malgranda: atendi\u011Dis ke ${issue2.origin} havu ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Tro malgranda: atendi\u011Dis ke ${issue2.origin} estu ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Nevalida karaktraro: devas komenci\u011Di per "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Nevalida karaktraro: devas fini\u011Di per "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Nevalida karaktraro: devas inkluzivi "${_issue.includes}"`; + if (_issue.format === "regex") + return `Nevalida karaktraro: devas kongrui kun la modelo ${_issue.pattern}`; + return `Nevalida ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Nevalida nombro: devas esti oblo de ${issue2.divisor}`; + case "unrecognized_keys": + return `Nekonata${issue2.keys.length > 1 ? "j" : ""} \u015Dlosilo${issue2.keys.length > 1 ? "j" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Nevalida \u015Dlosilo en ${issue2.origin}`; + case "invalid_union": + return "Nevalida enigo"; + case "invalid_element": + return `Nevalida valoro en ${issue2.origin}`; + default: + return `Nevalida enigo`; + } + }; +}; +function eo_default() { + return { + localeError: error10() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/es.js +var error11 = () => { + const Sizable = { + string: { unit: "caracteres", verb: "tener" }, + file: { unit: "bytes", verb: "tener" }, + array: { unit: "elementos", verb: "tener" }, + set: { unit: "elementos", verb: "tener" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "entrada", + email: "direcci\xF3n de correo electr\xF3nico", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "fecha y hora ISO", + date: "fecha ISO", + time: "hora ISO", + duration: "duraci\xF3n ISO", + ipv4: "direcci\xF3n IPv4", + ipv6: "direcci\xF3n IPv6", + cidrv4: "rango IPv4", + cidrv6: "rango IPv6", + base64: "cadena codificada en base64", + base64url: "URL codificada en base64", + json_string: "cadena JSON", + e164: "n\xFAmero E.164", + jwt: "JWT", + template_literal: "entrada" + }; + const TypeDictionary = { + nan: "NaN", + string: "texto", + number: "n\xFAmero", + boolean: "booleano", + array: "arreglo", + object: "objeto", + set: "conjunto", + file: "archivo", + date: "fecha", + bigint: "n\xFAmero grande", + symbol: "s\xEDmbolo", + undefined: "indefinido", + null: "nulo", + function: "funci\xF3n", + map: "mapa", + record: "registro", + tuple: "tupla", + enum: "enumeraci\xF3n", + union: "uni\xF3n", + literal: "literal", + promise: "promesa", + void: "vac\xEDo", + never: "nunca", + unknown: "desconocido", + any: "cualquiera" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Entrada inv\xE1lida: se esperaba instanceof ${issue2.expected}, recibido ${received}`; + } + return `Entrada inv\xE1lida: se esperaba ${expected}, recibido ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Entrada inv\xE1lida: se esperaba ${stringifyPrimitive(issue2.values[0])}`; + return `Opci\xF3n inv\xE1lida: se esperaba una de ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + if (sizing) + return `Demasiado grande: se esperaba que ${origin ?? "valor"} tuviera ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementos"}`; + return `Demasiado grande: se esperaba que ${origin ?? "valor"} fuera ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + if (sizing) { + return `Demasiado peque\xF1o: se esperaba que ${origin} tuviera ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Demasiado peque\xF1o: se esperaba que ${origin} fuera ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Cadena inv\xE1lida: debe comenzar con "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Cadena inv\xE1lida: debe terminar en "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Cadena inv\xE1lida: debe incluir "${_issue.includes}"`; + if (_issue.format === "regex") + return `Cadena inv\xE1lida: debe coincidir con el patr\xF3n ${_issue.pattern}`; + return `Inv\xE1lido ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `N\xFAmero inv\xE1lido: debe ser m\xFAltiplo de ${issue2.divisor}`; + case "unrecognized_keys": + return `Llave${issue2.keys.length > 1 ? "s" : ""} desconocida${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Llave inv\xE1lida en ${TypeDictionary[issue2.origin] ?? issue2.origin}`; + case "invalid_union": + return "Entrada inv\xE1lida"; + case "invalid_element": + return `Valor inv\xE1lido en ${TypeDictionary[issue2.origin] ?? issue2.origin}`; + default: + return `Entrada inv\xE1lida`; + } + }; +}; +function es_default() { + return { + localeError: error11() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fa.js +var error12 = () => { + const Sizable = { + string: { unit: "\u06A9\u0627\u0631\u0627\u06A9\u062A\u0631", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" }, + file: { unit: "\u0628\u0627\u06CC\u062A", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" }, + array: { unit: "\u0622\u06CC\u062A\u0645", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" }, + set: { unit: "\u0622\u06CC\u062A\u0645", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0648\u0631\u0648\u062F\u06CC", + email: "\u0622\u062F\u0631\u0633 \u0627\u06CC\u0645\u06CC\u0644", + url: "URL", + emoji: "\u0627\u06CC\u0645\u0648\u062C\u06CC", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u062A\u0627\u0631\u06CC\u062E \u0648 \u0632\u0645\u0627\u0646 \u0627\u06CC\u0632\u0648", + date: "\u062A\u0627\u0631\u06CC\u062E \u0627\u06CC\u0632\u0648", + time: "\u0632\u0645\u0627\u0646 \u0627\u06CC\u0632\u0648", + duration: "\u0645\u062F\u062A \u0632\u0645\u0627\u0646 \u0627\u06CC\u0632\u0648", + ipv4: "IPv4 \u0622\u062F\u0631\u0633", + ipv6: "IPv6 \u0622\u062F\u0631\u0633", + cidrv4: "IPv4 \u062F\u0627\u0645\u0646\u0647", + cidrv6: "IPv6 \u062F\u0627\u0645\u0646\u0647", + base64: "base64-encoded \u0631\u0634\u062A\u0647", + base64url: "base64url-encoded \u0631\u0634\u062A\u0647", + json_string: "JSON \u0631\u0634\u062A\u0647", + e164: "E.164 \u0639\u062F\u062F", + jwt: "JWT", + template_literal: "\u0648\u0631\u0648\u062F\u06CC" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0639\u062F\u062F", + array: "\u0622\u0631\u0627\u06CC\u0647" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A instanceof ${issue2.expected} \u0645\u06CC\u200C\u0628\u0648\u062F\u060C ${received} \u062F\u0631\u06CC\u0627\u0641\u062A \u0634\u062F`; + } + return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A ${expected} \u0645\u06CC\u200C\u0628\u0648\u062F\u060C ${received} \u062F\u0631\u06CC\u0627\u0641\u062A \u0634\u062F`; + } + case "invalid_value": + if (issue2.values.length === 1) { + return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A ${stringifyPrimitive(issue2.values[0])} \u0645\u06CC\u200C\u0628\u0648\u062F`; + } + return `\u06AF\u0632\u06CC\u0646\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A \u06CC\u06A9\u06CC \u0627\u0632 ${joinValues(issue2.values, "|")} \u0645\u06CC\u200C\u0628\u0648\u062F`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u062E\u06CC\u0644\u06CC \u0628\u0632\u0631\u06AF: ${issue2.origin ?? "\u0645\u0642\u062F\u0627\u0631"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0635\u0631"} \u0628\u0627\u0634\u062F`; + } + return `\u062E\u06CC\u0644\u06CC \u0628\u0632\u0631\u06AF: ${issue2.origin ?? "\u0645\u0642\u062F\u0627\u0631"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} \u0628\u0627\u0634\u062F`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u062E\u06CC\u0644\u06CC \u06A9\u0648\u0686\u06A9: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} ${sizing.unit} \u0628\u0627\u0634\u062F`; + } + return `\u062E\u06CC\u0644\u06CC \u06A9\u0648\u0686\u06A9: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} \u0628\u0627\u0634\u062F`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0628\u0627 "${_issue.prefix}" \u0634\u0631\u0648\u0639 \u0634\u0648\u062F`; + } + if (_issue.format === "ends_with") { + return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0628\u0627 "${_issue.suffix}" \u062A\u0645\u0627\u0645 \u0634\u0648\u062F`; + } + if (_issue.format === "includes") { + return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0634\u0627\u0645\u0644 "${_issue.includes}" \u0628\u0627\u0634\u062F`; + } + if (_issue.format === "regex") { + return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0628\u0627 \u0627\u0644\u06AF\u0648\u06CC ${_issue.pattern} \u0645\u0637\u0627\u0628\u0642\u062A \u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F`; + } + return `${FormatDictionary[_issue.format] ?? issue2.format} \u0646\u0627\u0645\u0639\u062A\u0628\u0631`; + } + case "not_multiple_of": + return `\u0639\u062F\u062F \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0645\u0636\u0631\u0628 ${issue2.divisor} \u0628\u0627\u0634\u062F`; + case "unrecognized_keys": + return `\u06A9\u0644\u06CC\u062F${issue2.keys.length > 1 ? "\u0647\u0627\u06CC" : ""} \u0646\u0627\u0634\u0646\u0627\u0633: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u06A9\u0644\u06CC\u062F \u0646\u0627\u0634\u0646\u0627\u0633 \u062F\u0631 ${issue2.origin}`; + case "invalid_union": + return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631`; + case "invalid_element": + return `\u0645\u0642\u062F\u0627\u0631 \u0646\u0627\u0645\u0639\u062A\u0628\u0631 \u062F\u0631 ${issue2.origin}`; + default: + return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631`; + } + }; +}; +function fa_default() { + return { + localeError: error12() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fi.js +var error13 = () => { + const Sizable = { + string: { unit: "merkki\xE4", subject: "merkkijonon" }, + file: { unit: "tavua", subject: "tiedoston" }, + array: { unit: "alkiota", subject: "listan" }, + set: { unit: "alkiota", subject: "joukon" }, + number: { unit: "", subject: "luvun" }, + bigint: { unit: "", subject: "suuren kokonaisluvun" }, + int: { unit: "", subject: "kokonaisluvun" }, + date: { unit: "", subject: "p\xE4iv\xE4m\xE4\xE4r\xE4n" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "s\xE4\xE4nn\xF6llinen lauseke", + email: "s\xE4hk\xF6postiosoite", + url: "URL-osoite", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO-aikaleima", + date: "ISO-p\xE4iv\xE4m\xE4\xE4r\xE4", + time: "ISO-aika", + duration: "ISO-kesto", + ipv4: "IPv4-osoite", + ipv6: "IPv6-osoite", + cidrv4: "IPv4-alue", + cidrv6: "IPv6-alue", + base64: "base64-koodattu merkkijono", + base64url: "base64url-koodattu merkkijono", + json_string: "JSON-merkkijono", + e164: "E.164-luku", + jwt: "JWT", + template_literal: "templaattimerkkijono" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Virheellinen tyyppi: odotettiin instanceof ${issue2.expected}, oli ${received}`; + } + return `Virheellinen tyyppi: odotettiin ${expected}, oli ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Virheellinen sy\xF6te: t\xE4ytyy olla ${stringifyPrimitive(issue2.values[0])}`; + return `Virheellinen valinta: t\xE4ytyy olla yksi seuraavista: ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Liian suuri: ${sizing.subject} t\xE4ytyy olla ${adj}${issue2.maximum.toString()} ${sizing.unit}`.trim(); + } + return `Liian suuri: arvon t\xE4ytyy olla ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Liian pieni: ${sizing.subject} t\xE4ytyy olla ${adj}${issue2.minimum.toString()} ${sizing.unit}`.trim(); + } + return `Liian pieni: arvon t\xE4ytyy olla ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Virheellinen sy\xF6te: t\xE4ytyy alkaa "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Virheellinen sy\xF6te: t\xE4ytyy loppua "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Virheellinen sy\xF6te: t\xE4ytyy sis\xE4lt\xE4\xE4 "${_issue.includes}"`; + if (_issue.format === "regex") { + return `Virheellinen sy\xF6te: t\xE4ytyy vastata s\xE4\xE4nn\xF6llist\xE4 lauseketta ${_issue.pattern}`; + } + return `Virheellinen ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Virheellinen luku: t\xE4ytyy olla luvun ${issue2.divisor} monikerta`; + case "unrecognized_keys": + return `${issue2.keys.length > 1 ? "Tuntemattomat avaimet" : "Tuntematon avain"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return "Virheellinen avain tietueessa"; + case "invalid_union": + return "Virheellinen unioni"; + case "invalid_element": + return "Virheellinen arvo joukossa"; + default: + return `Virheellinen sy\xF6te`; + } + }; +}; +function fi_default() { + return { + localeError: error13() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fr.js +var error14 = () => { + const Sizable = { + string: { unit: "caract\xE8res", verb: "avoir" }, + file: { unit: "octets", verb: "avoir" }, + array: { unit: "\xE9l\xE9ments", verb: "avoir" }, + set: { unit: "\xE9l\xE9ments", verb: "avoir" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "entr\xE9e", + email: "adresse e-mail", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "date et heure ISO", + date: "date ISO", + time: "heure ISO", + duration: "dur\xE9e ISO", + ipv4: "adresse IPv4", + ipv6: "adresse IPv6", + cidrv4: "plage IPv4", + cidrv6: "plage IPv6", + base64: "cha\xEEne encod\xE9e en base64", + base64url: "cha\xEEne encod\xE9e en base64url", + json_string: "cha\xEEne JSON", + e164: "num\xE9ro E.164", + jwt: "JWT", + template_literal: "entr\xE9e" + }; + const TypeDictionary = { + nan: "NaN", + number: "nombre", + array: "tableau" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Entr\xE9e invalide : instanceof ${issue2.expected} attendu, ${received} re\xE7u`; + } + return `Entr\xE9e invalide : ${expected} attendu, ${received} re\xE7u`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Entr\xE9e invalide : ${stringifyPrimitive(issue2.values[0])} attendu`; + return `Option invalide : une valeur parmi ${joinValues(issue2.values, "|")} attendue`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Trop grand : ${issue2.origin ?? "valeur"} doit ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\xE9l\xE9ment(s)"}`; + return `Trop grand : ${issue2.origin ?? "valeur"} doit \xEAtre ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Trop petit : ${issue2.origin} doit ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Trop petit : ${issue2.origin} doit \xEAtre ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Cha\xEEne invalide : doit commencer par "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Cha\xEEne invalide : doit se terminer par "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Cha\xEEne invalide : doit inclure "${_issue.includes}"`; + if (_issue.format === "regex") + return `Cha\xEEne invalide : doit correspondre au mod\xE8le ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} invalide`; + } + case "not_multiple_of": + return `Nombre invalide : doit \xEAtre un multiple de ${issue2.divisor}`; + case "unrecognized_keys": + return `Cl\xE9${issue2.keys.length > 1 ? "s" : ""} non reconnue${issue2.keys.length > 1 ? "s" : ""} : ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Cl\xE9 invalide dans ${issue2.origin}`; + case "invalid_union": + return "Entr\xE9e invalide"; + case "invalid_element": + return `Valeur invalide dans ${issue2.origin}`; + default: + return `Entr\xE9e invalide`; + } + }; +}; +function fr_default() { + return { + localeError: error14() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fr-CA.js +var error15 = () => { + const Sizable = { + string: { unit: "caract\xE8res", verb: "avoir" }, + file: { unit: "octets", verb: "avoir" }, + array: { unit: "\xE9l\xE9ments", verb: "avoir" }, + set: { unit: "\xE9l\xE9ments", verb: "avoir" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "entr\xE9e", + email: "adresse courriel", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "date-heure ISO", + date: "date ISO", + time: "heure ISO", + duration: "dur\xE9e ISO", + ipv4: "adresse IPv4", + ipv6: "adresse IPv6", + cidrv4: "plage IPv4", + cidrv6: "plage IPv6", + base64: "cha\xEEne encod\xE9e en base64", + base64url: "cha\xEEne encod\xE9e en base64url", + json_string: "cha\xEEne JSON", + e164: "num\xE9ro E.164", + jwt: "JWT", + template_literal: "entr\xE9e" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Entr\xE9e invalide : attendu instanceof ${issue2.expected}, re\xE7u ${received}`; + } + return `Entr\xE9e invalide : attendu ${expected}, re\xE7u ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Entr\xE9e invalide : attendu ${stringifyPrimitive(issue2.values[0])}`; + return `Option invalide : attendu l'une des valeurs suivantes ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "\u2264" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Trop grand : attendu que ${issue2.origin ?? "la valeur"} ait ${adj}${issue2.maximum.toString()} ${sizing.unit}`; + return `Trop grand : attendu que ${issue2.origin ?? "la valeur"} soit ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? "\u2265" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Trop petit : attendu que ${issue2.origin} ait ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Trop petit : attendu que ${issue2.origin} soit ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Cha\xEEne invalide : doit commencer par "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `Cha\xEEne invalide : doit se terminer par "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Cha\xEEne invalide : doit inclure "${_issue.includes}"`; + if (_issue.format === "regex") + return `Cha\xEEne invalide : doit correspondre au motif ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} invalide`; + } + case "not_multiple_of": + return `Nombre invalide : doit \xEAtre un multiple de ${issue2.divisor}`; + case "unrecognized_keys": + return `Cl\xE9${issue2.keys.length > 1 ? "s" : ""} non reconnue${issue2.keys.length > 1 ? "s" : ""} : ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Cl\xE9 invalide dans ${issue2.origin}`; + case "invalid_union": + return "Entr\xE9e invalide"; + case "invalid_element": + return `Valeur invalide dans ${issue2.origin}`; + default: + return `Entr\xE9e invalide`; + } + }; +}; +function fr_CA_default() { + return { + localeError: error15() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/he.js +var error16 = () => { + const TypeNames = { + string: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA", gender: "f" }, + number: { label: "\u05DE\u05E1\u05E4\u05E8", gender: "m" }, + boolean: { label: "\u05E2\u05E8\u05DA \u05D1\u05D5\u05DC\u05D9\u05D0\u05E0\u05D9", gender: "m" }, + bigint: { label: "BigInt", gender: "m" }, + date: { label: "\u05EA\u05D0\u05E8\u05D9\u05DA", gender: "m" }, + array: { label: "\u05DE\u05E2\u05E8\u05DA", gender: "m" }, + object: { label: "\u05D0\u05D5\u05D1\u05D9\u05D9\u05E7\u05D8", gender: "m" }, + null: { label: "\u05E2\u05E8\u05DA \u05E8\u05D9\u05E7 (null)", gender: "m" }, + undefined: { label: "\u05E2\u05E8\u05DA \u05DC\u05D0 \u05DE\u05D5\u05D2\u05D3\u05E8 (undefined)", gender: "m" }, + symbol: { label: "\u05E1\u05D9\u05DE\u05D1\u05D5\u05DC (Symbol)", gender: "m" }, + function: { label: "\u05E4\u05D5\u05E0\u05E7\u05E6\u05D9\u05D4", gender: "f" }, + map: { label: "\u05DE\u05E4\u05D4 (Map)", gender: "f" }, + set: { label: "\u05E7\u05D1\u05D5\u05E6\u05D4 (Set)", gender: "f" }, + file: { label: "\u05E7\u05D5\u05D1\u05E5", gender: "m" }, + promise: { label: "Promise", gender: "m" }, + NaN: { label: "NaN", gender: "m" }, + unknown: { label: "\u05E2\u05E8\u05DA \u05DC\u05D0 \u05D9\u05D3\u05D5\u05E2", gender: "m" }, + value: { label: "\u05E2\u05E8\u05DA", gender: "m" } + }; + const Sizable = { + string: { unit: "\u05EA\u05D5\u05D5\u05D9\u05DD", shortLabel: "\u05E7\u05E6\u05E8", longLabel: "\u05D0\u05E8\u05D5\u05DA" }, + file: { unit: "\u05D1\u05D9\u05D9\u05D8\u05D9\u05DD", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" }, + array: { unit: "\u05E4\u05E8\u05D9\u05D8\u05D9\u05DD", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" }, + set: { unit: "\u05E4\u05E8\u05D9\u05D8\u05D9\u05DD", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" }, + number: { unit: "", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" } + // no unit + }; + const typeEntry = (t) => t ? TypeNames[t] : void 0; + const typeLabel = (t) => { + const e = typeEntry(t); + if (e) + return e.label; + return t ?? TypeNames.unknown.label; + }; + const withDefinite = (t) => `\u05D4${typeLabel(t)}`; + const verbFor = (t) => { + const e = typeEntry(t); + const gender = e?.gender ?? "m"; + return gender === "f" ? "\u05E6\u05E8\u05D9\u05DB\u05D4 \u05DC\u05D4\u05D9\u05D5\u05EA" : "\u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA"; + }; + const getSizing = (origin) => { + if (!origin) + return null; + return Sizable[origin] ?? null; + }; + const FormatDictionary = { + regex: { label: "\u05E7\u05DC\u05D8", gender: "m" }, + email: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA \u05D0\u05D9\u05DE\u05D9\u05D9\u05DC", gender: "f" }, + url: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA \u05E8\u05E9\u05EA", gender: "f" }, + emoji: { label: "\u05D0\u05D9\u05DE\u05D5\u05D2'\u05D9", gender: "m" }, + uuid: { label: "UUID", gender: "m" }, + nanoid: { label: "nanoid", gender: "m" }, + guid: { label: "GUID", gender: "m" }, + cuid: { label: "cuid", gender: "m" }, + cuid2: { label: "cuid2", gender: "m" }, + ulid: { label: "ULID", gender: "m" }, + xid: { label: "XID", gender: "m" }, + ksuid: { label: "KSUID", gender: "m" }, + datetime: { label: "\u05EA\u05D0\u05E8\u05D9\u05DA \u05D5\u05D6\u05DE\u05DF ISO", gender: "m" }, + date: { label: "\u05EA\u05D0\u05E8\u05D9\u05DA ISO", gender: "m" }, + time: { label: "\u05D6\u05DE\u05DF ISO", gender: "m" }, + duration: { label: "\u05DE\u05E9\u05DA \u05D6\u05DE\u05DF ISO", gender: "m" }, + ipv4: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA IPv4", gender: "f" }, + ipv6: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA IPv6", gender: "f" }, + cidrv4: { label: "\u05D8\u05D5\u05D5\u05D7 IPv4", gender: "m" }, + cidrv6: { label: "\u05D8\u05D5\u05D5\u05D7 IPv6", gender: "m" }, + base64: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D1\u05D1\u05E1\u05D9\u05E1 64", gender: "f" }, + base64url: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D1\u05D1\u05E1\u05D9\u05E1 64 \u05DC\u05DB\u05EA\u05D5\u05D1\u05D5\u05EA \u05E8\u05E9\u05EA", gender: "f" }, + json_string: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA JSON", gender: "f" }, + e164: { label: "\u05DE\u05E1\u05E4\u05E8 E.164", gender: "m" }, + jwt: { label: "JWT", gender: "m" }, + ends_with: { label: "\u05E7\u05DC\u05D8", gender: "m" }, + includes: { label: "\u05E7\u05DC\u05D8", gender: "m" }, + lowercase: { label: "\u05E7\u05DC\u05D8", gender: "m" }, + starts_with: { label: "\u05E7\u05DC\u05D8", gender: "m" }, + uppercase: { label: "\u05E7\u05DC\u05D8", gender: "m" } + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expectedKey = issue2.expected; + const expected = TypeDictionary[expectedKey ?? ""] ?? typeLabel(expectedKey); + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? TypeNames[receivedType]?.label ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA instanceof ${issue2.expected}, \u05D4\u05EA\u05E7\u05D1\u05DC ${received}`; + } + return `\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA ${expected}, \u05D4\u05EA\u05E7\u05D1\u05DC ${received}`; + } + case "invalid_value": { + if (issue2.values.length === 1) { + return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D4\u05E2\u05E8\u05DA \u05D7\u05D9\u05D9\u05D1 \u05DC\u05D4\u05D9\u05D5\u05EA ${stringifyPrimitive(issue2.values[0])}`; + } + const stringified = issue2.values.map((v) => stringifyPrimitive(v)); + if (issue2.values.length === 2) { + return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D4\u05D0\u05E4\u05E9\u05E8\u05D5\u05D9\u05D5\u05EA \u05D4\u05DE\u05EA\u05D0\u05D9\u05DE\u05D5\u05EA \u05D4\u05DF ${stringified[0]} \u05D0\u05D5 ${stringified[1]}`; + } + const lastValue = stringified[stringified.length - 1]; + const restValues = stringified.slice(0, -1).join(", "); + return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D4\u05D0\u05E4\u05E9\u05E8\u05D5\u05D9\u05D5\u05EA \u05D4\u05DE\u05EA\u05D0\u05D9\u05DE\u05D5\u05EA \u05D4\u05DF ${restValues} \u05D0\u05D5 ${lastValue}`; + } + case "too_big": { + const sizing = getSizing(issue2.origin); + const subject = withDefinite(issue2.origin ?? "value"); + if (issue2.origin === "string") { + return `${sizing?.longLabel ?? "\u05D0\u05E8\u05D5\u05DA"} \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DB\u05D4 \u05DC\u05D4\u05DB\u05D9\u05DC ${issue2.maximum.toString()} ${sizing?.unit ?? ""} ${issue2.inclusive ? "\u05D0\u05D5 \u05E4\u05D7\u05D5\u05EA" : "\u05DC\u05DB\u05DC \u05D4\u05D9\u05D5\u05EA\u05E8"}`.trim(); + } + if (issue2.origin === "number") { + const comparison = issue2.inclusive ? `\u05E7\u05D8\u05DF \u05D0\u05D5 \u05E9\u05D5\u05D5\u05D4 \u05DC-${issue2.maximum}` : `\u05E7\u05D8\u05DF \u05DE-${issue2.maximum}`; + return `\u05D2\u05D3\u05D5\u05DC \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA ${comparison}`; + } + if (issue2.origin === "array" || issue2.origin === "set") { + const verb = issue2.origin === "set" ? "\u05E6\u05E8\u05D9\u05DB\u05D4" : "\u05E6\u05E8\u05D9\u05DA"; + const comparison = issue2.inclusive ? `${issue2.maximum} ${sizing?.unit ?? ""} \u05D0\u05D5 \u05E4\u05D7\u05D5\u05EA` : `\u05E4\u05D7\u05D5\u05EA \u05DE-${issue2.maximum} ${sizing?.unit ?? ""}`; + return `\u05D2\u05D3\u05D5\u05DC \u05DE\u05D3\u05D9: ${subject} ${verb} \u05DC\u05D4\u05DB\u05D9\u05DC ${comparison}`.trim(); + } + const adj = issue2.inclusive ? "<=" : "<"; + const be = verbFor(issue2.origin ?? "value"); + if (sizing?.unit) { + return `${sizing.longLabel} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.maximum.toString()} ${sizing.unit}`; + } + return `${sizing?.longLabel ?? "\u05D2\u05D3\u05D5\u05DC"} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const sizing = getSizing(issue2.origin); + const subject = withDefinite(issue2.origin ?? "value"); + if (issue2.origin === "string") { + return `${sizing?.shortLabel ?? "\u05E7\u05E6\u05E8"} \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DB\u05D4 \u05DC\u05D4\u05DB\u05D9\u05DC ${issue2.minimum.toString()} ${sizing?.unit ?? ""} ${issue2.inclusive ? "\u05D0\u05D5 \u05D9\u05D5\u05EA\u05E8" : "\u05DC\u05E4\u05D7\u05D5\u05EA"}`.trim(); + } + if (issue2.origin === "number") { + const comparison = issue2.inclusive ? `\u05D2\u05D3\u05D5\u05DC \u05D0\u05D5 \u05E9\u05D5\u05D5\u05D4 \u05DC-${issue2.minimum}` : `\u05D2\u05D3\u05D5\u05DC \u05DE-${issue2.minimum}`; + return `\u05E7\u05D8\u05DF \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA ${comparison}`; + } + if (issue2.origin === "array" || issue2.origin === "set") { + const verb = issue2.origin === "set" ? "\u05E6\u05E8\u05D9\u05DB\u05D4" : "\u05E6\u05E8\u05D9\u05DA"; + if (issue2.minimum === 1 && issue2.inclusive) { + const singularPhrase = issue2.origin === "set" ? "\u05DC\u05E4\u05D7\u05D5\u05EA \u05E4\u05E8\u05D9\u05D8 \u05D0\u05D7\u05D3" : "\u05DC\u05E4\u05D7\u05D5\u05EA \u05E4\u05E8\u05D9\u05D8 \u05D0\u05D7\u05D3"; + return `\u05E7\u05D8\u05DF \u05DE\u05D3\u05D9: ${subject} ${verb} \u05DC\u05D4\u05DB\u05D9\u05DC ${singularPhrase}`; + } + const comparison = issue2.inclusive ? `${issue2.minimum} ${sizing?.unit ?? ""} \u05D0\u05D5 \u05D9\u05D5\u05EA\u05E8` : `\u05D9\u05D5\u05EA\u05E8 \u05DE-${issue2.minimum} ${sizing?.unit ?? ""}`; + return `\u05E7\u05D8\u05DF \u05DE\u05D3\u05D9: ${subject} ${verb} \u05DC\u05D4\u05DB\u05D9\u05DC ${comparison}`.trim(); + } + const adj = issue2.inclusive ? ">=" : ">"; + const be = verbFor(issue2.origin ?? "value"); + if (sizing?.unit) { + return `${sizing.shortLabel} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `${sizing?.shortLabel ?? "\u05E7\u05D8\u05DF"} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05D4\u05EA\u05D7\u05D9\u05DC \u05D1 "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05D4\u05E1\u05EA\u05D9\u05D9\u05DD \u05D1 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05DB\u05DC\u05D5\u05DC "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05D4\u05EA\u05D0\u05D9\u05DD \u05DC\u05EA\u05D1\u05E0\u05D9\u05EA ${_issue.pattern}`; + const nounEntry = FormatDictionary[_issue.format]; + const noun = nounEntry?.label ?? _issue.format; + const gender = nounEntry?.gender ?? "m"; + const adjective = gender === "f" ? "\u05EA\u05E7\u05D9\u05E0\u05D4" : "\u05EA\u05E7\u05D9\u05DF"; + return `${noun} \u05DC\u05D0 ${adjective}`; + } + case "not_multiple_of": + return `\u05DE\u05E1\u05E4\u05E8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D7\u05D9\u05D9\u05D1 \u05DC\u05D4\u05D9\u05D5\u05EA \u05DE\u05DB\u05E4\u05DC\u05D4 \u05E9\u05DC ${issue2.divisor}`; + case "unrecognized_keys": + return `\u05DE\u05E4\u05EA\u05D7${issue2.keys.length > 1 ? "\u05D5\u05EA" : ""} \u05DC\u05D0 \u05DE\u05D6\u05D5\u05D4${issue2.keys.length > 1 ? "\u05D9\u05DD" : "\u05D4"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": { + return `\u05E9\u05D3\u05D4 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF \u05D1\u05D0\u05D5\u05D1\u05D9\u05D9\u05E7\u05D8`; + } + case "invalid_union": + return "\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF"; + case "invalid_element": { + const place = withDefinite(issue2.origin ?? "array"); + return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF \u05D1${place}`; + } + default: + return `\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF`; + } + }; +}; +function he_default() { + return { + localeError: error16() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/hu.js +var error17 = () => { + const Sizable = { + string: { unit: "karakter", verb: "legyen" }, + file: { unit: "byte", verb: "legyen" }, + array: { unit: "elem", verb: "legyen" }, + set: { unit: "elem", verb: "legyen" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "bemenet", + email: "email c\xEDm", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO id\u0151b\xE9lyeg", + date: "ISO d\xE1tum", + time: "ISO id\u0151", + duration: "ISO id\u0151intervallum", + ipv4: "IPv4 c\xEDm", + ipv6: "IPv6 c\xEDm", + cidrv4: "IPv4 tartom\xE1ny", + cidrv6: "IPv6 tartom\xE1ny", + base64: "base64-k\xF3dolt string", + base64url: "base64url-k\xF3dolt string", + json_string: "JSON string", + e164: "E.164 sz\xE1m", + jwt: "JWT", + template_literal: "bemenet" + }; + const TypeDictionary = { + nan: "NaN", + number: "sz\xE1m", + array: "t\xF6mb" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\xC9rv\xE9nytelen bemenet: a v\xE1rt \xE9rt\xE9k instanceof ${issue2.expected}, a kapott \xE9rt\xE9k ${received}`; + } + return `\xC9rv\xE9nytelen bemenet: a v\xE1rt \xE9rt\xE9k ${expected}, a kapott \xE9rt\xE9k ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\xC9rv\xE9nytelen bemenet: a v\xE1rt \xE9rt\xE9k ${stringifyPrimitive(issue2.values[0])}`; + return `\xC9rv\xE9nytelen opci\xF3: valamelyik \xE9rt\xE9k v\xE1rt ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `T\xFAl nagy: ${issue2.origin ?? "\xE9rt\xE9k"} m\xE9rete t\xFAl nagy ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elem"}`; + return `T\xFAl nagy: a bemeneti \xE9rt\xE9k ${issue2.origin ?? "\xE9rt\xE9k"} t\xFAl nagy: ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `T\xFAl kicsi: a bemeneti \xE9rt\xE9k ${issue2.origin} m\xE9rete t\xFAl kicsi ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `T\xFAl kicsi: a bemeneti \xE9rt\xE9k ${issue2.origin} t\xFAl kicsi ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\xC9rv\xE9nytelen string: "${_issue.prefix}" \xE9rt\xE9kkel kell kezd\u0151dnie`; + if (_issue.format === "ends_with") + return `\xC9rv\xE9nytelen string: "${_issue.suffix}" \xE9rt\xE9kkel kell v\xE9gz\u0151dnie`; + if (_issue.format === "includes") + return `\xC9rv\xE9nytelen string: "${_issue.includes}" \xE9rt\xE9ket kell tartalmaznia`; + if (_issue.format === "regex") + return `\xC9rv\xE9nytelen string: ${_issue.pattern} mint\xE1nak kell megfelelnie`; + return `\xC9rv\xE9nytelen ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\xC9rv\xE9nytelen sz\xE1m: ${issue2.divisor} t\xF6bbsz\xF6r\xF6s\xE9nek kell lennie`; + case "unrecognized_keys": + return `Ismeretlen kulcs${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\xC9rv\xE9nytelen kulcs ${issue2.origin}`; + case "invalid_union": + return "\xC9rv\xE9nytelen bemenet"; + case "invalid_element": + return `\xC9rv\xE9nytelen \xE9rt\xE9k: ${issue2.origin}`; + default: + return `\xC9rv\xE9nytelen bemenet`; + } + }; +}; +function hu_default() { + return { + localeError: error17() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/hy.js +function getArmenianPlural(count, one, many) { + return Math.abs(count) === 1 ? one : many; +} +function withDefiniteArticle(word) { + if (!word) + return ""; + const vowels = ["\u0561", "\u0565", "\u0568", "\u056B", "\u0578", "\u0578\u0582", "\u0585"]; + const lastChar = word[word.length - 1]; + return word + (vowels.includes(lastChar) ? "\u0576" : "\u0568"); +} +var error18 = () => { + const Sizable = { + string: { + unit: { + one: "\u0576\u0577\u0561\u0576", + many: "\u0576\u0577\u0561\u0576\u0576\u0565\u0580" + }, + verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" + }, + file: { + unit: { + one: "\u0562\u0561\u0575\u0569", + many: "\u0562\u0561\u0575\u0569\u0565\u0580" + }, + verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" + }, + array: { + unit: { + one: "\u057F\u0561\u0580\u0580", + many: "\u057F\u0561\u0580\u0580\u0565\u0580" + }, + verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" + }, + set: { + unit: { + one: "\u057F\u0561\u0580\u0580", + many: "\u057F\u0561\u0580\u0580\u0565\u0580" + }, + verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" + } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0574\u0578\u0582\u057F\u0584", + email: "\u0567\u056C. \u0570\u0561\u057D\u0581\u0565", + url: "URL", + emoji: "\u0567\u0574\u0578\u057B\u056B", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u0561\u0574\u057D\u0561\u0569\u056B\u057E \u0587 \u056A\u0561\u0574", + date: "ISO \u0561\u0574\u057D\u0561\u0569\u056B\u057E", + time: "ISO \u056A\u0561\u0574", + duration: "ISO \u057F\u0587\u0578\u0572\u0578\u0582\u0569\u0575\u0578\u0582\u0576", + ipv4: "IPv4 \u0570\u0561\u057D\u0581\u0565", + ipv6: "IPv6 \u0570\u0561\u057D\u0581\u0565", + cidrv4: "IPv4 \u0574\u056B\u057B\u0561\u056F\u0561\u0575\u0584", + cidrv6: "IPv6 \u0574\u056B\u057B\u0561\u056F\u0561\u0575\u0584", + base64: "base64 \u0571\u0587\u0561\u0579\u0561\u0583\u0578\u057E \u057F\u0578\u0572", + base64url: "base64url \u0571\u0587\u0561\u0579\u0561\u0583\u0578\u057E \u057F\u0578\u0572", + json_string: "JSON \u057F\u0578\u0572", + e164: "E.164 \u0570\u0561\u0574\u0561\u0580", + jwt: "JWT", + template_literal: "\u0574\u0578\u0582\u057F\u0584" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0569\u056B\u057E", + array: "\u0566\u0561\u0576\u0563\u057E\u0561\u056E" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 instanceof ${issue2.expected}, \u057D\u057F\u0561\u0581\u057E\u0565\u056C \u0567 ${received}`; + } + return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 ${expected}, \u057D\u057F\u0561\u0581\u057E\u0565\u056C \u0567 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 ${stringifyPrimitive(issue2.values[1])}`; + return `\u054D\u056D\u0561\u056C \u057F\u0561\u0580\u0562\u0565\u0580\u0561\u056F\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 \u0570\u0565\u057F\u0587\u0575\u0561\u056C\u0576\u0565\u0580\u056B\u0581 \u0574\u0565\u056F\u0568\u055D ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + const maxValue = Number(issue2.maximum); + const unit = getArmenianPlural(maxValue, sizing.unit.one, sizing.unit.many); + return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0574\u0565\u056E \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin ?? "\u0561\u0580\u056A\u0565\u0584")} \u056F\u0578\u0582\u0576\u0565\u0576\u0561 ${adj}${issue2.maximum.toString()} ${unit}`; + } + return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0574\u0565\u056E \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin ?? "\u0561\u0580\u056A\u0565\u0584")} \u056C\u056B\u0576\u056B ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + const minValue = Number(issue2.minimum); + const unit = getArmenianPlural(minValue, sizing.unit.one, sizing.unit.many); + return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0583\u0578\u0584\u0580 \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin)} \u056F\u0578\u0582\u0576\u0565\u0576\u0561 ${adj}${issue2.minimum.toString()} ${unit}`; + } + return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0583\u0578\u0584\u0580 \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin)} \u056C\u056B\u0576\u056B ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u057D\u056F\u057D\u057E\u056B "${_issue.prefix}"-\u0578\u057E`; + if (_issue.format === "ends_with") + return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u0561\u057E\u0561\u0580\u057F\u057E\u056B "${_issue.suffix}"-\u0578\u057E`; + if (_issue.format === "includes") + return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u057A\u0561\u0580\u0578\u0582\u0576\u0561\u056F\u056B "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u0570\u0561\u0574\u0561\u057A\u0561\u057F\u0561\u057D\u056D\u0561\u0576\u056B ${_issue.pattern} \u0571\u0587\u0561\u0579\u0561\u0583\u056B\u0576`; + return `\u054D\u056D\u0561\u056C ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u054D\u056D\u0561\u056C \u0569\u056B\u057E\u2024 \u057A\u0565\u057F\u0584 \u0567 \u0562\u0561\u0566\u0574\u0561\u057A\u0561\u057F\u056B\u056F \u056C\u056B\u0576\u056B ${issue2.divisor}-\u056B`; + case "unrecognized_keys": + return `\u0549\u0573\u0561\u0576\u0561\u0579\u057E\u0561\u056E \u0562\u0561\u0576\u0561\u056C\u056B${issue2.keys.length > 1 ? "\u0576\u0565\u0580" : ""}. ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u054D\u056D\u0561\u056C \u0562\u0561\u0576\u0561\u056C\u056B ${withDefiniteArticle(issue2.origin)}-\u0578\u0582\u0574`; + case "invalid_union": + return "\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574"; + case "invalid_element": + return `\u054D\u056D\u0561\u056C \u0561\u0580\u056A\u0565\u0584 ${withDefiniteArticle(issue2.origin)}-\u0578\u0582\u0574`; + default: + return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574`; + } + }; +}; +function hy_default() { + return { + localeError: error18() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/id.js +var error19 = () => { + const Sizable = { + string: { unit: "karakter", verb: "memiliki" }, + file: { unit: "byte", verb: "memiliki" }, + array: { unit: "item", verb: "memiliki" }, + set: { unit: "item", verb: "memiliki" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "alamat email", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "tanggal dan waktu format ISO", + date: "tanggal format ISO", + time: "jam format ISO", + duration: "durasi format ISO", + ipv4: "alamat IPv4", + ipv6: "alamat IPv6", + cidrv4: "rentang alamat IPv4", + cidrv6: "rentang alamat IPv6", + base64: "string dengan enkode base64", + base64url: "string dengan enkode base64url", + json_string: "string JSON", + e164: "angka E.164", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Input tidak valid: diharapkan instanceof ${issue2.expected}, diterima ${received}`; + } + return `Input tidak valid: diharapkan ${expected}, diterima ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Input tidak valid: diharapkan ${stringifyPrimitive(issue2.values[0])}`; + return `Pilihan tidak valid: diharapkan salah satu dari ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Terlalu besar: diharapkan ${issue2.origin ?? "value"} memiliki ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elemen"}`; + return `Terlalu besar: diharapkan ${issue2.origin ?? "value"} menjadi ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Terlalu kecil: diharapkan ${issue2.origin} memiliki ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Terlalu kecil: diharapkan ${issue2.origin} menjadi ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `String tidak valid: harus dimulai dengan "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `String tidak valid: harus berakhir dengan "${_issue.suffix}"`; + if (_issue.format === "includes") + return `String tidak valid: harus menyertakan "${_issue.includes}"`; + if (_issue.format === "regex") + return `String tidak valid: harus sesuai pola ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} tidak valid`; + } + case "not_multiple_of": + return `Angka tidak valid: harus kelipatan dari ${issue2.divisor}`; + case "unrecognized_keys": + return `Kunci tidak dikenali ${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Kunci tidak valid di ${issue2.origin}`; + case "invalid_union": + return "Input tidak valid"; + case "invalid_element": + return `Nilai tidak valid di ${issue2.origin}`; + default: + return `Input tidak valid`; + } + }; +}; +function id_default() { + return { + localeError: error19() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/is.js +var error20 = () => { + const Sizable = { + string: { unit: "stafi", verb: "a\xF0 hafa" }, + file: { unit: "b\xE6ti", verb: "a\xF0 hafa" }, + array: { unit: "hluti", verb: "a\xF0 hafa" }, + set: { unit: "hluti", verb: "a\xF0 hafa" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "gildi", + email: "netfang", + url: "vefsl\xF3\xF0", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO dagsetning og t\xEDmi", + date: "ISO dagsetning", + time: "ISO t\xEDmi", + duration: "ISO t\xEDmalengd", + ipv4: "IPv4 address", + ipv6: "IPv6 address", + cidrv4: "IPv4 range", + cidrv6: "IPv6 range", + base64: "base64-encoded strengur", + base64url: "base64url-encoded strengur", + json_string: "JSON strengur", + e164: "E.164 t\xF6lugildi", + jwt: "JWT", + template_literal: "gildi" + }; + const TypeDictionary = { + nan: "NaN", + number: "n\xFAmer", + array: "fylki" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Rangt gildi: \xDE\xFA sl\xF3st inn ${received} \xFEar sem \xE1 a\xF0 vera instanceof ${issue2.expected}`; + } + return `Rangt gildi: \xDE\xFA sl\xF3st inn ${received} \xFEar sem \xE1 a\xF0 vera ${expected}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Rangt gildi: gert r\xE1\xF0 fyrir ${stringifyPrimitive(issue2.values[0])}`; + return `\xD3gilt val: m\xE1 vera eitt af eftirfarandi ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Of st\xF3rt: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin ?? "gildi"} hafi ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "hluti"}`; + return `Of st\xF3rt: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin ?? "gildi"} s\xE9 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Of l\xEDti\xF0: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin} hafi ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Of l\xEDti\xF0: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin} s\xE9 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\xD3gildur strengur: ver\xF0ur a\xF0 byrja \xE1 "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `\xD3gildur strengur: ver\xF0ur a\xF0 enda \xE1 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\xD3gildur strengur: ver\xF0ur a\xF0 innihalda "${_issue.includes}"`; + if (_issue.format === "regex") + return `\xD3gildur strengur: ver\xF0ur a\xF0 fylgja mynstri ${_issue.pattern}`; + return `Rangt ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `R\xF6ng tala: ver\xF0ur a\xF0 vera margfeldi af ${issue2.divisor}`; + case "unrecognized_keys": + return `\xD3\xFEekkt ${issue2.keys.length > 1 ? "ir lyklar" : "ur lykill"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Rangur lykill \xED ${issue2.origin}`; + case "invalid_union": + return "Rangt gildi"; + case "invalid_element": + return `Rangt gildi \xED ${issue2.origin}`; + default: + return `Rangt gildi`; + } + }; +}; +function is_default() { + return { + localeError: error20() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/it.js +var error21 = () => { + const Sizable = { + string: { unit: "caratteri", verb: "avere" }, + file: { unit: "byte", verb: "avere" }, + array: { unit: "elementi", verb: "avere" }, + set: { unit: "elementi", verb: "avere" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "indirizzo email", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "data e ora ISO", + date: "data ISO", + time: "ora ISO", + duration: "durata ISO", + ipv4: "indirizzo IPv4", + ipv6: "indirizzo IPv6", + cidrv4: "intervallo IPv4", + cidrv6: "intervallo IPv6", + base64: "stringa codificata in base64", + base64url: "URL codificata in base64", + json_string: "stringa JSON", + e164: "numero E.164", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN", + number: "numero", + array: "vettore" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Input non valido: atteso instanceof ${issue2.expected}, ricevuto ${received}`; + } + return `Input non valido: atteso ${expected}, ricevuto ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Input non valido: atteso ${stringifyPrimitive(issue2.values[0])}`; + return `Opzione non valida: atteso uno tra ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Troppo grande: ${issue2.origin ?? "valore"} deve avere ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementi"}`; + return `Troppo grande: ${issue2.origin ?? "valore"} deve essere ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Troppo piccolo: ${issue2.origin} deve avere ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Troppo piccolo: ${issue2.origin} deve essere ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Stringa non valida: deve iniziare con "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Stringa non valida: deve terminare con "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Stringa non valida: deve includere "${_issue.includes}"`; + if (_issue.format === "regex") + return `Stringa non valida: deve corrispondere al pattern ${_issue.pattern}`; + return `Invalid ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Numero non valido: deve essere un multiplo di ${issue2.divisor}`; + case "unrecognized_keys": + return `Chiav${issue2.keys.length > 1 ? "i" : "e"} non riconosciut${issue2.keys.length > 1 ? "e" : "a"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Chiave non valida in ${issue2.origin}`; + case "invalid_union": + return "Input non valido"; + case "invalid_element": + return `Valore non valido in ${issue2.origin}`; + default: + return `Input non valido`; + } + }; +}; +function it_default() { + return { + localeError: error21() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ja.js +var error22 = () => { + const Sizable = { + string: { unit: "\u6587\u5B57", verb: "\u3067\u3042\u308B" }, + file: { unit: "\u30D0\u30A4\u30C8", verb: "\u3067\u3042\u308B" }, + array: { unit: "\u8981\u7D20", verb: "\u3067\u3042\u308B" }, + set: { unit: "\u8981\u7D20", verb: "\u3067\u3042\u308B" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u5165\u529B\u5024", + email: "\u30E1\u30FC\u30EB\u30A2\u30C9\u30EC\u30B9", + url: "URL", + emoji: "\u7D75\u6587\u5B57", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO\u65E5\u6642", + date: "ISO\u65E5\u4ED8", + time: "ISO\u6642\u523B", + duration: "ISO\u671F\u9593", + ipv4: "IPv4\u30A2\u30C9\u30EC\u30B9", + ipv6: "IPv6\u30A2\u30C9\u30EC\u30B9", + cidrv4: "IPv4\u7BC4\u56F2", + cidrv6: "IPv6\u7BC4\u56F2", + base64: "base64\u30A8\u30F3\u30B3\u30FC\u30C9\u6587\u5B57\u5217", + base64url: "base64url\u30A8\u30F3\u30B3\u30FC\u30C9\u6587\u5B57\u5217", + json_string: "JSON\u6587\u5B57\u5217", + e164: "E.164\u756A\u53F7", + jwt: "JWT", + template_literal: "\u5165\u529B\u5024" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u6570\u5024", + array: "\u914D\u5217" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u7121\u52B9\u306A\u5165\u529B: instanceof ${issue2.expected}\u304C\u671F\u5F85\u3055\u308C\u307E\u3057\u305F\u304C\u3001${received}\u304C\u5165\u529B\u3055\u308C\u307E\u3057\u305F`; + } + return `\u7121\u52B9\u306A\u5165\u529B: ${expected}\u304C\u671F\u5F85\u3055\u308C\u307E\u3057\u305F\u304C\u3001${received}\u304C\u5165\u529B\u3055\u308C\u307E\u3057\u305F`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u7121\u52B9\u306A\u5165\u529B: ${stringifyPrimitive(issue2.values[0])}\u304C\u671F\u5F85\u3055\u308C\u307E\u3057\u305F`; + return `\u7121\u52B9\u306A\u9078\u629E: ${joinValues(issue2.values, "\u3001")}\u306E\u3044\u305A\u308C\u304B\u3067\u3042\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + case "too_big": { + const adj = issue2.inclusive ? "\u4EE5\u4E0B\u3067\u3042\u308B" : "\u3088\u308A\u5C0F\u3055\u3044"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u5927\u304D\u3059\u304E\u308B\u5024: ${issue2.origin ?? "\u5024"}\u306F${issue2.maximum.toString()}${sizing.unit ?? "\u8981\u7D20"}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + return `\u5927\u304D\u3059\u304E\u308B\u5024: ${issue2.origin ?? "\u5024"}\u306F${issue2.maximum.toString()}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + } + case "too_small": { + const adj = issue2.inclusive ? "\u4EE5\u4E0A\u3067\u3042\u308B" : "\u3088\u308A\u5927\u304D\u3044"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u5C0F\u3055\u3059\u304E\u308B\u5024: ${issue2.origin}\u306F${issue2.minimum.toString()}${sizing.unit}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + return `\u5C0F\u3055\u3059\u304E\u308B\u5024: ${issue2.origin}\u306F${issue2.minimum.toString()}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u7121\u52B9\u306A\u6587\u5B57\u5217: "${_issue.prefix}"\u3067\u59CB\u307E\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + if (_issue.format === "ends_with") + return `\u7121\u52B9\u306A\u6587\u5B57\u5217: "${_issue.suffix}"\u3067\u7D42\u308F\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + if (_issue.format === "includes") + return `\u7121\u52B9\u306A\u6587\u5B57\u5217: "${_issue.includes}"\u3092\u542B\u3080\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + if (_issue.format === "regex") + return `\u7121\u52B9\u306A\u6587\u5B57\u5217: \u30D1\u30BF\u30FC\u30F3${_issue.pattern}\u306B\u4E00\u81F4\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + return `\u7121\u52B9\u306A${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u7121\u52B9\u306A\u6570\u5024: ${issue2.divisor}\u306E\u500D\u6570\u3067\u3042\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; + case "unrecognized_keys": + return `\u8A8D\u8B58\u3055\u308C\u3066\u3044\u306A\u3044\u30AD\u30FC${issue2.keys.length > 1 ? "\u7FA4" : ""}: ${joinValues(issue2.keys, "\u3001")}`; + case "invalid_key": + return `${issue2.origin}\u5185\u306E\u7121\u52B9\u306A\u30AD\u30FC`; + case "invalid_union": + return "\u7121\u52B9\u306A\u5165\u529B"; + case "invalid_element": + return `${issue2.origin}\u5185\u306E\u7121\u52B9\u306A\u5024`; + default: + return `\u7121\u52B9\u306A\u5165\u529B`; + } + }; +}; +function ja_default() { + return { + localeError: error22() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ka.js +var error23 = () => { + const Sizable = { + string: { unit: "\u10E1\u10D8\u10DB\u10D1\u10DD\u10DA\u10DD", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" }, + file: { unit: "\u10D1\u10D0\u10D8\u10E2\u10D8", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" }, + array: { unit: "\u10D4\u10DA\u10D4\u10DB\u10D4\u10DC\u10E2\u10D8", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" }, + set: { unit: "\u10D4\u10DA\u10D4\u10DB\u10D4\u10DC\u10E2\u10D8", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0", + email: "\u10D4\u10DA-\u10E4\u10DD\u10E1\u10E2\u10D8\u10E1 \u10DB\u10D8\u10E1\u10D0\u10DB\u10D0\u10E0\u10D7\u10D8", + url: "URL", + emoji: "\u10D4\u10DB\u10DD\u10EF\u10D8", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u10D7\u10D0\u10E0\u10D8\u10E6\u10D8-\u10D3\u10E0\u10DD", + date: "\u10D7\u10D0\u10E0\u10D8\u10E6\u10D8", + time: "\u10D3\u10E0\u10DD", + duration: "\u10EE\u10D0\u10DC\u10D2\u10E0\u10EB\u10DA\u10D8\u10D5\u10DD\u10D1\u10D0", + ipv4: "IPv4 \u10DB\u10D8\u10E1\u10D0\u10DB\u10D0\u10E0\u10D7\u10D8", + ipv6: "IPv6 \u10DB\u10D8\u10E1\u10D0\u10DB\u10D0\u10E0\u10D7\u10D8", + cidrv4: "IPv4 \u10D3\u10D8\u10D0\u10DE\u10D0\u10D6\u10DD\u10DC\u10D8", + cidrv6: "IPv6 \u10D3\u10D8\u10D0\u10DE\u10D0\u10D6\u10DD\u10DC\u10D8", + base64: "base64-\u10D9\u10DD\u10D3\u10D8\u10E0\u10D4\u10D1\u10E3\u10DA\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", + base64url: "base64url-\u10D9\u10DD\u10D3\u10D8\u10E0\u10D4\u10D1\u10E3\u10DA\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", + json_string: "JSON \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", + e164: "E.164 \u10DC\u10DD\u10DB\u10D4\u10E0\u10D8", + jwt: "JWT", + template_literal: "\u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u10E0\u10D8\u10EA\u10EE\u10D5\u10D8", + string: "\u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", + boolean: "\u10D1\u10E3\u10DA\u10D4\u10D0\u10DC\u10D8", + function: "\u10E4\u10E3\u10DC\u10E5\u10EA\u10D8\u10D0", + array: "\u10DB\u10D0\u10E1\u10D8\u10D5\u10D8" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 instanceof ${issue2.expected}, \u10DB\u10D8\u10E6\u10D4\u10D1\u10E3\u10DA\u10D8 ${received}`; + } + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${expected}, \u10DB\u10D8\u10E6\u10D4\u10D1\u10E3\u10DA\u10D8 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${stringifyPrimitive(issue2.values[0])}`; + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10D5\u10D0\u10E0\u10D8\u10D0\u10DC\u10E2\u10D8: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8\u10D0 \u10D4\u10E0\u10D7-\u10D4\u10E0\u10D7\u10D8 ${joinValues(issue2.values, "|")}-\u10D3\u10D0\u10DC`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10D3\u10D8\u10D3\u10D8: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin ?? "\u10DB\u10DC\u10D8\u10E8\u10D5\u10DC\u10D4\u10DA\u10DD\u10D1\u10D0"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit}`; + return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10D3\u10D8\u10D3\u10D8: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin ?? "\u10DB\u10DC\u10D8\u10E8\u10D5\u10DC\u10D4\u10DA\u10DD\u10D1\u10D0"} \u10D8\u10E7\u10DD\u10E1 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10DE\u10D0\u10E2\u10D0\u10E0\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10DE\u10D0\u10E2\u10D0\u10E0\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin} \u10D8\u10E7\u10DD\u10E1 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10D8\u10EC\u10E7\u10D4\u10D1\u10DD\u10D3\u10D4\u10E1 "${_issue.prefix}"-\u10D8\u10D7`; + } + if (_issue.format === "ends_with") + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10DB\u10D7\u10D0\u10D5\u10E0\u10D3\u10D4\u10D1\u10DD\u10D3\u10D4\u10E1 "${_issue.suffix}"-\u10D8\u10D7`; + if (_issue.format === "includes") + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1 "${_issue.includes}"-\u10E1`; + if (_issue.format === "regex") + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D4\u10E1\u10D0\u10D1\u10D0\u10DB\u10D4\u10D1\u10DD\u10D3\u10D4\u10E1 \u10E8\u10D0\u10D1\u10DA\u10DD\u10DC\u10E1 ${_issue.pattern}`; + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E0\u10D8\u10EA\u10EE\u10D5\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10D8\u10E7\u10DD\u10E1 ${issue2.divisor}-\u10D8\u10E1 \u10EF\u10D4\u10E0\u10D0\u10D3\u10D8`; + case "unrecognized_keys": + return `\u10E3\u10EA\u10DC\u10DD\u10D1\u10D8 \u10D2\u10D0\u10E1\u10D0\u10E6\u10D4\u10D1${issue2.keys.length > 1 ? "\u10D4\u10D1\u10D8" : "\u10D8"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10D2\u10D0\u10E1\u10D0\u10E6\u10D4\u10D1\u10D8 ${issue2.origin}-\u10E8\u10D8`; + case "invalid_union": + return "\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0"; + case "invalid_element": + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10DB\u10DC\u10D8\u10E8\u10D5\u10DC\u10D4\u10DA\u10DD\u10D1\u10D0 ${issue2.origin}-\u10E8\u10D8`; + default: + return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0`; + } + }; +}; +function ka_default() { + return { + localeError: error23() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/km.js +var error24 = () => { + const Sizable = { + string: { unit: "\u178F\u17BD\u17A2\u1780\u17D2\u179F\u179A", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" }, + file: { unit: "\u1794\u17C3", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" }, + array: { unit: "\u1792\u17B6\u178F\u17BB", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" }, + set: { unit: "\u1792\u17B6\u178F\u17BB", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B", + email: "\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793\u17A2\u17CA\u17B8\u1798\u17C2\u179B", + url: "URL", + emoji: "\u179F\u1789\u17D2\u1789\u17B6\u17A2\u17B6\u179A\u1798\u17D2\u1798\u178E\u17CD", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u1780\u17B6\u179B\u1794\u179A\u17B7\u1785\u17D2\u1786\u17C1\u1791 \u1793\u17B7\u1784\u1798\u17C9\u17C4\u1784 ISO", + date: "\u1780\u17B6\u179B\u1794\u179A\u17B7\u1785\u17D2\u1786\u17C1\u1791 ISO", + time: "\u1798\u17C9\u17C4\u1784 ISO", + duration: "\u179A\u1799\u17C8\u1796\u17C1\u179B ISO", + ipv4: "\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv4", + ipv6: "\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv6", + cidrv4: "\u178A\u17C2\u1793\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv4", + cidrv6: "\u178A\u17C2\u1793\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv6", + base64: "\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u17A2\u17CA\u17B7\u1780\u17BC\u178A base64", + base64url: "\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u17A2\u17CA\u17B7\u1780\u17BC\u178A base64url", + json_string: "\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A JSON", + e164: "\u179B\u17C1\u1781 E.164", + jwt: "JWT", + template_literal: "\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u179B\u17C1\u1781", + array: "\u17A2\u17B6\u179A\u17C1 (Array)", + null: "\u1782\u17D2\u1798\u17B6\u1793\u178F\u1798\u17D2\u179B\u17C3 (null)" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A instanceof ${issue2.expected} \u1794\u17C9\u17BB\u1793\u17D2\u178F\u17C2\u1791\u1791\u17BD\u179B\u1794\u17B6\u1793 ${received}`; + } + return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${expected} \u1794\u17C9\u17BB\u1793\u17D2\u178F\u17C2\u1791\u1791\u17BD\u179B\u1794\u17B6\u1793 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${stringifyPrimitive(issue2.values[0])}`; + return `\u1787\u1798\u17D2\u179A\u17BE\u179F\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1787\u17B6\u1798\u17BD\u1799\u1780\u17D2\u1793\u17BB\u1784\u1785\u17C6\u178E\u17C4\u1798 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u1792\u17C6\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin ?? "\u178F\u1798\u17D2\u179B\u17C3"} ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "\u1792\u17B6\u178F\u17BB"}`; + return `\u1792\u17C6\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin ?? "\u178F\u1798\u17D2\u179B\u17C3"} ${adj} ${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u178F\u17BC\u1785\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin} ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u178F\u17BC\u1785\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin} ${adj} ${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1785\u17B6\u1794\u17CB\u1795\u17D2\u178F\u17BE\u1798\u178A\u17C4\u1799 "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1794\u1789\u17D2\u1785\u1794\u17CB\u178A\u17C4\u1799 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1798\u17B6\u1793 "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u178F\u17C2\u1795\u17D2\u1782\u17BC\u1795\u17D2\u1782\u1784\u1793\u17B9\u1784\u1791\u1798\u17D2\u179A\u1784\u17CB\u178A\u17C2\u179B\u1794\u17B6\u1793\u1780\u17C6\u178E\u178F\u17CB ${_issue.pattern}`; + return `\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u179B\u17C1\u1781\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u178F\u17C2\u1787\u17B6\u1796\u17A0\u17BB\u1782\u17BB\u178E\u1793\u17C3 ${issue2.divisor}`; + case "unrecognized_keys": + return `\u179A\u1780\u1783\u17BE\u1789\u179F\u17C4\u1798\u17B7\u1793\u179F\u17D2\u1782\u17B6\u179B\u17CB\u17D6 ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u179F\u17C4\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u1793\u17C5\u1780\u17D2\u1793\u17BB\u1784 ${issue2.origin}`; + case "invalid_union": + return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C`; + case "invalid_element": + return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u1793\u17C5\u1780\u17D2\u1793\u17BB\u1784 ${issue2.origin}`; + default: + return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C`; + } + }; +}; +function km_default() { + return { + localeError: error24() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/kh.js +function kh_default() { + return km_default(); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ko.js +var error25 = () => { + const Sizable = { + string: { unit: "\uBB38\uC790", verb: "to have" }, + file: { unit: "\uBC14\uC774\uD2B8", verb: "to have" }, + array: { unit: "\uAC1C", verb: "to have" }, + set: { unit: "\uAC1C", verb: "to have" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\uC785\uB825", + email: "\uC774\uBA54\uC77C \uC8FC\uC18C", + url: "URL", + emoji: "\uC774\uBAA8\uC9C0", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \uB0A0\uC9DC\uC2DC\uAC04", + date: "ISO \uB0A0\uC9DC", + time: "ISO \uC2DC\uAC04", + duration: "ISO \uAE30\uAC04", + ipv4: "IPv4 \uC8FC\uC18C", + ipv6: "IPv6 \uC8FC\uC18C", + cidrv4: "IPv4 \uBC94\uC704", + cidrv6: "IPv6 \uBC94\uC704", + base64: "base64 \uC778\uCF54\uB529 \uBB38\uC790\uC5F4", + base64url: "base64url \uC778\uCF54\uB529 \uBB38\uC790\uC5F4", + json_string: "JSON \uBB38\uC790\uC5F4", + e164: "E.164 \uBC88\uD638", + jwt: "JWT", + template_literal: "\uC785\uB825" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\uC798\uBABB\uB41C \uC785\uB825: \uC608\uC0C1 \uD0C0\uC785\uC740 instanceof ${issue2.expected}, \uBC1B\uC740 \uD0C0\uC785\uC740 ${received}\uC785\uB2C8\uB2E4`; + } + return `\uC798\uBABB\uB41C \uC785\uB825: \uC608\uC0C1 \uD0C0\uC785\uC740 ${expected}, \uBC1B\uC740 \uD0C0\uC785\uC740 ${received}\uC785\uB2C8\uB2E4`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\uC798\uBABB\uB41C \uC785\uB825: \uAC12\uC740 ${stringifyPrimitive(issue2.values[0])} \uC774\uC5B4\uC57C \uD569\uB2C8\uB2E4`; + return `\uC798\uBABB\uB41C \uC635\uC158: ${joinValues(issue2.values, "\uB610\uB294 ")} \uC911 \uD558\uB098\uC5EC\uC57C \uD569\uB2C8\uB2E4`; + case "too_big": { + const adj = issue2.inclusive ? "\uC774\uD558" : "\uBBF8\uB9CC"; + const suffix = adj === "\uBBF8\uB9CC" ? "\uC774\uC5B4\uC57C \uD569\uB2C8\uB2E4" : "\uC5EC\uC57C \uD569\uB2C8\uB2E4"; + const sizing = getSizing(issue2.origin); + const unit = sizing?.unit ?? "\uC694\uC18C"; + if (sizing) + return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uD07D\uB2C8\uB2E4: ${issue2.maximum.toString()}${unit} ${adj}${suffix}`; + return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uD07D\uB2C8\uB2E4: ${issue2.maximum.toString()} ${adj}${suffix}`; + } + case "too_small": { + const adj = issue2.inclusive ? "\uC774\uC0C1" : "\uCD08\uACFC"; + const suffix = adj === "\uC774\uC0C1" ? "\uC774\uC5B4\uC57C \uD569\uB2C8\uB2E4" : "\uC5EC\uC57C \uD569\uB2C8\uB2E4"; + const sizing = getSizing(issue2.origin); + const unit = sizing?.unit ?? "\uC694\uC18C"; + if (sizing) { + return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uC791\uC2B5\uB2C8\uB2E4: ${issue2.minimum.toString()}${unit} ${adj}${suffix}`; + } + return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uC791\uC2B5\uB2C8\uB2E4: ${issue2.minimum.toString()} ${adj}${suffix}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: "${_issue.prefix}"(\uC73C)\uB85C \uC2DC\uC791\uD574\uC57C \uD569\uB2C8\uB2E4`; + } + if (_issue.format === "ends_with") + return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: "${_issue.suffix}"(\uC73C)\uB85C \uB05D\uB098\uC57C \uD569\uB2C8\uB2E4`; + if (_issue.format === "includes") + return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: "${_issue.includes}"\uC744(\uB97C) \uD3EC\uD568\uD574\uC57C \uD569\uB2C8\uB2E4`; + if (_issue.format === "regex") + return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: \uC815\uADDC\uC2DD ${_issue.pattern} \uD328\uD134\uACFC \uC77C\uCE58\uD574\uC57C \uD569\uB2C8\uB2E4`; + return `\uC798\uBABB\uB41C ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\uC798\uBABB\uB41C \uC22B\uC790: ${issue2.divisor}\uC758 \uBC30\uC218\uC5EC\uC57C \uD569\uB2C8\uB2E4`; + case "unrecognized_keys": + return `\uC778\uC2DD\uD560 \uC218 \uC5C6\uB294 \uD0A4: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\uC798\uBABB\uB41C \uD0A4: ${issue2.origin}`; + case "invalid_union": + return `\uC798\uBABB\uB41C \uC785\uB825`; + case "invalid_element": + return `\uC798\uBABB\uB41C \uAC12: ${issue2.origin}`; + default: + return `\uC798\uBABB\uB41C \uC785\uB825`; + } + }; +}; +function ko_default() { + return { + localeError: error25() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/lt.js +var capitalizeFirstCharacter = (text) => { + return text.charAt(0).toUpperCase() + text.slice(1); +}; +function getUnitTypeFromNumber(number4) { + const abs = Math.abs(number4); + const last = abs % 10; + const last2 = abs % 100; + if (last2 >= 11 && last2 <= 19 || last === 0) + return "many"; + if (last === 1) + return "one"; + return "few"; +} +var error26 = () => { + const Sizable = { + string: { + unit: { + one: "simbolis", + few: "simboliai", + many: "simboli\u0173" + }, + verb: { + smaller: { + inclusive: "turi b\u016Bti ne ilgesn\u0117 kaip", + notInclusive: "turi b\u016Bti trumpesn\u0117 kaip" + }, + bigger: { + inclusive: "turi b\u016Bti ne trumpesn\u0117 kaip", + notInclusive: "turi b\u016Bti ilgesn\u0117 kaip" + } + } + }, + file: { + unit: { + one: "baitas", + few: "baitai", + many: "bait\u0173" + }, + verb: { + smaller: { + inclusive: "turi b\u016Bti ne didesnis kaip", + notInclusive: "turi b\u016Bti ma\u017Eesnis kaip" + }, + bigger: { + inclusive: "turi b\u016Bti ne ma\u017Eesnis kaip", + notInclusive: "turi b\u016Bti didesnis kaip" + } + } + }, + array: { + unit: { + one: "element\u0105", + few: "elementus", + many: "element\u0173" + }, + verb: { + smaller: { + inclusive: "turi tur\u0117ti ne daugiau kaip", + notInclusive: "turi tur\u0117ti ma\u017Eiau kaip" + }, + bigger: { + inclusive: "turi tur\u0117ti ne ma\u017Eiau kaip", + notInclusive: "turi tur\u0117ti daugiau kaip" + } + } + }, + set: { + unit: { + one: "element\u0105", + few: "elementus", + many: "element\u0173" + }, + verb: { + smaller: { + inclusive: "turi tur\u0117ti ne daugiau kaip", + notInclusive: "turi tur\u0117ti ma\u017Eiau kaip" + }, + bigger: { + inclusive: "turi tur\u0117ti ne ma\u017Eiau kaip", + notInclusive: "turi tur\u0117ti daugiau kaip" + } + } + } + }; + function getSizing(origin, unitType, inclusive, targetShouldBe) { + const result = Sizable[origin] ?? null; + if (result === null) + return result; + return { + unit: result.unit[unitType], + verb: result.verb[targetShouldBe][inclusive ? "inclusive" : "notInclusive"] + }; + } + const FormatDictionary = { + regex: "\u012Fvestis", + email: "el. pa\u0161to adresas", + url: "URL", + emoji: "jaustukas", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO data ir laikas", + date: "ISO data", + time: "ISO laikas", + duration: "ISO trukm\u0117", + ipv4: "IPv4 adresas", + ipv6: "IPv6 adresas", + cidrv4: "IPv4 tinklo prefiksas (CIDR)", + cidrv6: "IPv6 tinklo prefiksas (CIDR)", + base64: "base64 u\u017Ekoduota eilut\u0117", + base64url: "base64url u\u017Ekoduota eilut\u0117", + json_string: "JSON eilut\u0117", + e164: "E.164 numeris", + jwt: "JWT", + template_literal: "\u012Fvestis" + }; + const TypeDictionary = { + nan: "NaN", + number: "skai\u010Dius", + bigint: "sveikasis skai\u010Dius", + string: "eilut\u0117", + boolean: "login\u0117 reik\u0161m\u0117", + undefined: "neapibr\u0117\u017Eta reik\u0161m\u0117", + function: "funkcija", + symbol: "simbolis", + array: "masyvas", + object: "objektas", + null: "nulin\u0117 reik\u0161m\u0117" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Gautas tipas ${received}, o tik\u0117tasi - instanceof ${issue2.expected}`; + } + return `Gautas tipas ${received}, o tik\u0117tasi - ${expected}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Privalo b\u016Bti ${stringifyPrimitive(issue2.values[0])}`; + return `Privalo b\u016Bti vienas i\u0161 ${joinValues(issue2.values, "|")} pasirinkim\u0173`; + case "too_big": { + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + const sizing = getSizing(issue2.origin, getUnitTypeFromNumber(Number(issue2.maximum)), issue2.inclusive ?? false, "smaller"); + if (sizing?.verb) + return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} ${sizing.verb} ${issue2.maximum.toString()} ${sizing.unit ?? "element\u0173"}`; + const adj = issue2.inclusive ? "ne didesnis kaip" : "ma\u017Eesnis kaip"; + return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} turi b\u016Bti ${adj} ${issue2.maximum.toString()} ${sizing?.unit}`; + } + case "too_small": { + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + const sizing = getSizing(issue2.origin, getUnitTypeFromNumber(Number(issue2.minimum)), issue2.inclusive ?? false, "bigger"); + if (sizing?.verb) + return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} ${sizing.verb} ${issue2.minimum.toString()} ${sizing.unit ?? "element\u0173"}`; + const adj = issue2.inclusive ? "ne ma\u017Eesnis kaip" : "didesnis kaip"; + return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} turi b\u016Bti ${adj} ${issue2.minimum.toString()} ${sizing?.unit}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Eilut\u0117 privalo prasid\u0117ti "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `Eilut\u0117 privalo pasibaigti "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Eilut\u0117 privalo \u012Ftraukti "${_issue.includes}"`; + if (_issue.format === "regex") + return `Eilut\u0117 privalo atitikti ${_issue.pattern}`; + return `Neteisingas ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Skai\u010Dius privalo b\u016Bti ${issue2.divisor} kartotinis.`; + case "unrecognized_keys": + return `Neatpa\u017Eint${issue2.keys.length > 1 ? "i" : "as"} rakt${issue2.keys.length > 1 ? "ai" : "as"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return "Rastas klaidingas raktas"; + case "invalid_union": + return "Klaidinga \u012Fvestis"; + case "invalid_element": { + const origin = TypeDictionary[issue2.origin] ?? issue2.origin; + return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} turi klaiding\u0105 \u012Fvest\u012F`; + } + default: + return "Klaidinga \u012Fvestis"; + } + }; +}; +function lt_default() { + return { + localeError: error26() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/mk.js +var error27 = () => { + const Sizable = { + string: { unit: "\u0437\u043D\u0430\u0446\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" }, + file: { unit: "\u0431\u0430\u0458\u0442\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" }, + array: { unit: "\u0441\u0442\u0430\u0432\u043A\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" }, + set: { unit: "\u0441\u0442\u0430\u0432\u043A\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0432\u043D\u0435\u0441", + email: "\u0430\u0434\u0440\u0435\u0441\u0430 \u043D\u0430 \u0435-\u043F\u043E\u0448\u0442\u0430", + url: "URL", + emoji: "\u0435\u043C\u043E\u045F\u0438", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u0434\u0430\u0442\u0443\u043C \u0438 \u0432\u0440\u0435\u043C\u0435", + date: "ISO \u0434\u0430\u0442\u0443\u043C", + time: "ISO \u0432\u0440\u0435\u043C\u0435", + duration: "ISO \u0432\u0440\u0435\u043C\u0435\u0442\u0440\u0430\u0435\u045A\u0435", + ipv4: "IPv4 \u0430\u0434\u0440\u0435\u0441\u0430", + ipv6: "IPv6 \u0430\u0434\u0440\u0435\u0441\u0430", + cidrv4: "IPv4 \u043E\u043F\u0441\u0435\u0433", + cidrv6: "IPv6 \u043E\u043F\u0441\u0435\u0433", + base64: "base64-\u0435\u043D\u043A\u043E\u0434\u0438\u0440\u0430\u043D\u0430 \u043D\u0438\u0437\u0430", + base64url: "base64url-\u0435\u043D\u043A\u043E\u0434\u0438\u0440\u0430\u043D\u0430 \u043D\u0438\u0437\u0430", + json_string: "JSON \u043D\u0438\u0437\u0430", + e164: "E.164 \u0431\u0440\u043E\u0458", + jwt: "JWT", + template_literal: "\u0432\u043D\u0435\u0441" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0431\u0440\u043E\u0458", + array: "\u043D\u0438\u0437\u0430" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 instanceof ${issue2.expected}, \u043F\u0440\u0438\u043C\u0435\u043D\u043E ${received}`; + } + return `\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${expected}, \u043F\u0440\u0438\u043C\u0435\u043D\u043E ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Invalid input: expected ${stringifyPrimitive(issue2.values[0])}`; + return `\u0413\u0440\u0435\u0448\u0430\u043D\u0430 \u043E\u043F\u0446\u0438\u0458\u0430: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 \u0435\u0434\u043D\u0430 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u0433\u043E\u043B\u0435\u043C: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin ?? "\u0432\u0440\u0435\u0434\u043D\u043E\u0441\u0442\u0430"} \u0434\u0430 \u0438\u043C\u0430 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0438"}`; + return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u0433\u043E\u043B\u0435\u043C: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin ?? "\u0432\u0440\u0435\u0434\u043D\u043E\u0441\u0442\u0430"} \u0434\u0430 \u0431\u0438\u0434\u0435 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u043C\u0430\u043B: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin} \u0434\u0430 \u0438\u043C\u0430 ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u043C\u0430\u043B: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin} \u0434\u0430 \u0431\u0438\u0434\u0435 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0437\u0430\u043F\u043E\u0447\u043D\u0443\u0432\u0430 \u0441\u043E "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0437\u0430\u0432\u0440\u0448\u0443\u0432\u0430 \u0441\u043E "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0432\u043A\u043B\u0443\u0447\u0443\u0432\u0430 "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u043E\u0434\u0433\u043E\u0430\u0440\u0430 \u043D\u0430 \u043F\u0430\u0442\u0435\u0440\u043D\u043E\u0442 ${_issue.pattern}`; + return `Invalid ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u0413\u0440\u0435\u0448\u0435\u043D \u0431\u0440\u043E\u0458: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0431\u0438\u0434\u0435 \u0434\u0435\u043B\u0438\u0432 \u0441\u043E ${issue2.divisor}`; + case "unrecognized_keys": + return `${issue2.keys.length > 1 ? "\u041D\u0435\u043F\u0440\u0435\u043F\u043E\u0437\u043D\u0430\u0435\u043D\u0438 \u043A\u043B\u0443\u0447\u0435\u0432\u0438" : "\u041D\u0435\u043F\u0440\u0435\u043F\u043E\u0437\u043D\u0430\u0435\u043D \u043A\u043B\u0443\u0447"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u0413\u0440\u0435\u0448\u0435\u043D \u043A\u043B\u0443\u0447 \u0432\u043E ${issue2.origin}`; + case "invalid_union": + return "\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441"; + case "invalid_element": + return `\u0413\u0440\u0435\u0448\u043D\u0430 \u0432\u0440\u0435\u0434\u043D\u043E\u0441\u0442 \u0432\u043E ${issue2.origin}`; + default: + return `\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441`; + } + }; +}; +function mk_default() { + return { + localeError: error27() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ms.js +var error28 = () => { + const Sizable = { + string: { unit: "aksara", verb: "mempunyai" }, + file: { unit: "bait", verb: "mempunyai" }, + array: { unit: "elemen", verb: "mempunyai" }, + set: { unit: "elemen", verb: "mempunyai" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "alamat e-mel", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "tarikh masa ISO", + date: "tarikh ISO", + time: "masa ISO", + duration: "tempoh ISO", + ipv4: "alamat IPv4", + ipv6: "alamat IPv6", + cidrv4: "julat IPv4", + cidrv6: "julat IPv6", + base64: "string dikodkan base64", + base64url: "string dikodkan base64url", + json_string: "string JSON", + e164: "nombor E.164", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN", + number: "nombor" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Input tidak sah: dijangka instanceof ${issue2.expected}, diterima ${received}`; + } + return `Input tidak sah: dijangka ${expected}, diterima ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Input tidak sah: dijangka ${stringifyPrimitive(issue2.values[0])}`; + return `Pilihan tidak sah: dijangka salah satu daripada ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Terlalu besar: dijangka ${issue2.origin ?? "nilai"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elemen"}`; + return `Terlalu besar: dijangka ${issue2.origin ?? "nilai"} adalah ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Terlalu kecil: dijangka ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Terlalu kecil: dijangka ${issue2.origin} adalah ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `String tidak sah: mesti bermula dengan "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `String tidak sah: mesti berakhir dengan "${_issue.suffix}"`; + if (_issue.format === "includes") + return `String tidak sah: mesti mengandungi "${_issue.includes}"`; + if (_issue.format === "regex") + return `String tidak sah: mesti sepadan dengan corak ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} tidak sah`; + } + case "not_multiple_of": + return `Nombor tidak sah: perlu gandaan ${issue2.divisor}`; + case "unrecognized_keys": + return `Kunci tidak dikenali: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Kunci tidak sah dalam ${issue2.origin}`; + case "invalid_union": + return "Input tidak sah"; + case "invalid_element": + return `Nilai tidak sah dalam ${issue2.origin}`; + default: + return `Input tidak sah`; + } + }; +}; +function ms_default() { + return { + localeError: error28() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/nl.js +var error29 = () => { + const Sizable = { + string: { unit: "tekens", verb: "heeft" }, + file: { unit: "bytes", verb: "heeft" }, + array: { unit: "elementen", verb: "heeft" }, + set: { unit: "elementen", verb: "heeft" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "invoer", + email: "emailadres", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO datum en tijd", + date: "ISO datum", + time: "ISO tijd", + duration: "ISO duur", + ipv4: "IPv4-adres", + ipv6: "IPv6-adres", + cidrv4: "IPv4-bereik", + cidrv6: "IPv6-bereik", + base64: "base64-gecodeerde tekst", + base64url: "base64 URL-gecodeerde tekst", + json_string: "JSON string", + e164: "E.164-nummer", + jwt: "JWT", + template_literal: "invoer" + }; + const TypeDictionary = { + nan: "NaN", + number: "getal" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Ongeldige invoer: verwacht instanceof ${issue2.expected}, ontving ${received}`; + } + return `Ongeldige invoer: verwacht ${expected}, ontving ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Ongeldige invoer: verwacht ${stringifyPrimitive(issue2.values[0])}`; + return `Ongeldige optie: verwacht \xE9\xE9n van ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + const longName = issue2.origin === "date" ? "laat" : issue2.origin === "string" ? "lang" : "groot"; + if (sizing) + return `Te ${longName}: verwacht dat ${issue2.origin ?? "waarde"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementen"} ${sizing.verb}`; + return `Te ${longName}: verwacht dat ${issue2.origin ?? "waarde"} ${adj}${issue2.maximum.toString()} is`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + const shortName = issue2.origin === "date" ? "vroeg" : issue2.origin === "string" ? "kort" : "klein"; + if (sizing) { + return `Te ${shortName}: verwacht dat ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} ${sizing.verb}`; + } + return `Te ${shortName}: verwacht dat ${issue2.origin} ${adj}${issue2.minimum.toString()} is`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Ongeldige tekst: moet met "${_issue.prefix}" beginnen`; + } + if (_issue.format === "ends_with") + return `Ongeldige tekst: moet op "${_issue.suffix}" eindigen`; + if (_issue.format === "includes") + return `Ongeldige tekst: moet "${_issue.includes}" bevatten`; + if (_issue.format === "regex") + return `Ongeldige tekst: moet overeenkomen met patroon ${_issue.pattern}`; + return `Ongeldig: ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Ongeldig getal: moet een veelvoud van ${issue2.divisor} zijn`; + case "unrecognized_keys": + return `Onbekende key${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Ongeldige key in ${issue2.origin}`; + case "invalid_union": + return "Ongeldige invoer"; + case "invalid_element": + return `Ongeldige waarde in ${issue2.origin}`; + default: + return `Ongeldige invoer`; + } + }; +}; +function nl_default() { + return { + localeError: error29() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/no.js +var error30 = () => { + const Sizable = { + string: { unit: "tegn", verb: "\xE5 ha" }, + file: { unit: "bytes", verb: "\xE5 ha" }, + array: { unit: "elementer", verb: "\xE5 inneholde" }, + set: { unit: "elementer", verb: "\xE5 inneholde" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "input", + email: "e-postadresse", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO dato- og klokkeslett", + date: "ISO-dato", + time: "ISO-klokkeslett", + duration: "ISO-varighet", + ipv4: "IPv4-omr\xE5de", + ipv6: "IPv6-omr\xE5de", + cidrv4: "IPv4-spekter", + cidrv6: "IPv6-spekter", + base64: "base64-enkodet streng", + base64url: "base64url-enkodet streng", + json_string: "JSON-streng", + e164: "E.164-nummer", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN", + number: "tall", + array: "liste" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Ugyldig input: forventet instanceof ${issue2.expected}, fikk ${received}`; + } + return `Ugyldig input: forventet ${expected}, fikk ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Ugyldig verdi: forventet ${stringifyPrimitive(issue2.values[0])}`; + return `Ugyldig valg: forventet en av ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `For stor(t): forventet ${issue2.origin ?? "value"} til \xE5 ha ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementer"}`; + return `For stor(t): forventet ${issue2.origin ?? "value"} til \xE5 ha ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `For lite(n): forventet ${issue2.origin} til \xE5 ha ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `For lite(n): forventet ${issue2.origin} til \xE5 ha ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Ugyldig streng: m\xE5 starte med "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Ugyldig streng: m\xE5 ende med "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Ugyldig streng: m\xE5 inneholde "${_issue.includes}"`; + if (_issue.format === "regex") + return `Ugyldig streng: m\xE5 matche m\xF8nsteret ${_issue.pattern}`; + return `Ugyldig ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Ugyldig tall: m\xE5 v\xE6re et multiplum av ${issue2.divisor}`; + case "unrecognized_keys": + return `${issue2.keys.length > 1 ? "Ukjente n\xF8kler" : "Ukjent n\xF8kkel"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Ugyldig n\xF8kkel i ${issue2.origin}`; + case "invalid_union": + return "Ugyldig input"; + case "invalid_element": + return `Ugyldig verdi i ${issue2.origin}`; + default: + return `Ugyldig input`; + } + }; +}; +function no_default() { + return { + localeError: error30() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ota.js +var error31 = () => { + const Sizable = { + string: { unit: "harf", verb: "olmal\u0131d\u0131r" }, + file: { unit: "bayt", verb: "olmal\u0131d\u0131r" }, + array: { unit: "unsur", verb: "olmal\u0131d\u0131r" }, + set: { unit: "unsur", verb: "olmal\u0131d\u0131r" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "giren", + email: "epostag\xE2h", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO heng\xE2m\u0131", + date: "ISO tarihi", + time: "ISO zaman\u0131", + duration: "ISO m\xFCddeti", + ipv4: "IPv4 ni\u015F\xE2n\u0131", + ipv6: "IPv6 ni\u015F\xE2n\u0131", + cidrv4: "IPv4 menzili", + cidrv6: "IPv6 menzili", + base64: "base64-\u015Fifreli metin", + base64url: "base64url-\u015Fifreli metin", + json_string: "JSON metin", + e164: "E.164 say\u0131s\u0131", + jwt: "JWT", + template_literal: "giren" + }; + const TypeDictionary = { + nan: "NaN", + number: "numara", + array: "saf", + null: "gayb" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `F\xE2sit giren: umulan instanceof ${issue2.expected}, al\u0131nan ${received}`; + } + return `F\xE2sit giren: umulan ${expected}, al\u0131nan ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `F\xE2sit giren: umulan ${stringifyPrimitive(issue2.values[0])}`; + return `F\xE2sit tercih: m\xFBteberler ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Fazla b\xFCy\xFCk: ${issue2.origin ?? "value"}, ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elements"} sahip olmal\u0131yd\u0131.`; + return `Fazla b\xFCy\xFCk: ${issue2.origin ?? "value"}, ${adj}${issue2.maximum.toString()} olmal\u0131yd\u0131.`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Fazla k\xFC\xE7\xFCk: ${issue2.origin}, ${adj}${issue2.minimum.toString()} ${sizing.unit} sahip olmal\u0131yd\u0131.`; + } + return `Fazla k\xFC\xE7\xFCk: ${issue2.origin}, ${adj}${issue2.minimum.toString()} olmal\u0131yd\u0131.`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `F\xE2sit metin: "${_issue.prefix}" ile ba\u015Flamal\u0131.`; + if (_issue.format === "ends_with") + return `F\xE2sit metin: "${_issue.suffix}" ile bitmeli.`; + if (_issue.format === "includes") + return `F\xE2sit metin: "${_issue.includes}" ihtiv\xE2 etmeli.`; + if (_issue.format === "regex") + return `F\xE2sit metin: ${_issue.pattern} nak\u015F\u0131na uymal\u0131.`; + return `F\xE2sit ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `F\xE2sit say\u0131: ${issue2.divisor} kat\u0131 olmal\u0131yd\u0131.`; + case "unrecognized_keys": + return `Tan\u0131nmayan anahtar ${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `${issue2.origin} i\xE7in tan\u0131nmayan anahtar var.`; + case "invalid_union": + return "Giren tan\u0131namad\u0131."; + case "invalid_element": + return `${issue2.origin} i\xE7in tan\u0131nmayan k\u0131ymet var.`; + default: + return `K\u0131ymet tan\u0131namad\u0131.`; + } + }; +}; +function ota_default() { + return { + localeError: error31() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ps.js +var error32 = () => { + const Sizable = { + string: { unit: "\u062A\u0648\u06A9\u064A", verb: "\u0648\u0644\u0631\u064A" }, + file: { unit: "\u0628\u0627\u06CC\u067C\u0633", verb: "\u0648\u0644\u0631\u064A" }, + array: { unit: "\u062A\u0648\u06A9\u064A", verb: "\u0648\u0644\u0631\u064A" }, + set: { unit: "\u062A\u0648\u06A9\u064A", verb: "\u0648\u0644\u0631\u064A" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0648\u0631\u0648\u062F\u064A", + email: "\u0628\u0631\u06CC\u069A\u0646\u0627\u0644\u06CC\u06A9", + url: "\u06CC\u0648 \u0622\u0631 \u0627\u0644", + emoji: "\u0627\u06CC\u0645\u0648\u062C\u064A", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u0646\u06CC\u067C\u0647 \u0627\u0648 \u0648\u062E\u062A", + date: "\u0646\u06D0\u067C\u0647", + time: "\u0648\u062E\u062A", + duration: "\u0645\u0648\u062F\u0647", + ipv4: "\u062F IPv4 \u067E\u062A\u0647", + ipv6: "\u062F IPv6 \u067E\u062A\u0647", + cidrv4: "\u062F IPv4 \u0633\u0627\u062D\u0647", + cidrv6: "\u062F IPv6 \u0633\u0627\u062D\u0647", + base64: "base64-encoded \u0645\u062A\u0646", + base64url: "base64url-encoded \u0645\u062A\u0646", + json_string: "JSON \u0645\u062A\u0646", + e164: "\u062F E.164 \u0634\u0645\u06D0\u0631\u0647", + jwt: "JWT", + template_literal: "\u0648\u0631\u0648\u062F\u064A" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0639\u062F\u062F", + array: "\u0627\u0631\u06D0" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0646\u0627\u0633\u0645 \u0648\u0631\u0648\u062F\u064A: \u0628\u0627\u06CC\u062F instanceof ${issue2.expected} \u0648\u0627\u06CC, \u0645\u06AB\u0631 ${received} \u062A\u0631\u0644\u0627\u0633\u0647 \u0634\u0648`; + } + return `\u0646\u0627\u0633\u0645 \u0648\u0631\u0648\u062F\u064A: \u0628\u0627\u06CC\u062F ${expected} \u0648\u0627\u06CC, \u0645\u06AB\u0631 ${received} \u062A\u0631\u0644\u0627\u0633\u0647 \u0634\u0648`; + } + case "invalid_value": + if (issue2.values.length === 1) { + return `\u0646\u0627\u0633\u0645 \u0648\u0631\u0648\u062F\u064A: \u0628\u0627\u06CC\u062F ${stringifyPrimitive(issue2.values[0])} \u0648\u0627\u06CC`; + } + return `\u0646\u0627\u0633\u0645 \u0627\u0646\u062A\u062E\u0627\u0628: \u0628\u0627\u06CC\u062F \u06CC\u0648 \u0644\u0647 ${joinValues(issue2.values, "|")} \u0685\u062E\u0647 \u0648\u0627\u06CC`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0689\u06CC\u0631 \u0644\u0648\u06CC: ${issue2.origin ?? "\u0627\u0631\u0632\u069A\u062A"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0635\u0631\u0648\u0646\u0647"} \u0648\u0644\u0631\u064A`; + } + return `\u0689\u06CC\u0631 \u0644\u0648\u06CC: ${issue2.origin ?? "\u0627\u0631\u0632\u069A\u062A"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} \u0648\u064A`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0689\u06CC\u0631 \u06A9\u0648\u0686\u0646\u06CC: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} ${sizing.unit} \u0648\u0644\u0631\u064A`; + } + return `\u0689\u06CC\u0631 \u06A9\u0648\u0686\u0646\u06CC: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} \u0648\u064A`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F \u062F "${_issue.prefix}" \u0633\u0631\u0647 \u067E\u06CC\u0644 \u0634\u064A`; + } + if (_issue.format === "ends_with") { + return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F \u062F "${_issue.suffix}" \u0633\u0631\u0647 \u067E\u0627\u06CC \u062A\u0647 \u0648\u0631\u0633\u064A\u0696\u064A`; + } + if (_issue.format === "includes") { + return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F "${_issue.includes}" \u0648\u0644\u0631\u064A`; + } + if (_issue.format === "regex") { + return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F \u062F ${_issue.pattern} \u0633\u0631\u0647 \u0645\u0637\u0627\u0628\u0642\u062A \u0648\u0644\u0631\u064A`; + } + return `${FormatDictionary[_issue.format] ?? issue2.format} \u0646\u0627\u0633\u0645 \u062F\u06CC`; + } + case "not_multiple_of": + return `\u0646\u0627\u0633\u0645 \u0639\u062F\u062F: \u0628\u0627\u06CC\u062F \u062F ${issue2.divisor} \u0645\u0636\u0631\u0628 \u0648\u064A`; + case "unrecognized_keys": + return `\u0646\u0627\u0633\u0645 ${issue2.keys.length > 1 ? "\u06A9\u0644\u06CC\u0689\u0648\u0646\u0647" : "\u06A9\u0644\u06CC\u0689"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u0646\u0627\u0633\u0645 \u06A9\u0644\u06CC\u0689 \u067E\u0647 ${issue2.origin} \u06A9\u06D0`; + case "invalid_union": + return `\u0646\u0627\u0633\u0645\u0647 \u0648\u0631\u0648\u062F\u064A`; + case "invalid_element": + return `\u0646\u0627\u0633\u0645 \u0639\u0646\u0635\u0631 \u067E\u0647 ${issue2.origin} \u06A9\u06D0`; + default: + return `\u0646\u0627\u0633\u0645\u0647 \u0648\u0631\u0648\u062F\u064A`; + } + }; +}; +function ps_default() { + return { + localeError: error32() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/pl.js +var error33 = () => { + const Sizable = { + string: { unit: "znak\xF3w", verb: "mie\u0107" }, + file: { unit: "bajt\xF3w", verb: "mie\u0107" }, + array: { unit: "element\xF3w", verb: "mie\u0107" }, + set: { unit: "element\xF3w", verb: "mie\u0107" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "wyra\u017Cenie", + email: "adres email", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "data i godzina w formacie ISO", + date: "data w formacie ISO", + time: "godzina w formacie ISO", + duration: "czas trwania ISO", + ipv4: "adres IPv4", + ipv6: "adres IPv6", + cidrv4: "zakres IPv4", + cidrv6: "zakres IPv6", + base64: "ci\u0105g znak\xF3w zakodowany w formacie base64", + base64url: "ci\u0105g znak\xF3w zakodowany w formacie base64url", + json_string: "ci\u0105g znak\xF3w w formacie JSON", + e164: "liczba E.164", + jwt: "JWT", + template_literal: "wej\u015Bcie" + }; + const TypeDictionary = { + nan: "NaN", + number: "liczba", + array: "tablica" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Nieprawid\u0142owe dane wej\u015Bciowe: oczekiwano instanceof ${issue2.expected}, otrzymano ${received}`; + } + return `Nieprawid\u0142owe dane wej\u015Bciowe: oczekiwano ${expected}, otrzymano ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Nieprawid\u0142owe dane wej\u015Bciowe: oczekiwano ${stringifyPrimitive(issue2.values[0])}`; + return `Nieprawid\u0142owa opcja: oczekiwano jednej z warto\u015Bci ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Za du\u017Ca warto\u015B\u0107: oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie mie\u0107 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "element\xF3w"}`; + } + return `Zbyt du\u017C(y/a/e): oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie wynosi\u0107 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Za ma\u0142a warto\u015B\u0107: oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie mie\u0107 ${adj}${issue2.minimum.toString()} ${sizing.unit ?? "element\xF3w"}`; + } + return `Zbyt ma\u0142(y/a/e): oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie wynosi\u0107 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi zaczyna\u0107 si\u0119 od "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi ko\u0144czy\u0107 si\u0119 na "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi zawiera\u0107 "${_issue.includes}"`; + if (_issue.format === "regex") + return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi odpowiada\u0107 wzorcowi ${_issue.pattern}`; + return `Nieprawid\u0142ow(y/a/e) ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Nieprawid\u0142owa liczba: musi by\u0107 wielokrotno\u015Bci\u0105 ${issue2.divisor}`; + case "unrecognized_keys": + return `Nierozpoznane klucze${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Nieprawid\u0142owy klucz w ${issue2.origin}`; + case "invalid_union": + return "Nieprawid\u0142owe dane wej\u015Bciowe"; + case "invalid_element": + return `Nieprawid\u0142owa warto\u015B\u0107 w ${issue2.origin}`; + default: + return `Nieprawid\u0142owe dane wej\u015Bciowe`; + } + }; +}; +function pl_default() { + return { + localeError: error33() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/pt.js +var error34 = () => { + const Sizable = { + string: { unit: "caracteres", verb: "ter" }, + file: { unit: "bytes", verb: "ter" }, + array: { unit: "itens", verb: "ter" }, + set: { unit: "itens", verb: "ter" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "padr\xE3o", + email: "endere\xE7o de e-mail", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "data e hora ISO", + date: "data ISO", + time: "hora ISO", + duration: "dura\xE7\xE3o ISO", + ipv4: "endere\xE7o IPv4", + ipv6: "endere\xE7o IPv6", + cidrv4: "faixa de IPv4", + cidrv6: "faixa de IPv6", + base64: "texto codificado em base64", + base64url: "URL codificada em base64", + json_string: "texto JSON", + e164: "n\xFAmero E.164", + jwt: "JWT", + template_literal: "entrada" + }; + const TypeDictionary = { + nan: "NaN", + number: "n\xFAmero", + null: "nulo" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Tipo inv\xE1lido: esperado instanceof ${issue2.expected}, recebido ${received}`; + } + return `Tipo inv\xE1lido: esperado ${expected}, recebido ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Entrada inv\xE1lida: esperado ${stringifyPrimitive(issue2.values[0])}`; + return `Op\xE7\xE3o inv\xE1lida: esperada uma das ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Muito grande: esperado que ${issue2.origin ?? "valor"} tivesse ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementos"}`; + return `Muito grande: esperado que ${issue2.origin ?? "valor"} fosse ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Muito pequeno: esperado que ${issue2.origin} tivesse ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Muito pequeno: esperado que ${issue2.origin} fosse ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Texto inv\xE1lido: deve come\xE7ar com "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Texto inv\xE1lido: deve terminar com "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Texto inv\xE1lido: deve incluir "${_issue.includes}"`; + if (_issue.format === "regex") + return `Texto inv\xE1lido: deve corresponder ao padr\xE3o ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} inv\xE1lido`; + } + case "not_multiple_of": + return `N\xFAmero inv\xE1lido: deve ser m\xFAltiplo de ${issue2.divisor}`; + case "unrecognized_keys": + return `Chave${issue2.keys.length > 1 ? "s" : ""} desconhecida${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Chave inv\xE1lida em ${issue2.origin}`; + case "invalid_union": + return "Entrada inv\xE1lida"; + case "invalid_element": + return `Valor inv\xE1lido em ${issue2.origin}`; + default: + return `Campo inv\xE1lido`; + } + }; +}; +function pt_default() { + return { + localeError: error34() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ru.js +function getRussianPlural(count, one, few, many) { + const absCount = Math.abs(count); + const lastDigit = absCount % 10; + const lastTwoDigits = absCount % 100; + if (lastTwoDigits >= 11 && lastTwoDigits <= 19) { + return many; + } + if (lastDigit === 1) { + return one; + } + if (lastDigit >= 2 && lastDigit <= 4) { + return few; + } + return many; +} +var error35 = () => { + const Sizable = { + string: { + unit: { + one: "\u0441\u0438\u043C\u0432\u043E\u043B", + few: "\u0441\u0438\u043C\u0432\u043E\u043B\u0430", + many: "\u0441\u0438\u043C\u0432\u043E\u043B\u043E\u0432" + }, + verb: "\u0438\u043C\u0435\u0442\u044C" + }, + file: { + unit: { + one: "\u0431\u0430\u0439\u0442", + few: "\u0431\u0430\u0439\u0442\u0430", + many: "\u0431\u0430\u0439\u0442" + }, + verb: "\u0438\u043C\u0435\u0442\u044C" + }, + array: { + unit: { + one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", + few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430", + many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u043E\u0432" + }, + verb: "\u0438\u043C\u0435\u0442\u044C" + }, + set: { + unit: { + one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", + few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430", + many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u043E\u0432" + }, + verb: "\u0438\u043C\u0435\u0442\u044C" + } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0432\u0432\u043E\u0434", + email: "email \u0430\u0434\u0440\u0435\u0441", + url: "URL", + emoji: "\u044D\u043C\u043E\u0434\u0437\u0438", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u0434\u0430\u0442\u0430 \u0438 \u0432\u0440\u0435\u043C\u044F", + date: "ISO \u0434\u0430\u0442\u0430", + time: "ISO \u0432\u0440\u0435\u043C\u044F", + duration: "ISO \u0434\u043B\u0438\u0442\u0435\u043B\u044C\u043D\u043E\u0441\u0442\u044C", + ipv4: "IPv4 \u0430\u0434\u0440\u0435\u0441", + ipv6: "IPv6 \u0430\u0434\u0440\u0435\u0441", + cidrv4: "IPv4 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", + cidrv6: "IPv6 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", + base64: "\u0441\u0442\u0440\u043E\u043A\u0430 \u0432 \u0444\u043E\u0440\u043C\u0430\u0442\u0435 base64", + base64url: "\u0441\u0442\u0440\u043E\u043A\u0430 \u0432 \u0444\u043E\u0440\u043C\u0430\u0442\u0435 base64url", + json_string: "JSON \u0441\u0442\u0440\u043E\u043A\u0430", + e164: "\u043D\u043E\u043C\u0435\u0440 E.164", + jwt: "JWT", + template_literal: "\u0432\u0432\u043E\u0434" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0447\u0438\u0441\u043B\u043E", + array: "\u043C\u0430\u0441\u0441\u0438\u0432" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0432\u043E\u0434: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C instanceof ${issue2.expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D\u043E ${received}`; + } + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0432\u043E\u0434: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C ${expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D\u043E ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0432\u043E\u0434: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C ${stringifyPrimitive(issue2.values[0])}`; + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0430\u0440\u0438\u0430\u043D\u0442: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C \u043E\u0434\u043D\u043E \u0438\u0437 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + const maxValue = Number(issue2.maximum); + const unit = getRussianPlural(maxValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); + return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u0431\u043E\u043B\u044C\u0448\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435"} \u0431\u0443\u0434\u0435\u0442 \u0438\u043C\u0435\u0442\u044C ${adj}${issue2.maximum.toString()} ${unit}`; + } + return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u0431\u043E\u043B\u044C\u0448\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435"} \u0431\u0443\u0434\u0435\u0442 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + const minValue = Number(issue2.minimum); + const unit = getRussianPlural(minValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); + return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u043C\u0430\u043B\u0435\u043D\u044C\u043A\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin} \u0431\u0443\u0434\u0435\u0442 \u0438\u043C\u0435\u0442\u044C ${adj}${issue2.minimum.toString()} ${unit}`; + } + return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u043C\u0430\u043B\u0435\u043D\u044C\u043A\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin} \u0431\u0443\u0434\u0435\u0442 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u043D\u0430\u0447\u0438\u043D\u0430\u0442\u044C\u0441\u044F \u0441 "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u0437\u0430\u043A\u0430\u043D\u0447\u0438\u0432\u0430\u0442\u044C\u0441\u044F \u043D\u0430 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u0441\u043E\u0434\u0435\u0440\u0436\u0430\u0442\u044C "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u0441\u043E\u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u043E\u0432\u0430\u0442\u044C \u0448\u0430\u0431\u043B\u043E\u043D\u0443 ${_issue.pattern}`; + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u041D\u0435\u0432\u0435\u0440\u043D\u043E\u0435 \u0447\u0438\u0441\u043B\u043E: \u0434\u043E\u043B\u0436\u043D\u043E \u0431\u044B\u0442\u044C \u043A\u0440\u0430\u0442\u043D\u044B\u043C ${issue2.divisor}`; + case "unrecognized_keys": + return `\u041D\u0435\u0440\u0430\u0441\u043F\u043E\u0437\u043D\u0430\u043D\u043D${issue2.keys.length > 1 ? "\u044B\u0435" : "\u044B\u0439"} \u043A\u043B\u044E\u0447${issue2.keys.length > 1 ? "\u0438" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u043A\u043B\u044E\u0447 \u0432 ${issue2.origin}`; + case "invalid_union": + return "\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0435 \u0432\u0445\u043E\u0434\u043D\u044B\u0435 \u0434\u0430\u043D\u043D\u044B\u0435"; + case "invalid_element": + return `\u041D\u0435\u0432\u0435\u0440\u043D\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435 \u0432 ${issue2.origin}`; + default: + return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0435 \u0432\u0445\u043E\u0434\u043D\u044B\u0435 \u0434\u0430\u043D\u043D\u044B\u0435`; + } + }; +}; +function ru_default() { + return { + localeError: error35() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/sl.js +var error36 = () => { + const Sizable = { + string: { unit: "znakov", verb: "imeti" }, + file: { unit: "bajtov", verb: "imeti" }, + array: { unit: "elementov", verb: "imeti" }, + set: { unit: "elementov", verb: "imeti" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "vnos", + email: "e-po\u0161tni naslov", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO datum in \u010Das", + date: "ISO datum", + time: "ISO \u010Das", + duration: "ISO trajanje", + ipv4: "IPv4 naslov", + ipv6: "IPv6 naslov", + cidrv4: "obseg IPv4", + cidrv6: "obseg IPv6", + base64: "base64 kodiran niz", + base64url: "base64url kodiran niz", + json_string: "JSON niz", + e164: "E.164 \u0161tevilka", + jwt: "JWT", + template_literal: "vnos" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0161tevilo", + array: "tabela" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Neveljaven vnos: pri\u010Dakovano instanceof ${issue2.expected}, prejeto ${received}`; + } + return `Neveljaven vnos: pri\u010Dakovano ${expected}, prejeto ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Neveljaven vnos: pri\u010Dakovano ${stringifyPrimitive(issue2.values[0])}`; + return `Neveljavna mo\u017Enost: pri\u010Dakovano eno izmed ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Preveliko: pri\u010Dakovano, da bo ${issue2.origin ?? "vrednost"} imelo ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementov"}`; + return `Preveliko: pri\u010Dakovano, da bo ${issue2.origin ?? "vrednost"} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Premajhno: pri\u010Dakovano, da bo ${issue2.origin} imelo ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Premajhno: pri\u010Dakovano, da bo ${issue2.origin} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Neveljaven niz: mora se za\u010Deti z "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `Neveljaven niz: mora se kon\u010Dati z "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Neveljaven niz: mora vsebovati "${_issue.includes}"`; + if (_issue.format === "regex") + return `Neveljaven niz: mora ustrezati vzorcu ${_issue.pattern}`; + return `Neveljaven ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Neveljavno \u0161tevilo: mora biti ve\u010Dkratnik ${issue2.divisor}`; + case "unrecognized_keys": + return `Neprepoznan${issue2.keys.length > 1 ? "i klju\u010Di" : " klju\u010D"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Neveljaven klju\u010D v ${issue2.origin}`; + case "invalid_union": + return "Neveljaven vnos"; + case "invalid_element": + return `Neveljavna vrednost v ${issue2.origin}`; + default: + return "Neveljaven vnos"; + } + }; +}; +function sl_default() { + return { + localeError: error36() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/sv.js +var error37 = () => { + const Sizable = { + string: { unit: "tecken", verb: "att ha" }, + file: { unit: "bytes", verb: "att ha" }, + array: { unit: "objekt", verb: "att inneh\xE5lla" }, + set: { unit: "objekt", verb: "att inneh\xE5lla" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "regulj\xE4rt uttryck", + email: "e-postadress", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO-datum och tid", + date: "ISO-datum", + time: "ISO-tid", + duration: "ISO-varaktighet", + ipv4: "IPv4-intervall", + ipv6: "IPv6-intervall", + cidrv4: "IPv4-spektrum", + cidrv6: "IPv6-spektrum", + base64: "base64-kodad str\xE4ng", + base64url: "base64url-kodad str\xE4ng", + json_string: "JSON-str\xE4ng", + e164: "E.164-nummer", + jwt: "JWT", + template_literal: "mall-literal" + }; + const TypeDictionary = { + nan: "NaN", + number: "antal", + array: "lista" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Ogiltig inmatning: f\xF6rv\xE4ntat instanceof ${issue2.expected}, fick ${received}`; + } + return `Ogiltig inmatning: f\xF6rv\xE4ntat ${expected}, fick ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Ogiltig inmatning: f\xF6rv\xE4ntat ${stringifyPrimitive(issue2.values[0])}`; + return `Ogiltigt val: f\xF6rv\xE4ntade en av ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `F\xF6r stor(t): f\xF6rv\xE4ntade ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "element"}`; + } + return `F\xF6r stor(t): f\xF6rv\xE4ntat ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `F\xF6r lite(t): f\xF6rv\xE4ntade ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `F\xF6r lite(t): f\xF6rv\xE4ntade ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `Ogiltig str\xE4ng: m\xE5ste b\xF6rja med "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `Ogiltig str\xE4ng: m\xE5ste sluta med "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Ogiltig str\xE4ng: m\xE5ste inneh\xE5lla "${_issue.includes}"`; + if (_issue.format === "regex") + return `Ogiltig str\xE4ng: m\xE5ste matcha m\xF6nstret "${_issue.pattern}"`; + return `Ogiltig(t) ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Ogiltigt tal: m\xE5ste vara en multipel av ${issue2.divisor}`; + case "unrecognized_keys": + return `${issue2.keys.length > 1 ? "Ok\xE4nda nycklar" : "Ok\xE4nd nyckel"}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Ogiltig nyckel i ${issue2.origin ?? "v\xE4rdet"}`; + case "invalid_union": + return "Ogiltig input"; + case "invalid_element": + return `Ogiltigt v\xE4rde i ${issue2.origin ?? "v\xE4rdet"}`; + default: + return `Ogiltig input`; + } + }; +}; +function sv_default() { + return { + localeError: error37() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ta.js +var error38 = () => { + const Sizable = { + string: { unit: "\u0B8E\u0BB4\u0BC1\u0BA4\u0BCD\u0BA4\u0BC1\u0B95\u0BCD\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" }, + file: { unit: "\u0BAA\u0BC8\u0B9F\u0BCD\u0B9F\u0BC1\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" }, + array: { unit: "\u0B89\u0BB1\u0BC1\u0BAA\u0BCD\u0BAA\u0BC1\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" }, + set: { unit: "\u0B89\u0BB1\u0BC1\u0BAA\u0BCD\u0BAA\u0BC1\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1", + email: "\u0BAE\u0BBF\u0BA9\u0BCD\u0BA9\u0B9E\u0BCD\u0B9A\u0BB2\u0BCD \u0BAE\u0BC1\u0B95\u0BB5\u0BB0\u0BBF", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u0BA4\u0BC7\u0BA4\u0BBF \u0BA8\u0BC7\u0BB0\u0BAE\u0BCD", + date: "ISO \u0BA4\u0BC7\u0BA4\u0BBF", + time: "ISO \u0BA8\u0BC7\u0BB0\u0BAE\u0BCD", + duration: "ISO \u0B95\u0BBE\u0BB2 \u0B85\u0BB3\u0BB5\u0BC1", + ipv4: "IPv4 \u0BAE\u0BC1\u0B95\u0BB5\u0BB0\u0BBF", + ipv6: "IPv6 \u0BAE\u0BC1\u0B95\u0BB5\u0BB0\u0BBF", + cidrv4: "IPv4 \u0BB5\u0BB0\u0BAE\u0BCD\u0BAA\u0BC1", + cidrv6: "IPv6 \u0BB5\u0BB0\u0BAE\u0BCD\u0BAA\u0BC1", + base64: "base64-encoded \u0B9A\u0BB0\u0BAE\u0BCD", + base64url: "base64url-encoded \u0B9A\u0BB0\u0BAE\u0BCD", + json_string: "JSON \u0B9A\u0BB0\u0BAE\u0BCD", + e164: "E.164 \u0B8E\u0BA3\u0BCD", + jwt: "JWT", + template_literal: "input" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0B8E\u0BA3\u0BCD", + array: "\u0B85\u0BA3\u0BBF", + null: "\u0BB5\u0BC6\u0BB1\u0BC1\u0BAE\u0BC8" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 instanceof ${issue2.expected}, \u0BAA\u0BC6\u0BB1\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${received}`; + } + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${expected}, \u0BAA\u0BC6\u0BB1\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${stringifyPrimitive(issue2.values[0])}`; + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0BB5\u0BBF\u0BB0\u0BC1\u0BAA\u0BCD\u0BAA\u0BAE\u0BCD: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${joinValues(issue2.values, "|")} \u0B87\u0BB2\u0BCD \u0B92\u0BA9\u0BCD\u0BB1\u0BC1`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0BAE\u0BBF\u0B95 \u0BAA\u0BC6\u0BB0\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin ?? "\u0BAE\u0BA4\u0BBF\u0BAA\u0BCD\u0BAA\u0BC1"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0B89\u0BB1\u0BC1\u0BAA\u0BCD\u0BAA\u0BC1\u0B95\u0BB3\u0BCD"} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + } + return `\u0BAE\u0BBF\u0B95 \u0BAA\u0BC6\u0BB0\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin ?? "\u0BAE\u0BA4\u0BBF\u0BAA\u0BCD\u0BAA\u0BC1"} ${adj}${issue2.maximum.toString()} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0BAE\u0BBF\u0B95\u0B9A\u0BCD \u0B9A\u0BBF\u0BB1\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + } + return `\u0BAE\u0BBF\u0B95\u0B9A\u0BCD \u0B9A\u0BBF\u0BB1\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin} ${adj}${issue2.minimum.toString()} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: "${_issue.prefix}" \u0B87\u0BB2\u0BCD \u0BA4\u0BCA\u0B9F\u0B99\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + if (_issue.format === "ends_with") + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: "${_issue.suffix}" \u0B87\u0BB2\u0BCD \u0BAE\u0BC1\u0B9F\u0BBF\u0BB5\u0B9F\u0BC8\u0BAF \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + if (_issue.format === "includes") + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: "${_issue.includes}" \u0B90 \u0B89\u0BB3\u0BCD\u0BB3\u0B9F\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + if (_issue.format === "regex") + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: ${_issue.pattern} \u0BAE\u0BC1\u0BB1\u0BC8\u0BAA\u0BBE\u0B9F\u0BCD\u0B9F\u0BC1\u0B9F\u0BA9\u0BCD \u0BAA\u0BCA\u0BB0\u0BC1\u0BA8\u0BCD\u0BA4 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B8E\u0BA3\u0BCD: ${issue2.divisor} \u0B87\u0BA9\u0BCD \u0BAA\u0BB2\u0BAE\u0BBE\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; + case "unrecognized_keys": + return `\u0B85\u0B9F\u0BC8\u0BAF\u0BBE\u0BB3\u0BAE\u0BCD \u0BA4\u0BC6\u0BB0\u0BBF\u0BAF\u0BBE\u0BA4 \u0BB5\u0BBF\u0B9A\u0BC8${issue2.keys.length > 1 ? "\u0B95\u0BB3\u0BCD" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `${issue2.origin} \u0B87\u0BB2\u0BCD \u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0BB5\u0BBF\u0B9A\u0BC8`; + case "invalid_union": + return "\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1"; + case "invalid_element": + return `${issue2.origin} \u0B87\u0BB2\u0BCD \u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0BAE\u0BA4\u0BBF\u0BAA\u0BCD\u0BAA\u0BC1`; + default: + return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1`; + } + }; +}; +function ta_default() { + return { + localeError: error38() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/th.js +var error39 = () => { + const Sizable = { + string: { unit: "\u0E15\u0E31\u0E27\u0E2D\u0E31\u0E01\u0E29\u0E23", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" }, + file: { unit: "\u0E44\u0E1A\u0E15\u0E4C", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" }, + array: { unit: "\u0E23\u0E32\u0E22\u0E01\u0E32\u0E23", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" }, + set: { unit: "\u0E23\u0E32\u0E22\u0E01\u0E32\u0E23", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E17\u0E35\u0E48\u0E1B\u0E49\u0E2D\u0E19", + email: "\u0E17\u0E35\u0E48\u0E2D\u0E22\u0E39\u0E48\u0E2D\u0E35\u0E40\u0E21\u0E25", + url: "URL", + emoji: "\u0E2D\u0E34\u0E42\u0E21\u0E08\u0E34", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u0E27\u0E31\u0E19\u0E17\u0E35\u0E48\u0E40\u0E27\u0E25\u0E32\u0E41\u0E1A\u0E1A ISO", + date: "\u0E27\u0E31\u0E19\u0E17\u0E35\u0E48\u0E41\u0E1A\u0E1A ISO", + time: "\u0E40\u0E27\u0E25\u0E32\u0E41\u0E1A\u0E1A ISO", + duration: "\u0E0A\u0E48\u0E27\u0E07\u0E40\u0E27\u0E25\u0E32\u0E41\u0E1A\u0E1A ISO", + ipv4: "\u0E17\u0E35\u0E48\u0E2D\u0E22\u0E39\u0E48 IPv4", + ipv6: "\u0E17\u0E35\u0E48\u0E2D\u0E22\u0E39\u0E48 IPv6", + cidrv4: "\u0E0A\u0E48\u0E27\u0E07 IP \u0E41\u0E1A\u0E1A IPv4", + cidrv6: "\u0E0A\u0E48\u0E27\u0E07 IP \u0E41\u0E1A\u0E1A IPv6", + base64: "\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E41\u0E1A\u0E1A Base64", + base64url: "\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E41\u0E1A\u0E1A Base64 \u0E2A\u0E33\u0E2B\u0E23\u0E31\u0E1A URL", + json_string: "\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E41\u0E1A\u0E1A JSON", + e164: "\u0E40\u0E1A\u0E2D\u0E23\u0E4C\u0E42\u0E17\u0E23\u0E28\u0E31\u0E1E\u0E17\u0E4C\u0E23\u0E30\u0E2B\u0E27\u0E48\u0E32\u0E07\u0E1B\u0E23\u0E30\u0E40\u0E17\u0E28 (E.164)", + jwt: "\u0E42\u0E17\u0E40\u0E04\u0E19 JWT", + template_literal: "\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E17\u0E35\u0E48\u0E1B\u0E49\u0E2D\u0E19" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0E15\u0E31\u0E27\u0E40\u0E25\u0E02", + array: "\u0E2D\u0E32\u0E23\u0E4C\u0E40\u0E23\u0E22\u0E4C (Array)", + null: "\u0E44\u0E21\u0E48\u0E21\u0E35\u0E04\u0E48\u0E32 (null)" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0E1B\u0E23\u0E30\u0E40\u0E20\u0E17\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19 instanceof ${issue2.expected} \u0E41\u0E15\u0E48\u0E44\u0E14\u0E49\u0E23\u0E31\u0E1A ${received}`; + } + return `\u0E1B\u0E23\u0E30\u0E40\u0E20\u0E17\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19 ${expected} \u0E41\u0E15\u0E48\u0E44\u0E14\u0E49\u0E23\u0E31\u0E1A ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u0E04\u0E48\u0E32\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19 ${stringifyPrimitive(issue2.values[0])}`; + return `\u0E15\u0E31\u0E27\u0E40\u0E25\u0E37\u0E2D\u0E01\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19\u0E2B\u0E19\u0E36\u0E48\u0E07\u0E43\u0E19 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "\u0E44\u0E21\u0E48\u0E40\u0E01\u0E34\u0E19" : "\u0E19\u0E49\u0E2D\u0E22\u0E01\u0E27\u0E48\u0E32"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u0E40\u0E01\u0E34\u0E19\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin ?? "\u0E04\u0E48\u0E32"} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "\u0E23\u0E32\u0E22\u0E01\u0E32\u0E23"}`; + return `\u0E40\u0E01\u0E34\u0E19\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin ?? "\u0E04\u0E48\u0E32"} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? "\u0E2D\u0E22\u0E48\u0E32\u0E07\u0E19\u0E49\u0E2D\u0E22" : "\u0E21\u0E32\u0E01\u0E01\u0E27\u0E48\u0E32"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0E19\u0E49\u0E2D\u0E22\u0E01\u0E27\u0E48\u0E32\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u0E19\u0E49\u0E2D\u0E22\u0E01\u0E27\u0E48\u0E32\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E15\u0E49\u0E2D\u0E07\u0E02\u0E36\u0E49\u0E19\u0E15\u0E49\u0E19\u0E14\u0E49\u0E27\u0E22 "${_issue.prefix}"`; + } + if (_issue.format === "ends_with") + return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E15\u0E49\u0E2D\u0E07\u0E25\u0E07\u0E17\u0E49\u0E32\u0E22\u0E14\u0E49\u0E27\u0E22 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E15\u0E49\u0E2D\u0E07\u0E21\u0E35 "${_issue.includes}" \u0E2D\u0E22\u0E39\u0E48\u0E43\u0E19\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21`; + if (_issue.format === "regex") + return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E15\u0E49\u0E2D\u0E07\u0E15\u0E23\u0E07\u0E01\u0E31\u0E1A\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E17\u0E35\u0E48\u0E01\u0E33\u0E2B\u0E19\u0E14 ${_issue.pattern}`; + return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u0E15\u0E31\u0E27\u0E40\u0E25\u0E02\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E15\u0E49\u0E2D\u0E07\u0E40\u0E1B\u0E47\u0E19\u0E08\u0E33\u0E19\u0E27\u0E19\u0E17\u0E35\u0E48\u0E2B\u0E32\u0E23\u0E14\u0E49\u0E27\u0E22 ${issue2.divisor} \u0E44\u0E14\u0E49\u0E25\u0E07\u0E15\u0E31\u0E27`; + case "unrecognized_keys": + return `\u0E1E\u0E1A\u0E04\u0E35\u0E22\u0E4C\u0E17\u0E35\u0E48\u0E44\u0E21\u0E48\u0E23\u0E39\u0E49\u0E08\u0E31\u0E01: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u0E04\u0E35\u0E22\u0E4C\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07\u0E43\u0E19 ${issue2.origin}`; + case "invalid_union": + return "\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E44\u0E21\u0E48\u0E15\u0E23\u0E07\u0E01\u0E31\u0E1A\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E22\u0E39\u0E40\u0E19\u0E35\u0E22\u0E19\u0E17\u0E35\u0E48\u0E01\u0E33\u0E2B\u0E19\u0E14\u0E44\u0E27\u0E49"; + case "invalid_element": + return `\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07\u0E43\u0E19 ${issue2.origin}`; + default: + return `\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07`; + } + }; +}; +function th_default() { + return { + localeError: error39() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/tr.js +var error40 = () => { + const Sizable = { + string: { unit: "karakter", verb: "olmal\u0131" }, + file: { unit: "bayt", verb: "olmal\u0131" }, + array: { unit: "\xF6\u011Fe", verb: "olmal\u0131" }, + set: { unit: "\xF6\u011Fe", verb: "olmal\u0131" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "girdi", + email: "e-posta adresi", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO tarih ve saat", + date: "ISO tarih", + time: "ISO saat", + duration: "ISO s\xFCre", + ipv4: "IPv4 adresi", + ipv6: "IPv6 adresi", + cidrv4: "IPv4 aral\u0131\u011F\u0131", + cidrv6: "IPv6 aral\u0131\u011F\u0131", + base64: "base64 ile \u015Fifrelenmi\u015F metin", + base64url: "base64url ile \u015Fifrelenmi\u015F metin", + json_string: "JSON dizesi", + e164: "E.164 say\u0131s\u0131", + jwt: "JWT", + template_literal: "\u015Eablon dizesi" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Ge\xE7ersiz de\u011Fer: beklenen instanceof ${issue2.expected}, al\u0131nan ${received}`; + } + return `Ge\xE7ersiz de\u011Fer: beklenen ${expected}, al\u0131nan ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Ge\xE7ersiz de\u011Fer: beklenen ${stringifyPrimitive(issue2.values[0])}`; + return `Ge\xE7ersiz se\xE7enek: a\u015Fa\u011F\u0131dakilerden biri olmal\u0131: ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\xC7ok b\xFCy\xFCk: beklenen ${issue2.origin ?? "de\u011Fer"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\xF6\u011Fe"}`; + return `\xC7ok b\xFCy\xFCk: beklenen ${issue2.origin ?? "de\u011Fer"} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\xC7ok k\xFC\xE7\xFCk: beklenen ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + return `\xC7ok k\xFC\xE7\xFCk: beklenen ${issue2.origin} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Ge\xE7ersiz metin: "${_issue.prefix}" ile ba\u015Flamal\u0131`; + if (_issue.format === "ends_with") + return `Ge\xE7ersiz metin: "${_issue.suffix}" ile bitmeli`; + if (_issue.format === "includes") + return `Ge\xE7ersiz metin: "${_issue.includes}" i\xE7ermeli`; + if (_issue.format === "regex") + return `Ge\xE7ersiz metin: ${_issue.pattern} desenine uymal\u0131`; + return `Ge\xE7ersiz ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Ge\xE7ersiz say\u0131: ${issue2.divisor} ile tam b\xF6l\xFCnebilmeli`; + case "unrecognized_keys": + return `Tan\u0131nmayan anahtar${issue2.keys.length > 1 ? "lar" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `${issue2.origin} i\xE7inde ge\xE7ersiz anahtar`; + case "invalid_union": + return "Ge\xE7ersiz de\u011Fer"; + case "invalid_element": + return `${issue2.origin} i\xE7inde ge\xE7ersiz de\u011Fer`; + default: + return `Ge\xE7ersiz de\u011Fer`; + } + }; +}; +function tr_default() { + return { + localeError: error40() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/uk.js +var error41 = () => { + const Sizable = { + string: { unit: "\u0441\u0438\u043C\u0432\u043E\u043B\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" }, + file: { unit: "\u0431\u0430\u0439\u0442\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" }, + array: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" }, + set: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456", + email: "\u0430\u0434\u0440\u0435\u0441\u0430 \u0435\u043B\u0435\u043A\u0442\u0440\u043E\u043D\u043D\u043E\u0457 \u043F\u043E\u0448\u0442\u0438", + url: "URL", + emoji: "\u0435\u043C\u043E\u0434\u0437\u0456", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\u0434\u0430\u0442\u0430 \u0442\u0430 \u0447\u0430\u0441 ISO", + date: "\u0434\u0430\u0442\u0430 ISO", + time: "\u0447\u0430\u0441 ISO", + duration: "\u0442\u0440\u0438\u0432\u0430\u043B\u0456\u0441\u0442\u044C ISO", + ipv4: "\u0430\u0434\u0440\u0435\u0441\u0430 IPv4", + ipv6: "\u0430\u0434\u0440\u0435\u0441\u0430 IPv6", + cidrv4: "\u0434\u0456\u0430\u043F\u0430\u0437\u043E\u043D IPv4", + cidrv6: "\u0434\u0456\u0430\u043F\u0430\u0437\u043E\u043D IPv6", + base64: "\u0440\u044F\u0434\u043E\u043A \u0443 \u043A\u043E\u0434\u0443\u0432\u0430\u043D\u043D\u0456 base64", + base64url: "\u0440\u044F\u0434\u043E\u043A \u0443 \u043A\u043E\u0434\u0443\u0432\u0430\u043D\u043D\u0456 base64url", + json_string: "\u0440\u044F\u0434\u043E\u043A JSON", + e164: "\u043D\u043E\u043C\u0435\u0440 E.164", + jwt: "JWT", + template_literal: "\u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0447\u0438\u0441\u043B\u043E", + array: "\u043C\u0430\u0441\u0438\u0432" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F instanceof ${issue2.expected}, \u043E\u0442\u0440\u0438\u043C\u0430\u043D\u043E ${received}`; + } + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F ${expected}, \u043E\u0442\u0440\u0438\u043C\u0430\u043D\u043E ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F ${stringifyPrimitive(issue2.values[0])}`; + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0430 \u043E\u043F\u0446\u0456\u044F: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F \u043E\u0434\u043D\u0435 \u0437 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u0432\u0435\u043B\u0438\u043A\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0456\u0432"}`; + return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u0432\u0435\u043B\u0438\u043A\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F"} \u0431\u0443\u0434\u0435 ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u043C\u0430\u043B\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u043C\u0430\u043B\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin} \u0431\u0443\u0434\u0435 ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u043F\u043E\u0447\u0438\u043D\u0430\u0442\u0438\u0441\u044F \u0437 "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u0437\u0430\u043A\u0456\u043D\u0447\u0443\u0432\u0430\u0442\u0438\u0441\u044F \u043D\u0430 "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u043C\u0456\u0441\u0442\u0438\u0442\u0438 "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u0432\u0456\u0434\u043F\u043E\u0432\u0456\u0434\u0430\u0442\u0438 \u0448\u0430\u0431\u043B\u043E\u043D\u0443 ${_issue.pattern}`; + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0435 \u0447\u0438\u0441\u043B\u043E: \u043F\u043E\u0432\u0438\u043D\u043D\u043E \u0431\u0443\u0442\u0438 \u043A\u0440\u0430\u0442\u043D\u0438\u043C ${issue2.divisor}`; + case "unrecognized_keys": + return `\u041D\u0435\u0440\u043E\u0437\u043F\u0456\u0437\u043D\u0430\u043D\u0438\u0439 \u043A\u043B\u044E\u0447${issue2.keys.length > 1 ? "\u0456" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u043A\u043B\u044E\u0447 \u0443 ${issue2.origin}`; + case "invalid_union": + return "\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456"; + case "invalid_element": + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F \u0443 ${issue2.origin}`; + default: + return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456`; + } + }; +}; +function uk_default() { + return { + localeError: error41() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ua.js +function ua_default() { + return uk_default(); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ur.js +var error42 = () => { + const Sizable = { + string: { unit: "\u062D\u0631\u0648\u0641", verb: "\u06C1\u0648\u0646\u0627" }, + file: { unit: "\u0628\u0627\u0626\u0679\u0633", verb: "\u06C1\u0648\u0646\u0627" }, + array: { unit: "\u0622\u0626\u0679\u0645\u0632", verb: "\u06C1\u0648\u0646\u0627" }, + set: { unit: "\u0622\u0626\u0679\u0645\u0632", verb: "\u06C1\u0648\u0646\u0627" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0627\u0646 \u067E\u0679", + email: "\u0627\u06CC \u0645\u06CC\u0644 \u0627\u06CC\u0688\u0631\u06CC\u0633", + url: "\u06CC\u0648 \u0622\u0631 \u0627\u06CC\u0644", + emoji: "\u0627\u06CC\u0645\u0648\u062C\u06CC", + uuid: "\u06CC\u0648 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", + uuidv4: "\u06CC\u0648 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC \u0648\u06CC 4", + uuidv6: "\u06CC\u0648 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC \u0648\u06CC 6", + nanoid: "\u0646\u06CC\u0646\u0648 \u0622\u0626\u06CC \u0688\u06CC", + guid: "\u062C\u06CC \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", + cuid: "\u0633\u06CC \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", + cuid2: "\u0633\u06CC \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC 2", + ulid: "\u06CC\u0648 \u0627\u06CC\u0644 \u0622\u0626\u06CC \u0688\u06CC", + xid: "\u0627\u06CC\u06A9\u0633 \u0622\u0626\u06CC \u0688\u06CC", + ksuid: "\u06A9\u06D2 \u0627\u06CC\u0633 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", + datetime: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u0688\u06CC\u0679 \u0679\u0627\u0626\u0645", + date: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u062A\u0627\u0631\u06CC\u062E", + time: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u0648\u0642\u062A", + duration: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u0645\u062F\u062A", + ipv4: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 4 \u0627\u06CC\u0688\u0631\u06CC\u0633", + ipv6: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 6 \u0627\u06CC\u0688\u0631\u06CC\u0633", + cidrv4: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 4 \u0631\u06CC\u0646\u062C", + cidrv6: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 6 \u0631\u06CC\u0646\u062C", + base64: "\u0628\u06CC\u0633 64 \u0627\u0646 \u06A9\u0648\u0688\u0688 \u0633\u0679\u0631\u0646\u06AF", + base64url: "\u0628\u06CC\u0633 64 \u06CC\u0648 \u0622\u0631 \u0627\u06CC\u0644 \u0627\u0646 \u06A9\u0648\u0688\u0688 \u0633\u0679\u0631\u0646\u06AF", + json_string: "\u062C\u06D2 \u0627\u06CC\u0633 \u0627\u0648 \u0627\u06CC\u0646 \u0633\u0679\u0631\u0646\u06AF", + e164: "\u0627\u06CC 164 \u0646\u0645\u0628\u0631", + jwt: "\u062C\u06D2 \u0688\u0628\u0644\u06CC\u0648 \u0679\u06CC", + template_literal: "\u0627\u0646 \u067E\u0679" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u0646\u0645\u0628\u0631", + array: "\u0622\u0631\u06D2", + null: "\u0646\u0644" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679: instanceof ${issue2.expected} \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627\u060C ${received} \u0645\u0648\u0635\u0648\u0644 \u06C1\u0648\u0627`; + } + return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679: ${expected} \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627\u060C ${received} \u0645\u0648\u0635\u0648\u0644 \u06C1\u0648\u0627`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679: ${stringifyPrimitive(issue2.values[0])} \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; + return `\u063A\u0644\u0637 \u0622\u067E\u0634\u0646: ${joinValues(issue2.values, "|")} \u0645\u06CC\u06BA \u0633\u06D2 \u0627\u06CC\u06A9 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u0628\u06C1\u062A \u0628\u0691\u0627: ${issue2.origin ?? "\u0648\u06CC\u0644\u06CC\u0648"} \u06A9\u06D2 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0627\u0635\u0631"} \u06C1\u0648\u0646\u06D2 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u06D2`; + return `\u0628\u06C1\u062A \u0628\u0691\u0627: ${issue2.origin ?? "\u0648\u06CC\u0644\u06CC\u0648"} \u06A9\u0627 ${adj}${issue2.maximum.toString()} \u06C1\u0648\u0646\u0627 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u0628\u06C1\u062A \u0686\u06BE\u0648\u0679\u0627: ${issue2.origin} \u06A9\u06D2 ${adj}${issue2.minimum.toString()} ${sizing.unit} \u06C1\u0648\u0646\u06D2 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u06D2`; + } + return `\u0628\u06C1\u062A \u0686\u06BE\u0648\u0679\u0627: ${issue2.origin} \u06A9\u0627 ${adj}${issue2.minimum.toString()} \u06C1\u0648\u0646\u0627 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: "${_issue.prefix}" \u0633\u06D2 \u0634\u0631\u0648\u0639 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; + } + if (_issue.format === "ends_with") + return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: "${_issue.suffix}" \u067E\u0631 \u062E\u062A\u0645 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; + if (_issue.format === "includes") + return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: "${_issue.includes}" \u0634\u0627\u0645\u0644 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; + if (_issue.format === "regex") + return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: \u067E\u06CC\u0679\u0631\u0646 ${_issue.pattern} \u0633\u06D2 \u0645\u06CC\u0686 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; + return `\u063A\u0644\u0637 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u063A\u0644\u0637 \u0646\u0645\u0628\u0631: ${issue2.divisor} \u06A9\u0627 \u0645\u0636\u0627\u0639\u0641 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; + case "unrecognized_keys": + return `\u063A\u06CC\u0631 \u062A\u0633\u0644\u06CC\u0645 \u0634\u062F\u06C1 \u06A9\u06CC${issue2.keys.length > 1 ? "\u0632" : ""}: ${joinValues(issue2.keys, "\u060C ")}`; + case "invalid_key": + return `${issue2.origin} \u0645\u06CC\u06BA \u063A\u0644\u0637 \u06A9\u06CC`; + case "invalid_union": + return "\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679"; + case "invalid_element": + return `${issue2.origin} \u0645\u06CC\u06BA \u063A\u0644\u0637 \u0648\u06CC\u0644\u06CC\u0648`; + default: + return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679`; + } + }; +}; +function ur_default() { + return { + localeError: error42() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/uz.js +var error43 = () => { + const Sizable = { + string: { unit: "belgi", verb: "bo\u2018lishi kerak" }, + file: { unit: "bayt", verb: "bo\u2018lishi kerak" }, + array: { unit: "element", verb: "bo\u2018lishi kerak" }, + set: { unit: "element", verb: "bo\u2018lishi kerak" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "kirish", + email: "elektron pochta manzili", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO sana va vaqti", + date: "ISO sana", + time: "ISO vaqt", + duration: "ISO davomiylik", + ipv4: "IPv4 manzil", + ipv6: "IPv6 manzil", + mac: "MAC manzil", + cidrv4: "IPv4 diapazon", + cidrv6: "IPv6 diapazon", + base64: "base64 kodlangan satr", + base64url: "base64url kodlangan satr", + json_string: "JSON satr", + e164: "E.164 raqam", + jwt: "JWT", + template_literal: "kirish" + }; + const TypeDictionary = { + nan: "NaN", + number: "raqam", + array: "massiv" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `Noto\u2018g\u2018ri kirish: kutilgan instanceof ${issue2.expected}, qabul qilingan ${received}`; + } + return `Noto\u2018g\u2018ri kirish: kutilgan ${expected}, qabul qilingan ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `Noto\u2018g\u2018ri kirish: kutilgan ${stringifyPrimitive(issue2.values[0])}`; + return `Noto\u2018g\u2018ri variant: quyidagilardan biri kutilgan ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Juda katta: kutilgan ${issue2.origin ?? "qiymat"} ${adj}${issue2.maximum.toString()} ${sizing.unit} ${sizing.verb}`; + return `Juda katta: kutilgan ${issue2.origin ?? "qiymat"} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Juda kichik: kutilgan ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} ${sizing.verb}`; + } + return `Juda kichik: kutilgan ${issue2.origin} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Noto\u2018g\u2018ri satr: "${_issue.prefix}" bilan boshlanishi kerak`; + if (_issue.format === "ends_with") + return `Noto\u2018g\u2018ri satr: "${_issue.suffix}" bilan tugashi kerak`; + if (_issue.format === "includes") + return `Noto\u2018g\u2018ri satr: "${_issue.includes}" ni o\u2018z ichiga olishi kerak`; + if (_issue.format === "regex") + return `Noto\u2018g\u2018ri satr: ${_issue.pattern} shabloniga mos kelishi kerak`; + return `Noto\u2018g\u2018ri ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `Noto\u2018g\u2018ri raqam: ${issue2.divisor} ning karralisi bo\u2018lishi kerak`; + case "unrecognized_keys": + return `Noma\u2019lum kalit${issue2.keys.length > 1 ? "lar" : ""}: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `${issue2.origin} dagi kalit noto\u2018g\u2018ri`; + case "invalid_union": + return "Noto\u2018g\u2018ri kirish"; + case "invalid_element": + return `${issue2.origin} da noto\u2018g\u2018ri qiymat`; + default: + return `Noto\u2018g\u2018ri kirish`; + } + }; +}; +function uz_default() { + return { + localeError: error43() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/vi.js +var error44 = () => { + const Sizable = { + string: { unit: "k\xFD t\u1EF1", verb: "c\xF3" }, + file: { unit: "byte", verb: "c\xF3" }, + array: { unit: "ph\u1EA7n t\u1EED", verb: "c\xF3" }, + set: { unit: "ph\u1EA7n t\u1EED", verb: "c\xF3" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u0111\u1EA7u v\xE0o", + email: "\u0111\u1ECBa ch\u1EC9 email", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ng\xE0y gi\u1EDD ISO", + date: "ng\xE0y ISO", + time: "gi\u1EDD ISO", + duration: "kho\u1EA3ng th\u1EDDi gian ISO", + ipv4: "\u0111\u1ECBa ch\u1EC9 IPv4", + ipv6: "\u0111\u1ECBa ch\u1EC9 IPv6", + cidrv4: "d\u1EA3i IPv4", + cidrv6: "d\u1EA3i IPv6", + base64: "chu\u1ED7i m\xE3 h\xF3a base64", + base64url: "chu\u1ED7i m\xE3 h\xF3a base64url", + json_string: "chu\u1ED7i JSON", + e164: "s\u1ED1 E.164", + jwt: "JWT", + template_literal: "\u0111\u1EA7u v\xE0o" + }; + const TypeDictionary = { + nan: "NaN", + number: "s\u1ED1", + array: "m\u1EA3ng" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i instanceof ${issue2.expected}, nh\u1EADn \u0111\u01B0\u1EE3c ${received}`; + } + return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i ${expected}, nh\u1EADn \u0111\u01B0\u1EE3c ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i ${stringifyPrimitive(issue2.values[0])}`; + return `T\xF9y ch\u1ECDn kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i m\u1ED9t trong c\xE1c gi\xE1 tr\u1ECB ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `Qu\xE1 l\u1EDBn: mong \u0111\u1EE3i ${issue2.origin ?? "gi\xE1 tr\u1ECB"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "ph\u1EA7n t\u1EED"}`; + return `Qu\xE1 l\u1EDBn: mong \u0111\u1EE3i ${issue2.origin ?? "gi\xE1 tr\u1ECB"} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `Qu\xE1 nh\u1ECF: mong \u0111\u1EE3i ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `Qu\xE1 nh\u1ECF: mong \u0111\u1EE3i ${issue2.origin} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i b\u1EAFt \u0111\u1EA7u b\u1EB1ng "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i k\u1EBFt th\xFAc b\u1EB1ng "${_issue.suffix}"`; + if (_issue.format === "includes") + return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i bao g\u1ED3m "${_issue.includes}"`; + if (_issue.format === "regex") + return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i kh\u1EDBp v\u1EDBi m\u1EABu ${_issue.pattern}`; + return `${FormatDictionary[_issue.format] ?? issue2.format} kh\xF4ng h\u1EE3p l\u1EC7`; + } + case "not_multiple_of": + return `S\u1ED1 kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i l\xE0 b\u1ED9i s\u1ED1 c\u1EE7a ${issue2.divisor}`; + case "unrecognized_keys": + return `Kh\xF3a kh\xF4ng \u0111\u01B0\u1EE3c nh\u1EADn d\u1EA1ng: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `Kh\xF3a kh\xF4ng h\u1EE3p l\u1EC7 trong ${issue2.origin}`; + case "invalid_union": + return "\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7"; + case "invalid_element": + return `Gi\xE1 tr\u1ECB kh\xF4ng h\u1EE3p l\u1EC7 trong ${issue2.origin}`; + default: + return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7`; + } + }; +}; +function vi_default() { + return { + localeError: error44() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/zh-CN.js +var error45 = () => { + const Sizable = { + string: { unit: "\u5B57\u7B26", verb: "\u5305\u542B" }, + file: { unit: "\u5B57\u8282", verb: "\u5305\u542B" }, + array: { unit: "\u9879", verb: "\u5305\u542B" }, + set: { unit: "\u9879", verb: "\u5305\u542B" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u8F93\u5165", + email: "\u7535\u5B50\u90AE\u4EF6", + url: "URL", + emoji: "\u8868\u60C5\u7B26\u53F7", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO\u65E5\u671F\u65F6\u95F4", + date: "ISO\u65E5\u671F", + time: "ISO\u65F6\u95F4", + duration: "ISO\u65F6\u957F", + ipv4: "IPv4\u5730\u5740", + ipv6: "IPv6\u5730\u5740", + cidrv4: "IPv4\u7F51\u6BB5", + cidrv6: "IPv6\u7F51\u6BB5", + base64: "base64\u7F16\u7801\u5B57\u7B26\u4E32", + base64url: "base64url\u7F16\u7801\u5B57\u7B26\u4E32", + json_string: "JSON\u5B57\u7B26\u4E32", + e164: "E.164\u53F7\u7801", + jwt: "JWT", + template_literal: "\u8F93\u5165" + }; + const TypeDictionary = { + nan: "NaN", + number: "\u6570\u5B57", + array: "\u6570\u7EC4", + null: "\u7A7A\u503C(null)" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u65E0\u6548\u8F93\u5165\uFF1A\u671F\u671B instanceof ${issue2.expected}\uFF0C\u5B9E\u9645\u63A5\u6536 ${received}`; + } + return `\u65E0\u6548\u8F93\u5165\uFF1A\u671F\u671B ${expected}\uFF0C\u5B9E\u9645\u63A5\u6536 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u65E0\u6548\u8F93\u5165\uFF1A\u671F\u671B ${stringifyPrimitive(issue2.values[0])}`; + return `\u65E0\u6548\u9009\u9879\uFF1A\u671F\u671B\u4EE5\u4E0B\u4E4B\u4E00 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u6570\u503C\u8FC7\u5927\uFF1A\u671F\u671B ${issue2.origin ?? "\u503C"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u4E2A\u5143\u7D20"}`; + return `\u6570\u503C\u8FC7\u5927\uFF1A\u671F\u671B ${issue2.origin ?? "\u503C"} ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u6570\u503C\u8FC7\u5C0F\uFF1A\u671F\u671B ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u6570\u503C\u8FC7\u5C0F\uFF1A\u671F\u671B ${issue2.origin} ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u4EE5 "${_issue.prefix}" \u5F00\u5934`; + if (_issue.format === "ends_with") + return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u4EE5 "${_issue.suffix}" \u7ED3\u5C3E`; + if (_issue.format === "includes") + return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u5305\u542B "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u6EE1\u8DB3\u6B63\u5219\u8868\u8FBE\u5F0F ${_issue.pattern}`; + return `\u65E0\u6548${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u65E0\u6548\u6570\u5B57\uFF1A\u5FC5\u987B\u662F ${issue2.divisor} \u7684\u500D\u6570`; + case "unrecognized_keys": + return `\u51FA\u73B0\u672A\u77E5\u7684\u952E(key): ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `${issue2.origin} \u4E2D\u7684\u952E(key)\u65E0\u6548`; + case "invalid_union": + return "\u65E0\u6548\u8F93\u5165"; + case "invalid_element": + return `${issue2.origin} \u4E2D\u5305\u542B\u65E0\u6548\u503C(value)`; + default: + return `\u65E0\u6548\u8F93\u5165`; + } + }; +}; +function zh_CN_default() { + return { + localeError: error45() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/zh-TW.js +var error46 = () => { + const Sizable = { + string: { unit: "\u5B57\u5143", verb: "\u64C1\u6709" }, + file: { unit: "\u4F4D\u5143\u7D44", verb: "\u64C1\u6709" }, + array: { unit: "\u9805\u76EE", verb: "\u64C1\u6709" }, + set: { unit: "\u9805\u76EE", verb: "\u64C1\u6709" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u8F38\u5165", + email: "\u90F5\u4EF6\u5730\u5740", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "ISO \u65E5\u671F\u6642\u9593", + date: "ISO \u65E5\u671F", + time: "ISO \u6642\u9593", + duration: "ISO \u671F\u9593", + ipv4: "IPv4 \u4F4D\u5740", + ipv6: "IPv6 \u4F4D\u5740", + cidrv4: "IPv4 \u7BC4\u570D", + cidrv6: "IPv6 \u7BC4\u570D", + base64: "base64 \u7DE8\u78BC\u5B57\u4E32", + base64url: "base64url \u7DE8\u78BC\u5B57\u4E32", + json_string: "JSON \u5B57\u4E32", + e164: "E.164 \u6578\u503C", + jwt: "JWT", + template_literal: "\u8F38\u5165" + }; + const TypeDictionary = { + nan: "NaN" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\u7121\u6548\u7684\u8F38\u5165\u503C\uFF1A\u9810\u671F\u70BA instanceof ${issue2.expected}\uFF0C\u4F46\u6536\u5230 ${received}`; + } + return `\u7121\u6548\u7684\u8F38\u5165\u503C\uFF1A\u9810\u671F\u70BA ${expected}\uFF0C\u4F46\u6536\u5230 ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\u7121\u6548\u7684\u8F38\u5165\u503C\uFF1A\u9810\u671F\u70BA ${stringifyPrimitive(issue2.values[0])}`; + return `\u7121\u6548\u7684\u9078\u9805\uFF1A\u9810\u671F\u70BA\u4EE5\u4E0B\u5176\u4E2D\u4E4B\u4E00 ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `\u6578\u503C\u904E\u5927\uFF1A\u9810\u671F ${issue2.origin ?? "\u503C"} \u61C9\u70BA ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u500B\u5143\u7D20"}`; + return `\u6578\u503C\u904E\u5927\uFF1A\u9810\u671F ${issue2.origin ?? "\u503C"} \u61C9\u70BA ${adj}${issue2.maximum.toString()}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) { + return `\u6578\u503C\u904E\u5C0F\uFF1A\u9810\u671F ${issue2.origin} \u61C9\u70BA ${adj}${issue2.minimum.toString()} ${sizing.unit}`; + } + return `\u6578\u503C\u904E\u5C0F\uFF1A\u9810\u671F ${issue2.origin} \u61C9\u70BA ${adj}${issue2.minimum.toString()}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") { + return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u4EE5 "${_issue.prefix}" \u958B\u982D`; + } + if (_issue.format === "ends_with") + return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u4EE5 "${_issue.suffix}" \u7D50\u5C3E`; + if (_issue.format === "includes") + return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u5305\u542B "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u7B26\u5408\u683C\u5F0F ${_issue.pattern}`; + return `\u7121\u6548\u7684 ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `\u7121\u6548\u7684\u6578\u5B57\uFF1A\u5FC5\u9808\u70BA ${issue2.divisor} \u7684\u500D\u6578`; + case "unrecognized_keys": + return `\u7121\u6CD5\u8B58\u5225\u7684\u9375\u503C${issue2.keys.length > 1 ? "\u5011" : ""}\uFF1A${joinValues(issue2.keys, "\u3001")}`; + case "invalid_key": + return `${issue2.origin} \u4E2D\u6709\u7121\u6548\u7684\u9375\u503C`; + case "invalid_union": + return "\u7121\u6548\u7684\u8F38\u5165\u503C"; + case "invalid_element": + return `${issue2.origin} \u4E2D\u6709\u7121\u6548\u7684\u503C`; + default: + return `\u7121\u6548\u7684\u8F38\u5165\u503C`; + } + }; +}; +function zh_TW_default() { + return { + localeError: error46() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/yo.js +var error47 = () => { + const Sizable = { + string: { unit: "\xE0mi", verb: "n\xED" }, + file: { unit: "bytes", verb: "n\xED" }, + array: { unit: "nkan", verb: "n\xED" }, + set: { unit: "nkan", verb: "n\xED" } + }; + function getSizing(origin) { + return Sizable[origin] ?? null; + } + const FormatDictionary = { + regex: "\u1EB9\u0300r\u1ECD \xECb\xE1w\u1ECDl\xE9", + email: "\xE0d\xEDr\u1EB9\u0301s\xEC \xECm\u1EB9\u0301l\xEC", + url: "URL", + emoji: "emoji", + uuid: "UUID", + uuidv4: "UUIDv4", + uuidv6: "UUIDv6", + nanoid: "nanoid", + guid: "GUID", + cuid: "cuid", + cuid2: "cuid2", + ulid: "ULID", + xid: "XID", + ksuid: "KSUID", + datetime: "\xE0k\xF3k\xF2 ISO", + date: "\u1ECDj\u1ECD\u0301 ISO", + time: "\xE0k\xF3k\xF2 ISO", + duration: "\xE0k\xF3k\xF2 t\xF3 p\xE9 ISO", + ipv4: "\xE0d\xEDr\u1EB9\u0301s\xEC IPv4", + ipv6: "\xE0d\xEDr\u1EB9\u0301s\xEC IPv6", + cidrv4: "\xE0gb\xE8gb\xE8 IPv4", + cidrv6: "\xE0gb\xE8gb\xE8 IPv6", + base64: "\u1ECD\u0300r\u1ECD\u0300 t\xED a k\u1ECD\u0301 n\xED base64", + base64url: "\u1ECD\u0300r\u1ECD\u0300 base64url", + json_string: "\u1ECD\u0300r\u1ECD\u0300 JSON", + e164: "n\u1ECD\u0301mb\xE0 E.164", + jwt: "JWT", + template_literal: "\u1EB9\u0300r\u1ECD \xECb\xE1w\u1ECDl\xE9" + }; + const TypeDictionary = { + nan: "NaN", + number: "n\u1ECD\u0301mb\xE0", + array: "akop\u1ECD" + }; + return (issue2) => { + switch (issue2.code) { + case "invalid_type": { + const expected = TypeDictionary[issue2.expected] ?? issue2.expected; + const receivedType = parsedType(issue2.input); + const received = TypeDictionary[receivedType] ?? receivedType; + if (/^[A-Z]/.test(issue2.expected)) { + return `\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e: a n\xED l\xE1ti fi instanceof ${issue2.expected}, \xE0m\u1ECD\u0300 a r\xED ${received}`; + } + return `\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e: a n\xED l\xE1ti fi ${expected}, \xE0m\u1ECD\u0300 a r\xED ${received}`; + } + case "invalid_value": + if (issue2.values.length === 1) + return `\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e: a n\xED l\xE1ti fi ${stringifyPrimitive(issue2.values[0])}`; + return `\xC0\u1E63\xE0y\xE0n a\u1E63\xEC\u1E63e: yan \u1ECD\u0300kan l\xE1ra ${joinValues(issue2.values, "|")}`; + case "too_big": { + const adj = issue2.inclusive ? "<=" : "<"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `T\xF3 p\u1ECD\u0300 j\xF9: a n\xED l\xE1ti j\u1EB9\u0301 p\xE9 ${issue2.origin ?? "iye"} ${sizing.verb} ${adj}${issue2.maximum} ${sizing.unit}`; + return `T\xF3 p\u1ECD\u0300 j\xF9: a n\xED l\xE1ti j\u1EB9\u0301 ${adj}${issue2.maximum}`; + } + case "too_small": { + const adj = issue2.inclusive ? ">=" : ">"; + const sizing = getSizing(issue2.origin); + if (sizing) + return `K\xE9r\xE9 ju: a n\xED l\xE1ti j\u1EB9\u0301 p\xE9 ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum} ${sizing.unit}`; + return `K\xE9r\xE9 ju: a n\xED l\xE1ti j\u1EB9\u0301 ${adj}${issue2.minimum}`; + } + case "invalid_format": { + const _issue = issue2; + if (_issue.format === "starts_with") + return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 b\u1EB9\u0300r\u1EB9\u0300 p\u1EB9\u0300l\xFA "${_issue.prefix}"`; + if (_issue.format === "ends_with") + return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 par\xED p\u1EB9\u0300l\xFA "${_issue.suffix}"`; + if (_issue.format === "includes") + return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 n\xED "${_issue.includes}"`; + if (_issue.format === "regex") + return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 b\xE1 \xE0p\u1EB9\u1EB9r\u1EB9 mu ${_issue.pattern}`; + return `A\u1E63\xEC\u1E63e: ${FormatDictionary[_issue.format] ?? issue2.format}`; + } + case "not_multiple_of": + return `N\u1ECD\u0301mb\xE0 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 j\u1EB9\u0301 \xE8y\xE0 p\xEDp\xEDn ti ${issue2.divisor}`; + case "unrecognized_keys": + return `B\u1ECDt\xECn\xEC \xE0\xECm\u1ECD\u0300: ${joinValues(issue2.keys, ", ")}`; + case "invalid_key": + return `B\u1ECDt\xECn\xEC a\u1E63\xEC\u1E63e n\xEDn\xFA ${issue2.origin}`; + case "invalid_union": + return "\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e"; + case "invalid_element": + return `Iye a\u1E63\xEC\u1E63e n\xEDn\xFA ${issue2.origin}`; + default: + return "\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e"; + } + }; +}; +function yo_default() { + return { + localeError: error47() + }; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/registries.js +var _a; +var $output = /* @__PURE__ */ Symbol("ZodOutput"); +var $input = /* @__PURE__ */ Symbol("ZodInput"); +var $ZodRegistry = class { + constructor() { + this._map = /* @__PURE__ */ new WeakMap(); + this._idmap = /* @__PURE__ */ new Map(); + } + add(schema, ..._meta) { + const meta3 = _meta[0]; + this._map.set(schema, meta3); + if (meta3 && typeof meta3 === "object" && "id" in meta3) { + this._idmap.set(meta3.id, schema); + } + return this; + } + clear() { + this._map = /* @__PURE__ */ new WeakMap(); + this._idmap = /* @__PURE__ */ new Map(); + return this; + } + remove(schema) { + const meta3 = this._map.get(schema); + if (meta3 && typeof meta3 === "object" && "id" in meta3) { + this._idmap.delete(meta3.id); + } + this._map.delete(schema); + return this; + } + get(schema) { + const p = schema._zod.parent; + if (p) { + const pm = { ...this.get(p) ?? {} }; + delete pm.id; + const f = { ...pm, ...this._map.get(schema) }; + return Object.keys(f).length ? f : void 0; + } + return this._map.get(schema); + } + has(schema) { + return this._map.has(schema); + } +}; +function registry() { + return new $ZodRegistry(); +} +(_a = globalThis).__zod_globalRegistry ?? (_a.__zod_globalRegistry = registry()); +var globalRegistry = globalThis.__zod_globalRegistry; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/api.js +// @__NO_SIDE_EFFECTS__ +function _string(Class2, params) { + return new Class2({ + type: "string", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _coercedString(Class2, params) { + return new Class2({ + type: "string", + coerce: true, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _email(Class2, params) { + return new Class2({ + type: "string", + format: "email", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _guid(Class2, params) { + return new Class2({ + type: "string", + format: "guid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uuid(Class2, params) { + return new Class2({ + type: "string", + format: "uuid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uuidv4(Class2, params) { + return new Class2({ + type: "string", + format: "uuid", + check: "string_format", + abort: false, + version: "v4", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uuidv6(Class2, params) { + return new Class2({ + type: "string", + format: "uuid", + check: "string_format", + abort: false, + version: "v6", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uuidv7(Class2, params) { + return new Class2({ + type: "string", + format: "uuid", + check: "string_format", + abort: false, + version: "v7", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _url(Class2, params) { + return new Class2({ + type: "string", + format: "url", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _emoji2(Class2, params) { + return new Class2({ + type: "string", + format: "emoji", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _nanoid(Class2, params) { + return new Class2({ + type: "string", + format: "nanoid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _cuid(Class2, params) { + return new Class2({ + type: "string", + format: "cuid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _cuid2(Class2, params) { + return new Class2({ + type: "string", + format: "cuid2", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _ulid(Class2, params) { + return new Class2({ + type: "string", + format: "ulid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _xid(Class2, params) { + return new Class2({ + type: "string", + format: "xid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _ksuid(Class2, params) { + return new Class2({ + type: "string", + format: "ksuid", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _ipv4(Class2, params) { + return new Class2({ + type: "string", + format: "ipv4", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _ipv6(Class2, params) { + return new Class2({ + type: "string", + format: "ipv6", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _mac(Class2, params) { + return new Class2({ + type: "string", + format: "mac", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _cidrv4(Class2, params) { + return new Class2({ + type: "string", + format: "cidrv4", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _cidrv6(Class2, params) { + return new Class2({ + type: "string", + format: "cidrv6", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _base64(Class2, params) { + return new Class2({ + type: "string", + format: "base64", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _base64url(Class2, params) { + return new Class2({ + type: "string", + format: "base64url", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _e164(Class2, params) { + return new Class2({ + type: "string", + format: "e164", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _jwt(Class2, params) { + return new Class2({ + type: "string", + format: "jwt", + check: "string_format", + abort: false, + ...normalizeParams(params) + }); +} +var TimePrecision = { + Any: null, + Minute: -1, + Second: 0, + Millisecond: 3, + Microsecond: 6 +}; +// @__NO_SIDE_EFFECTS__ +function _isoDateTime(Class2, params) { + return new Class2({ + type: "string", + format: "datetime", + check: "string_format", + offset: false, + local: false, + precision: null, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _isoDate(Class2, params) { + return new Class2({ + type: "string", + format: "date", + check: "string_format", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _isoTime(Class2, params) { + return new Class2({ + type: "string", + format: "time", + check: "string_format", + precision: null, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _isoDuration(Class2, params) { + return new Class2({ + type: "string", + format: "duration", + check: "string_format", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _number(Class2, params) { + return new Class2({ + type: "number", + checks: [], + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _coercedNumber(Class2, params) { + return new Class2({ + type: "number", + coerce: true, + checks: [], + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _int(Class2, params) { + return new Class2({ + type: "number", + check: "number_format", + abort: false, + format: "safeint", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _float32(Class2, params) { + return new Class2({ + type: "number", + check: "number_format", + abort: false, + format: "float32", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _float64(Class2, params) { + return new Class2({ + type: "number", + check: "number_format", + abort: false, + format: "float64", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _int32(Class2, params) { + return new Class2({ + type: "number", + check: "number_format", + abort: false, + format: "int32", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uint32(Class2, params) { + return new Class2({ + type: "number", + check: "number_format", + abort: false, + format: "uint32", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _boolean(Class2, params) { + return new Class2({ + type: "boolean", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _coercedBoolean(Class2, params) { + return new Class2({ + type: "boolean", + coerce: true, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _bigint(Class2, params) { + return new Class2({ + type: "bigint", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _coercedBigint(Class2, params) { + return new Class2({ + type: "bigint", + coerce: true, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _int64(Class2, params) { + return new Class2({ + type: "bigint", + check: "bigint_format", + abort: false, + format: "int64", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uint64(Class2, params) { + return new Class2({ + type: "bigint", + check: "bigint_format", + abort: false, + format: "uint64", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _symbol(Class2, params) { + return new Class2({ + type: "symbol", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _undefined2(Class2, params) { + return new Class2({ + type: "undefined", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _null2(Class2, params) { + return new Class2({ + type: "null", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _any(Class2) { + return new Class2({ + type: "any" + }); +} +// @__NO_SIDE_EFFECTS__ +function _unknown(Class2) { + return new Class2({ + type: "unknown" + }); +} +// @__NO_SIDE_EFFECTS__ +function _never(Class2, params) { + return new Class2({ + type: "never", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _void(Class2, params) { + return new Class2({ + type: "void", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _date(Class2, params) { + return new Class2({ + type: "date", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _coercedDate(Class2, params) { + return new Class2({ + type: "date", + coerce: true, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _nan(Class2, params) { + return new Class2({ + type: "nan", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _lt(value, params) { + return new $ZodCheckLessThan({ + check: "less_than", + ...normalizeParams(params), + value, + inclusive: false + }); +} +// @__NO_SIDE_EFFECTS__ +function _lte(value, params) { + return new $ZodCheckLessThan({ + check: "less_than", + ...normalizeParams(params), + value, + inclusive: true + }); +} +// @__NO_SIDE_EFFECTS__ +function _gt(value, params) { + return new $ZodCheckGreaterThan({ + check: "greater_than", + ...normalizeParams(params), + value, + inclusive: false + }); +} +// @__NO_SIDE_EFFECTS__ +function _gte(value, params) { + return new $ZodCheckGreaterThan({ + check: "greater_than", + ...normalizeParams(params), + value, + inclusive: true + }); +} +// @__NO_SIDE_EFFECTS__ +function _positive(params) { + return /* @__PURE__ */ _gt(0, params); +} +// @__NO_SIDE_EFFECTS__ +function _negative(params) { + return /* @__PURE__ */ _lt(0, params); +} +// @__NO_SIDE_EFFECTS__ +function _nonpositive(params) { + return /* @__PURE__ */ _lte(0, params); +} +// @__NO_SIDE_EFFECTS__ +function _nonnegative(params) { + return /* @__PURE__ */ _gte(0, params); +} +// @__NO_SIDE_EFFECTS__ +function _multipleOf(value, params) { + return new $ZodCheckMultipleOf({ + check: "multiple_of", + ...normalizeParams(params), + value + }); +} +// @__NO_SIDE_EFFECTS__ +function _maxSize(maximum, params) { + return new $ZodCheckMaxSize({ + check: "max_size", + ...normalizeParams(params), + maximum + }); +} +// @__NO_SIDE_EFFECTS__ +function _minSize(minimum, params) { + return new $ZodCheckMinSize({ + check: "min_size", + ...normalizeParams(params), + minimum + }); +} +// @__NO_SIDE_EFFECTS__ +function _size(size, params) { + return new $ZodCheckSizeEquals({ + check: "size_equals", + ...normalizeParams(params), + size + }); +} +// @__NO_SIDE_EFFECTS__ +function _maxLength(maximum, params) { + const ch = new $ZodCheckMaxLength({ + check: "max_length", + ...normalizeParams(params), + maximum + }); + return ch; +} +// @__NO_SIDE_EFFECTS__ +function _minLength(minimum, params) { + return new $ZodCheckMinLength({ + check: "min_length", + ...normalizeParams(params), + minimum + }); +} +// @__NO_SIDE_EFFECTS__ +function _length(length, params) { + return new $ZodCheckLengthEquals({ + check: "length_equals", + ...normalizeParams(params), + length + }); +} +// @__NO_SIDE_EFFECTS__ +function _regex(pattern, params) { + return new $ZodCheckRegex({ + check: "string_format", + format: "regex", + ...normalizeParams(params), + pattern + }); +} +// @__NO_SIDE_EFFECTS__ +function _lowercase(params) { + return new $ZodCheckLowerCase({ + check: "string_format", + format: "lowercase", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _uppercase(params) { + return new $ZodCheckUpperCase({ + check: "string_format", + format: "uppercase", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _includes(includes, params) { + return new $ZodCheckIncludes({ + check: "string_format", + format: "includes", + ...normalizeParams(params), + includes + }); +} +// @__NO_SIDE_EFFECTS__ +function _startsWith(prefix, params) { + return new $ZodCheckStartsWith({ + check: "string_format", + format: "starts_with", + ...normalizeParams(params), + prefix + }); +} +// @__NO_SIDE_EFFECTS__ +function _endsWith(suffix, params) { + return new $ZodCheckEndsWith({ + check: "string_format", + format: "ends_with", + ...normalizeParams(params), + suffix + }); +} +// @__NO_SIDE_EFFECTS__ +function _property(property, schema, params) { + return new $ZodCheckProperty({ + check: "property", + property, + schema, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _mime(types, params) { + return new $ZodCheckMimeType({ + check: "mime_type", + mime: types, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _overwrite(tx) { + return new $ZodCheckOverwrite({ + check: "overwrite", + tx + }); +} +// @__NO_SIDE_EFFECTS__ +function _normalize(form) { + return /* @__PURE__ */ _overwrite((input) => input.normalize(form)); +} +// @__NO_SIDE_EFFECTS__ +function _trim() { + return /* @__PURE__ */ _overwrite((input) => input.trim()); +} +// @__NO_SIDE_EFFECTS__ +function _toLowerCase() { + return /* @__PURE__ */ _overwrite((input) => input.toLowerCase()); +} +// @__NO_SIDE_EFFECTS__ +function _toUpperCase() { + return /* @__PURE__ */ _overwrite((input) => input.toUpperCase()); +} +// @__NO_SIDE_EFFECTS__ +function _slugify() { + return /* @__PURE__ */ _overwrite((input) => slugify(input)); +} +// @__NO_SIDE_EFFECTS__ +function _array(Class2, element, params) { + return new Class2({ + type: "array", + element, + // get element() { + // return element; + // }, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _union(Class2, options, params) { + return new Class2({ + type: "union", + options, + ...normalizeParams(params) + }); +} +function _xor(Class2, options, params) { + return new Class2({ + type: "union", + options, + inclusive: false, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _discriminatedUnion(Class2, discriminator, options, params) { + return new Class2({ + type: "union", + options, + discriminator, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _intersection(Class2, left, right) { + return new Class2({ + type: "intersection", + left, + right + }); +} +// @__NO_SIDE_EFFECTS__ +function _tuple(Class2, items, _paramsOrRest, _params) { + const hasRest = _paramsOrRest instanceof $ZodType; + const params = hasRest ? _params : _paramsOrRest; + const rest = hasRest ? _paramsOrRest : null; + return new Class2({ + type: "tuple", + items, + rest, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _record(Class2, keyType, valueType, params) { + return new Class2({ + type: "record", + keyType, + valueType, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _map(Class2, keyType, valueType, params) { + return new Class2({ + type: "map", + keyType, + valueType, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _set(Class2, valueType, params) { + return new Class2({ + type: "set", + valueType, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _enum(Class2, values, params) { + const entries = Array.isArray(values) ? Object.fromEntries(values.map((v) => [v, v])) : values; + return new Class2({ + type: "enum", + entries, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _nativeEnum(Class2, entries, params) { + return new Class2({ + type: "enum", + entries, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _literal(Class2, value, params) { + return new Class2({ + type: "literal", + values: Array.isArray(value) ? value : [value], + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _file(Class2, params) { + return new Class2({ + type: "file", + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _transform(Class2, fn) { + return new Class2({ + type: "transform", + transform: fn + }); +} +// @__NO_SIDE_EFFECTS__ +function _optional(Class2, innerType) { + return new Class2({ + type: "optional", + innerType + }); +} +// @__NO_SIDE_EFFECTS__ +function _nullable(Class2, innerType) { + return new Class2({ + type: "nullable", + innerType + }); +} +// @__NO_SIDE_EFFECTS__ +function _default(Class2, innerType, defaultValue) { + return new Class2({ + type: "default", + innerType, + get defaultValue() { + return typeof defaultValue === "function" ? defaultValue() : shallowClone(defaultValue); + } + }); +} +// @__NO_SIDE_EFFECTS__ +function _nonoptional(Class2, innerType, params) { + return new Class2({ + type: "nonoptional", + innerType, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _success(Class2, innerType) { + return new Class2({ + type: "success", + innerType + }); +} +// @__NO_SIDE_EFFECTS__ +function _catch(Class2, innerType, catchValue) { + return new Class2({ + type: "catch", + innerType, + catchValue: typeof catchValue === "function" ? catchValue : () => catchValue + }); +} +// @__NO_SIDE_EFFECTS__ +function _pipe(Class2, in_, out) { + return new Class2({ + type: "pipe", + in: in_, + out + }); +} +// @__NO_SIDE_EFFECTS__ +function _readonly(Class2, innerType) { + return new Class2({ + type: "readonly", + innerType + }); +} +// @__NO_SIDE_EFFECTS__ +function _templateLiteral(Class2, parts, params) { + return new Class2({ + type: "template_literal", + parts, + ...normalizeParams(params) + }); +} +// @__NO_SIDE_EFFECTS__ +function _lazy(Class2, getter) { + return new Class2({ + type: "lazy", + getter + }); +} +// @__NO_SIDE_EFFECTS__ +function _promise(Class2, innerType) { + return new Class2({ + type: "promise", + innerType + }); +} +// @__NO_SIDE_EFFECTS__ +function _custom(Class2, fn, _params) { + const norm = normalizeParams(_params); + norm.abort ?? (norm.abort = true); + const schema = new Class2({ + type: "custom", + check: "custom", + fn, + ...norm + }); + return schema; +} +// @__NO_SIDE_EFFECTS__ +function _refine(Class2, fn, _params) { + const schema = new Class2({ + type: "custom", + check: "custom", + fn, + ...normalizeParams(_params) + }); + return schema; +} +// @__NO_SIDE_EFFECTS__ +function _superRefine(fn) { + const ch = /* @__PURE__ */ _check((payload) => { + payload.addIssue = (issue2) => { + if (typeof issue2 === "string") { + payload.issues.push(issue(issue2, payload.value, ch._zod.def)); + } else { + const _issue = issue2; + if (_issue.fatal) + _issue.continue = false; + _issue.code ?? (_issue.code = "custom"); + _issue.input ?? (_issue.input = payload.value); + _issue.inst ?? (_issue.inst = ch); + _issue.continue ?? (_issue.continue = !ch._zod.def.abort); + payload.issues.push(issue(_issue)); + } + }; + return fn(payload.value, payload); + }); + return ch; +} +// @__NO_SIDE_EFFECTS__ +function _check(fn, params) { + const ch = new $ZodCheck({ + check: "custom", + ...normalizeParams(params) + }); + ch._zod.check = fn; + return ch; +} +// @__NO_SIDE_EFFECTS__ +function describe(description) { + const ch = new $ZodCheck({ check: "describe" }); + ch._zod.onattach = [ + (inst) => { + const existing = globalRegistry.get(inst) ?? {}; + globalRegistry.add(inst, { ...existing, description }); + } + ]; + ch._zod.check = () => { + }; + return ch; +} +// @__NO_SIDE_EFFECTS__ +function meta(metadata) { + const ch = new $ZodCheck({ check: "meta" }); + ch._zod.onattach = [ + (inst) => { + const existing = globalRegistry.get(inst) ?? {}; + globalRegistry.add(inst, { ...existing, ...metadata }); + } + ]; + ch._zod.check = () => { + }; + return ch; +} +// @__NO_SIDE_EFFECTS__ +function _stringbool(Classes, _params) { + const params = normalizeParams(_params); + let truthyArray = params.truthy ?? ["true", "1", "yes", "on", "y", "enabled"]; + let falsyArray = params.falsy ?? ["false", "0", "no", "off", "n", "disabled"]; + if (params.case !== "sensitive") { + truthyArray = truthyArray.map((v) => typeof v === "string" ? v.toLowerCase() : v); + falsyArray = falsyArray.map((v) => typeof v === "string" ? v.toLowerCase() : v); + } + const truthySet = new Set(truthyArray); + const falsySet = new Set(falsyArray); + const _Codec = Classes.Codec ?? $ZodCodec; + const _Boolean = Classes.Boolean ?? $ZodBoolean; + const _String = Classes.String ?? $ZodString; + const stringSchema = new _String({ type: "string", error: params.error }); + const booleanSchema = new _Boolean({ type: "boolean", error: params.error }); + const codec2 = new _Codec({ + type: "pipe", + in: stringSchema, + out: booleanSchema, + transform: ((input, payload) => { + let data = input; + if (params.case !== "sensitive") + data = data.toLowerCase(); + if (truthySet.has(data)) { + return true; + } else if (falsySet.has(data)) { + return false; + } else { + payload.issues.push({ + code: "invalid_value", + expected: "stringbool", + values: [...truthySet, ...falsySet], + input: payload.value, + inst: codec2, + continue: false + }); + return {}; + } + }), + reverseTransform: ((input, _payload) => { + if (input === true) { + return truthyArray[0] || "true"; + } else { + return falsyArray[0] || "false"; + } + }), + error: params.error + }); + return codec2; +} +// @__NO_SIDE_EFFECTS__ +function _stringFormat(Class2, format, fnOrRegex, _params = {}) { + const params = normalizeParams(_params); + const def = { + ...normalizeParams(_params), + check: "string_format", + type: "string", + format, + fn: typeof fnOrRegex === "function" ? fnOrRegex : (val) => fnOrRegex.test(val), + ...params + }; + if (fnOrRegex instanceof RegExp) { + def.pattern = fnOrRegex; + } + const inst = new Class2(def); + return inst; +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/to-json-schema.js +function initializeContext(params) { + let target = params?.target ?? "draft-2020-12"; + if (target === "draft-4") + target = "draft-04"; + if (target === "draft-7") + target = "draft-07"; + return { + processors: params.processors ?? {}, + metadataRegistry: params?.metadata ?? globalRegistry, + target, + unrepresentable: params?.unrepresentable ?? "throw", + override: params?.override ?? (() => { + }), + io: params?.io ?? "output", + counter: 0, + seen: /* @__PURE__ */ new Map(), + cycles: params?.cycles ?? "ref", + reused: params?.reused ?? "inline", + external: params?.external ?? void 0 + }; +} +function process2(schema, ctx, _params = { path: [], schemaPath: [] }) { + var _a3; + const def = schema._zod.def; + const seen = ctx.seen.get(schema); + if (seen) { + seen.count++; + const isCycle = _params.schemaPath.includes(schema); + if (isCycle) { + seen.cycle = _params.path; + } + return seen.schema; + } + const result = { schema: {}, count: 1, cycle: void 0, path: _params.path }; + ctx.seen.set(schema, result); + const overrideSchema = schema._zod.toJSONSchema?.(); + if (overrideSchema) { + result.schema = overrideSchema; + } else { + const params = { + ..._params, + schemaPath: [..._params.schemaPath, schema], + path: _params.path + }; + if (schema._zod.processJSONSchema) { + schema._zod.processJSONSchema(ctx, result.schema, params); + } else { + const _json = result.schema; + const processor = ctx.processors[def.type]; + if (!processor) { + throw new Error(`[toJSONSchema]: Non-representable type encountered: ${def.type}`); + } + processor(schema, ctx, _json, params); + } + const parent = schema._zod.parent; + if (parent) { + if (!result.ref) + result.ref = parent; + process2(parent, ctx, params); + ctx.seen.get(parent).isParent = true; + } + } + const meta3 = ctx.metadataRegistry.get(schema); + if (meta3) + Object.assign(result.schema, meta3); + if (ctx.io === "input" && isTransforming(schema)) { + delete result.schema.examples; + delete result.schema.default; + } + if (ctx.io === "input" && result.schema._prefault) + (_a3 = result.schema).default ?? (_a3.default = result.schema._prefault); + delete result.schema._prefault; + const _result = ctx.seen.get(schema); + return _result.schema; +} +function extractDefs(ctx, schema) { + const root = ctx.seen.get(schema); + if (!root) + throw new Error("Unprocessed schema. This is a bug in Zod."); + const idToSchema = /* @__PURE__ */ new Map(); + for (const entry of ctx.seen.entries()) { + const id = ctx.metadataRegistry.get(entry[0])?.id; + if (id) { + const existing = idToSchema.get(id); + if (existing && existing !== entry[0]) { + throw new Error(`Duplicate schema id "${id}" detected during JSON Schema conversion. Two different schemas cannot share the same id when converted together.`); + } + idToSchema.set(id, entry[0]); + } + } + const makeURI = (entry) => { + const defsSegment = ctx.target === "draft-2020-12" ? "$defs" : "definitions"; + if (ctx.external) { + const externalId = ctx.external.registry.get(entry[0])?.id; + const uriGenerator = ctx.external.uri ?? ((id2) => id2); + if (externalId) { + return { ref: uriGenerator(externalId) }; + } + const id = entry[1].defId ?? entry[1].schema.id ?? `schema${ctx.counter++}`; + entry[1].defId = id; + return { defId: id, ref: `${uriGenerator("__shared")}#/${defsSegment}/${id}` }; + } + if (entry[1] === root) { + return { ref: "#" }; + } + const uriPrefix = `#`; + const defUriPrefix = `${uriPrefix}/${defsSegment}/`; + const defId = entry[1].schema.id ?? `__schema${ctx.counter++}`; + return { defId, ref: defUriPrefix + defId }; + }; + const extractToDef = (entry) => { + if (entry[1].schema.$ref) { + return; + } + const seen = entry[1]; + const { ref, defId } = makeURI(entry); + seen.def = { ...seen.schema }; + if (defId) + seen.defId = defId; + const schema2 = seen.schema; + for (const key in schema2) { + delete schema2[key]; + } + schema2.$ref = ref; + }; + if (ctx.cycles === "throw") { + for (const entry of ctx.seen.entries()) { + const seen = entry[1]; + if (seen.cycle) { + throw new Error(`Cycle detected: #/${seen.cycle?.join("/")}/ + +Set the \`cycles\` parameter to \`"ref"\` to resolve cyclical schemas with defs.`); + } + } + } + for (const entry of ctx.seen.entries()) { + const seen = entry[1]; + if (schema === entry[0]) { + extractToDef(entry); + continue; + } + if (ctx.external) { + const ext = ctx.external.registry.get(entry[0])?.id; + if (schema !== entry[0] && ext) { + extractToDef(entry); + continue; + } + } + const id = ctx.metadataRegistry.get(entry[0])?.id; + if (id) { + extractToDef(entry); + continue; + } + if (seen.cycle) { + extractToDef(entry); + continue; + } + if (seen.count > 1) { + if (ctx.reused === "ref") { + extractToDef(entry); + continue; + } + } + } +} +function finalize(ctx, schema) { + const root = ctx.seen.get(schema); + if (!root) + throw new Error("Unprocessed schema. This is a bug in Zod."); + const flattenRef = (zodSchema) => { + const seen = ctx.seen.get(zodSchema); + if (seen.ref === null) + return; + const schema2 = seen.def ?? seen.schema; + const _cached = { ...schema2 }; + const ref = seen.ref; + seen.ref = null; + if (ref) { + flattenRef(ref); + const refSeen = ctx.seen.get(ref); + const refSchema = refSeen.schema; + if (refSchema.$ref && (ctx.target === "draft-07" || ctx.target === "draft-04" || ctx.target === "openapi-3.0")) { + schema2.allOf = schema2.allOf ?? []; + schema2.allOf.push(refSchema); + } else { + Object.assign(schema2, refSchema); + } + Object.assign(schema2, _cached); + const isParentRef = zodSchema._zod.parent === ref; + if (isParentRef) { + for (const key in schema2) { + if (key === "$ref" || key === "allOf") + continue; + if (!(key in _cached)) { + delete schema2[key]; + } + } + } + if (refSchema.$ref && refSeen.def) { + for (const key in schema2) { + if (key === "$ref" || key === "allOf") + continue; + if (key in refSeen.def && JSON.stringify(schema2[key]) === JSON.stringify(refSeen.def[key])) { + delete schema2[key]; + } + } + } + } + const parent = zodSchema._zod.parent; + if (parent && parent !== ref) { + flattenRef(parent); + const parentSeen = ctx.seen.get(parent); + if (parentSeen?.schema.$ref) { + schema2.$ref = parentSeen.schema.$ref; + if (parentSeen.def) { + for (const key in schema2) { + if (key === "$ref" || key === "allOf") + continue; + if (key in parentSeen.def && JSON.stringify(schema2[key]) === JSON.stringify(parentSeen.def[key])) { + delete schema2[key]; + } + } + } + } + } + ctx.override({ + zodSchema, + jsonSchema: schema2, + path: seen.path ?? [] + }); + }; + for (const entry of [...ctx.seen.entries()].reverse()) { + flattenRef(entry[0]); + } + const result = {}; + if (ctx.target === "draft-2020-12") { + result.$schema = "https://json-schema.org/draft/2020-12/schema"; + } else if (ctx.target === "draft-07") { + result.$schema = "http://json-schema.org/draft-07/schema#"; + } else if (ctx.target === "draft-04") { + result.$schema = "http://json-schema.org/draft-04/schema#"; + } else if (ctx.target === "openapi-3.0") { + } else { + } + if (ctx.external?.uri) { + const id = ctx.external.registry.get(schema)?.id; + if (!id) + throw new Error("Schema is missing an `id` property"); + result.$id = ctx.external.uri(id); + } + Object.assign(result, root.def ?? root.schema); + const defs = ctx.external?.defs ?? {}; + for (const entry of ctx.seen.entries()) { + const seen = entry[1]; + if (seen.def && seen.defId) { + defs[seen.defId] = seen.def; + } + } + if (ctx.external) { + } else { + if (Object.keys(defs).length > 0) { + if (ctx.target === "draft-2020-12") { + result.$defs = defs; + } else { + result.definitions = defs; + } + } + } + try { + const finalized = JSON.parse(JSON.stringify(result)); + Object.defineProperty(finalized, "~standard", { + value: { + ...schema["~standard"], + jsonSchema: { + input: createStandardJSONSchemaMethod(schema, "input", ctx.processors), + output: createStandardJSONSchemaMethod(schema, "output", ctx.processors) + } + }, + enumerable: false, + writable: false + }); + return finalized; + } catch (_err) { + throw new Error("Error converting schema to JSON."); + } +} +function isTransforming(_schema, _ctx) { + const ctx = _ctx ?? { seen: /* @__PURE__ */ new Set() }; + if (ctx.seen.has(_schema)) + return false; + ctx.seen.add(_schema); + const def = _schema._zod.def; + if (def.type === "transform") + return true; + if (def.type === "array") + return isTransforming(def.element, ctx); + if (def.type === "set") + return isTransforming(def.valueType, ctx); + if (def.type === "lazy") + return isTransforming(def.getter(), ctx); + if (def.type === "promise" || def.type === "optional" || def.type === "nonoptional" || def.type === "nullable" || def.type === "readonly" || def.type === "default" || def.type === "prefault") { + return isTransforming(def.innerType, ctx); + } + if (def.type === "intersection") { + return isTransforming(def.left, ctx) || isTransforming(def.right, ctx); + } + if (def.type === "record" || def.type === "map") { + return isTransforming(def.keyType, ctx) || isTransforming(def.valueType, ctx); + } + if (def.type === "pipe") { + return isTransforming(def.in, ctx) || isTransforming(def.out, ctx); + } + if (def.type === "object") { + for (const key in def.shape) { + if (isTransforming(def.shape[key], ctx)) + return true; + } + return false; + } + if (def.type === "union") { + for (const option of def.options) { + if (isTransforming(option, ctx)) + return true; + } + return false; + } + if (def.type === "tuple") { + for (const item of def.items) { + if (isTransforming(item, ctx)) + return true; + } + if (def.rest && isTransforming(def.rest, ctx)) + return true; + return false; + } + return false; +} +var createToJSONSchemaMethod = (schema, processors = {}) => (params) => { + const ctx = initializeContext({ ...params, processors }); + process2(schema, ctx); + extractDefs(ctx, schema); + return finalize(ctx, schema); +}; +var createStandardJSONSchemaMethod = (schema, io, processors = {}) => (params) => { + const { libraryOptions, target } = params ?? {}; + const ctx = initializeContext({ ...libraryOptions ?? {}, target, io, processors }); + process2(schema, ctx); + extractDefs(ctx, schema); + return finalize(ctx, schema); +}; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/json-schema-processors.js +var formatMap = { + guid: "uuid", + url: "uri", + datetime: "date-time", + json_string: "json-string", + regex: "" + // do not set +}; +var stringProcessor = (schema, ctx, _json, _params) => { + const json2 = _json; + json2.type = "string"; + const { minimum, maximum, format, patterns, contentEncoding } = schema._zod.bag; + if (typeof minimum === "number") + json2.minLength = minimum; + if (typeof maximum === "number") + json2.maxLength = maximum; + if (format) { + json2.format = formatMap[format] ?? format; + if (json2.format === "") + delete json2.format; + if (format === "time") { + delete json2.format; + } + } + if (contentEncoding) + json2.contentEncoding = contentEncoding; + if (patterns && patterns.size > 0) { + const regexes = [...patterns]; + if (regexes.length === 1) + json2.pattern = regexes[0].source; + else if (regexes.length > 1) { + json2.allOf = [ + ...regexes.map((regex) => ({ + ...ctx.target === "draft-07" || ctx.target === "draft-04" || ctx.target === "openapi-3.0" ? { type: "string" } : {}, + pattern: regex.source + })) + ]; + } + } +}; +var numberProcessor = (schema, ctx, _json, _params) => { + const json2 = _json; + const { minimum, maximum, format, multipleOf, exclusiveMaximum, exclusiveMinimum } = schema._zod.bag; + if (typeof format === "string" && format.includes("int")) + json2.type = "integer"; + else + json2.type = "number"; + if (typeof exclusiveMinimum === "number") { + if (ctx.target === "draft-04" || ctx.target === "openapi-3.0") { + json2.minimum = exclusiveMinimum; + json2.exclusiveMinimum = true; + } else { + json2.exclusiveMinimum = exclusiveMinimum; + } + } + if (typeof minimum === "number") { + json2.minimum = minimum; + if (typeof exclusiveMinimum === "number" && ctx.target !== "draft-04") { + if (exclusiveMinimum >= minimum) + delete json2.minimum; + else + delete json2.exclusiveMinimum; + } + } + if (typeof exclusiveMaximum === "number") { + if (ctx.target === "draft-04" || ctx.target === "openapi-3.0") { + json2.maximum = exclusiveMaximum; + json2.exclusiveMaximum = true; + } else { + json2.exclusiveMaximum = exclusiveMaximum; + } + } + if (typeof maximum === "number") { + json2.maximum = maximum; + if (typeof exclusiveMaximum === "number" && ctx.target !== "draft-04") { + if (exclusiveMaximum <= maximum) + delete json2.maximum; + else + delete json2.exclusiveMaximum; + } + } + if (typeof multipleOf === "number") + json2.multipleOf = multipleOf; +}; +var booleanProcessor = (_schema, _ctx, json2, _params) => { + json2.type = "boolean"; +}; +var bigintProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("BigInt cannot be represented in JSON Schema"); + } +}; +var symbolProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Symbols cannot be represented in JSON Schema"); + } +}; +var nullProcessor = (_schema, ctx, json2, _params) => { + if (ctx.target === "openapi-3.0") { + json2.type = "string"; + json2.nullable = true; + json2.enum = [null]; + } else { + json2.type = "null"; + } +}; +var undefinedProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Undefined cannot be represented in JSON Schema"); + } +}; +var voidProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Void cannot be represented in JSON Schema"); + } +}; +var neverProcessor = (_schema, _ctx, json2, _params) => { + json2.not = {}; +}; +var anyProcessor = (_schema, _ctx, _json, _params) => { +}; +var unknownProcessor = (_schema, _ctx, _json, _params) => { +}; +var dateProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Date cannot be represented in JSON Schema"); + } +}; +var enumProcessor = (schema, _ctx, json2, _params) => { + const def = schema._zod.def; + const values = getEnumValues(def.entries); + if (values.every((v) => typeof v === "number")) + json2.type = "number"; + if (values.every((v) => typeof v === "string")) + json2.type = "string"; + json2.enum = values; +}; +var literalProcessor = (schema, ctx, json2, _params) => { + const def = schema._zod.def; + const vals = []; + for (const val of def.values) { + if (val === void 0) { + if (ctx.unrepresentable === "throw") { + throw new Error("Literal `undefined` cannot be represented in JSON Schema"); + } else { + } + } else if (typeof val === "bigint") { + if (ctx.unrepresentable === "throw") { + throw new Error("BigInt literals cannot be represented in JSON Schema"); + } else { + vals.push(Number(val)); + } + } else { + vals.push(val); + } + } + if (vals.length === 0) { + } else if (vals.length === 1) { + const val = vals[0]; + json2.type = val === null ? "null" : typeof val; + if (ctx.target === "draft-04" || ctx.target === "openapi-3.0") { + json2.enum = [val]; + } else { + json2.const = val; + } + } else { + if (vals.every((v) => typeof v === "number")) + json2.type = "number"; + if (vals.every((v) => typeof v === "string")) + json2.type = "string"; + if (vals.every((v) => typeof v === "boolean")) + json2.type = "boolean"; + if (vals.every((v) => v === null)) + json2.type = "null"; + json2.enum = vals; + } +}; +var nanProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("NaN cannot be represented in JSON Schema"); + } +}; +var templateLiteralProcessor = (schema, _ctx, json2, _params) => { + const _json = json2; + const pattern = schema._zod.pattern; + if (!pattern) + throw new Error("Pattern not found in template literal"); + _json.type = "string"; + _json.pattern = pattern.source; +}; +var fileProcessor = (schema, _ctx, json2, _params) => { + const _json = json2; + const file2 = { + type: "string", + format: "binary", + contentEncoding: "binary" + }; + const { minimum, maximum, mime } = schema._zod.bag; + if (minimum !== void 0) + file2.minLength = minimum; + if (maximum !== void 0) + file2.maxLength = maximum; + if (mime) { + if (mime.length === 1) { + file2.contentMediaType = mime[0]; + Object.assign(_json, file2); + } else { + Object.assign(_json, file2); + _json.anyOf = mime.map((m) => ({ contentMediaType: m })); + } + } else { + Object.assign(_json, file2); + } +}; +var successProcessor = (_schema, _ctx, json2, _params) => { + json2.type = "boolean"; +}; +var customProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Custom types cannot be represented in JSON Schema"); + } +}; +var functionProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Function types cannot be represented in JSON Schema"); + } +}; +var transformProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Transforms cannot be represented in JSON Schema"); + } +}; +var mapProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Map cannot be represented in JSON Schema"); + } +}; +var setProcessor = (_schema, ctx, _json, _params) => { + if (ctx.unrepresentable === "throw") { + throw new Error("Set cannot be represented in JSON Schema"); + } +}; +var arrayProcessor = (schema, ctx, _json, params) => { + const json2 = _json; + const def = schema._zod.def; + const { minimum, maximum } = schema._zod.bag; + if (typeof minimum === "number") + json2.minItems = minimum; + if (typeof maximum === "number") + json2.maxItems = maximum; + json2.type = "array"; + json2.items = process2(def.element, ctx, { ...params, path: [...params.path, "items"] }); +}; +var objectProcessor = (schema, ctx, _json, params) => { + const json2 = _json; + const def = schema._zod.def; + json2.type = "object"; + json2.properties = {}; + const shape = def.shape; + for (const key in shape) { + json2.properties[key] = process2(shape[key], ctx, { + ...params, + path: [...params.path, "properties", key] + }); + } + const allKeys = new Set(Object.keys(shape)); + const requiredKeys = new Set([...allKeys].filter((key) => { + const v = def.shape[key]._zod; + if (ctx.io === "input") { + return v.optin === void 0; + } else { + return v.optout === void 0; + } + })); + if (requiredKeys.size > 0) { + json2.required = Array.from(requiredKeys); + } + if (def.catchall?._zod.def.type === "never") { + json2.additionalProperties = false; + } else if (!def.catchall) { + if (ctx.io === "output") + json2.additionalProperties = false; + } else if (def.catchall) { + json2.additionalProperties = process2(def.catchall, ctx, { + ...params, + path: [...params.path, "additionalProperties"] + }); + } +}; +var unionProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + const isExclusive = def.inclusive === false; + const options = def.options.map((x, i) => process2(x, ctx, { + ...params, + path: [...params.path, isExclusive ? "oneOf" : "anyOf", i] + })); + if (isExclusive) { + json2.oneOf = options; + } else { + json2.anyOf = options; + } +}; +var intersectionProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + const a = process2(def.left, ctx, { + ...params, + path: [...params.path, "allOf", 0] + }); + const b = process2(def.right, ctx, { + ...params, + path: [...params.path, "allOf", 1] + }); + const isSimpleIntersection = (val) => "allOf" in val && Object.keys(val).length === 1; + const allOf = [ + ...isSimpleIntersection(a) ? a.allOf : [a], + ...isSimpleIntersection(b) ? b.allOf : [b] + ]; + json2.allOf = allOf; +}; +var tupleProcessor = (schema, ctx, _json, params) => { + const json2 = _json; + const def = schema._zod.def; + json2.type = "array"; + const prefixPath = ctx.target === "draft-2020-12" ? "prefixItems" : "items"; + const restPath = ctx.target === "draft-2020-12" ? "items" : ctx.target === "openapi-3.0" ? "items" : "additionalItems"; + const prefixItems = def.items.map((x, i) => process2(x, ctx, { + ...params, + path: [...params.path, prefixPath, i] + })); + const rest = def.rest ? process2(def.rest, ctx, { + ...params, + path: [...params.path, restPath, ...ctx.target === "openapi-3.0" ? [def.items.length] : []] + }) : null; + if (ctx.target === "draft-2020-12") { + json2.prefixItems = prefixItems; + if (rest) { + json2.items = rest; + } + } else if (ctx.target === "openapi-3.0") { + json2.items = { + anyOf: prefixItems + }; + if (rest) { + json2.items.anyOf.push(rest); + } + json2.minItems = prefixItems.length; + if (!rest) { + json2.maxItems = prefixItems.length; + } + } else { + json2.items = prefixItems; + if (rest) { + json2.additionalItems = rest; + } + } + const { minimum, maximum } = schema._zod.bag; + if (typeof minimum === "number") + json2.minItems = minimum; + if (typeof maximum === "number") + json2.maxItems = maximum; +}; +var recordProcessor = (schema, ctx, _json, params) => { + const json2 = _json; + const def = schema._zod.def; + json2.type = "object"; + const keyType = def.keyType; + const keyBag = keyType._zod.bag; + const patterns = keyBag?.patterns; + if (def.mode === "loose" && patterns && patterns.size > 0) { + const valueSchema = process2(def.valueType, ctx, { + ...params, + path: [...params.path, "patternProperties", "*"] + }); + json2.patternProperties = {}; + for (const pattern of patterns) { + json2.patternProperties[pattern.source] = valueSchema; + } + } else { + if (ctx.target === "draft-07" || ctx.target === "draft-2020-12") { + json2.propertyNames = process2(def.keyType, ctx, { + ...params, + path: [...params.path, "propertyNames"] + }); + } + json2.additionalProperties = process2(def.valueType, ctx, { + ...params, + path: [...params.path, "additionalProperties"] + }); + } + const keyValues = keyType._zod.values; + if (keyValues) { + const validKeyValues = [...keyValues].filter((v) => typeof v === "string" || typeof v === "number"); + if (validKeyValues.length > 0) { + json2.required = validKeyValues; + } + } +}; +var nullableProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + const inner = process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + if (ctx.target === "openapi-3.0") { + seen.ref = def.innerType; + json2.nullable = true; + } else { + json2.anyOf = [inner, { type: "null" }]; + } +}; +var nonoptionalProcessor = (schema, ctx, _json, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; +}; +var defaultProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; + json2.default = JSON.parse(JSON.stringify(def.defaultValue)); +}; +var prefaultProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; + if (ctx.io === "input") + json2._prefault = JSON.parse(JSON.stringify(def.defaultValue)); +}; +var catchProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; + let catchValue; + try { + catchValue = def.catchValue(void 0); + } catch { + throw new Error("Dynamic catch values are not supported in JSON Schema"); + } + json2.default = catchValue; +}; +var pipeProcessor = (schema, ctx, _json, params) => { + const def = schema._zod.def; + const innerType = ctx.io === "input" ? def.in._zod.def.type === "transform" ? def.out : def.in : def.out; + process2(innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = innerType; +}; +var readonlyProcessor = (schema, ctx, json2, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; + json2.readOnly = true; +}; +var promiseProcessor = (schema, ctx, _json, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; +}; +var optionalProcessor = (schema, ctx, _json, params) => { + const def = schema._zod.def; + process2(def.innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = def.innerType; +}; +var lazyProcessor = (schema, ctx, _json, params) => { + const innerType = schema._zod.innerType; + process2(innerType, ctx, params); + const seen = ctx.seen.get(schema); + seen.ref = innerType; +}; +var allProcessors = { + string: stringProcessor, + number: numberProcessor, + boolean: booleanProcessor, + bigint: bigintProcessor, + symbol: symbolProcessor, + null: nullProcessor, + undefined: undefinedProcessor, + void: voidProcessor, + never: neverProcessor, + any: anyProcessor, + unknown: unknownProcessor, + date: dateProcessor, + enum: enumProcessor, + literal: literalProcessor, + nan: nanProcessor, + template_literal: templateLiteralProcessor, + file: fileProcessor, + success: successProcessor, + custom: customProcessor, + function: functionProcessor, + transform: transformProcessor, + map: mapProcessor, + set: setProcessor, + array: arrayProcessor, + object: objectProcessor, + union: unionProcessor, + intersection: intersectionProcessor, + tuple: tupleProcessor, + record: recordProcessor, + nullable: nullableProcessor, + nonoptional: nonoptionalProcessor, + default: defaultProcessor, + prefault: prefaultProcessor, + catch: catchProcessor, + pipe: pipeProcessor, + readonly: readonlyProcessor, + promise: promiseProcessor, + optional: optionalProcessor, + lazy: lazyProcessor +}; +function toJSONSchema(input, params) { + if ("_idmap" in input) { + const registry2 = input; + const ctx2 = initializeContext({ ...params, processors: allProcessors }); + const defs = {}; + for (const entry of registry2._idmap.entries()) { + const [_, schema] = entry; + process2(schema, ctx2); + } + const schemas = {}; + const external = { + registry: registry2, + uri: params?.uri, + defs + }; + ctx2.external = external; + for (const entry of registry2._idmap.entries()) { + const [key, schema] = entry; + extractDefs(ctx2, schema); + schemas[key] = finalize(ctx2, schema); + } + if (Object.keys(defs).length > 0) { + const defsSegment = ctx2.target === "draft-2020-12" ? "$defs" : "definitions"; + schemas.__shared = { + [defsSegment]: defs + }; + } + return { schemas }; + } + const ctx = initializeContext({ ...params, processors: allProcessors }); + process2(input, ctx); + extractDefs(ctx, input); + return finalize(ctx, input); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/json-schema-generator.js +var JSONSchemaGenerator = class { + /** @deprecated Access via ctx instead */ + get metadataRegistry() { + return this.ctx.metadataRegistry; + } + /** @deprecated Access via ctx instead */ + get target() { + return this.ctx.target; + } + /** @deprecated Access via ctx instead */ + get unrepresentable() { + return this.ctx.unrepresentable; + } + /** @deprecated Access via ctx instead */ + get override() { + return this.ctx.override; + } + /** @deprecated Access via ctx instead */ + get io() { + return this.ctx.io; + } + /** @deprecated Access via ctx instead */ + get counter() { + return this.ctx.counter; + } + set counter(value) { + this.ctx.counter = value; + } + /** @deprecated Access via ctx instead */ + get seen() { + return this.ctx.seen; + } + constructor(params) { + let normalizedTarget = params?.target ?? "draft-2020-12"; + if (normalizedTarget === "draft-4") + normalizedTarget = "draft-04"; + if (normalizedTarget === "draft-7") + normalizedTarget = "draft-07"; + this.ctx = initializeContext({ + processors: allProcessors, + target: normalizedTarget, + ...params?.metadata && { metadata: params.metadata }, + ...params?.unrepresentable && { unrepresentable: params.unrepresentable }, + ...params?.override && { override: params.override }, + ...params?.io && { io: params.io } + }); + } + /** + * Process a schema to prepare it for JSON Schema generation. + * This must be called before emit(). + */ + process(schema, _params = { path: [], schemaPath: [] }) { + return process2(schema, this.ctx, _params); + } + /** + * Emit the final JSON Schema after processing. + * Must call process() first. + */ + emit(schema, _params) { + if (_params) { + if (_params.cycles) + this.ctx.cycles = _params.cycles; + if (_params.reused) + this.ctx.reused = _params.reused; + if (_params.external) + this.ctx.external = _params.external; + } + extractDefs(this.ctx, schema); + const result = finalize(this.ctx, schema); + const { "~standard": _, ...plainResult } = result; + return plainResult; + } +}; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/json-schema.js +var json_schema_exports = {}; + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/schemas.js +var schemas_exports2 = {}; +__export(schemas_exports2, { + ZodAny: () => ZodAny, + ZodArray: () => ZodArray, + ZodBase64: () => ZodBase64, + ZodBase64URL: () => ZodBase64URL, + ZodBigInt: () => ZodBigInt, + ZodBigIntFormat: () => ZodBigIntFormat, + ZodBoolean: () => ZodBoolean, + ZodCIDRv4: () => ZodCIDRv4, + ZodCIDRv6: () => ZodCIDRv6, + ZodCUID: () => ZodCUID, + ZodCUID2: () => ZodCUID2, + ZodCatch: () => ZodCatch, + ZodCodec: () => ZodCodec, + ZodCustom: () => ZodCustom, + ZodCustomStringFormat: () => ZodCustomStringFormat, + ZodDate: () => ZodDate, + ZodDefault: () => ZodDefault, + ZodDiscriminatedUnion: () => ZodDiscriminatedUnion, + ZodE164: () => ZodE164, + ZodEmail: () => ZodEmail, + ZodEmoji: () => ZodEmoji, + ZodEnum: () => ZodEnum, + ZodExactOptional: () => ZodExactOptional, + ZodFile: () => ZodFile, + ZodFunction: () => ZodFunction, + ZodGUID: () => ZodGUID, + ZodIPv4: () => ZodIPv4, + ZodIPv6: () => ZodIPv6, + ZodIntersection: () => ZodIntersection, + ZodJWT: () => ZodJWT, + ZodKSUID: () => ZodKSUID, + ZodLazy: () => ZodLazy, + ZodLiteral: () => ZodLiteral, + ZodMAC: () => ZodMAC, + ZodMap: () => ZodMap, + ZodNaN: () => ZodNaN, + ZodNanoID: () => ZodNanoID, + ZodNever: () => ZodNever, + ZodNonOptional: () => ZodNonOptional, + ZodNull: () => ZodNull, + ZodNullable: () => ZodNullable, + ZodNumber: () => ZodNumber, + ZodNumberFormat: () => ZodNumberFormat, + ZodObject: () => ZodObject, + ZodOptional: () => ZodOptional, + ZodPipe: () => ZodPipe, + ZodPrefault: () => ZodPrefault, + ZodPromise: () => ZodPromise, + ZodReadonly: () => ZodReadonly, + ZodRecord: () => ZodRecord, + ZodSet: () => ZodSet, + ZodString: () => ZodString, + ZodStringFormat: () => ZodStringFormat, + ZodSuccess: () => ZodSuccess, + ZodSymbol: () => ZodSymbol, + ZodTemplateLiteral: () => ZodTemplateLiteral, + ZodTransform: () => ZodTransform, + ZodTuple: () => ZodTuple, + ZodType: () => ZodType, + ZodULID: () => ZodULID, + ZodURL: () => ZodURL, + ZodUUID: () => ZodUUID, + ZodUndefined: () => ZodUndefined, + ZodUnion: () => ZodUnion, + ZodUnknown: () => ZodUnknown, + ZodVoid: () => ZodVoid, + ZodXID: () => ZodXID, + ZodXor: () => ZodXor, + _ZodString: () => _ZodString, + _default: () => _default2, + _function: () => _function, + any: () => any, + array: () => array, + base64: () => base642, + base64url: () => base64url2, + bigint: () => bigint2, + boolean: () => boolean2, + catch: () => _catch2, + check: () => check, + cidrv4: () => cidrv42, + cidrv6: () => cidrv62, + codec: () => codec, + cuid: () => cuid3, + cuid2: () => cuid22, + custom: () => custom, + date: () => date3, + describe: () => describe2, + discriminatedUnion: () => discriminatedUnion, + e164: () => e1642, + email: () => email2, + emoji: () => emoji2, + enum: () => _enum2, + exactOptional: () => exactOptional, + file: () => file, + float32: () => float32, + float64: () => float64, + function: () => _function, + guid: () => guid2, + hash: () => hash, + hex: () => hex2, + hostname: () => hostname2, + httpUrl: () => httpUrl, + instanceof: () => _instanceof, + int: () => int, + int32: () => int32, + int64: () => int64, + intersection: () => intersection, + ipv4: () => ipv42, + ipv6: () => ipv62, + json: () => json, + jwt: () => jwt, + keyof: () => keyof, + ksuid: () => ksuid2, + lazy: () => lazy, + literal: () => literal, + looseObject: () => looseObject, + looseRecord: () => looseRecord, + mac: () => mac2, + map: () => map, + meta: () => meta2, + nan: () => nan, + nanoid: () => nanoid2, + nativeEnum: () => nativeEnum, + never: () => never, + nonoptional: () => nonoptional, + null: () => _null3, + nullable: () => nullable, + nullish: () => nullish2, + number: () => number2, + object: () => object, + optional: () => optional, + partialRecord: () => partialRecord, + pipe: () => pipe, + prefault: () => prefault, + preprocess: () => preprocess, + promise: () => promise, + readonly: () => readonly, + record: () => record, + refine: () => refine, + set: () => set, + strictObject: () => strictObject, + string: () => string2, + stringFormat: () => stringFormat, + stringbool: () => stringbool, + success: () => success, + superRefine: () => superRefine, + symbol: () => symbol, + templateLiteral: () => templateLiteral, + transform: () => transform, + tuple: () => tuple, + uint32: () => uint32, + uint64: () => uint64, + ulid: () => ulid2, + undefined: () => _undefined3, + union: () => union, + unknown: () => unknown, + url: () => url, + uuid: () => uuid2, + uuidv4: () => uuidv4, + uuidv6: () => uuidv6, + uuidv7: () => uuidv7, + void: () => _void2, + xid: () => xid2, + xor: () => xor +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/checks.js +var checks_exports2 = {}; +__export(checks_exports2, { + endsWith: () => _endsWith, + gt: () => _gt, + gte: () => _gte, + includes: () => _includes, + length: () => _length, + lowercase: () => _lowercase, + lt: () => _lt, + lte: () => _lte, + maxLength: () => _maxLength, + maxSize: () => _maxSize, + mime: () => _mime, + minLength: () => _minLength, + minSize: () => _minSize, + multipleOf: () => _multipleOf, + negative: () => _negative, + nonnegative: () => _nonnegative, + nonpositive: () => _nonpositive, + normalize: () => _normalize, + overwrite: () => _overwrite, + positive: () => _positive, + property: () => _property, + regex: () => _regex, + size: () => _size, + slugify: () => _slugify, + startsWith: () => _startsWith, + toLowerCase: () => _toLowerCase, + toUpperCase: () => _toUpperCase, + trim: () => _trim, + uppercase: () => _uppercase +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/iso.js +var iso_exports = {}; +__export(iso_exports, { + ZodISODate: () => ZodISODate, + ZodISODateTime: () => ZodISODateTime, + ZodISODuration: () => ZodISODuration, + ZodISOTime: () => ZodISOTime, + date: () => date2, + datetime: () => datetime2, + duration: () => duration2, + time: () => time2 +}); +var ZodISODateTime = /* @__PURE__ */ $constructor("ZodISODateTime", (inst, def) => { + $ZodISODateTime.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function datetime2(params) { + return _isoDateTime(ZodISODateTime, params); +} +var ZodISODate = /* @__PURE__ */ $constructor("ZodISODate", (inst, def) => { + $ZodISODate.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function date2(params) { + return _isoDate(ZodISODate, params); +} +var ZodISOTime = /* @__PURE__ */ $constructor("ZodISOTime", (inst, def) => { + $ZodISOTime.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function time2(params) { + return _isoTime(ZodISOTime, params); +} +var ZodISODuration = /* @__PURE__ */ $constructor("ZodISODuration", (inst, def) => { + $ZodISODuration.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function duration2(params) { + return _isoDuration(ZodISODuration, params); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/errors.js +var initializer2 = (inst, issues) => { + $ZodError.init(inst, issues); + inst.name = "ZodError"; + Object.defineProperties(inst, { + format: { + value: (mapper) => formatError(inst, mapper) + // enumerable: false, + }, + flatten: { + value: (mapper) => flattenError(inst, mapper) + // enumerable: false, + }, + addIssue: { + value: (issue2) => { + inst.issues.push(issue2); + inst.message = JSON.stringify(inst.issues, jsonStringifyReplacer, 2); + } + // enumerable: false, + }, + addIssues: { + value: (issues2) => { + inst.issues.push(...issues2); + inst.message = JSON.stringify(inst.issues, jsonStringifyReplacer, 2); + } + // enumerable: false, + }, + isEmpty: { + get() { + return inst.issues.length === 0; + } + // enumerable: false, + } + }); +}; +var ZodError = $constructor("ZodError", initializer2); +var ZodRealError = $constructor("ZodError", initializer2, { + Parent: Error +}); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/parse.js +var parse2 = /* @__PURE__ */ _parse(ZodRealError); +var parseAsync2 = /* @__PURE__ */ _parseAsync(ZodRealError); +var safeParse2 = /* @__PURE__ */ _safeParse(ZodRealError); +var safeParseAsync2 = /* @__PURE__ */ _safeParseAsync(ZodRealError); +var encode2 = /* @__PURE__ */ _encode(ZodRealError); +var decode2 = /* @__PURE__ */ _decode(ZodRealError); +var encodeAsync2 = /* @__PURE__ */ _encodeAsync(ZodRealError); +var decodeAsync2 = /* @__PURE__ */ _decodeAsync(ZodRealError); +var safeEncode2 = /* @__PURE__ */ _safeEncode(ZodRealError); +var safeDecode2 = /* @__PURE__ */ _safeDecode(ZodRealError); +var safeEncodeAsync2 = /* @__PURE__ */ _safeEncodeAsync(ZodRealError); +var safeDecodeAsync2 = /* @__PURE__ */ _safeDecodeAsync(ZodRealError); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/schemas.js +var ZodType = /* @__PURE__ */ $constructor("ZodType", (inst, def) => { + $ZodType.init(inst, def); + Object.assign(inst["~standard"], { + jsonSchema: { + input: createStandardJSONSchemaMethod(inst, "input"), + output: createStandardJSONSchemaMethod(inst, "output") + } + }); + inst.toJSONSchema = createToJSONSchemaMethod(inst, {}); + inst.def = def; + inst.type = def.type; + Object.defineProperty(inst, "_def", { value: def }); + inst.check = (...checks) => { + return inst.clone(util_exports.mergeDefs(def, { + checks: [ + ...def.checks ?? [], + ...checks.map((ch) => typeof ch === "function" ? { _zod: { check: ch, def: { check: "custom" }, onattach: [] } } : ch) + ] + }), { + parent: true + }); + }; + inst.with = inst.check; + inst.clone = (def2, params) => clone(inst, def2, params); + inst.brand = () => inst; + inst.register = ((reg, meta3) => { + reg.add(inst, meta3); + return inst; + }); + inst.parse = (data, params) => parse2(inst, data, params, { callee: inst.parse }); + inst.safeParse = (data, params) => safeParse2(inst, data, params); + inst.parseAsync = async (data, params) => parseAsync2(inst, data, params, { callee: inst.parseAsync }); + inst.safeParseAsync = async (data, params) => safeParseAsync2(inst, data, params); + inst.spa = inst.safeParseAsync; + inst.encode = (data, params) => encode2(inst, data, params); + inst.decode = (data, params) => decode2(inst, data, params); + inst.encodeAsync = async (data, params) => encodeAsync2(inst, data, params); + inst.decodeAsync = async (data, params) => decodeAsync2(inst, data, params); + inst.safeEncode = (data, params) => safeEncode2(inst, data, params); + inst.safeDecode = (data, params) => safeDecode2(inst, data, params); + inst.safeEncodeAsync = async (data, params) => safeEncodeAsync2(inst, data, params); + inst.safeDecodeAsync = async (data, params) => safeDecodeAsync2(inst, data, params); + inst.refine = (check2, params) => inst.check(refine(check2, params)); + inst.superRefine = (refinement) => inst.check(superRefine(refinement)); + inst.overwrite = (fn) => inst.check(_overwrite(fn)); + inst.optional = () => optional(inst); + inst.exactOptional = () => exactOptional(inst); + inst.nullable = () => nullable(inst); + inst.nullish = () => optional(nullable(inst)); + inst.nonoptional = (params) => nonoptional(inst, params); + inst.array = () => array(inst); + inst.or = (arg) => union([inst, arg]); + inst.and = (arg) => intersection(inst, arg); + inst.transform = (tx) => pipe(inst, transform(tx)); + inst.default = (def2) => _default2(inst, def2); + inst.prefault = (def2) => prefault(inst, def2); + inst.catch = (params) => _catch2(inst, params); + inst.pipe = (target) => pipe(inst, target); + inst.readonly = () => readonly(inst); + inst.describe = (description) => { + const cl = inst.clone(); + globalRegistry.add(cl, { description }); + return cl; + }; + Object.defineProperty(inst, "description", { + get() { + return globalRegistry.get(inst)?.description; + }, + configurable: true + }); + inst.meta = (...args) => { + if (args.length === 0) { + return globalRegistry.get(inst); + } + const cl = inst.clone(); + globalRegistry.add(cl, args[0]); + return cl; + }; + inst.isOptional = () => inst.safeParse(void 0).success; + inst.isNullable = () => inst.safeParse(null).success; + inst.apply = (fn) => fn(inst); + return inst; +}); +var _ZodString = /* @__PURE__ */ $constructor("_ZodString", (inst, def) => { + $ZodString.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => stringProcessor(inst, ctx, json2, params); + const bag = inst._zod.bag; + inst.format = bag.format ?? null; + inst.minLength = bag.minimum ?? null; + inst.maxLength = bag.maximum ?? null; + inst.regex = (...args) => inst.check(_regex(...args)); + inst.includes = (...args) => inst.check(_includes(...args)); + inst.startsWith = (...args) => inst.check(_startsWith(...args)); + inst.endsWith = (...args) => inst.check(_endsWith(...args)); + inst.min = (...args) => inst.check(_minLength(...args)); + inst.max = (...args) => inst.check(_maxLength(...args)); + inst.length = (...args) => inst.check(_length(...args)); + inst.nonempty = (...args) => inst.check(_minLength(1, ...args)); + inst.lowercase = (params) => inst.check(_lowercase(params)); + inst.uppercase = (params) => inst.check(_uppercase(params)); + inst.trim = () => inst.check(_trim()); + inst.normalize = (...args) => inst.check(_normalize(...args)); + inst.toLowerCase = () => inst.check(_toLowerCase()); + inst.toUpperCase = () => inst.check(_toUpperCase()); + inst.slugify = () => inst.check(_slugify()); +}); +var ZodString = /* @__PURE__ */ $constructor("ZodString", (inst, def) => { + $ZodString.init(inst, def); + _ZodString.init(inst, def); + inst.email = (params) => inst.check(_email(ZodEmail, params)); + inst.url = (params) => inst.check(_url(ZodURL, params)); + inst.jwt = (params) => inst.check(_jwt(ZodJWT, params)); + inst.emoji = (params) => inst.check(_emoji2(ZodEmoji, params)); + inst.guid = (params) => inst.check(_guid(ZodGUID, params)); + inst.uuid = (params) => inst.check(_uuid(ZodUUID, params)); + inst.uuidv4 = (params) => inst.check(_uuidv4(ZodUUID, params)); + inst.uuidv6 = (params) => inst.check(_uuidv6(ZodUUID, params)); + inst.uuidv7 = (params) => inst.check(_uuidv7(ZodUUID, params)); + inst.nanoid = (params) => inst.check(_nanoid(ZodNanoID, params)); + inst.guid = (params) => inst.check(_guid(ZodGUID, params)); + inst.cuid = (params) => inst.check(_cuid(ZodCUID, params)); + inst.cuid2 = (params) => inst.check(_cuid2(ZodCUID2, params)); + inst.ulid = (params) => inst.check(_ulid(ZodULID, params)); + inst.base64 = (params) => inst.check(_base64(ZodBase64, params)); + inst.base64url = (params) => inst.check(_base64url(ZodBase64URL, params)); + inst.xid = (params) => inst.check(_xid(ZodXID, params)); + inst.ksuid = (params) => inst.check(_ksuid(ZodKSUID, params)); + inst.ipv4 = (params) => inst.check(_ipv4(ZodIPv4, params)); + inst.ipv6 = (params) => inst.check(_ipv6(ZodIPv6, params)); + inst.cidrv4 = (params) => inst.check(_cidrv4(ZodCIDRv4, params)); + inst.cidrv6 = (params) => inst.check(_cidrv6(ZodCIDRv6, params)); + inst.e164 = (params) => inst.check(_e164(ZodE164, params)); + inst.datetime = (params) => inst.check(datetime2(params)); + inst.date = (params) => inst.check(date2(params)); + inst.time = (params) => inst.check(time2(params)); + inst.duration = (params) => inst.check(duration2(params)); +}); +function string2(params) { + return _string(ZodString, params); +} +var ZodStringFormat = /* @__PURE__ */ $constructor("ZodStringFormat", (inst, def) => { + $ZodStringFormat.init(inst, def); + _ZodString.init(inst, def); +}); +var ZodEmail = /* @__PURE__ */ $constructor("ZodEmail", (inst, def) => { + $ZodEmail.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function email2(params) { + return _email(ZodEmail, params); +} +var ZodGUID = /* @__PURE__ */ $constructor("ZodGUID", (inst, def) => { + $ZodGUID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function guid2(params) { + return _guid(ZodGUID, params); +} +var ZodUUID = /* @__PURE__ */ $constructor("ZodUUID", (inst, def) => { + $ZodUUID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function uuid2(params) { + return _uuid(ZodUUID, params); +} +function uuidv4(params) { + return _uuidv4(ZodUUID, params); +} +function uuidv6(params) { + return _uuidv6(ZodUUID, params); +} +function uuidv7(params) { + return _uuidv7(ZodUUID, params); +} +var ZodURL = /* @__PURE__ */ $constructor("ZodURL", (inst, def) => { + $ZodURL.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function url(params) { + return _url(ZodURL, params); +} +function httpUrl(params) { + return _url(ZodURL, { + protocol: /^https?$/, + hostname: regexes_exports.domain, + ...util_exports.normalizeParams(params) + }); +} +var ZodEmoji = /* @__PURE__ */ $constructor("ZodEmoji", (inst, def) => { + $ZodEmoji.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function emoji2(params) { + return _emoji2(ZodEmoji, params); +} +var ZodNanoID = /* @__PURE__ */ $constructor("ZodNanoID", (inst, def) => { + $ZodNanoID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function nanoid2(params) { + return _nanoid(ZodNanoID, params); +} +var ZodCUID = /* @__PURE__ */ $constructor("ZodCUID", (inst, def) => { + $ZodCUID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function cuid3(params) { + return _cuid(ZodCUID, params); +} +var ZodCUID2 = /* @__PURE__ */ $constructor("ZodCUID2", (inst, def) => { + $ZodCUID2.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function cuid22(params) { + return _cuid2(ZodCUID2, params); +} +var ZodULID = /* @__PURE__ */ $constructor("ZodULID", (inst, def) => { + $ZodULID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function ulid2(params) { + return _ulid(ZodULID, params); +} +var ZodXID = /* @__PURE__ */ $constructor("ZodXID", (inst, def) => { + $ZodXID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function xid2(params) { + return _xid(ZodXID, params); +} +var ZodKSUID = /* @__PURE__ */ $constructor("ZodKSUID", (inst, def) => { + $ZodKSUID.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function ksuid2(params) { + return _ksuid(ZodKSUID, params); +} +var ZodIPv4 = /* @__PURE__ */ $constructor("ZodIPv4", (inst, def) => { + $ZodIPv4.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function ipv42(params) { + return _ipv4(ZodIPv4, params); +} +var ZodMAC = /* @__PURE__ */ $constructor("ZodMAC", (inst, def) => { + $ZodMAC.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function mac2(params) { + return _mac(ZodMAC, params); +} +var ZodIPv6 = /* @__PURE__ */ $constructor("ZodIPv6", (inst, def) => { + $ZodIPv6.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function ipv62(params) { + return _ipv6(ZodIPv6, params); +} +var ZodCIDRv4 = /* @__PURE__ */ $constructor("ZodCIDRv4", (inst, def) => { + $ZodCIDRv4.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function cidrv42(params) { + return _cidrv4(ZodCIDRv4, params); +} +var ZodCIDRv6 = /* @__PURE__ */ $constructor("ZodCIDRv6", (inst, def) => { + $ZodCIDRv6.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function cidrv62(params) { + return _cidrv6(ZodCIDRv6, params); +} +var ZodBase64 = /* @__PURE__ */ $constructor("ZodBase64", (inst, def) => { + $ZodBase64.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function base642(params) { + return _base64(ZodBase64, params); +} +var ZodBase64URL = /* @__PURE__ */ $constructor("ZodBase64URL", (inst, def) => { + $ZodBase64URL.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function base64url2(params) { + return _base64url(ZodBase64URL, params); +} +var ZodE164 = /* @__PURE__ */ $constructor("ZodE164", (inst, def) => { + $ZodE164.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function e1642(params) { + return _e164(ZodE164, params); +} +var ZodJWT = /* @__PURE__ */ $constructor("ZodJWT", (inst, def) => { + $ZodJWT.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function jwt(params) { + return _jwt(ZodJWT, params); +} +var ZodCustomStringFormat = /* @__PURE__ */ $constructor("ZodCustomStringFormat", (inst, def) => { + $ZodCustomStringFormat.init(inst, def); + ZodStringFormat.init(inst, def); +}); +function stringFormat(format, fnOrRegex, _params = {}) { + return _stringFormat(ZodCustomStringFormat, format, fnOrRegex, _params); +} +function hostname2(_params) { + return _stringFormat(ZodCustomStringFormat, "hostname", regexes_exports.hostname, _params); +} +function hex2(_params) { + return _stringFormat(ZodCustomStringFormat, "hex", regexes_exports.hex, _params); +} +function hash(alg, params) { + const enc = params?.enc ?? "hex"; + const format = `${alg}_${enc}`; + const regex = regexes_exports[format]; + if (!regex) + throw new Error(`Unrecognized hash format: ${format}`); + return _stringFormat(ZodCustomStringFormat, format, regex, params); +} +var ZodNumber = /* @__PURE__ */ $constructor("ZodNumber", (inst, def) => { + $ZodNumber.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => numberProcessor(inst, ctx, json2, params); + inst.gt = (value, params) => inst.check(_gt(value, params)); + inst.gte = (value, params) => inst.check(_gte(value, params)); + inst.min = (value, params) => inst.check(_gte(value, params)); + inst.lt = (value, params) => inst.check(_lt(value, params)); + inst.lte = (value, params) => inst.check(_lte(value, params)); + inst.max = (value, params) => inst.check(_lte(value, params)); + inst.int = (params) => inst.check(int(params)); + inst.safe = (params) => inst.check(int(params)); + inst.positive = (params) => inst.check(_gt(0, params)); + inst.nonnegative = (params) => inst.check(_gte(0, params)); + inst.negative = (params) => inst.check(_lt(0, params)); + inst.nonpositive = (params) => inst.check(_lte(0, params)); + inst.multipleOf = (value, params) => inst.check(_multipleOf(value, params)); + inst.step = (value, params) => inst.check(_multipleOf(value, params)); + inst.finite = () => inst; + const bag = inst._zod.bag; + inst.minValue = Math.max(bag.minimum ?? Number.NEGATIVE_INFINITY, bag.exclusiveMinimum ?? Number.NEGATIVE_INFINITY) ?? null; + inst.maxValue = Math.min(bag.maximum ?? Number.POSITIVE_INFINITY, bag.exclusiveMaximum ?? Number.POSITIVE_INFINITY) ?? null; + inst.isInt = (bag.format ?? "").includes("int") || Number.isSafeInteger(bag.multipleOf ?? 0.5); + inst.isFinite = true; + inst.format = bag.format ?? null; +}); +function number2(params) { + return _number(ZodNumber, params); +} +var ZodNumberFormat = /* @__PURE__ */ $constructor("ZodNumberFormat", (inst, def) => { + $ZodNumberFormat.init(inst, def); + ZodNumber.init(inst, def); +}); +function int(params) { + return _int(ZodNumberFormat, params); +} +function float32(params) { + return _float32(ZodNumberFormat, params); +} +function float64(params) { + return _float64(ZodNumberFormat, params); +} +function int32(params) { + return _int32(ZodNumberFormat, params); +} +function uint32(params) { + return _uint32(ZodNumberFormat, params); +} +var ZodBoolean = /* @__PURE__ */ $constructor("ZodBoolean", (inst, def) => { + $ZodBoolean.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => booleanProcessor(inst, ctx, json2, params); +}); +function boolean2(params) { + return _boolean(ZodBoolean, params); +} +var ZodBigInt = /* @__PURE__ */ $constructor("ZodBigInt", (inst, def) => { + $ZodBigInt.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => bigintProcessor(inst, ctx, json2, params); + inst.gte = (value, params) => inst.check(_gte(value, params)); + inst.min = (value, params) => inst.check(_gte(value, params)); + inst.gt = (value, params) => inst.check(_gt(value, params)); + inst.gte = (value, params) => inst.check(_gte(value, params)); + inst.min = (value, params) => inst.check(_gte(value, params)); + inst.lt = (value, params) => inst.check(_lt(value, params)); + inst.lte = (value, params) => inst.check(_lte(value, params)); + inst.max = (value, params) => inst.check(_lte(value, params)); + inst.positive = (params) => inst.check(_gt(BigInt(0), params)); + inst.negative = (params) => inst.check(_lt(BigInt(0), params)); + inst.nonpositive = (params) => inst.check(_lte(BigInt(0), params)); + inst.nonnegative = (params) => inst.check(_gte(BigInt(0), params)); + inst.multipleOf = (value, params) => inst.check(_multipleOf(value, params)); + const bag = inst._zod.bag; + inst.minValue = bag.minimum ?? null; + inst.maxValue = bag.maximum ?? null; + inst.format = bag.format ?? null; +}); +function bigint2(params) { + return _bigint(ZodBigInt, params); +} +var ZodBigIntFormat = /* @__PURE__ */ $constructor("ZodBigIntFormat", (inst, def) => { + $ZodBigIntFormat.init(inst, def); + ZodBigInt.init(inst, def); +}); +function int64(params) { + return _int64(ZodBigIntFormat, params); +} +function uint64(params) { + return _uint64(ZodBigIntFormat, params); +} +var ZodSymbol = /* @__PURE__ */ $constructor("ZodSymbol", (inst, def) => { + $ZodSymbol.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => symbolProcessor(inst, ctx, json2, params); +}); +function symbol(params) { + return _symbol(ZodSymbol, params); +} +var ZodUndefined = /* @__PURE__ */ $constructor("ZodUndefined", (inst, def) => { + $ZodUndefined.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => undefinedProcessor(inst, ctx, json2, params); +}); +function _undefined3(params) { + return _undefined2(ZodUndefined, params); +} +var ZodNull = /* @__PURE__ */ $constructor("ZodNull", (inst, def) => { + $ZodNull.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => nullProcessor(inst, ctx, json2, params); +}); +function _null3(params) { + return _null2(ZodNull, params); +} +var ZodAny = /* @__PURE__ */ $constructor("ZodAny", (inst, def) => { + $ZodAny.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => anyProcessor(inst, ctx, json2, params); +}); +function any() { + return _any(ZodAny); +} +var ZodUnknown = /* @__PURE__ */ $constructor("ZodUnknown", (inst, def) => { + $ZodUnknown.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => unknownProcessor(inst, ctx, json2, params); +}); +function unknown() { + return _unknown(ZodUnknown); +} +var ZodNever = /* @__PURE__ */ $constructor("ZodNever", (inst, def) => { + $ZodNever.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => neverProcessor(inst, ctx, json2, params); +}); +function never(params) { + return _never(ZodNever, params); +} +var ZodVoid = /* @__PURE__ */ $constructor("ZodVoid", (inst, def) => { + $ZodVoid.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => voidProcessor(inst, ctx, json2, params); +}); +function _void2(params) { + return _void(ZodVoid, params); +} +var ZodDate = /* @__PURE__ */ $constructor("ZodDate", (inst, def) => { + $ZodDate.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => dateProcessor(inst, ctx, json2, params); + inst.min = (value, params) => inst.check(_gte(value, params)); + inst.max = (value, params) => inst.check(_lte(value, params)); + const c = inst._zod.bag; + inst.minDate = c.minimum ? new Date(c.minimum) : null; + inst.maxDate = c.maximum ? new Date(c.maximum) : null; +}); +function date3(params) { + return _date(ZodDate, params); +} +var ZodArray = /* @__PURE__ */ $constructor("ZodArray", (inst, def) => { + $ZodArray.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => arrayProcessor(inst, ctx, json2, params); + inst.element = def.element; + inst.min = (minLength, params) => inst.check(_minLength(minLength, params)); + inst.nonempty = (params) => inst.check(_minLength(1, params)); + inst.max = (maxLength, params) => inst.check(_maxLength(maxLength, params)); + inst.length = (len, params) => inst.check(_length(len, params)); + inst.unwrap = () => inst.element; +}); +function array(element, params) { + return _array(ZodArray, element, params); +} +function keyof(schema) { + const shape = schema._zod.def.shape; + return _enum2(Object.keys(shape)); +} +var ZodObject = /* @__PURE__ */ $constructor("ZodObject", (inst, def) => { + $ZodObjectJIT.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => objectProcessor(inst, ctx, json2, params); + util_exports.defineLazy(inst, "shape", () => { + return def.shape; + }); + inst.keyof = () => _enum2(Object.keys(inst._zod.def.shape)); + inst.catchall = (catchall) => inst.clone({ ...inst._zod.def, catchall }); + inst.passthrough = () => inst.clone({ ...inst._zod.def, catchall: unknown() }); + inst.loose = () => inst.clone({ ...inst._zod.def, catchall: unknown() }); + inst.strict = () => inst.clone({ ...inst._zod.def, catchall: never() }); + inst.strip = () => inst.clone({ ...inst._zod.def, catchall: void 0 }); + inst.extend = (incoming) => { + return util_exports.extend(inst, incoming); + }; + inst.safeExtend = (incoming) => { + return util_exports.safeExtend(inst, incoming); + }; + inst.merge = (other) => util_exports.merge(inst, other); + inst.pick = (mask) => util_exports.pick(inst, mask); + inst.omit = (mask) => util_exports.omit(inst, mask); + inst.partial = (...args) => util_exports.partial(ZodOptional, inst, args[0]); + inst.required = (...args) => util_exports.required(ZodNonOptional, inst, args[0]); +}); +function object(shape, params) { + const def = { + type: "object", + shape: shape ?? {}, + ...util_exports.normalizeParams(params) + }; + return new ZodObject(def); +} +function strictObject(shape, params) { + return new ZodObject({ + type: "object", + shape, + catchall: never(), + ...util_exports.normalizeParams(params) + }); +} +function looseObject(shape, params) { + return new ZodObject({ + type: "object", + shape, + catchall: unknown(), + ...util_exports.normalizeParams(params) + }); +} +var ZodUnion = /* @__PURE__ */ $constructor("ZodUnion", (inst, def) => { + $ZodUnion.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => unionProcessor(inst, ctx, json2, params); + inst.options = def.options; +}); +function union(options, params) { + return new ZodUnion({ + type: "union", + options, + ...util_exports.normalizeParams(params) + }); +} +var ZodXor = /* @__PURE__ */ $constructor("ZodXor", (inst, def) => { + ZodUnion.init(inst, def); + $ZodXor.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => unionProcessor(inst, ctx, json2, params); + inst.options = def.options; +}); +function xor(options, params) { + return new ZodXor({ + type: "union", + options, + inclusive: false, + ...util_exports.normalizeParams(params) + }); +} +var ZodDiscriminatedUnion = /* @__PURE__ */ $constructor("ZodDiscriminatedUnion", (inst, def) => { + ZodUnion.init(inst, def); + $ZodDiscriminatedUnion.init(inst, def); +}); +function discriminatedUnion(discriminator, options, params) { + return new ZodDiscriminatedUnion({ + type: "union", + options, + discriminator, + ...util_exports.normalizeParams(params) + }); +} +var ZodIntersection = /* @__PURE__ */ $constructor("ZodIntersection", (inst, def) => { + $ZodIntersection.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => intersectionProcessor(inst, ctx, json2, params); +}); +function intersection(left, right) { + return new ZodIntersection({ + type: "intersection", + left, + right + }); +} +var ZodTuple = /* @__PURE__ */ $constructor("ZodTuple", (inst, def) => { + $ZodTuple.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => tupleProcessor(inst, ctx, json2, params); + inst.rest = (rest) => inst.clone({ + ...inst._zod.def, + rest + }); +}); +function tuple(items, _paramsOrRest, _params) { + const hasRest = _paramsOrRest instanceof $ZodType; + const params = hasRest ? _params : _paramsOrRest; + const rest = hasRest ? _paramsOrRest : null; + return new ZodTuple({ + type: "tuple", + items, + rest, + ...util_exports.normalizeParams(params) + }); +} +var ZodRecord = /* @__PURE__ */ $constructor("ZodRecord", (inst, def) => { + $ZodRecord.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => recordProcessor(inst, ctx, json2, params); + inst.keyType = def.keyType; + inst.valueType = def.valueType; +}); +function record(keyType, valueType, params) { + return new ZodRecord({ + type: "record", + keyType, + valueType, + ...util_exports.normalizeParams(params) + }); +} +function partialRecord(keyType, valueType, params) { + const k = clone(keyType); + k._zod.values = void 0; + return new ZodRecord({ + type: "record", + keyType: k, + valueType, + ...util_exports.normalizeParams(params) + }); +} +function looseRecord(keyType, valueType, params) { + return new ZodRecord({ + type: "record", + keyType, + valueType, + mode: "loose", + ...util_exports.normalizeParams(params) + }); +} +var ZodMap = /* @__PURE__ */ $constructor("ZodMap", (inst, def) => { + $ZodMap.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => mapProcessor(inst, ctx, json2, params); + inst.keyType = def.keyType; + inst.valueType = def.valueType; + inst.min = (...args) => inst.check(_minSize(...args)); + inst.nonempty = (params) => inst.check(_minSize(1, params)); + inst.max = (...args) => inst.check(_maxSize(...args)); + inst.size = (...args) => inst.check(_size(...args)); +}); +function map(keyType, valueType, params) { + return new ZodMap({ + type: "map", + keyType, + valueType, + ...util_exports.normalizeParams(params) + }); +} +var ZodSet = /* @__PURE__ */ $constructor("ZodSet", (inst, def) => { + $ZodSet.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => setProcessor(inst, ctx, json2, params); + inst.min = (...args) => inst.check(_minSize(...args)); + inst.nonempty = (params) => inst.check(_minSize(1, params)); + inst.max = (...args) => inst.check(_maxSize(...args)); + inst.size = (...args) => inst.check(_size(...args)); +}); +function set(valueType, params) { + return new ZodSet({ + type: "set", + valueType, + ...util_exports.normalizeParams(params) + }); +} +var ZodEnum = /* @__PURE__ */ $constructor("ZodEnum", (inst, def) => { + $ZodEnum.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => enumProcessor(inst, ctx, json2, params); + inst.enum = def.entries; + inst.options = Object.values(def.entries); + const keys = new Set(Object.keys(def.entries)); + inst.extract = (values, params) => { + const newEntries = {}; + for (const value of values) { + if (keys.has(value)) { + newEntries[value] = def.entries[value]; + } else + throw new Error(`Key ${value} not found in enum`); + } + return new ZodEnum({ + ...def, + checks: [], + ...util_exports.normalizeParams(params), + entries: newEntries + }); + }; + inst.exclude = (values, params) => { + const newEntries = { ...def.entries }; + for (const value of values) { + if (keys.has(value)) { + delete newEntries[value]; + } else + throw new Error(`Key ${value} not found in enum`); + } + return new ZodEnum({ + ...def, + checks: [], + ...util_exports.normalizeParams(params), + entries: newEntries + }); + }; +}); +function _enum2(values, params) { + const entries = Array.isArray(values) ? Object.fromEntries(values.map((v) => [v, v])) : values; + return new ZodEnum({ + type: "enum", + entries, + ...util_exports.normalizeParams(params) + }); +} +function nativeEnum(entries, params) { + return new ZodEnum({ + type: "enum", + entries, + ...util_exports.normalizeParams(params) + }); +} +var ZodLiteral = /* @__PURE__ */ $constructor("ZodLiteral", (inst, def) => { + $ZodLiteral.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => literalProcessor(inst, ctx, json2, params); + inst.values = new Set(def.values); + Object.defineProperty(inst, "value", { + get() { + if (def.values.length > 1) { + throw new Error("This schema contains multiple valid literal values. Use `.values` instead."); + } + return def.values[0]; + } + }); +}); +function literal(value, params) { + return new ZodLiteral({ + type: "literal", + values: Array.isArray(value) ? value : [value], + ...util_exports.normalizeParams(params) + }); +} +var ZodFile = /* @__PURE__ */ $constructor("ZodFile", (inst, def) => { + $ZodFile.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => fileProcessor(inst, ctx, json2, params); + inst.min = (size, params) => inst.check(_minSize(size, params)); + inst.max = (size, params) => inst.check(_maxSize(size, params)); + inst.mime = (types, params) => inst.check(_mime(Array.isArray(types) ? types : [types], params)); +}); +function file(params) { + return _file(ZodFile, params); +} +var ZodTransform = /* @__PURE__ */ $constructor("ZodTransform", (inst, def) => { + $ZodTransform.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => transformProcessor(inst, ctx, json2, params); + inst._zod.parse = (payload, _ctx) => { + if (_ctx.direction === "backward") { + throw new $ZodEncodeError(inst.constructor.name); + } + payload.addIssue = (issue2) => { + if (typeof issue2 === "string") { + payload.issues.push(util_exports.issue(issue2, payload.value, def)); + } else { + const _issue = issue2; + if (_issue.fatal) + _issue.continue = false; + _issue.code ?? (_issue.code = "custom"); + _issue.input ?? (_issue.input = payload.value); + _issue.inst ?? (_issue.inst = inst); + payload.issues.push(util_exports.issue(_issue)); + } + }; + const output = def.transform(payload.value, payload); + if (output instanceof Promise) { + return output.then((output2) => { + payload.value = output2; + return payload; + }); + } + payload.value = output; + return payload; + }; +}); +function transform(fn) { + return new ZodTransform({ + type: "transform", + transform: fn + }); +} +var ZodOptional = /* @__PURE__ */ $constructor("ZodOptional", (inst, def) => { + $ZodOptional.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => optionalProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function optional(innerType) { + return new ZodOptional({ + type: "optional", + innerType + }); +} +var ZodExactOptional = /* @__PURE__ */ $constructor("ZodExactOptional", (inst, def) => { + $ZodExactOptional.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => optionalProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function exactOptional(innerType) { + return new ZodExactOptional({ + type: "optional", + innerType + }); +} +var ZodNullable = /* @__PURE__ */ $constructor("ZodNullable", (inst, def) => { + $ZodNullable.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => nullableProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function nullable(innerType) { + return new ZodNullable({ + type: "nullable", + innerType + }); +} +function nullish2(innerType) { + return optional(nullable(innerType)); +} +var ZodDefault = /* @__PURE__ */ $constructor("ZodDefault", (inst, def) => { + $ZodDefault.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => defaultProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; + inst.removeDefault = inst.unwrap; +}); +function _default2(innerType, defaultValue) { + return new ZodDefault({ + type: "default", + innerType, + get defaultValue() { + return typeof defaultValue === "function" ? defaultValue() : util_exports.shallowClone(defaultValue); + } + }); +} +var ZodPrefault = /* @__PURE__ */ $constructor("ZodPrefault", (inst, def) => { + $ZodPrefault.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => prefaultProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function prefault(innerType, defaultValue) { + return new ZodPrefault({ + type: "prefault", + innerType, + get defaultValue() { + return typeof defaultValue === "function" ? defaultValue() : util_exports.shallowClone(defaultValue); + } + }); +} +var ZodNonOptional = /* @__PURE__ */ $constructor("ZodNonOptional", (inst, def) => { + $ZodNonOptional.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => nonoptionalProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function nonoptional(innerType, params) { + return new ZodNonOptional({ + type: "nonoptional", + innerType, + ...util_exports.normalizeParams(params) + }); +} +var ZodSuccess = /* @__PURE__ */ $constructor("ZodSuccess", (inst, def) => { + $ZodSuccess.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => successProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function success(innerType) { + return new ZodSuccess({ + type: "success", + innerType + }); +} +var ZodCatch = /* @__PURE__ */ $constructor("ZodCatch", (inst, def) => { + $ZodCatch.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => catchProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; + inst.removeCatch = inst.unwrap; +}); +function _catch2(innerType, catchValue) { + return new ZodCatch({ + type: "catch", + innerType, + catchValue: typeof catchValue === "function" ? catchValue : () => catchValue + }); +} +var ZodNaN = /* @__PURE__ */ $constructor("ZodNaN", (inst, def) => { + $ZodNaN.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => nanProcessor(inst, ctx, json2, params); +}); +function nan(params) { + return _nan(ZodNaN, params); +} +var ZodPipe = /* @__PURE__ */ $constructor("ZodPipe", (inst, def) => { + $ZodPipe.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => pipeProcessor(inst, ctx, json2, params); + inst.in = def.in; + inst.out = def.out; +}); +function pipe(in_, out) { + return new ZodPipe({ + type: "pipe", + in: in_, + out + // ...util.normalizeParams(params), + }); +} +var ZodCodec = /* @__PURE__ */ $constructor("ZodCodec", (inst, def) => { + ZodPipe.init(inst, def); + $ZodCodec.init(inst, def); +}); +function codec(in_, out, params) { + return new ZodCodec({ + type: "pipe", + in: in_, + out, + transform: params.decode, + reverseTransform: params.encode + }); +} +var ZodReadonly = /* @__PURE__ */ $constructor("ZodReadonly", (inst, def) => { + $ZodReadonly.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => readonlyProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function readonly(innerType) { + return new ZodReadonly({ + type: "readonly", + innerType + }); +} +var ZodTemplateLiteral = /* @__PURE__ */ $constructor("ZodTemplateLiteral", (inst, def) => { + $ZodTemplateLiteral.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => templateLiteralProcessor(inst, ctx, json2, params); +}); +function templateLiteral(parts, params) { + return new ZodTemplateLiteral({ + type: "template_literal", + parts, + ...util_exports.normalizeParams(params) + }); +} +var ZodLazy = /* @__PURE__ */ $constructor("ZodLazy", (inst, def) => { + $ZodLazy.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => lazyProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.getter(); +}); +function lazy(getter) { + return new ZodLazy({ + type: "lazy", + getter + }); +} +var ZodPromise = /* @__PURE__ */ $constructor("ZodPromise", (inst, def) => { + $ZodPromise.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => promiseProcessor(inst, ctx, json2, params); + inst.unwrap = () => inst._zod.def.innerType; +}); +function promise(innerType) { + return new ZodPromise({ + type: "promise", + innerType + }); +} +var ZodFunction = /* @__PURE__ */ $constructor("ZodFunction", (inst, def) => { + $ZodFunction.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => functionProcessor(inst, ctx, json2, params); +}); +function _function(params) { + return new ZodFunction({ + type: "function", + input: Array.isArray(params?.input) ? tuple(params?.input) : params?.input ?? array(unknown()), + output: params?.output ?? unknown() + }); +} +var ZodCustom = /* @__PURE__ */ $constructor("ZodCustom", (inst, def) => { + $ZodCustom.init(inst, def); + ZodType.init(inst, def); + inst._zod.processJSONSchema = (ctx, json2, params) => customProcessor(inst, ctx, json2, params); +}); +function check(fn) { + const ch = new $ZodCheck({ + check: "custom" + // ...util.normalizeParams(params), + }); + ch._zod.check = fn; + return ch; +} +function custom(fn, _params) { + return _custom(ZodCustom, fn ?? (() => true), _params); +} +function refine(fn, _params = {}) { + return _refine(ZodCustom, fn, _params); +} +function superRefine(fn) { + return _superRefine(fn); +} +var describe2 = describe; +var meta2 = meta; +function _instanceof(cls, params = {}) { + const inst = new ZodCustom({ + type: "custom", + check: "custom", + fn: (data) => data instanceof cls, + abort: true, + ...util_exports.normalizeParams(params) + }); + inst._zod.bag.Class = cls; + inst._zod.check = (payload) => { + if (!(payload.value instanceof cls)) { + payload.issues.push({ + code: "invalid_type", + expected: cls.name, + input: payload.value, + inst, + path: [...inst._zod.def.path ?? []] + }); + } + }; + return inst; +} +var stringbool = (...args) => _stringbool({ + Codec: ZodCodec, + Boolean: ZodBoolean, + String: ZodString +}, ...args); +function json(params) { + const jsonSchema = lazy(() => { + return union([string2(params), number2(), boolean2(), _null3(), array(jsonSchema), record(string2(), jsonSchema)]); + }); + return jsonSchema; +} +function preprocess(fn, schema) { + return pipe(transform(fn), schema); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/compat.js +var ZodIssueCode = { + invalid_type: "invalid_type", + too_big: "too_big", + too_small: "too_small", + invalid_format: "invalid_format", + not_multiple_of: "not_multiple_of", + unrecognized_keys: "unrecognized_keys", + invalid_union: "invalid_union", + invalid_key: "invalid_key", + invalid_element: "invalid_element", + invalid_value: "invalid_value", + custom: "custom" +}; +function setErrorMap(map2) { + config({ + customError: map2 + }); +} +function getErrorMap() { + return config().customError; +} +var ZodFirstPartyTypeKind; +/* @__PURE__ */ (function(ZodFirstPartyTypeKind2) { +})(ZodFirstPartyTypeKind || (ZodFirstPartyTypeKind = {})); + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/from-json-schema.js +var z = { + ...schemas_exports2, + ...checks_exports2, + iso: iso_exports +}; +var RECOGNIZED_KEYS = /* @__PURE__ */ new Set([ + // Schema identification + "$schema", + "$ref", + "$defs", + "definitions", + // Core schema keywords + "$id", + "id", + "$comment", + "$anchor", + "$vocabulary", + "$dynamicRef", + "$dynamicAnchor", + // Type + "type", + "enum", + "const", + // Composition + "anyOf", + "oneOf", + "allOf", + "not", + // Object + "properties", + "required", + "additionalProperties", + "patternProperties", + "propertyNames", + "minProperties", + "maxProperties", + // Array + "items", + "prefixItems", + "additionalItems", + "minItems", + "maxItems", + "uniqueItems", + "contains", + "minContains", + "maxContains", + // String + "minLength", + "maxLength", + "pattern", + "format", + // Number + "minimum", + "maximum", + "exclusiveMinimum", + "exclusiveMaximum", + "multipleOf", + // Already handled metadata + "description", + "default", + // Content + "contentEncoding", + "contentMediaType", + "contentSchema", + // Unsupported (error-throwing) + "unevaluatedItems", + "unevaluatedProperties", + "if", + "then", + "else", + "dependentSchemas", + "dependentRequired", + // OpenAPI + "nullable", + "readOnly" +]); +function detectVersion(schema, defaultTarget) { + const $schema = schema.$schema; + if ($schema === "https://json-schema.org/draft/2020-12/schema") { + return "draft-2020-12"; + } + if ($schema === "http://json-schema.org/draft-07/schema#") { + return "draft-7"; + } + if ($schema === "http://json-schema.org/draft-04/schema#") { + return "draft-4"; + } + return defaultTarget ?? "draft-2020-12"; +} +function resolveRef(ref, ctx) { + if (!ref.startsWith("#")) { + throw new Error("External $ref is not supported, only local refs (#/...) are allowed"); + } + const path = ref.slice(1).split("/").filter(Boolean); + if (path.length === 0) { + return ctx.rootSchema; + } + const defsKey = ctx.version === "draft-2020-12" ? "$defs" : "definitions"; + if (path[0] === defsKey) { + const key = path[1]; + if (!key || !ctx.defs[key]) { + throw new Error(`Reference not found: ${ref}`); + } + return ctx.defs[key]; + } + throw new Error(`Reference not found: ${ref}`); +} +function convertBaseSchema(schema, ctx) { + if (schema.not !== void 0) { + if (typeof schema.not === "object" && Object.keys(schema.not).length === 0) { + return z.never(); + } + throw new Error("not is not supported in Zod (except { not: {} } for never)"); + } + if (schema.unevaluatedItems !== void 0) { + throw new Error("unevaluatedItems is not supported"); + } + if (schema.unevaluatedProperties !== void 0) { + throw new Error("unevaluatedProperties is not supported"); + } + if (schema.if !== void 0 || schema.then !== void 0 || schema.else !== void 0) { + throw new Error("Conditional schemas (if/then/else) are not supported"); + } + if (schema.dependentSchemas !== void 0 || schema.dependentRequired !== void 0) { + throw new Error("dependentSchemas and dependentRequired are not supported"); + } + if (schema.$ref) { + const refPath = schema.$ref; + if (ctx.refs.has(refPath)) { + return ctx.refs.get(refPath); + } + if (ctx.processing.has(refPath)) { + return z.lazy(() => { + if (!ctx.refs.has(refPath)) { + throw new Error(`Circular reference not resolved: ${refPath}`); + } + return ctx.refs.get(refPath); + }); + } + ctx.processing.add(refPath); + const resolved = resolveRef(refPath, ctx); + const zodSchema2 = convertSchema(resolved, ctx); + ctx.refs.set(refPath, zodSchema2); + ctx.processing.delete(refPath); + return zodSchema2; + } + if (schema.enum !== void 0) { + const enumValues = schema.enum; + if (ctx.version === "openapi-3.0" && schema.nullable === true && enumValues.length === 1 && enumValues[0] === null) { + return z.null(); + } + if (enumValues.length === 0) { + return z.never(); + } + if (enumValues.length === 1) { + return z.literal(enumValues[0]); + } + if (enumValues.every((v) => typeof v === "string")) { + return z.enum(enumValues); + } + const literalSchemas = enumValues.map((v) => z.literal(v)); + if (literalSchemas.length < 2) { + return literalSchemas[0]; + } + return z.union([literalSchemas[0], literalSchemas[1], ...literalSchemas.slice(2)]); + } + if (schema.const !== void 0) { + return z.literal(schema.const); + } + const type = schema.type; + if (Array.isArray(type)) { + const typeSchemas = type.map((t) => { + const typeSchema = { ...schema, type: t }; + return convertBaseSchema(typeSchema, ctx); + }); + if (typeSchemas.length === 0) { + return z.never(); + } + if (typeSchemas.length === 1) { + return typeSchemas[0]; + } + return z.union(typeSchemas); + } + if (!type) { + return z.any(); + } + let zodSchema; + switch (type) { + case "string": { + let stringSchema = z.string(); + if (schema.format) { + const format = schema.format; + if (format === "email") { + stringSchema = stringSchema.check(z.email()); + } else if (format === "uri" || format === "uri-reference") { + stringSchema = stringSchema.check(z.url()); + } else if (format === "uuid" || format === "guid") { + stringSchema = stringSchema.check(z.uuid()); + } else if (format === "date-time") { + stringSchema = stringSchema.check(z.iso.datetime()); + } else if (format === "date") { + stringSchema = stringSchema.check(z.iso.date()); + } else if (format === "time") { + stringSchema = stringSchema.check(z.iso.time()); + } else if (format === "duration") { + stringSchema = stringSchema.check(z.iso.duration()); + } else if (format === "ipv4") { + stringSchema = stringSchema.check(z.ipv4()); + } else if (format === "ipv6") { + stringSchema = stringSchema.check(z.ipv6()); + } else if (format === "mac") { + stringSchema = stringSchema.check(z.mac()); + } else if (format === "cidr") { + stringSchema = stringSchema.check(z.cidrv4()); + } else if (format === "cidr-v6") { + stringSchema = stringSchema.check(z.cidrv6()); + } else if (format === "base64") { + stringSchema = stringSchema.check(z.base64()); + } else if (format === "base64url") { + stringSchema = stringSchema.check(z.base64url()); + } else if (format === "e164") { + stringSchema = stringSchema.check(z.e164()); + } else if (format === "jwt") { + stringSchema = stringSchema.check(z.jwt()); + } else if (format === "emoji") { + stringSchema = stringSchema.check(z.emoji()); + } else if (format === "nanoid") { + stringSchema = stringSchema.check(z.nanoid()); + } else if (format === "cuid") { + stringSchema = stringSchema.check(z.cuid()); + } else if (format === "cuid2") { + stringSchema = stringSchema.check(z.cuid2()); + } else if (format === "ulid") { + stringSchema = stringSchema.check(z.ulid()); + } else if (format === "xid") { + stringSchema = stringSchema.check(z.xid()); + } else if (format === "ksuid") { + stringSchema = stringSchema.check(z.ksuid()); + } + } + if (typeof schema.minLength === "number") { + stringSchema = stringSchema.min(schema.minLength); + } + if (typeof schema.maxLength === "number") { + stringSchema = stringSchema.max(schema.maxLength); + } + if (schema.pattern) { + stringSchema = stringSchema.regex(new RegExp(schema.pattern)); + } + zodSchema = stringSchema; + break; + } + case "number": + case "integer": { + let numberSchema = type === "integer" ? z.number().int() : z.number(); + if (typeof schema.minimum === "number") { + numberSchema = numberSchema.min(schema.minimum); + } + if (typeof schema.maximum === "number") { + numberSchema = numberSchema.max(schema.maximum); + } + if (typeof schema.exclusiveMinimum === "number") { + numberSchema = numberSchema.gt(schema.exclusiveMinimum); + } else if (schema.exclusiveMinimum === true && typeof schema.minimum === "number") { + numberSchema = numberSchema.gt(schema.minimum); + } + if (typeof schema.exclusiveMaximum === "number") { + numberSchema = numberSchema.lt(schema.exclusiveMaximum); + } else if (schema.exclusiveMaximum === true && typeof schema.maximum === "number") { + numberSchema = numberSchema.lt(schema.maximum); + } + if (typeof schema.multipleOf === "number") { + numberSchema = numberSchema.multipleOf(schema.multipleOf); + } + zodSchema = numberSchema; + break; + } + case "boolean": { + zodSchema = z.boolean(); + break; + } + case "null": { + zodSchema = z.null(); + break; + } + case "object": { + const shape = {}; + const properties = schema.properties || {}; + const requiredSet = new Set(schema.required || []); + for (const [key, propSchema] of Object.entries(properties)) { + const propZodSchema = convertSchema(propSchema, ctx); + shape[key] = requiredSet.has(key) ? propZodSchema : propZodSchema.optional(); + } + if (schema.propertyNames) { + const keySchema = convertSchema(schema.propertyNames, ctx); + const valueSchema = schema.additionalProperties && typeof schema.additionalProperties === "object" ? convertSchema(schema.additionalProperties, ctx) : z.any(); + if (Object.keys(shape).length === 0) { + zodSchema = z.record(keySchema, valueSchema); + break; + } + const objectSchema2 = z.object(shape).passthrough(); + const recordSchema = z.looseRecord(keySchema, valueSchema); + zodSchema = z.intersection(objectSchema2, recordSchema); + break; + } + if (schema.patternProperties) { + const patternProps = schema.patternProperties; + const patternKeys = Object.keys(patternProps); + const looseRecords = []; + for (const pattern of patternKeys) { + const patternValue = convertSchema(patternProps[pattern], ctx); + const keySchema = z.string().regex(new RegExp(pattern)); + looseRecords.push(z.looseRecord(keySchema, patternValue)); + } + const schemasToIntersect = []; + if (Object.keys(shape).length > 0) { + schemasToIntersect.push(z.object(shape).passthrough()); + } + schemasToIntersect.push(...looseRecords); + if (schemasToIntersect.length === 0) { + zodSchema = z.object({}).passthrough(); + } else if (schemasToIntersect.length === 1) { + zodSchema = schemasToIntersect[0]; + } else { + let result = z.intersection(schemasToIntersect[0], schemasToIntersect[1]); + for (let i = 2; i < schemasToIntersect.length; i++) { + result = z.intersection(result, schemasToIntersect[i]); + } + zodSchema = result; + } + break; + } + const objectSchema = z.object(shape); + if (schema.additionalProperties === false) { + zodSchema = objectSchema.strict(); + } else if (typeof schema.additionalProperties === "object") { + zodSchema = objectSchema.catchall(convertSchema(schema.additionalProperties, ctx)); + } else { + zodSchema = objectSchema.passthrough(); + } + break; + } + case "array": { + const prefixItems = schema.prefixItems; + const items = schema.items; + if (prefixItems && Array.isArray(prefixItems)) { + const tupleItems = prefixItems.map((item) => convertSchema(item, ctx)); + const rest = items && typeof items === "object" && !Array.isArray(items) ? convertSchema(items, ctx) : void 0; + if (rest) { + zodSchema = z.tuple(tupleItems).rest(rest); + } else { + zodSchema = z.tuple(tupleItems); + } + if (typeof schema.minItems === "number") { + zodSchema = zodSchema.check(z.minLength(schema.minItems)); + } + if (typeof schema.maxItems === "number") { + zodSchema = zodSchema.check(z.maxLength(schema.maxItems)); + } + } else if (Array.isArray(items)) { + const tupleItems = items.map((item) => convertSchema(item, ctx)); + const rest = schema.additionalItems && typeof schema.additionalItems === "object" ? convertSchema(schema.additionalItems, ctx) : void 0; + if (rest) { + zodSchema = z.tuple(tupleItems).rest(rest); + } else { + zodSchema = z.tuple(tupleItems); + } + if (typeof schema.minItems === "number") { + zodSchema = zodSchema.check(z.minLength(schema.minItems)); + } + if (typeof schema.maxItems === "number") { + zodSchema = zodSchema.check(z.maxLength(schema.maxItems)); + } + } else if (items !== void 0) { + const element = convertSchema(items, ctx); + let arraySchema = z.array(element); + if (typeof schema.minItems === "number") { + arraySchema = arraySchema.min(schema.minItems); + } + if (typeof schema.maxItems === "number") { + arraySchema = arraySchema.max(schema.maxItems); + } + zodSchema = arraySchema; + } else { + zodSchema = z.array(z.any()); + } + break; + } + default: + throw new Error(`Unsupported type: ${type}`); + } + if (schema.description) { + zodSchema = zodSchema.describe(schema.description); + } + if (schema.default !== void 0) { + zodSchema = zodSchema.default(schema.default); + } + return zodSchema; +} +function convertSchema(schema, ctx) { + if (typeof schema === "boolean") { + return schema ? z.any() : z.never(); + } + let baseSchema = convertBaseSchema(schema, ctx); + const hasExplicitType = schema.type || schema.enum !== void 0 || schema.const !== void 0; + if (schema.anyOf && Array.isArray(schema.anyOf)) { + const options = schema.anyOf.map((s) => convertSchema(s, ctx)); + const anyOfUnion = z.union(options); + baseSchema = hasExplicitType ? z.intersection(baseSchema, anyOfUnion) : anyOfUnion; + } + if (schema.oneOf && Array.isArray(schema.oneOf)) { + const options = schema.oneOf.map((s) => convertSchema(s, ctx)); + const oneOfUnion = z.xor(options); + baseSchema = hasExplicitType ? z.intersection(baseSchema, oneOfUnion) : oneOfUnion; + } + if (schema.allOf && Array.isArray(schema.allOf)) { + if (schema.allOf.length === 0) { + baseSchema = hasExplicitType ? baseSchema : z.any(); + } else { + let result = hasExplicitType ? baseSchema : convertSchema(schema.allOf[0], ctx); + const startIdx = hasExplicitType ? 0 : 1; + for (let i = startIdx; i < schema.allOf.length; i++) { + result = z.intersection(result, convertSchema(schema.allOf[i], ctx)); + } + baseSchema = result; + } + } + if (schema.nullable === true && ctx.version === "openapi-3.0") { + baseSchema = z.nullable(baseSchema); + } + if (schema.readOnly === true) { + baseSchema = z.readonly(baseSchema); + } + const extraMeta = {}; + const coreMetadataKeys = ["$id", "id", "$comment", "$anchor", "$vocabulary", "$dynamicRef", "$dynamicAnchor"]; + for (const key of coreMetadataKeys) { + if (key in schema) { + extraMeta[key] = schema[key]; + } + } + const contentMetadataKeys = ["contentEncoding", "contentMediaType", "contentSchema"]; + for (const key of contentMetadataKeys) { + if (key in schema) { + extraMeta[key] = schema[key]; + } + } + for (const key of Object.keys(schema)) { + if (!RECOGNIZED_KEYS.has(key)) { + extraMeta[key] = schema[key]; + } + } + if (Object.keys(extraMeta).length > 0) { + ctx.registry.add(baseSchema, extraMeta); + } + return baseSchema; +} +function fromJSONSchema(schema, params) { + if (typeof schema === "boolean") { + return schema ? z.any() : z.never(); + } + const version2 = detectVersion(schema, params?.defaultTarget); + const defs = schema.$defs || schema.definitions || {}; + const ctx = { + version: version2, + defs, + refs: /* @__PURE__ */ new Map(), + processing: /* @__PURE__ */ new Set(), + rootSchema: schema, + registry: params?.registry ?? globalRegistry + }; + return convertSchema(schema, ctx); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/coerce.js +var coerce_exports = {}; +__export(coerce_exports, { + bigint: () => bigint3, + boolean: () => boolean3, + date: () => date4, + number: () => number3, + string: () => string3 +}); +function string3(params) { + return _coercedString(ZodString, params); +} +function number3(params) { + return _coercedNumber(ZodNumber, params); +} +function boolean3(params) { + return _coercedBoolean(ZodBoolean, params); +} +function bigint3(params) { + return _coercedBigint(ZodBigInt, params); +} +function date4(params) { + return _coercedDate(ZodDate, params); +} + +// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/external.js +config(en_default()); + +// ../../node_modules/.pnpm/@scure+base@2.0.0/node_modules/@scure/base/index.js +function isBytes(a) { + return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; +} +function isArrayOf(isString, arr) { + if (!Array.isArray(arr)) + return false; + if (arr.length === 0) + return true; + if (isString) { + return arr.every((item) => typeof item === "string"); + } else { + return arr.every((item) => Number.isSafeInteger(item)); + } +} +function astr(label, input) { + if (typeof input !== "string") + throw new Error(`${label}: string expected`); + return true; +} +function anumber(n) { + if (!Number.isSafeInteger(n)) + throw new Error(`invalid integer: ${n}`); +} +function aArr(input) { + if (!Array.isArray(input)) + throw new Error("array expected"); +} +function astrArr(label, input) { + if (!isArrayOf(true, input)) + throw new Error(`${label}: array of strings expected`); +} +function anumArr(label, input) { + if (!isArrayOf(false, input)) + throw new Error(`${label}: array of numbers expected`); +} +// @__NO_SIDE_EFFECTS__ +function chain(...args) { + const id = (a) => a; + const wrap = (a, b) => (c) => a(b(c)); + const encode3 = args.map((x) => x.encode).reduceRight(wrap, id); + const decode3 = args.map((x) => x.decode).reduce(wrap, id); + return { encode: encode3, decode: decode3 }; +} +// @__NO_SIDE_EFFECTS__ +function alphabet(letters) { + const lettersA = typeof letters === "string" ? letters.split("") : letters; + const len = lettersA.length; + astrArr("alphabet", lettersA); + const indexes = new Map(lettersA.map((l, i) => [l, i])); + return { + encode: (digits) => { + aArr(digits); + return digits.map((i) => { + if (!Number.isSafeInteger(i) || i < 0 || i >= len) + throw new Error(`alphabet.encode: digit index outside alphabet "${i}". Allowed: ${letters}`); + return lettersA[i]; + }); + }, + decode: (input) => { + aArr(input); + return input.map((letter) => { + astr("alphabet.decode", letter); + const i = indexes.get(letter); + if (i === void 0) + throw new Error(`Unknown letter: "${letter}". Allowed: ${letters}`); + return i; + }); + } + }; +} +// @__NO_SIDE_EFFECTS__ +function join(separator = "") { + astr("join", separator); + return { + encode: (from) => { + astrArr("join.decode", from); + return from.join(separator); + }, + decode: (to) => { + astr("join.decode", to); + return to.split(separator); + } + }; +} +var gcd = (a, b) => b === 0 ? a : gcd(b, a % b); +var radix2carry = /* @__NO_SIDE_EFFECTS__ */ (from, to) => from + (to - gcd(from, to)); +var powers = /* @__PURE__ */ (() => { + let res = []; + for (let i = 0; i < 40; i++) + res.push(2 ** i); + return res; +})(); +function convertRadix2(data, from, to, padding) { + aArr(data); + if (from <= 0 || from > 32) + throw new Error(`convertRadix2: wrong from=${from}`); + if (to <= 0 || to > 32) + throw new Error(`convertRadix2: wrong to=${to}`); + if (/* @__PURE__ */ radix2carry(from, to) > 32) { + throw new Error(`convertRadix2: carry overflow from=${from} to=${to} carryBits=${/* @__PURE__ */ radix2carry(from, to)}`); + } + let carry = 0; + let pos = 0; + const max = powers[from]; + const mask = powers[to] - 1; + const res = []; + for (const n of data) { + anumber(n); + if (n >= max) + throw new Error(`convertRadix2: invalid data word=${n} from=${from}`); + carry = carry << from | n; + if (pos + from > 32) + throw new Error(`convertRadix2: carry overflow pos=${pos} from=${from}`); + pos += from; + for (; pos >= to; pos -= to) + res.push((carry >> pos - to & mask) >>> 0); + const pow = powers[pos]; + if (pow === void 0) + throw new Error("invalid carry"); + carry &= pow - 1; + } + carry = carry << to - pos & mask; + if (!padding && pos >= from) + throw new Error("Excess padding"); + if (!padding && carry > 0) + throw new Error(`Non-zero padding: ${carry}`); + if (padding && pos > 0) + res.push(carry >>> 0); + return res; +} +// @__NO_SIDE_EFFECTS__ +function radix2(bits, revPadding = false) { + anumber(bits); + if (bits <= 0 || bits > 32) + throw new Error("radix2: bits should be in (0..32]"); + if (/* @__PURE__ */ radix2carry(8, bits) > 32 || /* @__PURE__ */ radix2carry(bits, 8) > 32) + throw new Error("radix2: carry overflow"); + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error("radix2.encode input should be Uint8Array"); + return convertRadix2(Array.from(bytes), 8, bits, !revPadding); + }, + decode: (digits) => { + anumArr("radix2.decode", digits); + return Uint8Array.from(convertRadix2(digits, bits, 8, revPadding)); + } + }; +} +var base64urlnopad = /* @__PURE__ */ chain(/* @__PURE__ */ radix2(6), /* @__PURE__ */ alphabet("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"), /* @__PURE__ */ join("")); + +// ../../packages/protocol/src/errors.ts +var ProtocolParseError = class extends Error { + code; + constructor(code, message) { + super(message); + this.name = "ProtocolParseError"; + this.code = code; + } +}; + +// ../../packages/protocol/src/base64url.ts +function invalidBase64url(input) { + return new ProtocolParseError( + "INVALID_BASE64URL", + `Invalid base64url input: ${input}` + ); +} +function encodeBase64url(input) { + return base64urlnopad.encode(input); +} +function decodeBase64url(input) { + if (input.length === 0) { + return new Uint8Array(); + } + try { + return base64urlnopad.decode(input); + } catch { + throw invalidBase64url(input); + } +} + +// ../../node_modules/.pnpm/ulid@3.0.2/node_modules/ulid/dist/node/index.js +import crypto2 from "crypto"; +var ENCODING = "0123456789ABCDEFGHJKMNPQRSTVWXYZ"; +var ENCODING_LEN = 32; +var RANDOM_LEN = 16; +var TIME_LEN = 10; +var TIME_MAX = 281474976710655; +var ULIDErrorCode; +(function(ULIDErrorCode2) { + ULIDErrorCode2["Base32IncorrectEncoding"] = "B32_ENC_INVALID"; + ULIDErrorCode2["DecodeTimeInvalidCharacter"] = "DEC_TIME_CHAR"; + ULIDErrorCode2["DecodeTimeValueMalformed"] = "DEC_TIME_MALFORMED"; + ULIDErrorCode2["EncodeTimeNegative"] = "ENC_TIME_NEG"; + ULIDErrorCode2["EncodeTimeSizeExceeded"] = "ENC_TIME_SIZE_EXCEED"; + ULIDErrorCode2["EncodeTimeValueMalformed"] = "ENC_TIME_MALFORMED"; + ULIDErrorCode2["PRNGDetectFailure"] = "PRNG_DETECT"; + ULIDErrorCode2["ULIDInvalid"] = "ULID_INVALID"; + ULIDErrorCode2["Unexpected"] = "UNEXPECTED"; + ULIDErrorCode2["UUIDInvalid"] = "UUID_INVALID"; +})(ULIDErrorCode || (ULIDErrorCode = {})); +var ULIDError = class extends Error { + constructor(errorCode, message) { + super(`${message} (${errorCode})`); + this.name = "ULIDError"; + this.code = errorCode; + } +}; +function decodeTime(id) { + if (id.length !== TIME_LEN + RANDOM_LEN) { + throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, "Malformed ULID"); + } + const time3 = id.substr(0, TIME_LEN).toUpperCase().split("").reverse().reduce((carry, char, index) => { + const encodingIndex = ENCODING.indexOf(char); + if (encodingIndex === -1) { + throw new ULIDError(ULIDErrorCode.DecodeTimeInvalidCharacter, `Time decode error: Invalid character: ${char}`); + } + return carry += encodingIndex * Math.pow(ENCODING_LEN, index); + }, 0); + if (time3 > TIME_MAX) { + throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, `Malformed ULID: timestamp too large: ${time3}`); + } + return time3; +} +function isValid(id) { + return typeof id === "string" && id.length === TIME_LEN + RANDOM_LEN && id.toUpperCase().split("").every((char) => ENCODING.indexOf(char) !== -1); +} + +// ../../packages/protocol/src/ulid.ts +var ULID_PATTERN = /^[0-9A-HJKMNP-TV-Z]{26}$/; +function invalidUlid(value) { + return new ProtocolParseError("INVALID_ULID", `Invalid ULID: ${value}`); +} +function parseUlid(value) { + if (!ULID_PATTERN.test(value) || !isValid(value)) { + throw invalidUlid(value); + } + return { + value, + timestampMs: decodeTime(value) + }; +} + +// ../../packages/protocol/src/did.ts +function invalidDid(value) { + return new ProtocolParseError("INVALID_DID", `Invalid DID: ${value}`); +} +function ensureDidUlid(value) { + try { + parseUlid(value); + } catch { + throw invalidDid(value); + } +} +function parseDid(value) { + const parts = value.split(":"); + if (parts.length !== 4) { + throw invalidDid(value); + } + const [scheme, method, rawKind, rawUlid] = parts; + if (scheme !== "did" || method !== "claw") { + throw invalidDid(value); + } + if (rawKind !== "human" && rawKind !== "agent") { + throw invalidDid(value); + } + ensureDidUlid(rawUlid); + return { + kind: rawKind, + ulid: rawUlid + }; +} + +// ../../packages/protocol/src/text.ts +function hasControlChars(value) { + for (const char of value) { + const code = char.charCodeAt(0); + if (code <= 31 || code === 127) { + return true; + } + } + return false; +} + +// ../../packages/protocol/src/ait.ts +var MAX_AGENT_DESCRIPTION_LENGTH = 280; +var AGENT_NAME_REGEX = /^[A-Za-z0-9._ -]{1,64}$/; +var MAX_FRAMEWORK_LENGTH = 32; +var ED25519_PUBLIC_KEY_LENGTH = 32; +function validateAgentName(name) { + return AGENT_NAME_REGEX.test(name); +} +var aitClaimsSchema = external_exports.object({ + iss: external_exports.string().min(1, "iss is required"), + sub: external_exports.string().min(1, "sub is required"), + ownerDid: external_exports.string().min(1, "ownerDid is required"), + name: external_exports.string().refine(validateAgentName, "name contains invalid characters or length"), + framework: external_exports.string().min(1, "framework is required").max(MAX_FRAMEWORK_LENGTH).refine( + (value) => !hasControlChars(value), + "framework contains control characters" + ), + description: external_exports.string().max(MAX_AGENT_DESCRIPTION_LENGTH).refine( + (value) => !hasControlChars(value), + "description contains control characters" + ).optional(), + cnf: external_exports.object({ + jwk: external_exports.object({ + kty: external_exports.literal("OKP"), + crv: external_exports.literal("Ed25519"), + x: external_exports.string().min(1) + }).strict() + }).strict(), + iat: external_exports.number().int().nonnegative(), + nbf: external_exports.number().int().nonnegative(), + exp: external_exports.number().int().nonnegative(), + jti: external_exports.string().min(1) +}).strict().superRefine((claims, ctx) => { + try { + const parsedSub = parseDid(claims.sub); + if (parsedSub.kind !== "agent") { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "sub must be an agent DID", + path: ["sub"] + }); + } + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "sub must be a valid DID", + path: ["sub"] + }); + } + try { + const parsedOwnerDid = parseDid(claims.ownerDid); + if (parsedOwnerDid.kind !== "human") { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "ownerDid must be a human DID", + path: ["ownerDid"] + }); + } + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "ownerDid must be a valid DID", + path: ["ownerDid"] + }); + } + try { + const decodedPublicKey = decodeBase64url(claims.cnf.jwk.x); + if (decodedPublicKey.length !== ED25519_PUBLIC_KEY_LENGTH) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "cnf.jwk.x must decode to 32-byte Ed25519 public key", + path: ["cnf", "jwk", "x"] + }); + } + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "cnf.jwk.x must be valid base64url", + path: ["cnf", "jwk", "x"] + }); + } + try { + parseUlid(claims.jti); + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "jti must be a valid ULID", + path: ["jti"] + }); + } + if (claims.exp <= claims.nbf) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "exp must be greater than nbf", + path: ["exp"] + }); + } + if (claims.exp <= claims.iat) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "exp must be greater than iat", + path: ["exp"] + }); + } +}); + +// ../../packages/protocol/src/crl.ts +var crlClaimsSchema = external_exports.object({ + iss: external_exports.string().min(1, "iss is required"), + jti: external_exports.string().min(1, "jti is required"), + iat: external_exports.number().int().nonnegative(), + exp: external_exports.number().int().nonnegative(), + revocations: external_exports.array( + external_exports.object({ + jti: external_exports.string().min(1, "revocation.jti is required"), + agentDid: external_exports.string().min(1, "agentDid is required"), + reason: external_exports.string().max(280).optional(), + revokedAt: external_exports.number().int().nonnegative() + }).strict().superRefine((revocation, ctx) => { + if (hasControlChars(revocation.agentDid)) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "revocation.agentDid contains control characters", + path: ["agentDid"] + }); + } + }) + ).min(1, "revocations must include at least one entry") +}).strict().superRefine((claims, ctx) => { + if (claims.exp <= claims.iat) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "exp must be greater than iat", + path: ["exp"] + }); + } + for (const [index, revocation] of claims.revocations.entries()) { + try { + const parsedAgentDid = parseDid(revocation.agentDid); + if (parsedAgentDid.kind !== "agent") { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "revocation.agentDid must refer to an agent DID", + path: ["revocations", index, "agentDid"] + }); + } + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "revocation.agentDid must be a valid DID", + path: ["revocations", index, "agentDid"] + }); + } + try { + parseUlid(revocation.jti); + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "revocation.jti must be a valid ULID", + path: ["revocations", index, "jti"] + }); + } + } +}); + +// ../../packages/protocol/src/endpoints.ts +var AGENT_AUTH_REFRESH_PATH = "/v1/agents/auth/refresh"; + +// ../../packages/protocol/src/http-signing.ts +var CLAW_PROOF_CANONICAL_VERSION = "CLAW-PROOF-V1"; +function canonicalizeRequest(input) { + return [ + CLAW_PROOF_CANONICAL_VERSION, + input.method.toUpperCase(), + input.pathWithQuery, + input.timestamp, + input.nonce, + input.bodyHash + ].join("\n"); +} + +// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/request/constants.js +var GET_MATCH_RESULT = /* @__PURE__ */ Symbol(); + +// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/utils/body.js +var parseBody = async (request, options = /* @__PURE__ */ Object.create(null)) => { + const { all = false, dot = false } = options; + const headers = request instanceof HonoRequest ? request.raw.headers : request.headers; + const contentType = headers.get("Content-Type"); + if (contentType?.startsWith("multipart/form-data") || contentType?.startsWith("application/x-www-form-urlencoded")) { + return parseFormData(request, { all, dot }); + } + return {}; +}; +async function parseFormData(request, options) { + const formData = await request.formData(); + if (formData) { + return convertFormDataToBodyData(formData, options); + } + return {}; +} +function convertFormDataToBodyData(formData, options) { + const form = /* @__PURE__ */ Object.create(null); + formData.forEach((value, key) => { + const shouldParseAllValues = options.all || key.endsWith("[]"); + if (!shouldParseAllValues) { + form[key] = value; + } else { + handleParsingAllValues(form, key, value); + } + }); + if (options.dot) { + Object.entries(form).forEach(([key, value]) => { + const shouldParseDotValues = key.includes("."); + if (shouldParseDotValues) { + handleParsingNestedValues(form, key, value); + delete form[key]; + } + }); + } + return form; +} +var handleParsingAllValues = (form, key, value) => { + if (form[key] !== void 0) { + if (Array.isArray(form[key])) { + ; + form[key].push(value); + } else { + form[key] = [form[key], value]; + } + } else { + if (!key.endsWith("[]")) { + form[key] = value; + } else { + form[key] = [value]; + } + } +}; +var handleParsingNestedValues = (form, key, value) => { + let nestedForm = form; + const keys = key.split("."); + keys.forEach((key2, index) => { + if (index === keys.length - 1) { + nestedForm[key2] = value; + } else { + if (!nestedForm[key2] || typeof nestedForm[key2] !== "object" || Array.isArray(nestedForm[key2]) || nestedForm[key2] instanceof File) { + nestedForm[key2] = /* @__PURE__ */ Object.create(null); + } + nestedForm = nestedForm[key2]; + } + }); +}; + +// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/utils/url.js +var tryDecode = (str, decoder) => { + try { + return decoder(str); + } catch { + return str.replace(/(?:%[0-9A-Fa-f]{2})+/g, (match2) => { + try { + return decoder(match2); + } catch { + return match2; + } + }); + } +}; +var _decodeURI = (value) => { + if (!/[%+]/.test(value)) { + return value; + } + if (value.indexOf("+") !== -1) { + value = value.replace(/\+/g, " "); + } + return value.indexOf("%") !== -1 ? tryDecode(value, decodeURIComponent_) : value; +}; +var _getQueryParam = (url2, key, multiple) => { + let encoded; + if (!multiple && key && !/[%+]/.test(key)) { + let keyIndex2 = url2.indexOf("?", 8); + if (keyIndex2 === -1) { + return void 0; + } + if (!url2.startsWith(key, keyIndex2 + 1)) { + keyIndex2 = url2.indexOf(`&${key}`, keyIndex2 + 1); + } + while (keyIndex2 !== -1) { + const trailingKeyCode = url2.charCodeAt(keyIndex2 + key.length + 1); + if (trailingKeyCode === 61) { + const valueIndex = keyIndex2 + key.length + 2; + const endIndex = url2.indexOf("&", valueIndex); + return _decodeURI(url2.slice(valueIndex, endIndex === -1 ? void 0 : endIndex)); + } else if (trailingKeyCode == 38 || isNaN(trailingKeyCode)) { + return ""; + } + keyIndex2 = url2.indexOf(`&${key}`, keyIndex2 + 1); + } + encoded = /[%+]/.test(url2); + if (!encoded) { + return void 0; + } + } + const results = {}; + encoded ??= /[%+]/.test(url2); + let keyIndex = url2.indexOf("?", 8); + while (keyIndex !== -1) { + const nextKeyIndex = url2.indexOf("&", keyIndex + 1); + let valueIndex = url2.indexOf("=", keyIndex); + if (valueIndex > nextKeyIndex && nextKeyIndex !== -1) { + valueIndex = -1; + } + let name = url2.slice( + keyIndex + 1, + valueIndex === -1 ? nextKeyIndex === -1 ? void 0 : nextKeyIndex : valueIndex + ); + if (encoded) { + name = _decodeURI(name); + } + keyIndex = nextKeyIndex; + if (name === "") { + continue; + } + let value; + if (valueIndex === -1) { + value = ""; + } else { + value = url2.slice(valueIndex + 1, nextKeyIndex === -1 ? void 0 : nextKeyIndex); + if (encoded) { + value = _decodeURI(value); + } + } + if (multiple) { + if (!(results[name] && Array.isArray(results[name]))) { + results[name] = []; + } + ; + results[name].push(value); + } else { + results[name] ??= value; + } + } + return key ? results[key] : results; +}; +var getQueryParam = _getQueryParam; +var getQueryParams = (url2, key) => { + return _getQueryParam(url2, key, true); +}; +var decodeURIComponent_ = decodeURIComponent; + +// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/request.js +var tryDecodeURIComponent = (str) => tryDecode(str, decodeURIComponent_); +var HonoRequest = class { + /** + * `.raw` can get the raw Request object. + * + * @see {@link https://hono.dev/docs/api/request#raw} + * + * @example + * ```ts + * // For Cloudflare Workers + * app.post('/', async (c) => { + * const metadata = c.req.raw.cf?.hostMetadata? + * ... + * }) + * ``` + */ + raw; + #validatedData; + // Short name of validatedData + #matchResult; + routeIndex = 0; + /** + * `.path` can get the pathname of the request. + * + * @see {@link https://hono.dev/docs/api/request#path} + * + * @example + * ```ts + * app.get('/about/me', (c) => { + * const pathname = c.req.path // `/about/me` + * }) + * ``` + */ + path; + bodyCache = {}; + constructor(request, path = "/", matchResult = [[]]) { + this.raw = request; + this.path = path; + this.#matchResult = matchResult; + this.#validatedData = {}; + } + param(key) { + return key ? this.#getDecodedParam(key) : this.#getAllDecodedParams(); + } + #getDecodedParam(key) { + const paramKey = this.#matchResult[0][this.routeIndex][1][key]; + const param = this.#getParamValue(paramKey); + return param && /\%/.test(param) ? tryDecodeURIComponent(param) : param; + } + #getAllDecodedParams() { + const decoded = {}; + const keys = Object.keys(this.#matchResult[0][this.routeIndex][1]); + for (const key of keys) { + const value = this.#getParamValue(this.#matchResult[0][this.routeIndex][1][key]); + if (value !== void 0) { + decoded[key] = /\%/.test(value) ? tryDecodeURIComponent(value) : value; + } + } + return decoded; + } + #getParamValue(paramKey) { + return this.#matchResult[1] ? this.#matchResult[1][paramKey] : paramKey; + } + query(key) { + return getQueryParam(this.url, key); + } + queries(key) { + return getQueryParams(this.url, key); + } + header(name) { + if (name) { + return this.raw.headers.get(name) ?? void 0; + } + const headerData = {}; + this.raw.headers.forEach((value, key) => { + headerData[key] = value; + }); + return headerData; + } + async parseBody(options) { + return this.bodyCache.parsedBody ??= await parseBody(this, options); + } + #cachedBody = (key) => { + const { bodyCache, raw } = this; + const cachedBody = bodyCache[key]; + if (cachedBody) { + return cachedBody; + } + const anyCachedKey = Object.keys(bodyCache)[0]; + if (anyCachedKey) { + return bodyCache[anyCachedKey].then((body) => { + if (anyCachedKey === "json") { + body = JSON.stringify(body); + } + return new Response(body)[key](); + }); + } + return bodyCache[key] = raw[key](); + }; + /** + * `.json()` can parse Request body of type `application/json` + * + * @see {@link https://hono.dev/docs/api/request#json} + * + * @example + * ```ts + * app.post('/entry', async (c) => { + * const body = await c.req.json() + * }) + * ``` + */ + json() { + return this.#cachedBody("text").then((text) => JSON.parse(text)); + } + /** + * `.text()` can parse Request body of type `text/plain` + * + * @see {@link https://hono.dev/docs/api/request#text} + * + * @example + * ```ts + * app.post('/entry', async (c) => { + * const body = await c.req.text() + * }) + * ``` + */ + text() { + return this.#cachedBody("text"); + } + /** + * `.arrayBuffer()` parse Request body as an `ArrayBuffer` + * + * @see {@link https://hono.dev/docs/api/request#arraybuffer} + * + * @example + * ```ts + * app.post('/entry', async (c) => { + * const body = await c.req.arrayBuffer() + * }) + * ``` + */ + arrayBuffer() { + return this.#cachedBody("arrayBuffer"); + } + /** + * Parses the request body as a `Blob`. + * @example + * ```ts + * app.post('/entry', async (c) => { + * const body = await c.req.blob(); + * }); + * ``` + * @see https://hono.dev/docs/api/request#blob + */ + blob() { + return this.#cachedBody("blob"); + } + /** + * Parses the request body as `FormData`. + * @example + * ```ts + * app.post('/entry', async (c) => { + * const body = await c.req.formData(); + * }); + * ``` + * @see https://hono.dev/docs/api/request#formdata + */ + formData() { + return this.#cachedBody("formData"); + } + /** + * Adds validated data to the request. + * + * @param target - The target of the validation. + * @param data - The validated data to add. + */ + addValidatedData(target, data) { + this.#validatedData[target] = data; + } + valid(target) { + return this.#validatedData[target]; + } + /** + * `.url()` can get the request url strings. + * + * @see {@link https://hono.dev/docs/api/request#url} + * + * @example + * ```ts + * app.get('/about/me', (c) => { + * const url = c.req.url // `http://localhost:8787/about/me` + * ... + * }) + * ``` + */ + get url() { + return this.raw.url; + } + /** + * `.method()` can get the method name of the request. + * + * @see {@link https://hono.dev/docs/api/request#method} + * + * @example + * ```ts + * app.get('/about/me', (c) => { + * const method = c.req.method // `GET` + * }) + * ``` + */ + get method() { + return this.raw.method; + } + get [GET_MATCH_RESULT]() { + return this.#matchResult; + } + /** + * `.matchedRoutes()` can return a matched route in the handler + * + * @deprecated + * + * Use matchedRoutes helper defined in "hono/route" instead. + * + * @see {@link https://hono.dev/docs/api/request#matchedroutes} + * + * @example + * ```ts + * app.use('*', async function logger(c, next) { + * await next() + * c.req.matchedRoutes.forEach(({ handler, method, path }, i) => { + * const name = handler.name || (handler.length < 2 ? '[handler]' : '[middleware]') + * console.log( + * method, + * ' ', + * path, + * ' '.repeat(Math.max(10 - path.length, 0)), + * name, + * i === c.req.routeIndex ? '<- respond from here' : '' + * ) + * }) + * }) + * ``` + */ + get matchedRoutes() { + return this.#matchResult[0].map(([[, route]]) => route); + } + /** + * `routePath()` can retrieve the path registered within the handler + * + * @deprecated + * + * Use routePath helper defined in "hono/route" instead. + * + * @see {@link https://hono.dev/docs/api/request#routepath} + * + * @example + * ```ts + * app.get('/posts/:id', (c) => { + * return c.json({ path: c.req.routePath }) + * }) + * ``` + */ + get routePath() { + return this.#matchResult[0].map(([[, route]]) => route)[this.routeIndex].path; + } +}; + +// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/router/reg-exp-router/node.js +var regExpMetaChars = new Set(".\\+*[^]$()"); + +// ../../packages/sdk/src/exceptions.ts +var AppError = class extends Error { + code; + status; + details; + expose; + constructor(options) { + super(options.message); + this.name = "AppError"; + this.code = options.code; + this.status = options.status; + this.details = options.details; + this.expose = options.expose ?? options.status < 500; + } +}; + +// ../../node_modules/.pnpm/@noble+ed25519@3.0.0/node_modules/@noble/ed25519/index.js +var ed25519_CURVE = { + p: 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffedn, + n: 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3edn, + h: 8n, + a: 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffecn, + d: 0x52036cee2b6ffe738cc740797779e89800700a4d4141d8ab75eb4dca135978a3n, + Gx: 0x216936d3cd6e53fec0a4e231fdd6dc5c692cc7609525a7b2c9562d608f25d51an, + Gy: 0x6666666666666666666666666666666666666666666666666666666666666658n +}; +var { p: P, n: N, Gx, Gy, a: _a2, d: _d, h } = ed25519_CURVE; +var L = 32; +var L2 = 64; +var captureTrace = (...args) => { + if ("captureStackTrace" in Error && typeof Error.captureStackTrace === "function") { + Error.captureStackTrace(...args); + } +}; +var err = (message = "") => { + const e = new Error(message); + captureTrace(e, err); + throw e; +}; +var isBig = (n) => typeof n === "bigint"; +var isStr = (s) => typeof s === "string"; +var isBytes2 = (a) => a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; +var abytes = (value, length, title = "") => { + const bytes = isBytes2(value); + const len = value?.length; + const needsLen = length !== void 0; + if (!bytes || needsLen && len !== length) { + const prefix = title && `"${title}" `; + const ofLen = needsLen ? ` of length ${length}` : ""; + const got = bytes ? `length=${len}` : `type=${typeof value}`; + err(prefix + "expected Uint8Array" + ofLen + ", got " + got); + } + return value; +}; +var u8n = (len) => new Uint8Array(len); +var u8fr = (buf) => Uint8Array.from(buf); +var padh = (n, pad) => n.toString(16).padStart(pad, "0"); +var bytesToHex = (b) => Array.from(abytes(b)).map((e) => padh(e, 2)).join(""); +var C = { _0: 48, _9: 57, A: 65, F: 70, a: 97, f: 102 }; +var _ch = (ch) => { + if (ch >= C._0 && ch <= C._9) + return ch - C._0; + if (ch >= C.A && ch <= C.F) + return ch - (C.A - 10); + if (ch >= C.a && ch <= C.f) + return ch - (C.a - 10); + return; +}; +var hexToBytes = (hex3) => { + const e = "hex invalid"; + if (!isStr(hex3)) + return err(e); + const hl = hex3.length; + const al = hl / 2; + if (hl % 2) + return err(e); + const array2 = u8n(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = _ch(hex3.charCodeAt(hi)); + const n2 = _ch(hex3.charCodeAt(hi + 1)); + if (n1 === void 0 || n2 === void 0) + return err(e); + array2[ai] = n1 * 16 + n2; + } + return array2; +}; +var cr = () => globalThis?.crypto; +var subtle = () => cr()?.subtle ?? err("crypto.subtle must be defined, consider polyfill"); +var concatBytes = (...arrs) => { + const r = u8n(arrs.reduce((sum, a) => sum + abytes(a).length, 0)); + let pad = 0; + arrs.forEach((a) => { + r.set(a, pad); + pad += a.length; + }); + return r; +}; +var big = BigInt; +var assertRange = (n, min, max, msg = "bad number: out of range") => isBig(n) && min <= n && n < max ? n : err(msg); +var M = (a, b = P) => { + const r = a % b; + return r >= 0n ? r : b + r; +}; +var modN = (a) => M(a, N); +var invert = (num, md) => { + if (num === 0n || md <= 0n) + err("no inverse n=" + num + " mod=" + md); + let a = M(num, md), b = md, x = 0n, y = 1n, u = 1n, v = 0n; + while (a !== 0n) { + const q = b / a, r = b % a; + const m = x - u * q, n = y - v * q; + b = a, a = r, x = u, y = v, u = m, v = n; + } + return b === 1n ? M(x, md) : err("no inverse"); +}; +var apoint = (p) => p instanceof Point ? p : err("Point expected"); +var B256 = 2n ** 256n; +var Point = class _Point { + static BASE; + static ZERO; + X; + Y; + Z; + T; + constructor(X, Y, Z, T) { + const max = B256; + this.X = assertRange(X, 0n, max); + this.Y = assertRange(Y, 0n, max); + this.Z = assertRange(Z, 1n, max); + this.T = assertRange(T, 0n, max); + Object.freeze(this); + } + static CURVE() { + return ed25519_CURVE; + } + static fromAffine(p) { + return new _Point(p.x, p.y, 1n, M(p.x * p.y)); + } + /** RFC8032 5.1.3: Uint8Array to Point. */ + static fromBytes(hex3, zip215 = false) { + const d = _d; + const normed = u8fr(abytes(hex3, L)); + const lastByte = hex3[31]; + normed[31] = lastByte & ~128; + const y = bytesToNumLE(normed); + const max = zip215 ? B256 : P; + assertRange(y, 0n, max); + const y2 = M(y * y); + const u = M(y2 - 1n); + const v = M(d * y2 + 1n); + let { isValid: isValid2, value: x } = uvRatio(u, v); + if (!isValid2) + err("bad point: y not sqrt"); + const isXOdd = (x & 1n) === 1n; + const isLastByteOdd = (lastByte & 128) !== 0; + if (!zip215 && x === 0n && isLastByteOdd) + err("bad point: x==0, isLastByteOdd"); + if (isLastByteOdd !== isXOdd) + x = M(-x); + return new _Point(x, y, 1n, M(x * y)); + } + static fromHex(hex3, zip215) { + return _Point.fromBytes(hexToBytes(hex3), zip215); + } + get x() { + return this.toAffine().x; + } + get y() { + return this.toAffine().y; + } + /** Checks if the point is valid and on-curve. */ + assertValidity() { + const a = _a2; + const d = _d; + const p = this; + if (p.is0()) + return err("bad point: ZERO"); + const { X, Y, Z, T } = p; + const X2 = M(X * X); + const Y2 = M(Y * Y); + const Z2 = M(Z * Z); + const Z4 = M(Z2 * Z2); + const aX2 = M(X2 * a); + const left = M(Z2 * M(aX2 + Y2)); + const right = M(Z4 + M(d * M(X2 * Y2))); + if (left !== right) + return err("bad point: equation left != right (1)"); + const XY = M(X * Y); + const ZT = M(Z * T); + if (XY !== ZT) + return err("bad point: equation left != right (2)"); + return this; + } + /** Equality check: compare points P&Q. */ + equals(other) { + const { X: X1, Y: Y1, Z: Z1 } = this; + const { X: X2, Y: Y2, Z: Z2 } = apoint(other); + const X1Z2 = M(X1 * Z2); + const X2Z1 = M(X2 * Z1); + const Y1Z2 = M(Y1 * Z2); + const Y2Z1 = M(Y2 * Z1); + return X1Z2 === X2Z1 && Y1Z2 === Y2Z1; + } + is0() { + return this.equals(I); + } + /** Flip point over y coordinate. */ + negate() { + return new _Point(M(-this.X), this.Y, this.Z, M(-this.T)); + } + /** Point doubling. Complete formula. Cost: `4M + 4S + 1*a + 6add + 1*2`. */ + double() { + const { X: X1, Y: Y1, Z: Z1 } = this; + const a = _a2; + const A = M(X1 * X1); + const B = M(Y1 * Y1); + const C2 = M(2n * M(Z1 * Z1)); + const D = M(a * A); + const x1y1 = X1 + Y1; + const E = M(M(x1y1 * x1y1) - A - B); + const G2 = D + B; + const F = G2 - C2; + const H = D - B; + const X3 = M(E * F); + const Y3 = M(G2 * H); + const T3 = M(E * H); + const Z3 = M(F * G2); + return new _Point(X3, Y3, Z3, T3); + } + /** Point addition. Complete formula. Cost: `8M + 1*k + 8add + 1*2`. */ + add(other) { + const { X: X1, Y: Y1, Z: Z1, T: T1 } = this; + const { X: X2, Y: Y2, Z: Z2, T: T2 } = apoint(other); + const a = _a2; + const d = _d; + const A = M(X1 * X2); + const B = M(Y1 * Y2); + const C2 = M(T1 * d * T2); + const D = M(Z1 * Z2); + const E = M((X1 + Y1) * (X2 + Y2) - A - B); + const F = M(D - C2); + const G2 = M(D + C2); + const H = M(B - a * A); + const X3 = M(E * F); + const Y3 = M(G2 * H); + const T3 = M(E * H); + const Z3 = M(F * G2); + return new _Point(X3, Y3, Z3, T3); + } + subtract(other) { + return this.add(apoint(other).negate()); + } + /** + * Point-by-scalar multiplication. Scalar must be in range 1 <= n < CURVE.n. + * Uses {@link wNAF} for base point. + * Uses fake point to mitigate side-channel leakage. + * @param n scalar by which point is multiplied + * @param safe safe mode guards against timing attacks; unsafe mode is faster + */ + multiply(n, safe = true) { + if (!safe && (n === 0n || this.is0())) + return I; + assertRange(n, 1n, N); + if (n === 1n) + return this; + if (this.equals(G)) + return wNAF(n).p; + let p = I; + let f = G; + for (let d = this; n > 0n; d = d.double(), n >>= 1n) { + if (n & 1n) + p = p.add(d); + else if (safe) + f = f.add(d); + } + return p; + } + multiplyUnsafe(scalar) { + return this.multiply(scalar, false); + } + /** Convert point to 2d xy affine point. (X, Y, Z) ∋ (x=X/Z, y=Y/Z) */ + toAffine() { + const { X, Y, Z } = this; + if (this.equals(I)) + return { x: 0n, y: 1n }; + const iz = invert(Z, P); + if (M(Z * iz) !== 1n) + err("invalid inverse"); + const x = M(X * iz); + const y = M(Y * iz); + return { x, y }; + } + toBytes() { + const { x, y } = this.assertValidity().toAffine(); + const b = numTo32bLE(y); + b[31] |= x & 1n ? 128 : 0; + return b; + } + toHex() { + return bytesToHex(this.toBytes()); + } + clearCofactor() { + return this.multiply(big(h), false); + } + isSmallOrder() { + return this.clearCofactor().is0(); + } + isTorsionFree() { + let p = this.multiply(N / 2n, false).double(); + if (N % 2n) + p = p.add(this); + return p.is0(); + } +}; +var G = new Point(Gx, Gy, 1n, M(Gx * Gy)); +var I = new Point(0n, 1n, 1n, 0n); +Point.BASE = G; +Point.ZERO = I; +var numTo32bLE = (num) => hexToBytes(padh(assertRange(num, 0n, B256), L2)).reverse(); +var bytesToNumLE = (b) => big("0x" + bytesToHex(u8fr(abytes(b)).reverse())); +var pow2 = (x, power) => { + let r = x; + while (power-- > 0n) { + r *= r; + r %= P; + } + return r; +}; +var pow_2_252_3 = (x) => { + const x2 = x * x % P; + const b2 = x2 * x % P; + const b4 = pow2(b2, 2n) * b2 % P; + const b5 = pow2(b4, 1n) * x % P; + const b10 = pow2(b5, 5n) * b5 % P; + const b20 = pow2(b10, 10n) * b10 % P; + const b40 = pow2(b20, 20n) * b20 % P; + const b80 = pow2(b40, 40n) * b40 % P; + const b160 = pow2(b80, 80n) * b80 % P; + const b240 = pow2(b160, 80n) * b80 % P; + const b250 = pow2(b240, 10n) * b10 % P; + const pow_p_5_8 = pow2(b250, 2n) * x % P; + return { pow_p_5_8, b2 }; +}; +var RM1 = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0n; +var uvRatio = (u, v) => { + const v3 = M(v * v * v); + const v7 = M(v3 * v3 * v); + const pow = pow_2_252_3(u * v7).pow_p_5_8; + let x = M(u * v3 * pow); + const vx2 = M(v * x * x); + const root1 = x; + const root2 = M(x * RM1); + const useRoot1 = vx2 === u; + const useRoot2 = vx2 === M(-u); + const noRoot = vx2 === M(-u * RM1); + if (useRoot1) + x = root1; + if (useRoot2 || noRoot) + x = root2; + if ((M(x) & 1n) === 1n) + x = M(-x); + return { isValid: useRoot1 || useRoot2, value: x }; +}; +var modL_LE = (hash2) => modN(bytesToNumLE(hash2)); +var sha512a = (...m) => hashes.sha512Async(concatBytes(...m)); +var hash2extK = (hashed) => { + const head = hashed.slice(0, L); + head[0] &= 248; + head[31] &= 127; + head[31] |= 64; + const prefix = hashed.slice(L, L2); + const scalar = modL_LE(head); + const point = G.multiply(scalar); + const pointBytes = point.toBytes(); + return { head, prefix, scalar, point, pointBytes }; +}; +var getExtendedPublicKeyAsync = (secretKey) => sha512a(abytes(secretKey, L)).then(hash2extK); +var hashFinishA = (res) => sha512a(res.hashable).then(res.finish); +var _sign = (e, rBytes, msg) => { + const { pointBytes: P2, scalar: s } = e; + const r = modL_LE(rBytes); + const R = G.multiply(r).toBytes(); + const hashable = concatBytes(R, P2, msg); + const finish = (hashed) => { + const S = modN(r + modL_LE(hashed) * s); + return abytes(concatBytes(R, numTo32bLE(S)), L2); + }; + return { hashable, finish }; +}; +var signAsync = async (message, secretKey) => { + const m = abytes(message); + const e = await getExtendedPublicKeyAsync(secretKey); + const rBytes = await sha512a(e.prefix, m); + return hashFinishA(_sign(e, rBytes, m)); +}; +var hashes = { + sha512Async: async (message) => { + const s = subtle(); + const m = concatBytes(message); + return u8n(await s.digest("SHA-512", m.buffer)); + }, + sha512: void 0 +}; +var W = 8; +var scalarBits = 256; +var pwindows = Math.ceil(scalarBits / W) + 1; +var pwindowSize = 2 ** (W - 1); +var precompute = () => { + const points = []; + let p = G; + let b = p; + for (let w = 0; w < pwindows; w++) { + b = p; + points.push(b); + for (let i = 1; i < pwindowSize; i++) { + b = b.add(p); + points.push(b); + } + p = b.double(); + } + return points; +}; +var Gpows = void 0; +var ctneg = (cnd, p) => { + const n = p.negate(); + return cnd ? n : p; +}; +var wNAF = (n) => { + const comp = Gpows || (Gpows = precompute()); + let p = I; + let f = G; + const pow_2_w = 2 ** W; + const maxNum = pow_2_w; + const mask = big(pow_2_w - 1); + const shiftBy = big(W); + for (let w = 0; w < pwindows; w++) { + let wbits = Number(n & mask); + n >>= shiftBy; + if (wbits > pwindowSize) { + wbits -= maxNum; + n += 1n; + } + const off = w * pwindowSize; + const offF = off; + const offP = off + Math.abs(wbits) - 1; + const isEven = w % 2 !== 0; + const isNeg = wbits < 0; + if (wbits === 0) { + f = f.add(ctneg(isEven, comp[offF])); + } else { + p = p.add(ctneg(isNeg, comp[offP])); + } + } + if (n !== 0n) + err("invalid wnaf"); + return { p, f }; +}; + +// ../../packages/sdk/src/crypto/ed25519.ts +async function signEd25519(message, secretKey) { + return signAsync(message, secretKey); +} +function encodeEd25519SignatureBase64url(signature) { + return encodeBase64url(signature); +} + +// ../../packages/sdk/src/http/constants.ts +var X_CLAW_TIMESTAMP = "X-Claw-Timestamp"; +var X_CLAW_NONCE = "X-Claw-Nonce"; +var X_CLAW_BODY_SHA256 = "X-Claw-Body-SHA256"; +var X_CLAW_PROOF = "X-Claw-Proof"; + +// ../../packages/sdk/src/http/utils.ts +var textEncoder = new TextEncoder(); +var ED25519_SECRET_KEY_LENGTH = 32; +function getCrypto() { + return globalThis.crypto; +} +function ensureString(value, label) { + if (typeof value !== "string" || value.trim() === "") { + throw new AppError({ + code: "HTTP_SIGNATURE_INVALID_INPUT", + message: "Input must be a non-empty string", + status: 400, + details: { + field: label + } + }); + } + return value; +} +function ensureBodyBytes(body) { + if (body === void 0) { + return new Uint8Array(); + } + if (!(body instanceof Uint8Array)) { + throw new AppError({ + code: "HTTP_SIGNATURE_INVALID_INPUT", + message: "body must be a Uint8Array when provided", + status: 400, + details: { + field: "body" + } + }); + } + return body; +} +function ensureSecretKey(key) { + if (!(key instanceof Uint8Array) || key.length !== ED25519_SECRET_KEY_LENGTH) { + throw new AppError({ + code: "HTTP_SIGNATURE_MISSING_SECRET", + message: "Secret key is required to sign HTTP requests", + status: 500, + details: { + keyLength: key instanceof Uint8Array ? key.length : null, + expectedKeyLength: ED25519_SECRET_KEY_LENGTH + } + }); + } +} +async function hashBodySha256Base64url(body) { + const cryptoObject = getCrypto(); + if (typeof cryptoObject !== "object" || typeof cryptoObject?.subtle !== "object" || typeof cryptoObject?.subtle?.digest !== "function") { + throw new AppError({ + code: "HTTP_SIGNATURE_CRYPTO_UNAVAILABLE", + message: "Web Crypto API is required for HTTP signing", + status: 500, + details: { + runtime: typeof cryptoObject + } + }); + } + const digest = await cryptoObject.subtle.digest("SHA-256", body); + return encodeBase64url(new Uint8Array(digest)); +} + +// ../../packages/sdk/src/http/sign.ts +async function signHttpRequest(input) { + ensureSecretKey(input.secretKey); + const method = ensureString(input.method, "method"); + const pathWithQuery = ensureString(input.pathWithQuery, "pathWithQuery"); + const timestamp = ensureString(input.timestamp, "timestamp"); + const nonce = ensureString(input.nonce, "nonce"); + const body = ensureBodyBytes(input.body); + const bodyHash = await hashBodySha256Base64url(body); + const canonicalRequest = canonicalizeRequest({ + method, + pathWithQuery, + timestamp, + nonce, + bodyHash + }); + const signature = await signEd25519( + textEncoder.encode(canonicalRequest), + input.secretKey + ); + const proof = encodeEd25519SignatureBase64url(signature); + return { + canonicalRequest, + proof, + headers: { + [X_CLAW_TIMESTAMP]: timestamp, + [X_CLAW_NONCE]: nonce, + [X_CLAW_BODY_SHA256]: bodyHash, + [X_CLAW_PROOF]: proof + } + }; +} + +// ../../packages/sdk/src/agent-auth-client.ts +var refreshSingleFlights = /* @__PURE__ */ new Map(); +var isRecord = (value) => { + return typeof value === "object" && value !== null; +}; +var parseNonEmptyString = (value) => { + if (typeof value !== "string") { + return ""; + } + return value.trim(); +}; +var parseJsonResponse = async (response) => { + try { + return await response.json(); + } catch { + return void 0; + } +}; +var toPathWithQuery = (requestUrl) => { + const parsed = new URL(requestUrl); + return `${parsed.pathname}${parsed.search}`; +}; +var parseRegistryErrorEnvelope = (payload) => { + if (!isRecord(payload)) { + return void 0; + } + const errorValue = payload.error; + if (!isRecord(errorValue)) { + return void 0; + } + return { + error: { + code: parseNonEmptyString(errorValue.code) || void 0, + message: parseNonEmptyString(errorValue.message) || void 0 + } + }; +}; +var parseAgentAuthBundle = (payload) => { + if (!isRecord(payload)) { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID_RESPONSE", + message: "Registry returned an invalid refresh response payload", + status: 502, + expose: true + }); + } + const source = isRecord(payload.agentAuth) ? payload.agentAuth : payload; + const tokenType = source.tokenType; + const accessToken = source.accessToken; + const accessExpiresAt = source.accessExpiresAt; + const refreshToken = source.refreshToken; + const refreshExpiresAt = source.refreshExpiresAt; + if (tokenType !== "Bearer" || typeof accessToken !== "string" || typeof accessExpiresAt !== "string" || typeof refreshToken !== "string" || typeof refreshExpiresAt !== "string") { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID_RESPONSE", + message: "Registry returned an invalid refresh response payload", + status: 502, + expose: true + }); + } + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt + }; +}; +var toRefreshHttpError = (status, responseBody) => { + const parsedEnvelope = parseRegistryErrorEnvelope(responseBody); + const registryCode = parsedEnvelope?.error?.code; + const registryMessage = parsedEnvelope?.error?.message; + if (status === 400) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: registryMessage ?? "Refresh request is invalid (400).", + status, + expose: true, + details: { + registryCode, + registryMessage + } + }); + } + if (status === 401) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_UNAUTHORIZED", + message: registryMessage ?? "Refresh rejected (401). Agent credentials are invalid, revoked, or expired.", + status, + expose: true, + details: { + registryCode, + registryMessage + } + }); + } + if (status === 409) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_CONFLICT", + message: registryMessage ?? "Refresh conflict (409). Retry request.", + status, + expose: true, + details: { + registryCode, + registryMessage + } + }); + } + if (status >= 500) { + return new AppError({ + code: "AGENT_AUTH_REFRESH_SERVER_ERROR", + message: `Registry server error (${status}). Try again later.`, + status: 503, + expose: true, + details: { + status + } + }); + } + return new AppError({ + code: "AGENT_AUTH_REFRESH_FAILED", + message: registryMessage ?? `Registry request failed during refresh (${status}).`, + status, + expose: true, + details: { + registryCode, + registryMessage, + status + } + }); +}; +var toRegistryAgentAuthRefreshRequestUrl = (registryUrl) => { + const normalizedBaseUrl = registryUrl.endsWith("/") ? registryUrl : `${registryUrl}/`; + return new URL( + AGENT_AUTH_REFRESH_PATH.slice(1), + normalizedBaseUrl + ).toString(); +}; +async function runRefreshSingleFlight(options) { + const existing = refreshSingleFlights.get(options.key); + if (existing) { + return existing; + } + const inFlight = options.run().finally(() => { + if (refreshSingleFlights.get(options.key) === inFlight) { + refreshSingleFlights.delete(options.key); + } + }); + refreshSingleFlights.set(options.key, inFlight); + return inFlight; +} +async function refreshAgentAuthWithClawProof(input) { + const fetchImpl = input.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_NETWORK", + message: "fetch implementation is required", + status: 500, + expose: true + }); + } + const refreshUrl = toRegistryAgentAuthRefreshRequestUrl(input.registryUrl); + const refreshBody = JSON.stringify({ + refreshToken: input.refreshToken + }); + const nowMs = input.nowMs?.() ?? Date.now(); + const timestamp = String(Math.floor(nowMs / 1e3)); + const nonce = encodeBase64url(crypto.getRandomValues(new Uint8Array(16))); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(refreshUrl), + timestamp, + nonce, + body: new TextEncoder().encode(refreshBody), + secretKey: input.secretKey + }); + let response; + try { + response = await fetchImpl(refreshUrl, { + method: "POST", + headers: { + authorization: `Claw ${input.ait}`, + "content-type": "application/json", + ...signed.headers + }, + body: refreshBody + }); + } catch { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_NETWORK", + message: "Unable to connect to the registry. Check network access and registryUrl.", + status: 503, + expose: true + }); + } + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw toRefreshHttpError(response.status, responseBody); + } + return parseAgentAuthBundle(responseBody); +} +function isRetryableAuthExpiryError(error48) { + if (!(error48 instanceof AppError)) { + return false; + } + return error48.status === 401; +} +async function executeWithAgentAuthRefreshRetry(input) { + const shouldRetry = input.shouldRetry ?? isRetryableAuthExpiryError; + const currentAuth = await input.getAuth(); + try { + return await input.perform(currentAuth); + } catch (error48) { + if (!shouldRetry(error48)) { + throw error48; + } + const refreshedAuth = await runRefreshSingleFlight({ + key: input.key, + run: async () => { + const latestAuth = await input.getAuth(); + const nextAuth = await input.refreshAuth(latestAuth); + await input.persistAuth(nextAuth); + return nextAuth; + } + }); + return input.perform(refreshedAuth); + } +} + +// ../../packages/sdk/src/runtime-environment.ts +var runtimeEnvironmentValues = [ + "development", + "production", + "test" +]; + +// ../../packages/sdk/src/config.ts +var environmentSchema = external_exports.enum(runtimeEnvironmentValues); +var registrySigningKeyStatusSchema = external_exports.enum(["active", "revoked"]); +var ED25519_PUBLIC_KEY_LENGTH2 = 32; +var registrySigningPublicKeySchema = external_exports.object({ + kid: external_exports.string().min(1), + alg: external_exports.literal("EdDSA"), + crv: external_exports.literal("Ed25519"), + x: external_exports.string().min(1), + status: registrySigningKeyStatusSchema +}).superRefine((value, ctx) => { + let decodedPublicKey; + try { + decodedPublicKey = decodeBase64url(value.x); + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + path: ["x"], + message: "x must be valid base64url" + }); + return; + } + if (decodedPublicKey.length !== ED25519_PUBLIC_KEY_LENGTH2) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + path: ["x"], + message: "x must decode to 32-byte Ed25519 public key" + }); + } +}); +var registrySigningKeysSchema = external_exports.array(registrySigningPublicKeySchema).superRefine((keys, ctx) => { + const seenKids = /* @__PURE__ */ new Set(); + for (const [index, key] of keys.entries()) { + if (seenKids.has(key.kid)) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + path: [index, "kid"], + message: `Duplicate kid "${key.kid}" is not allowed` + }); + } else { + seenKids.add(key.kid); + } + } +}); +var registrySigningKeysEnvSchema = external_exports.string().min(1).transform((value, ctx) => { + let parsed; + try { + parsed = JSON.parse(value); + } catch { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: "REGISTRY_SIGNING_KEYS must be valid JSON" + }); + return external_exports.NEVER; + } + const keys = registrySigningKeysSchema.safeParse(parsed); + if (!keys.success) { + for (const issue2 of keys.error.issues) { + ctx.addIssue({ + code: external_exports.ZodIssueCode.custom, + message: issue2.message, + path: issue2.path + }); + } + return external_exports.NEVER; + } + return keys.data; +}); +var registryConfigSchema = external_exports.object({ + ENVIRONMENT: environmentSchema, + APP_VERSION: external_exports.string().min(1).optional(), + BOOTSTRAP_SECRET: external_exports.string().min(1).optional(), + REGISTRY_SIGNING_KEY: external_exports.string().min(1).optional(), + REGISTRY_SIGNING_KEYS: registrySigningKeysEnvSchema.optional() +}); + +// ../../packages/sdk/src/crl/cache.ts +var DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1e3; +var DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1e3; + +// ../../packages/sdk/src/security/nonce-cache.ts +var DEFAULT_NONCE_TTL_MS = 5 * 60 * 1e3; + +// src/transforms/peers-config.ts +import { chmod, mkdir, readFile, writeFile } from "fs/promises"; +import { homedir } from "os"; +import { dirname, join as join2 } from "path"; +var CLAWDENTITY_DIR = ".clawdentity"; +var PEERS_FILENAME = "peers.json"; +var PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; +function isRecord2(value) { + return typeof value === "object" && value !== null; +} +function getErrorCode(error48) { + if (!isRecord2(error48)) { + return void 0; + } + return typeof error48.code === "string" ? error48.code : void 0; +} +function parseNonEmptyString2(value, label) { + if (typeof value !== "string") { + throw new Error(`${label} must be a string`); + } + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error(`${label} must not be empty`); + } + return trimmed; +} +function parsePeerAlias(value) { + const alias = parseNonEmptyString2(value, "peer alias"); + if (alias.length > 128) { + throw new Error("peer alias must be at most 128 characters"); + } + if (!PEER_ALIAS_PATTERN.test(alias)) { + throw new Error( + "peer alias must use only letters, numbers, dot, underscore, or hyphen" + ); + } + return alias; +} +function parseDid2(value) { + const did = parseNonEmptyString2(value, "did"); + if (!did.startsWith("did:")) { + throw new Error("did must start with 'did:'"); + } + return did; +} +function parseProxyUrl(value) { + const candidate = parseNonEmptyString2(value, "proxyUrl"); + try { + return new URL(candidate).toString(); + } catch { + throw new Error("proxyUrl must be a valid URL"); + } +} +function parsePeerName(value) { + if (value === void 0) { + return void 0; + } + return parseNonEmptyString2(value, "name"); +} +function parsePeerEntry(value) { + if (!isRecord2(value)) { + throw new Error("peer entry must be an object"); + } + const did = parseDid2(value.did); + const proxyUrl = parseProxyUrl(value.proxyUrl); + const name = parsePeerName(value.name); + if (name === void 0) { + return { did, proxyUrl }; + } + return { did, proxyUrl, name }; +} +function parsePeersConfig(value, source) { + if (!isRecord2(value)) { + throw new Error( + `Peer config validation failed at ${source}: root must be an object` + ); + } + const peersRaw = value.peers; + if (peersRaw === void 0) { + return { peers: {} }; + } + if (!isRecord2(peersRaw)) { + throw new Error( + `Peer config validation failed at ${source}: peers must be an object` + ); + } + const peers = {}; + for (const [alias, peerValue] of Object.entries(peersRaw)) { + const normalizedAlias = parsePeerAlias(alias); + try { + peers[normalizedAlias] = parsePeerEntry(peerValue); + } catch (error48) { + const reason = error48 instanceof Error ? error48.message : String(error48); + throw new Error( + `Peer config validation failed at ${source}: peers.${normalizedAlias}: ${reason}` + ); + } + } + return { peers }; +} +function resolvePeersConfigPath(options = {}) { + if (typeof options.configPath === "string" && options.configPath.trim().length > 0) { + return options.configPath.trim(); + } + if (typeof options.configDir === "string" && options.configDir.trim().length > 0) { + return join2(options.configDir.trim(), PEERS_FILENAME); + } + const home = typeof options.homeDir === "string" && options.homeDir.trim().length > 0 ? options.homeDir.trim() : homedir(); + return join2(home, CLAWDENTITY_DIR, PEERS_FILENAME); +} +async function loadPeersConfig(options = {}) { + const configPath = resolvePeersConfigPath(options); + let rawJson; + try { + rawJson = await readFile(configPath, "utf8"); + } catch (error48) { + if (getErrorCode(error48) === "ENOENT") { + return { peers: {} }; + } + throw error48; + } + let parsed; + try { + parsed = JSON.parse(rawJson); + } catch { + throw new Error(`Peer config at ${configPath} is not valid JSON`); + } + return parsePeersConfig(parsed, configPath); +} + +// src/transforms/registry-auth.ts +import { + chmod as chmod2, + open, + readFile as readFile2, + rename, + stat, + unlink, + writeFile as writeFile2 +} from "fs/promises"; +import { join as join3 } from "path"; +var CLAWDENTITY_DIR2 = ".clawdentity"; +var AGENTS_DIR = "agents"; +var REGISTRY_AUTH_FILENAME = "registry-auth.json"; +var FILE_MODE = 384; +var LOCK_RETRY_DELAY_MS = 50; +var LOCK_MAX_ATTEMPTS = 200; +var STALE_LOCK_AGE_MS = 3e4; +function isRecord3(value) { + return typeof value === "object" && value !== null; +} +function getErrorCode2(error48) { + if (!isRecord3(error48)) { + return void 0; + } + return typeof error48.code === "string" ? error48.code : void 0; +} +function sleep(delayMs) { + return new Promise((resolve) => { + setTimeout(resolve, delayMs); + }); +} +function parseAgentAuthBundle2(payload, options) { + if (!isRecord3(payload)) { + throw new Error( + `Agent "${options.agentName}" has invalid ${REGISTRY_AUTH_FILENAME}` + ); + } + const tokenType = payload.tokenType; + const accessToken = payload.accessToken; + const accessExpiresAt = payload.accessExpiresAt; + const refreshToken = payload.refreshToken; + const refreshExpiresAt = payload.refreshExpiresAt; + if (tokenType !== "Bearer" || typeof accessToken !== "string" || typeof accessExpiresAt !== "string" || typeof refreshToken !== "string" || typeof refreshExpiresAt !== "string") { + throw new Error( + `Agent "${options.agentName}" has invalid ${REGISTRY_AUTH_FILENAME}` + ); + } + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt + }; +} +function resolveAgentRegistryAuthPath(input) { + return join3( + input.homeDir, + CLAWDENTITY_DIR2, + AGENTS_DIR, + input.agentName, + REGISTRY_AUTH_FILENAME + ); +} +async function readAgentRegistryAuth(input) { + const registryAuthPath = resolveAgentRegistryAuthPath(input); + let rawRegistryAuth; + try { + rawRegistryAuth = await readFile2(registryAuthPath, "utf8"); + } catch (error48) { + if (getErrorCode2(error48) === "ENOENT") { + throw new Error( + `Agent "${input.agentName}" has no ${REGISTRY_AUTH_FILENAME}. Recreate agent identity or re-run auth bootstrap.` + ); + } + throw error48; + } + let parsed; + try { + parsed = JSON.parse(rawRegistryAuth); + } catch { + throw new Error( + `Agent "${input.agentName}" has invalid ${REGISTRY_AUTH_FILENAME} (must be valid JSON)` + ); + } + return parseAgentAuthBundle2(parsed, { agentName: input.agentName }); +} +async function writeAgentRegistryAuthAtomic(input) { + const registryAuthPath = resolveAgentRegistryAuthPath(input); + const tempPath = `${registryAuthPath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + const content = `${JSON.stringify(input.auth, null, 2)} +`; + await writeFile2(tempPath, content, "utf8"); + await chmod2(tempPath, FILE_MODE); + try { + await rename(tempPath, registryAuthPath); + await chmod2(registryAuthPath, FILE_MODE); + } catch (error48) { + try { + await unlink(tempPath); + } catch { + } + throw error48; + } +} +async function withAgentRegistryAuthLock(input) { + const registryAuthPath = resolveAgentRegistryAuthPath(input); + const lockPath = `${registryAuthPath}.lock`; + let lockAcquired = false; + for (let attempt = 0; attempt < LOCK_MAX_ATTEMPTS; attempt += 1) { + try { + const lockHandle = await open(lockPath, "wx", FILE_MODE); + await lockHandle.writeFile(`${Date.now()}`); + await lockHandle.close(); + lockAcquired = true; + break; + } catch (error48) { + if (getErrorCode2(error48) !== "EEXIST") { + throw error48; + } + try { + const lockStat = await stat(lockPath); + if (Date.now() - lockStat.mtimeMs > STALE_LOCK_AGE_MS) { + await unlink(lockPath); + continue; + } + } catch (statError) { + if (getErrorCode2(statError) !== "ENOENT") { + throw statError; + } + } + await sleep(LOCK_RETRY_DELAY_MS); + } + } + if (!lockAcquired) { + throw new Error( + `Timed out waiting for ${REGISTRY_AUTH_FILENAME} lock for agent "${input.agentName}"` + ); + } + try { + return await input.operation(); + } finally { + try { + await unlink(lockPath); + } catch { + } + } +} + +// src/transforms/relay-to-peer.ts +var CLAWDENTITY_DIR3 = ".clawdentity"; +var AGENTS_DIR2 = "agents"; +var SECRET_KEY_FILENAME = "secret.key"; +var AIT_FILENAME = "ait.jwt"; +var IDENTITY_FILENAME = "identity.json"; +var AGENT_NAME_ENV = "CLAWDENTITY_AGENT_NAME"; +var OPENCLAW_AGENT_NAME_FILENAME = "openclaw-agent-name"; +var NONCE_SIZE = 16; +var AGENT_ACCESS_HEADER = "x-claw-agent-access"; +var textEncoder2 = new TextEncoder(); +function isRecord4(value) { + return typeof value === "object" && value !== null; +} +function getErrorCode3(error48) { + if (!isRecord4(error48)) { + return void 0; + } + return typeof error48.code === "string" ? error48.code : void 0; +} +function parseRequiredString(value) { + if (typeof value !== "string") { + throw new Error("Input value must be a string"); + } + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error("Input value must not be empty"); + } + return trimmed; +} +function parseIdentityRegistryUrl(payload, options) { + if (!isRecord4(payload) || typeof payload.registryUrl !== "string") { + throw new Error( + `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)` + ); + } + const registryUrl = payload.registryUrl.trim(); + if (registryUrl.length === 0) { + throw new Error( + `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)` + ); + } + return registryUrl; +} +function resolvePathWithQuery(url2) { + return `${url2.pathname}${url2.search}`; +} +function resolveRelayFetch(fetchImpl) { + const resolved = fetchImpl ?? globalThis.fetch; + if (typeof resolved !== "function") { + throw new Error("fetch implementation is required"); + } + return resolved; +} +async function tryReadTrimmedFile(filePath, _label) { + let raw; + try { + raw = await readFile3(filePath, "utf8"); + } catch (error48) { + if (getErrorCode3(error48) === "ENOENT") { + return void 0; + } + throw error48; + } + const trimmed = raw.trim(); + if (trimmed.length === 0) { + throw new Error("Required file content is empty"); + } + return trimmed; +} +async function readTrimmedFile(filePath, label) { + const content = await tryReadTrimmedFile(filePath, label); + if (content === void 0) { + throw new Error("Required file is missing"); + } + return content; +} +async function resolveAgentName(input) { + const overridden = input.overrideName?.trim(); + if (overridden) { + return overridden; + } + const envAgentName = process.env[AGENT_NAME_ENV]?.trim(); + if (envAgentName) { + return envAgentName; + } + const selectedAgentPath = join4( + input.homeDir, + CLAWDENTITY_DIR3, + OPENCLAW_AGENT_NAME_FILENAME + ); + const selectedAgentName = await tryReadTrimmedFile( + selectedAgentPath, + OPENCLAW_AGENT_NAME_FILENAME + ); + if (selectedAgentName) { + return selectedAgentName; + } + const agentsDirectory = join4(input.homeDir, CLAWDENTITY_DIR3, AGENTS_DIR2); + let entries; + try { + entries = await readdir(agentsDirectory, { + withFileTypes: true + }); + } catch (error48) { + if (getErrorCode3(error48) === "ENOENT") { + throw new Error("No local agents found. Select one before relay setup."); + } + throw error48; + } + const agentNames = entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name).sort(); + if (agentNames.length === 1) { + return agentNames[0]; + } + if (agentNames.length === 0) { + throw new Error("No local agents found. Select one before relay setup."); + } + throw new Error( + "Multiple local agents found. Configure a selected relay agent first." + ); +} +async function readAgentCredentials(input) { + const agentDir = join4( + input.homeDir, + CLAWDENTITY_DIR3, + AGENTS_DIR2, + input.agentName + ); + const secretPath = join4(agentDir, SECRET_KEY_FILENAME); + const aitPath = join4(agentDir, AIT_FILENAME); + const identityPath = join4(agentDir, IDENTITY_FILENAME); + const [encodedSecret, ait, rawIdentity] = await Promise.all([ + readTrimmedFile(secretPath, SECRET_KEY_FILENAME), + readTrimmedFile(aitPath, AIT_FILENAME), + readTrimmedFile(identityPath, IDENTITY_FILENAME) + ]); + let secretKey; + try { + secretKey = decodeBase64url(encodedSecret); + } catch { + throw new Error("Agent secret key is invalid"); + } + let parsedIdentity; + try { + parsedIdentity = JSON.parse(rawIdentity); + } catch { + throw new Error( + `Agent "${input.agentName}" has invalid ${IDENTITY_FILENAME} (must be valid JSON)` + ); + } + const registryUrl = parseIdentityRegistryUrl(parsedIdentity, { + agentName: input.agentName + }); + return { + ait, + secretKey, + registryUrl + }; +} +function removePeerField(payload) { + const outbound = {}; + for (const [key, value] of Object.entries(payload)) { + if (key !== "peer") { + outbound[key] = value; + } + } + return outbound; +} +function isRetryableRelayAuthError(error48) { + return error48 instanceof AppError && error48.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && error48.status === 401; +} +function buildRefreshSingleFlightKey(input) { + return `${input.homeDir}:${input.agentName}`; +} +async function relayPayloadToPeer(payload, options = {}) { + if (!isRecord4(payload)) { + return payload; + } + const peerAliasValue = payload.peer; + if (peerAliasValue === void 0) { + return payload; + } + const peerAlias = parseRequiredString(peerAliasValue); + const peersConfig = await loadPeersConfig(options); + const peerEntry = peersConfig.peers[peerAlias]; + if (!peerEntry) { + throw new Error("Peer alias is not configured"); + } + const home = typeof options.homeDir === "string" && options.homeDir.trim().length > 0 ? options.homeDir.trim() : homedir2(); + const agentName = await resolveAgentName({ + overrideName: options.agentName, + homeDir: home + }); + const { ait, secretKey, registryUrl } = await readAgentCredentials({ + agentName, + homeDir: home + }); + const outboundPayload = removePeerField(payload); + const body = JSON.stringify(outboundPayload); + const peerUrl = new URL(peerEntry.proxyUrl); + const fetchImpl = resolveRelayFetch(options.fetchImpl); + const refreshSingleFlightKey = buildRefreshSingleFlightKey({ + homeDir: home, + agentName + }); + const sendRelayRequest = async (auth) => { + const unixSeconds = Math.floor( + (options.clock ?? Date.now)() / 1e3 + ).toString(); + const nonce = encodeBase64url( + (options.randomBytesImpl ?? randomBytes)(NONCE_SIZE) + ); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: resolvePathWithQuery(peerUrl), + timestamp: unixSeconds, + nonce, + body: textEncoder2.encode(body), + secretKey + }); + return fetchImpl(peerUrl.toString(), { + method: "POST", + headers: { + Authorization: `Claw ${ait}`, + "Content-Type": "application/json", + [AGENT_ACCESS_HEADER]: auth.accessToken, + ...signed.headers + }, + body + }); + }; + const performRelay = async (auth) => { + const response = await sendRelayRequest(auth); + if (!response.ok) { + if (response.status === 401) { + throw new AppError({ + code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", + message: "Peer relay rejected agent auth credentials", + status: 401, + expose: true + }); + } + throw new Error("Peer relay request failed"); + } + return null; + }; + const result = await executeWithAgentAuthRefreshRetry({ + key: refreshSingleFlightKey, + shouldRetry: isRetryableRelayAuthError, + getAuth: async () => readAgentRegistryAuth({ + homeDir: home, + agentName + }), + persistAuth: async () => { + }, + refreshAuth: async (currentAuth) => withAgentRegistryAuthLock({ + homeDir: home, + agentName, + operation: async () => { + const latestAuth = await readAgentRegistryAuth({ + homeDir: home, + agentName + }); + if (latestAuth.refreshToken !== currentAuth.refreshToken) { + return latestAuth; + } + let refreshedAuth; + try { + refreshedAuth = await refreshAgentAuthWithClawProof({ + registryUrl, + ait, + secretKey, + refreshToken: latestAuth.refreshToken, + fetchImpl + }); + } catch (error48) { + const afterFailureAuth = await readAgentRegistryAuth({ + homeDir: home, + agentName + }); + if (afterFailureAuth.refreshToken !== latestAuth.refreshToken) { + return afterFailureAuth; + } + throw error48; + } + await writeAgentRegistryAuthAtomic({ + homeDir: home, + agentName, + auth: refreshedAuth + }); + return refreshedAuth; + } + }), + perform: performRelay + }); + return result; +} +async function relayToPeer(ctx) { + return relayPayloadToPeer(ctx?.payload); +} +export { + relayToPeer as default, + relayPayloadToPeer +}; +/*! Bundled license information: + +@scure/base/index.js: + (*! scure-base - MIT License (c) 2022 Paul Miller (paulmillr.com) *) + +@noble/ed25519/index.js: + (*! noble-ed25519 - MIT License (c) 2019 Paul Miller (paulmillr.com) *) +*/ From f51afee6e7b7a2609779d3f476807b8bcd58a6bc Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 08:23:08 +0530 Subject: [PATCH 068/190] feat(issue-83): add relay connector runtime and autostart workflow --- README.md | 27 + apps/cli/AGENTS.md | 3 +- apps/cli/package.json | 2 + apps/cli/scripts/AGENTS.md | 4 + apps/cli/scripts/openclaw-relay-docker-e2e.sh | 294 + .../openclaw-skill/dist/relay-to-peer.mjs | 16099 +--------------- apps/cli/src/AGENTS.md | 5 + apps/cli/src/commands/AGENTS.md | 10 + apps/cli/src/commands/connector.test.ts | 344 + apps/cli/src/commands/connector.ts | 1080 ++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/openclaw-skill/AGENTS.md | 18 +- apps/openclaw-skill/src/AGENTS.md | 18 +- .../src/transforms/relay-to-peer.test.ts | 278 +- .../src/transforms/relay-to-peer.ts | 411 +- apps/proxy/.env.example | 13 +- apps/proxy/AGENTS.md | 13 +- apps/proxy/package.json | 11 +- apps/proxy/src/AGENTS.md | 8 +- apps/proxy/src/agent-hook-route.test.ts | 396 +- apps/proxy/src/agent-hook-route.ts | 210 +- apps/proxy/src/agent-relay-session.test.ts | 268 + apps/proxy/src/agent-relay-session.ts | 325 + apps/proxy/src/auth-middleware.test.ts | 94 +- apps/proxy/src/auth-middleware.ts | 3 +- apps/proxy/src/config.test.ts | 37 +- apps/proxy/src/config.ts | 4 +- apps/proxy/src/index.test.ts | 7 + apps/proxy/src/relay-connect-route.test.ts | 126 + apps/proxy/src/relay-connect-route.ts | 91 + apps/proxy/src/server.test.ts | 6 +- apps/proxy/src/server.ts | 22 +- apps/proxy/src/worker.test.ts | 48 +- apps/proxy/src/worker.ts | 63 +- apps/proxy/wrangler.jsonc | 56 + packages/connector/AGENTS.md | 18 + packages/connector/package.json | 31 + packages/connector/src/client.test.ts | 369 + packages/connector/src/client.ts | 675 + packages/connector/src/constants.ts | 27 + packages/connector/src/frames.test.ts | 106 + packages/connector/src/frames.ts | 220 + packages/connector/src/index.ts | 54 + packages/connector/src/runtime.ts | 594 + packages/connector/tsconfig.json | 9 + packages/connector/tsup.config.ts | 8 + packages/connector/vitest.config.ts | 7 + packages/protocol/AGENTS.md | 1 + packages/protocol/src/endpoints.ts | 2 + packages/protocol/src/index.test.ts | 4 + packages/protocol/src/index.ts | 2 + pnpm-lock.yaml | 49 + tsconfig.base.json | 1 + 54 files changed, 5606 insertions(+), 16975 deletions(-) create mode 100755 apps/cli/scripts/openclaw-relay-docker-e2e.sh create mode 100644 apps/cli/src/commands/connector.test.ts create mode 100644 apps/cli/src/commands/connector.ts create mode 100644 apps/proxy/src/agent-relay-session.test.ts create mode 100644 apps/proxy/src/agent-relay-session.ts create mode 100644 apps/proxy/src/relay-connect-route.test.ts create mode 100644 apps/proxy/src/relay-connect-route.ts create mode 100644 packages/connector/AGENTS.md create mode 100644 packages/connector/package.json create mode 100644 packages/connector/src/client.test.ts create mode 100644 packages/connector/src/client.ts create mode 100644 packages/connector/src/constants.ts create mode 100644 packages/connector/src/frames.test.ts create mode 100644 packages/connector/src/frames.ts create mode 100644 packages/connector/src/index.ts create mode 100644 packages/connector/src/runtime.ts create mode 100644 packages/connector/tsconfig.json create mode 100644 packages/connector/tsup.config.ts create mode 100644 packages/connector/vitest.config.ts diff --git a/README.md b/README.md index 321681c..f2b2fed 100644 --- a/README.md +++ b/README.md @@ -481,6 +481,9 @@ clawdentity/ - `clawdentity api-key list` to view PAT metadata (`id`, `name`, `status`, `createdAt`, `lastUsedAt`). - `clawdentity api-key revoke ` to invalidate a PAT without rotating unrelated keys. - `clawdentity share` for contact-card exchange (DID, verify URL, endpoint). +- `clawdentity connector start ` to run local relay connector runtime. +- `clawdentity connector service install ` to configure connector autostart after reboot/login (`launchd` on macOS, `systemd --user` on Linux). +- `clawdentity connector service uninstall ` to remove connector autostart service. ### 5) Onboarding and control model @@ -515,6 +518,30 @@ When `--skill` mode is detected, installer logic prepares OpenClaw runtime artif Install is idempotent and logs deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). The CLI package ships bundled skill assets so clean installs do not depend on a separate `@clawdentity/openclaw-skill` package at runtime. +### Docker E2E relay check (skill + invite flow) + +For user-like OpenClaw relay validation with existing Docker agents, run: + +```bash +pnpm -F @clawdentity/cli run test:e2e:openclaw-docker +``` + +Defaults target: +- `clawdbot-agent-alpha-1` (`http://127.0.0.1:18789`) +- `clawdbot-agent-beta-1` (`http://127.0.0.1:19001`) + +This script validates: +- invite-code onboarding setup in both containers +- skill-created artifact presence +- bidirectional multi-message relay +- edge cases: unknown peer alias, connector offline, connector recovery + +Common environment overrides: +- `CLAWDENTITY_E2E_PAT` (required if registry is already bootstrapped) +- `RESET_MODE=skill|full|none` (default `skill`) +- `ALPHA_CONTAINER`, `BETA_CONTAINER` +- `REGISTRY_URL`, `PROXY_HOOK_URL`, `PROXY_WS_URL` + --- ## MVP goals diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 68762a5..79fbb60 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -9,7 +9,7 @@ - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Keep `src/postinstall.ts` as a thin install entrypoint only; it should detect npm `--skill` mode and call shared installer helpers without mutating runtime CLI command wiring. - Implement command groups under `src/commands/*` and register them from `createProgram()`. -- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`) so automation and docs do not drift. +- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`, `connector`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. @@ -27,6 +27,7 @@ - CLI verification caches live under `~/.clawdentity/cache/` and must never include private keys or PATs. - Agent identities live at `~/.clawdentity/agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. - OpenClaw setup runtime hint lives at `~/.clawdentity/openclaw-relay.json` and stores `openclawBaseUrl` for proxy fallback. +- Connector runtime defaults to local outbound handoff endpoint `http://127.0.0.1:19400/v1/outbound`; keep transform and CLI defaults aligned. - Reject `.` and `..` as agent names before any filesystem operation to prevent directory traversal outside `~/.clawdentity/agents/`. - Resolve values with explicit precedence: environment variables > config file > built-in defaults. - Keep API tokens masked in human-facing output (`show`, success logs, debug prints). diff --git a/apps/cli/package.json b/apps/cli/package.json index c95e4ef..a934cf8 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -27,9 +27,11 @@ "postinstall": "node ./postinstall.mjs", "sync:skill-bundle": "node ./scripts/sync-skill-bundle.mjs", "test": "vitest run", + "test:e2e:openclaw-docker": "bash ./scripts/openclaw-relay-docker-e2e.sh", "typecheck": "tsc --noEmit" }, "dependencies": { + "@clawdentity/connector": "workspace:*", "commander": "^13.1.0" }, "devDependencies": { diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md index 5ed7530..dd099fb 100644 --- a/apps/cli/scripts/AGENTS.md +++ b/apps/cli/scripts/AGENTS.md @@ -5,6 +5,10 @@ ## Rules - `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. +- `openclaw-relay-docker-e2e.sh` is the source of truth for local Docker-based OpenClaw relay E2E validation (invite onboarding, skill artifacts, bidirectional relay, and connector failure/recovery checks). - Scripts must fail with actionable errors when required source artifacts are missing. +- Docker E2E scripts must keep reset behavior explicit (`RESET_MODE=skill|full|none`) and must only remove known skill-generated files in skill-reset mode. +- Docker E2E relay scripts should accept `CLAWDENTITY_E2E_PAT`, but when absent they must first attempt to reuse existing container CLI config PAT before fallback bootstrap so pre-bootstrapped environments remain runnable. +- Connector startup failures in Docker E2E should include the agent DID in diagnostics so operator allowlist mismatches can be fixed quickly. - Keep script output concise and stable for CI/release logs. - Do not add install-time network fetches to packaging scripts. diff --git a/apps/cli/scripts/openclaw-relay-docker-e2e.sh b/apps/cli/scripts/openclaw-relay-docker-e2e.sh new file mode 100755 index 0000000..8a6359e --- /dev/null +++ b/apps/cli/scripts/openclaw-relay-docker-e2e.sh @@ -0,0 +1,294 @@ +#!/usr/bin/env bash + +set -euo pipefail + +ALPHA_CONTAINER="${ALPHA_CONTAINER:-clawdbot-agent-alpha-1}" +BETA_CONTAINER="${BETA_CONTAINER:-clawdbot-agent-beta-1}" + +ALPHA_AGENT_NAME="${ALPHA_AGENT_NAME:-alpha}" +BETA_AGENT_NAME="${BETA_AGENT_NAME:-beta}" + +REGISTRY_URL="${REGISTRY_URL:-http://host.docker.internal:8787}" +PROXY_HOOK_URL="${PROXY_HOOK_URL:-http://host.docker.internal:8788/hooks/agent}" +PROXY_WS_URL="${PROXY_WS_URL:-ws://host.docker.internal:8788/v1/relay/connect}" + +ALPHA_HOST_OPENCLAW_URL="${ALPHA_HOST_OPENCLAW_URL:-http://127.0.0.1:18789}" +BETA_HOST_OPENCLAW_URL="${BETA_HOST_OPENCLAW_URL:-http://127.0.0.1:19001}" +CONTAINER_OPENCLAW_BASE_URL="${CONTAINER_OPENCLAW_BASE_URL:-http://127.0.0.1:18789}" + +ALPHA_HOOK_TOKEN="${ALPHA_HOOK_TOKEN:-alpha-hook-secret}" +BETA_HOOK_TOKEN="${BETA_HOOK_TOKEN:-beta-hook-secret}" +BOOTSTRAP_SECRET="${BOOTSTRAP_SECRET:-clawdentity-local-bootstrap}" +CLI_GLOBAL_PACKAGE_ROOT="${CLI_GLOBAL_PACKAGE_ROOT:-/home/node/.local/lib/node_modules/@clawdentity/cli}" + +RESET_MODE="${RESET_MODE:-skill}" +CLAWDENTITY_E2E_PAT="${CLAWDENTITY_E2E_PAT:-}" + +log() { + printf '[openclaw-relay-e2e] %s\n' "$*" +} + +fail() { + printf '[openclaw-relay-e2e] ERROR: %s\n' "$*" >&2 + exit 1 +} + +require_command() { + command -v "$1" >/dev/null 2>&1 || fail "Missing required command: $1" +} + +require_running_container() { + local container="$1" + local running + running="$(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null || true)" + [[ "$running" == "true" ]] || fail "Container is not running: $container" +} + +container_exec() { + local container="$1" + shift + docker exec "$container" sh -lc "$*" +} + +container_has_file() { + local container="$1" + local file_path="$2" + container_exec "$container" "test -f $file_path" +} + +extract_invite_code() { + printf '%s\n' "$1" | sed -n 's/^Invite code: //p' | head -n 1 +} + +extract_pat() { + printf '%s\n' "$1" | grep -Eo 'clw_pat_[A-Za-z0-9_-]+' | head -n 1 +} + +read_config_pat() { + local container="$1" + container_exec "$container" "node -e 'const fs=require(\"fs\");const p=process.env.HOME+\"/.clawdentity/config.json\";if(!fs.existsSync(p)){process.exit(0);}const cfg=JSON.parse(fs.readFileSync(p,\"utf8\"));if(typeof cfg.apiKey===\"string\"&&cfg.apiKey.trim().length>0){process.stdout.write(cfg.apiKey.trim());}'" +} + +read_agent_did() { + local container="$1" + local agent_name="$2" + container_exec "$container" "node -e 'const fs=require(\"fs\");const p=process.env.HOME+\"/.clawdentity/agents/$agent_name/identity.json\";const id=JSON.parse(fs.readFileSync(p,\"utf8\"));process.stdout.write(id.did);'" +} + +reset_skill_state() { + local container="$1" + local agent_name="$2" + + container_exec "$container" "rm -f ~/.clawdentity/peers.json ~/.clawdentity/openclaw-agent-name ~/.clawdentity/openclaw-relay.json ~/.openclaw/hooks/transforms/relay-to-peer.mjs" + container_exec "$container" "rm -rf ~/.openclaw/workspace/skills/clawdentity-openclaw-relay" + + if [[ "$RESET_MODE" == "full" ]]; then + container_exec "$container" "rm -rf ~/.clawdentity/agents/$agent_name" + fi +} + +install_skill_assets() { + local container="$1" + container_exec "$container" "test -f \"$CLI_GLOBAL_PACKAGE_ROOT/postinstall.mjs\"" + container_exec "$container" "npm_config_skill=true node \"$CLI_GLOBAL_PACKAGE_ROOT/postinstall.mjs\" >/dev/null" +} + +ensure_agent_identity() { + local container="$1" + local agent_name="$2" + if container_exec "$container" "clawdentity agent inspect \"$agent_name\" >/dev/null 2>&1"; then + log "$container: agent '$agent_name' already exists" + return + fi + + log "$container: creating agent '$agent_name'" + container_exec "$container" "clawdentity agent create \"$agent_name\" --framework openclaw >/dev/null" +} + +configure_registry() { + local container="$1" + local pat="$2" + container_exec "$container" "clawdentity config init >/dev/null" + container_exec "$container" "clawdentity config set registryUrl \"$REGISTRY_URL\" >/dev/null" + container_exec "$container" "clawdentity config set apiKey \"$pat\" >/dev/null" +} + +stop_connector() { + local container="$1" + local agent_name="$2" + + container_exec "$container" "if [ -f /tmp/clawdentity-connector-$agent_name.pid ]; then kill \$(cat /tmp/clawdentity-connector-$agent_name.pid) 2>/dev/null || true; fi" + container_exec "$container" "for pid in \$(ps -ef | awk '/clawdentity connector start $agent_name/ && !/awk/ {print \$2}'); do kill \"\$pid\" 2>/dev/null || true; done" +} + +start_connector() { + local container="$1" + local agent_name="$2" + local hook_token="$3" + local agent_did="$4" + + stop_connector "$container" "$agent_name" + container_exec "$container" "nohup clawdentity connector start \"$agent_name\" --proxy-ws-url \"$PROXY_WS_URL\" --openclaw-hook-token \"$hook_token\" >/tmp/clawdentity-connector-$agent_name.log 2>&1 & echo \$! > /tmp/clawdentity-connector-$agent_name.pid" + + local waited=0 + while true; do + if container_exec "$container" "grep -q 'connector.websocket.connected' /tmp/clawdentity-connector-$agent_name.log"; then + log "$container: connector '$agent_name' connected" + break + fi + + waited=$((waited + 1)) + if [[ $waited -ge 30 ]]; then + container_exec "$container" "tail -n 120 /tmp/clawdentity-connector-$agent_name.log" || true + fail "$container: connector '$agent_name' did not connect within timeout. Ensure proxy allowlist includes DID $agent_did" + fi + sleep 1 + done +} + +send_peer_message() { + local sender_url="$1" + local hook_token="$2" + local peer_alias="$3" + local session_id="$4" + local message="$5" + local expected_status="$6" + + local response_body + response_body="$(mktemp)" + local status + status="$( + curl -sS \ + -o "$response_body" \ + -w '%{http_code}' \ + -X POST "$sender_url/hooks/send-to-peer" \ + -H 'content-type: application/json' \ + -H "x-openclaw-token: $hook_token" \ + --data "{\"peer\":\"$peer_alias\",\"sessionId\":\"$session_id\",\"message\":\"$message\"}" + )" + + if [[ "$status" != "$expected_status" ]]; then + log "send-to-peer failed: expected $expected_status, got $status" + cat "$response_body" >&2 + rm -f "$response_body" + fail "Unexpected send-to-peer status" + fi + + log "send-to-peer ok: $sender_url -> $peer_alias ($status) | $message" + rm -f "$response_body" +} + +run() { + require_command docker + require_command curl + require_command node + + require_running_container "$ALPHA_CONTAINER" + require_running_container "$BETA_CONTAINER" + + log "Validating clawdentity CLI availability in containers" + container_exec "$ALPHA_CONTAINER" "clawdentity --help >/dev/null" || fail "$ALPHA_CONTAINER missing clawdentity CLI" + container_exec "$BETA_CONTAINER" "clawdentity --help >/dev/null" || fail "$BETA_CONTAINER missing clawdentity CLI" + + if [[ "$RESET_MODE" != "none" ]]; then + log "Reset mode: $RESET_MODE" + reset_skill_state "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME" + reset_skill_state "$BETA_CONTAINER" "$BETA_AGENT_NAME" + fi + + log "Installing skill artifacts via npm --skill postinstall flow" + install_skill_assets "$ALPHA_CONTAINER" + install_skill_assets "$BETA_CONTAINER" + + local pat="$CLAWDENTITY_E2E_PAT" + if [[ -z "$pat" ]]; then + pat="$(read_config_pat "$ALPHA_CONTAINER")" + fi + + if [[ -z "$pat" ]]; then + log "No CLAWDENTITY_E2E_PAT provided; attempting bootstrap on $ALPHA_CONTAINER" + local bootstrap_output + if ! bootstrap_output="$(container_exec "$ALPHA_CONTAINER" "clawdentity admin bootstrap --bootstrap-secret \"$BOOTSTRAP_SECRET\"" 2>&1)"; then + printf '%s\n' "$bootstrap_output" >&2 + fail "Bootstrap failed. Set CLAWDENTITY_E2E_PAT for pre-bootstrapped environments." + fi + + pat="$(extract_pat "$bootstrap_output")" + [[ -n "$pat" ]] || fail "Failed to extract PAT from bootstrap output" + fi + log "Using PAT for CLI config in both containers" + + configure_registry "$ALPHA_CONTAINER" "$pat" + configure_registry "$BETA_CONTAINER" "$pat" + + ensure_agent_identity "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME" + ensure_agent_identity "$BETA_CONTAINER" "$BETA_AGENT_NAME" + + local alpha_did beta_did + alpha_did="$(read_agent_did "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME")" + beta_did="$(read_agent_did "$BETA_CONTAINER" "$BETA_AGENT_NAME")" + log "Alpha DID: $alpha_did" + log "Beta DID: $beta_did" + + log "Creating invite code in $BETA_CONTAINER for peer alias '$BETA_AGENT_NAME'" + local beta_invite_output beta_invite_code + beta_invite_output="$( + container_exec "$BETA_CONTAINER" \ + "clawdentity openclaw invite --did \"$beta_did\" --proxy-url \"$PROXY_HOOK_URL\" --peer-alias \"$BETA_AGENT_NAME\"" + )" + beta_invite_code="$(extract_invite_code "$beta_invite_output")" + [[ -n "$beta_invite_code" ]] || fail "Failed to extract beta invite code" + + log "Creating invite code in $ALPHA_CONTAINER for peer alias '$ALPHA_AGENT_NAME'" + local alpha_invite_output alpha_invite_code + alpha_invite_output="$( + container_exec "$ALPHA_CONTAINER" \ + "clawdentity openclaw invite --did \"$alpha_did\" --proxy-url \"$PROXY_HOOK_URL\" --peer-alias \"$ALPHA_AGENT_NAME\"" + )" + alpha_invite_code="$(extract_invite_code "$alpha_invite_output")" + [[ -n "$alpha_invite_code" ]] || fail "Failed to extract alpha invite code" + + log "Running invite onboarding setup inside $ALPHA_CONTAINER" + container_exec "$ALPHA_CONTAINER" \ + "clawdentity openclaw setup \"$ALPHA_AGENT_NAME\" --invite-code \"$beta_invite_code\" --openclaw-base-url \"$CONTAINER_OPENCLAW_BASE_URL\" >/dev/null" + + log "Running invite onboarding setup inside $BETA_CONTAINER" + container_exec "$BETA_CONTAINER" \ + "clawdentity openclaw setup \"$BETA_AGENT_NAME\" --invite-code \"$alpha_invite_code\" --openclaw-base-url \"$CONTAINER_OPENCLAW_BASE_URL\" >/dev/null" + + log "Verifying skill-created artifacts" + container_has_file "$ALPHA_CONTAINER" '$HOME/.clawdentity/peers.json' || fail "Alpha peers.json missing" + container_has_file "$ALPHA_CONTAINER" '$HOME/.clawdentity/openclaw-agent-name' || fail "Alpha openclaw-agent-name missing" + container_has_file "$ALPHA_CONTAINER" '$HOME/.clawdentity/openclaw-relay.json' || fail "Alpha openclaw-relay.json missing" + container_has_file "$ALPHA_CONTAINER" '$HOME/.openclaw/hooks/transforms/relay-to-peer.mjs' || fail "Alpha relay transform missing" + container_has_file "$ALPHA_CONTAINER" '$HOME/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md' || fail "Alpha skill bundle missing" + container_has_file "$BETA_CONTAINER" '$HOME/.clawdentity/peers.json' || fail "Beta peers.json missing" + container_has_file "$BETA_CONTAINER" '$HOME/.clawdentity/openclaw-agent-name' || fail "Beta openclaw-agent-name missing" + container_has_file "$BETA_CONTAINER" '$HOME/.clawdentity/openclaw-relay.json' || fail "Beta openclaw-relay.json missing" + container_has_file "$BETA_CONTAINER" '$HOME/.openclaw/hooks/transforms/relay-to-peer.mjs' || fail "Beta relay transform missing" + container_has_file "$BETA_CONTAINER" '$HOME/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md' || fail "Beta skill bundle missing" + + log "Starting connector runtimes" + start_connector "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME" "$ALPHA_HOOK_TOKEN" "$alpha_did" + start_connector "$BETA_CONTAINER" "$BETA_AGENT_NAME" "$BETA_HOOK_TOKEN" "$beta_did" + + log "Running bidirectional multi-message relay" + send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta" "alpha to beta m1" "204" + send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta" "alpha to beta m2" "204" + send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta" "alpha to beta m3" "204" + send_peer_message "$BETA_HOST_OPENCLAW_URL" "$BETA_HOOK_TOKEN" "$ALPHA_AGENT_NAME" "relay-beta-alpha" "beta to alpha m1" "204" + send_peer_message "$BETA_HOST_OPENCLAW_URL" "$BETA_HOOK_TOKEN" "$ALPHA_AGENT_NAME" "relay-beta-alpha" "beta to alpha m2" "204" + + log "Running edge case: unknown peer alias" + send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "unknown-peer" "relay-alpha-invalid-peer" "should fail with unknown peer alias" "500" + + log "Running edge case: connector offline and recovery" + stop_connector "$BETA_CONTAINER" "$BETA_AGENT_NAME" + send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta-offline" "should fail while beta connector is offline" "500" + start_connector "$BETA_CONTAINER" "$BETA_AGENT_NAME" "$BETA_HOOK_TOKEN" "$beta_did" + send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta-recovered" "should succeed after beta connector reconnect" "204" + + log "E2E complete: invite onboarding + skill artifacts + bidirectional relay + edge cases" +} + +run diff --git a/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs b/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs index b35a138..4dd38d6 100644 --- a/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs +++ b/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs @@ -1,15605 +1,20 @@ -var __defProp = Object.defineProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; - -// src/transforms/relay-to-peer.ts -import { randomBytes } from "crypto"; -import { readdir, readFile as readFile3 } from "fs/promises"; -import { homedir as homedir2 } from "os"; -import { join as join4 } from "path"; - -// ../../packages/protocol/src/agent-registration-proof.ts -var AGENT_REGISTRATION_PROOF_VERSION = "clawdentity.register.v1"; -var AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE = `${AGENT_REGISTRATION_PROOF_VERSION} -challengeId:{challengeId} -nonce:{nonce} -ownerDid:{ownerDid} -publicKey:{publicKey} -name:{name} -framework:{framework} -ttlDays:{ttlDays}`; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/external.js -var external_exports = {}; -__export(external_exports, { - $brand: () => $brand, - $input: () => $input, - $output: () => $output, - NEVER: () => NEVER, - TimePrecision: () => TimePrecision, - ZodAny: () => ZodAny, - ZodArray: () => ZodArray, - ZodBase64: () => ZodBase64, - ZodBase64URL: () => ZodBase64URL, - ZodBigInt: () => ZodBigInt, - ZodBigIntFormat: () => ZodBigIntFormat, - ZodBoolean: () => ZodBoolean, - ZodCIDRv4: () => ZodCIDRv4, - ZodCIDRv6: () => ZodCIDRv6, - ZodCUID: () => ZodCUID, - ZodCUID2: () => ZodCUID2, - ZodCatch: () => ZodCatch, - ZodCodec: () => ZodCodec, - ZodCustom: () => ZodCustom, - ZodCustomStringFormat: () => ZodCustomStringFormat, - ZodDate: () => ZodDate, - ZodDefault: () => ZodDefault, - ZodDiscriminatedUnion: () => ZodDiscriminatedUnion, - ZodE164: () => ZodE164, - ZodEmail: () => ZodEmail, - ZodEmoji: () => ZodEmoji, - ZodEnum: () => ZodEnum, - ZodError: () => ZodError, - ZodExactOptional: () => ZodExactOptional, - ZodFile: () => ZodFile, - ZodFirstPartyTypeKind: () => ZodFirstPartyTypeKind, - ZodFunction: () => ZodFunction, - ZodGUID: () => ZodGUID, - ZodIPv4: () => ZodIPv4, - ZodIPv6: () => ZodIPv6, - ZodISODate: () => ZodISODate, - ZodISODateTime: () => ZodISODateTime, - ZodISODuration: () => ZodISODuration, - ZodISOTime: () => ZodISOTime, - ZodIntersection: () => ZodIntersection, - ZodIssueCode: () => ZodIssueCode, - ZodJWT: () => ZodJWT, - ZodKSUID: () => ZodKSUID, - ZodLazy: () => ZodLazy, - ZodLiteral: () => ZodLiteral, - ZodMAC: () => ZodMAC, - ZodMap: () => ZodMap, - ZodNaN: () => ZodNaN, - ZodNanoID: () => ZodNanoID, - ZodNever: () => ZodNever, - ZodNonOptional: () => ZodNonOptional, - ZodNull: () => ZodNull, - ZodNullable: () => ZodNullable, - ZodNumber: () => ZodNumber, - ZodNumberFormat: () => ZodNumberFormat, - ZodObject: () => ZodObject, - ZodOptional: () => ZodOptional, - ZodPipe: () => ZodPipe, - ZodPrefault: () => ZodPrefault, - ZodPromise: () => ZodPromise, - ZodReadonly: () => ZodReadonly, - ZodRealError: () => ZodRealError, - ZodRecord: () => ZodRecord, - ZodSet: () => ZodSet, - ZodString: () => ZodString, - ZodStringFormat: () => ZodStringFormat, - ZodSuccess: () => ZodSuccess, - ZodSymbol: () => ZodSymbol, - ZodTemplateLiteral: () => ZodTemplateLiteral, - ZodTransform: () => ZodTransform, - ZodTuple: () => ZodTuple, - ZodType: () => ZodType, - ZodULID: () => ZodULID, - ZodURL: () => ZodURL, - ZodUUID: () => ZodUUID, - ZodUndefined: () => ZodUndefined, - ZodUnion: () => ZodUnion, - ZodUnknown: () => ZodUnknown, - ZodVoid: () => ZodVoid, - ZodXID: () => ZodXID, - ZodXor: () => ZodXor, - _ZodString: () => _ZodString, - _default: () => _default2, - _function: () => _function, - any: () => any, - array: () => array, - base64: () => base642, - base64url: () => base64url2, - bigint: () => bigint2, - boolean: () => boolean2, - catch: () => _catch2, - check: () => check, - cidrv4: () => cidrv42, - cidrv6: () => cidrv62, - clone: () => clone, - codec: () => codec, - coerce: () => coerce_exports, - config: () => config, - core: () => core_exports2, - cuid: () => cuid3, - cuid2: () => cuid22, - custom: () => custom, - date: () => date3, - decode: () => decode2, - decodeAsync: () => decodeAsync2, - describe: () => describe2, - discriminatedUnion: () => discriminatedUnion, - e164: () => e1642, - email: () => email2, - emoji: () => emoji2, - encode: () => encode2, - encodeAsync: () => encodeAsync2, - endsWith: () => _endsWith, - enum: () => _enum2, - exactOptional: () => exactOptional, - file: () => file, - flattenError: () => flattenError, - float32: () => float32, - float64: () => float64, - formatError: () => formatError, - fromJSONSchema: () => fromJSONSchema, - function: () => _function, - getErrorMap: () => getErrorMap, - globalRegistry: () => globalRegistry, - gt: () => _gt, - gte: () => _gte, - guid: () => guid2, - hash: () => hash, - hex: () => hex2, - hostname: () => hostname2, - httpUrl: () => httpUrl, - includes: () => _includes, - instanceof: () => _instanceof, - int: () => int, - int32: () => int32, - int64: () => int64, - intersection: () => intersection, - ipv4: () => ipv42, - ipv6: () => ipv62, - iso: () => iso_exports, - json: () => json, - jwt: () => jwt, - keyof: () => keyof, - ksuid: () => ksuid2, - lazy: () => lazy, - length: () => _length, - literal: () => literal, - locales: () => locales_exports, - looseObject: () => looseObject, - looseRecord: () => looseRecord, - lowercase: () => _lowercase, - lt: () => _lt, - lte: () => _lte, - mac: () => mac2, - map: () => map, - maxLength: () => _maxLength, - maxSize: () => _maxSize, - meta: () => meta2, - mime: () => _mime, - minLength: () => _minLength, - minSize: () => _minSize, - multipleOf: () => _multipleOf, - nan: () => nan, - nanoid: () => nanoid2, - nativeEnum: () => nativeEnum, - negative: () => _negative, - never: () => never, - nonnegative: () => _nonnegative, - nonoptional: () => nonoptional, - nonpositive: () => _nonpositive, - normalize: () => _normalize, - null: () => _null3, - nullable: () => nullable, - nullish: () => nullish2, - number: () => number2, - object: () => object, - optional: () => optional, - overwrite: () => _overwrite, - parse: () => parse2, - parseAsync: () => parseAsync2, - partialRecord: () => partialRecord, - pipe: () => pipe, - positive: () => _positive, - prefault: () => prefault, - preprocess: () => preprocess, - prettifyError: () => prettifyError, - promise: () => promise, - property: () => _property, - readonly: () => readonly, - record: () => record, - refine: () => refine, - regex: () => _regex, - regexes: () => regexes_exports, - registry: () => registry, - safeDecode: () => safeDecode2, - safeDecodeAsync: () => safeDecodeAsync2, - safeEncode: () => safeEncode2, - safeEncodeAsync: () => safeEncodeAsync2, - safeParse: () => safeParse2, - safeParseAsync: () => safeParseAsync2, - set: () => set, - setErrorMap: () => setErrorMap, - size: () => _size, - slugify: () => _slugify, - startsWith: () => _startsWith, - strictObject: () => strictObject, - string: () => string2, - stringFormat: () => stringFormat, - stringbool: () => stringbool, - success: () => success, - superRefine: () => superRefine, - symbol: () => symbol, - templateLiteral: () => templateLiteral, - toJSONSchema: () => toJSONSchema, - toLowerCase: () => _toLowerCase, - toUpperCase: () => _toUpperCase, - transform: () => transform, - treeifyError: () => treeifyError, - trim: () => _trim, - tuple: () => tuple, - uint32: () => uint32, - uint64: () => uint64, - ulid: () => ulid2, - undefined: () => _undefined3, - union: () => union, - unknown: () => unknown, - uppercase: () => _uppercase, - url: () => url, - util: () => util_exports, - uuid: () => uuid2, - uuidv4: () => uuidv4, - uuidv6: () => uuidv6, - uuidv7: () => uuidv7, - void: () => _void2, - xid: () => xid2, - xor: () => xor -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/index.js -var core_exports2 = {}; -__export(core_exports2, { - $ZodAny: () => $ZodAny, - $ZodArray: () => $ZodArray, - $ZodAsyncError: () => $ZodAsyncError, - $ZodBase64: () => $ZodBase64, - $ZodBase64URL: () => $ZodBase64URL, - $ZodBigInt: () => $ZodBigInt, - $ZodBigIntFormat: () => $ZodBigIntFormat, - $ZodBoolean: () => $ZodBoolean, - $ZodCIDRv4: () => $ZodCIDRv4, - $ZodCIDRv6: () => $ZodCIDRv6, - $ZodCUID: () => $ZodCUID, - $ZodCUID2: () => $ZodCUID2, - $ZodCatch: () => $ZodCatch, - $ZodCheck: () => $ZodCheck, - $ZodCheckBigIntFormat: () => $ZodCheckBigIntFormat, - $ZodCheckEndsWith: () => $ZodCheckEndsWith, - $ZodCheckGreaterThan: () => $ZodCheckGreaterThan, - $ZodCheckIncludes: () => $ZodCheckIncludes, - $ZodCheckLengthEquals: () => $ZodCheckLengthEquals, - $ZodCheckLessThan: () => $ZodCheckLessThan, - $ZodCheckLowerCase: () => $ZodCheckLowerCase, - $ZodCheckMaxLength: () => $ZodCheckMaxLength, - $ZodCheckMaxSize: () => $ZodCheckMaxSize, - $ZodCheckMimeType: () => $ZodCheckMimeType, - $ZodCheckMinLength: () => $ZodCheckMinLength, - $ZodCheckMinSize: () => $ZodCheckMinSize, - $ZodCheckMultipleOf: () => $ZodCheckMultipleOf, - $ZodCheckNumberFormat: () => $ZodCheckNumberFormat, - $ZodCheckOverwrite: () => $ZodCheckOverwrite, - $ZodCheckProperty: () => $ZodCheckProperty, - $ZodCheckRegex: () => $ZodCheckRegex, - $ZodCheckSizeEquals: () => $ZodCheckSizeEquals, - $ZodCheckStartsWith: () => $ZodCheckStartsWith, - $ZodCheckStringFormat: () => $ZodCheckStringFormat, - $ZodCheckUpperCase: () => $ZodCheckUpperCase, - $ZodCodec: () => $ZodCodec, - $ZodCustom: () => $ZodCustom, - $ZodCustomStringFormat: () => $ZodCustomStringFormat, - $ZodDate: () => $ZodDate, - $ZodDefault: () => $ZodDefault, - $ZodDiscriminatedUnion: () => $ZodDiscriminatedUnion, - $ZodE164: () => $ZodE164, - $ZodEmail: () => $ZodEmail, - $ZodEmoji: () => $ZodEmoji, - $ZodEncodeError: () => $ZodEncodeError, - $ZodEnum: () => $ZodEnum, - $ZodError: () => $ZodError, - $ZodExactOptional: () => $ZodExactOptional, - $ZodFile: () => $ZodFile, - $ZodFunction: () => $ZodFunction, - $ZodGUID: () => $ZodGUID, - $ZodIPv4: () => $ZodIPv4, - $ZodIPv6: () => $ZodIPv6, - $ZodISODate: () => $ZodISODate, - $ZodISODateTime: () => $ZodISODateTime, - $ZodISODuration: () => $ZodISODuration, - $ZodISOTime: () => $ZodISOTime, - $ZodIntersection: () => $ZodIntersection, - $ZodJWT: () => $ZodJWT, - $ZodKSUID: () => $ZodKSUID, - $ZodLazy: () => $ZodLazy, - $ZodLiteral: () => $ZodLiteral, - $ZodMAC: () => $ZodMAC, - $ZodMap: () => $ZodMap, - $ZodNaN: () => $ZodNaN, - $ZodNanoID: () => $ZodNanoID, - $ZodNever: () => $ZodNever, - $ZodNonOptional: () => $ZodNonOptional, - $ZodNull: () => $ZodNull, - $ZodNullable: () => $ZodNullable, - $ZodNumber: () => $ZodNumber, - $ZodNumberFormat: () => $ZodNumberFormat, - $ZodObject: () => $ZodObject, - $ZodObjectJIT: () => $ZodObjectJIT, - $ZodOptional: () => $ZodOptional, - $ZodPipe: () => $ZodPipe, - $ZodPrefault: () => $ZodPrefault, - $ZodPromise: () => $ZodPromise, - $ZodReadonly: () => $ZodReadonly, - $ZodRealError: () => $ZodRealError, - $ZodRecord: () => $ZodRecord, - $ZodRegistry: () => $ZodRegistry, - $ZodSet: () => $ZodSet, - $ZodString: () => $ZodString, - $ZodStringFormat: () => $ZodStringFormat, - $ZodSuccess: () => $ZodSuccess, - $ZodSymbol: () => $ZodSymbol, - $ZodTemplateLiteral: () => $ZodTemplateLiteral, - $ZodTransform: () => $ZodTransform, - $ZodTuple: () => $ZodTuple, - $ZodType: () => $ZodType, - $ZodULID: () => $ZodULID, - $ZodURL: () => $ZodURL, - $ZodUUID: () => $ZodUUID, - $ZodUndefined: () => $ZodUndefined, - $ZodUnion: () => $ZodUnion, - $ZodUnknown: () => $ZodUnknown, - $ZodVoid: () => $ZodVoid, - $ZodXID: () => $ZodXID, - $ZodXor: () => $ZodXor, - $brand: () => $brand, - $constructor: () => $constructor, - $input: () => $input, - $output: () => $output, - Doc: () => Doc, - JSONSchema: () => json_schema_exports, - JSONSchemaGenerator: () => JSONSchemaGenerator, - NEVER: () => NEVER, - TimePrecision: () => TimePrecision, - _any: () => _any, - _array: () => _array, - _base64: () => _base64, - _base64url: () => _base64url, - _bigint: () => _bigint, - _boolean: () => _boolean, - _catch: () => _catch, - _check: () => _check, - _cidrv4: () => _cidrv4, - _cidrv6: () => _cidrv6, - _coercedBigint: () => _coercedBigint, - _coercedBoolean: () => _coercedBoolean, - _coercedDate: () => _coercedDate, - _coercedNumber: () => _coercedNumber, - _coercedString: () => _coercedString, - _cuid: () => _cuid, - _cuid2: () => _cuid2, - _custom: () => _custom, - _date: () => _date, - _decode: () => _decode, - _decodeAsync: () => _decodeAsync, - _default: () => _default, - _discriminatedUnion: () => _discriminatedUnion, - _e164: () => _e164, - _email: () => _email, - _emoji: () => _emoji2, - _encode: () => _encode, - _encodeAsync: () => _encodeAsync, - _endsWith: () => _endsWith, - _enum: () => _enum, - _file: () => _file, - _float32: () => _float32, - _float64: () => _float64, - _gt: () => _gt, - _gte: () => _gte, - _guid: () => _guid, - _includes: () => _includes, - _int: () => _int, - _int32: () => _int32, - _int64: () => _int64, - _intersection: () => _intersection, - _ipv4: () => _ipv4, - _ipv6: () => _ipv6, - _isoDate: () => _isoDate, - _isoDateTime: () => _isoDateTime, - _isoDuration: () => _isoDuration, - _isoTime: () => _isoTime, - _jwt: () => _jwt, - _ksuid: () => _ksuid, - _lazy: () => _lazy, - _length: () => _length, - _literal: () => _literal, - _lowercase: () => _lowercase, - _lt: () => _lt, - _lte: () => _lte, - _mac: () => _mac, - _map: () => _map, - _max: () => _lte, - _maxLength: () => _maxLength, - _maxSize: () => _maxSize, - _mime: () => _mime, - _min: () => _gte, - _minLength: () => _minLength, - _minSize: () => _minSize, - _multipleOf: () => _multipleOf, - _nan: () => _nan, - _nanoid: () => _nanoid, - _nativeEnum: () => _nativeEnum, - _negative: () => _negative, - _never: () => _never, - _nonnegative: () => _nonnegative, - _nonoptional: () => _nonoptional, - _nonpositive: () => _nonpositive, - _normalize: () => _normalize, - _null: () => _null2, - _nullable: () => _nullable, - _number: () => _number, - _optional: () => _optional, - _overwrite: () => _overwrite, - _parse: () => _parse, - _parseAsync: () => _parseAsync, - _pipe: () => _pipe, - _positive: () => _positive, - _promise: () => _promise, - _property: () => _property, - _readonly: () => _readonly, - _record: () => _record, - _refine: () => _refine, - _regex: () => _regex, - _safeDecode: () => _safeDecode, - _safeDecodeAsync: () => _safeDecodeAsync, - _safeEncode: () => _safeEncode, - _safeEncodeAsync: () => _safeEncodeAsync, - _safeParse: () => _safeParse, - _safeParseAsync: () => _safeParseAsync, - _set: () => _set, - _size: () => _size, - _slugify: () => _slugify, - _startsWith: () => _startsWith, - _string: () => _string, - _stringFormat: () => _stringFormat, - _stringbool: () => _stringbool, - _success: () => _success, - _superRefine: () => _superRefine, - _symbol: () => _symbol, - _templateLiteral: () => _templateLiteral, - _toLowerCase: () => _toLowerCase, - _toUpperCase: () => _toUpperCase, - _transform: () => _transform, - _trim: () => _trim, - _tuple: () => _tuple, - _uint32: () => _uint32, - _uint64: () => _uint64, - _ulid: () => _ulid, - _undefined: () => _undefined2, - _union: () => _union, - _unknown: () => _unknown, - _uppercase: () => _uppercase, - _url: () => _url, - _uuid: () => _uuid, - _uuidv4: () => _uuidv4, - _uuidv6: () => _uuidv6, - _uuidv7: () => _uuidv7, - _void: () => _void, - _xid: () => _xid, - _xor: () => _xor, - clone: () => clone, - config: () => config, - createStandardJSONSchemaMethod: () => createStandardJSONSchemaMethod, - createToJSONSchemaMethod: () => createToJSONSchemaMethod, - decode: () => decode, - decodeAsync: () => decodeAsync, - describe: () => describe, - encode: () => encode, - encodeAsync: () => encodeAsync, - extractDefs: () => extractDefs, - finalize: () => finalize, - flattenError: () => flattenError, - formatError: () => formatError, - globalConfig: () => globalConfig, - globalRegistry: () => globalRegistry, - initializeContext: () => initializeContext, - isValidBase64: () => isValidBase64, - isValidBase64URL: () => isValidBase64URL, - isValidJWT: () => isValidJWT, - locales: () => locales_exports, - meta: () => meta, - parse: () => parse, - parseAsync: () => parseAsync, - prettifyError: () => prettifyError, - process: () => process2, - regexes: () => regexes_exports, - registry: () => registry, - safeDecode: () => safeDecode, - safeDecodeAsync: () => safeDecodeAsync, - safeEncode: () => safeEncode, - safeEncodeAsync: () => safeEncodeAsync, - safeParse: () => safeParse, - safeParseAsync: () => safeParseAsync, - toDotPath: () => toDotPath, - toJSONSchema: () => toJSONSchema, - treeifyError: () => treeifyError, - util: () => util_exports, - version: () => version -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/core.js -var NEVER = Object.freeze({ - status: "aborted" -}); -// @__NO_SIDE_EFFECTS__ -function $constructor(name, initializer3, params) { - function init(inst, def) { - if (!inst._zod) { - Object.defineProperty(inst, "_zod", { - value: { - def, - constr: _, - traits: /* @__PURE__ */ new Set() - }, - enumerable: false - }); - } - if (inst._zod.traits.has(name)) { - return; - } - inst._zod.traits.add(name); - initializer3(inst, def); - const proto = _.prototype; - const keys = Object.keys(proto); - for (let i = 0; i < keys.length; i++) { - const k = keys[i]; - if (!(k in inst)) { - inst[k] = proto[k].bind(inst); - } - } - } - const Parent = params?.Parent ?? Object; - class Definition extends Parent { - } - Object.defineProperty(Definition, "name", { value: name }); - function _(def) { - var _a3; - const inst = params?.Parent ? new Definition() : this; - init(inst, def); - (_a3 = inst._zod).deferred ?? (_a3.deferred = []); - for (const fn of inst._zod.deferred) { - fn(); - } - return inst; - } - Object.defineProperty(_, "init", { value: init }); - Object.defineProperty(_, Symbol.hasInstance, { - value: (inst) => { - if (params?.Parent && inst instanceof params.Parent) - return true; - return inst?._zod?.traits?.has(name); - } - }); - Object.defineProperty(_, "name", { value: name }); - return _; -} -var $brand = /* @__PURE__ */ Symbol("zod_brand"); -var $ZodAsyncError = class extends Error { - constructor() { - super(`Encountered Promise during synchronous parse. Use .parseAsync() instead.`); - } -}; -var $ZodEncodeError = class extends Error { - constructor(name) { - super(`Encountered unidirectional transform during encode: ${name}`); - this.name = "ZodEncodeError"; - } -}; -var globalConfig = {}; -function config(newConfig) { - if (newConfig) - Object.assign(globalConfig, newConfig); - return globalConfig; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/util.js -var util_exports = {}; -__export(util_exports, { - BIGINT_FORMAT_RANGES: () => BIGINT_FORMAT_RANGES, - Class: () => Class, - NUMBER_FORMAT_RANGES: () => NUMBER_FORMAT_RANGES, - aborted: () => aborted, - allowsEval: () => allowsEval, - assert: () => assert, - assertEqual: () => assertEqual, - assertIs: () => assertIs, - assertNever: () => assertNever, - assertNotEqual: () => assertNotEqual, - assignProp: () => assignProp, - base64ToUint8Array: () => base64ToUint8Array, - base64urlToUint8Array: () => base64urlToUint8Array, - cached: () => cached, - captureStackTrace: () => captureStackTrace, - cleanEnum: () => cleanEnum, - cleanRegex: () => cleanRegex, - clone: () => clone, - cloneDef: () => cloneDef, - createTransparentProxy: () => createTransparentProxy, - defineLazy: () => defineLazy, - esc: () => esc, - escapeRegex: () => escapeRegex, - extend: () => extend, - finalizeIssue: () => finalizeIssue, - floatSafeRemainder: () => floatSafeRemainder, - getElementAtPath: () => getElementAtPath, - getEnumValues: () => getEnumValues, - getLengthableOrigin: () => getLengthableOrigin, - getParsedType: () => getParsedType, - getSizableOrigin: () => getSizableOrigin, - hexToUint8Array: () => hexToUint8Array, - isObject: () => isObject, - isPlainObject: () => isPlainObject, - issue: () => issue, - joinValues: () => joinValues, - jsonStringifyReplacer: () => jsonStringifyReplacer, - merge: () => merge, - mergeDefs: () => mergeDefs, - normalizeParams: () => normalizeParams, - nullish: () => nullish, - numKeys: () => numKeys, - objectClone: () => objectClone, - omit: () => omit, - optionalKeys: () => optionalKeys, - parsedType: () => parsedType, - partial: () => partial, - pick: () => pick, - prefixIssues: () => prefixIssues, - primitiveTypes: () => primitiveTypes, - promiseAllObject: () => promiseAllObject, - propertyKeyTypes: () => propertyKeyTypes, - randomString: () => randomString, - required: () => required, - safeExtend: () => safeExtend, - shallowClone: () => shallowClone, - slugify: () => slugify, - stringifyPrimitive: () => stringifyPrimitive, - uint8ArrayToBase64: () => uint8ArrayToBase64, - uint8ArrayToBase64url: () => uint8ArrayToBase64url, - uint8ArrayToHex: () => uint8ArrayToHex, - unwrapMessage: () => unwrapMessage -}); -function assertEqual(val) { - return val; -} -function assertNotEqual(val) { - return val; -} -function assertIs(_arg) { -} -function assertNever(_x) { - throw new Error("Unexpected value in exhaustive check"); -} -function assert(_) { -} -function getEnumValues(entries) { - const numericValues = Object.values(entries).filter((v) => typeof v === "number"); - const values = Object.entries(entries).filter(([k, _]) => numericValues.indexOf(+k) === -1).map(([_, v]) => v); - return values; -} -function joinValues(array2, separator = "|") { - return array2.map((val) => stringifyPrimitive(val)).join(separator); -} -function jsonStringifyReplacer(_, value) { - if (typeof value === "bigint") - return value.toString(); - return value; -} -function cached(getter) { - const set2 = false; - return { - get value() { - if (!set2) { - const value = getter(); - Object.defineProperty(this, "value", { value }); - return value; - } - throw new Error("cached value already set"); - } - }; -} -function nullish(input) { - return input === null || input === void 0; -} -function cleanRegex(source) { - const start = source.startsWith("^") ? 1 : 0; - const end = source.endsWith("$") ? source.length - 1 : source.length; - return source.slice(start, end); -} -function floatSafeRemainder(val, step) { - const valDecCount = (val.toString().split(".")[1] || "").length; - const stepString = step.toString(); - let stepDecCount = (stepString.split(".")[1] || "").length; - if (stepDecCount === 0 && /\d?e-\d?/.test(stepString)) { - const match2 = stepString.match(/\d?e-(\d?)/); - if (match2?.[1]) { - stepDecCount = Number.parseInt(match2[1]); - } - } - const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount; - const valInt = Number.parseInt(val.toFixed(decCount).replace(".", "")); - const stepInt = Number.parseInt(step.toFixed(decCount).replace(".", "")); - return valInt % stepInt / 10 ** decCount; -} -var EVALUATING = /* @__PURE__ */ Symbol("evaluating"); -function defineLazy(object2, key, getter) { - let value = void 0; - Object.defineProperty(object2, key, { - get() { - if (value === EVALUATING) { - return void 0; - } - if (value === void 0) { - value = EVALUATING; - value = getter(); - } - return value; - }, - set(v) { - Object.defineProperty(object2, key, { - value: v - // configurable: true, - }); - }, - configurable: true - }); -} -function objectClone(obj) { - return Object.create(Object.getPrototypeOf(obj), Object.getOwnPropertyDescriptors(obj)); -} -function assignProp(target, prop, value) { - Object.defineProperty(target, prop, { - value, - writable: true, - enumerable: true, - configurable: true - }); -} -function mergeDefs(...defs) { - const mergedDescriptors = {}; - for (const def of defs) { - const descriptors = Object.getOwnPropertyDescriptors(def); - Object.assign(mergedDescriptors, descriptors); - } - return Object.defineProperties({}, mergedDescriptors); -} -function cloneDef(schema) { - return mergeDefs(schema._zod.def); -} -function getElementAtPath(obj, path) { - if (!path) - return obj; - return path.reduce((acc, key) => acc?.[key], obj); -} -function promiseAllObject(promisesObj) { - const keys = Object.keys(promisesObj); - const promises = keys.map((key) => promisesObj[key]); - return Promise.all(promises).then((results) => { - const resolvedObj = {}; - for (let i = 0; i < keys.length; i++) { - resolvedObj[keys[i]] = results[i]; - } - return resolvedObj; - }); -} -function randomString(length = 10) { - const chars = "abcdefghijklmnopqrstuvwxyz"; - let str = ""; - for (let i = 0; i < length; i++) { - str += chars[Math.floor(Math.random() * chars.length)]; - } - return str; -} -function esc(str) { - return JSON.stringify(str); -} -function slugify(input) { - return input.toLowerCase().trim().replace(/[^\w\s-]/g, "").replace(/[\s_-]+/g, "-").replace(/^-+|-+$/g, ""); -} -var captureStackTrace = "captureStackTrace" in Error ? Error.captureStackTrace : (..._args) => { -}; -function isObject(data) { - return typeof data === "object" && data !== null && !Array.isArray(data); -} -var allowsEval = cached(() => { - if (typeof navigator !== "undefined" && navigator?.userAgent?.includes("Cloudflare")) { - return false; - } - try { - const F = Function; - new F(""); - return true; - } catch (_) { - return false; - } -}); -function isPlainObject(o) { - if (isObject(o) === false) - return false; - const ctor = o.constructor; - if (ctor === void 0) - return true; - if (typeof ctor !== "function") - return true; - const prot = ctor.prototype; - if (isObject(prot) === false) - return false; - if (Object.prototype.hasOwnProperty.call(prot, "isPrototypeOf") === false) { - return false; - } - return true; -} -function shallowClone(o) { - if (isPlainObject(o)) - return { ...o }; - if (Array.isArray(o)) - return [...o]; - return o; -} -function numKeys(data) { - let keyCount = 0; - for (const key in data) { - if (Object.prototype.hasOwnProperty.call(data, key)) { - keyCount++; - } - } - return keyCount; -} -var getParsedType = (data) => { - const t = typeof data; - switch (t) { - case "undefined": - return "undefined"; - case "string": - return "string"; - case "number": - return Number.isNaN(data) ? "nan" : "number"; - case "boolean": - return "boolean"; - case "function": - return "function"; - case "bigint": - return "bigint"; - case "symbol": - return "symbol"; - case "object": - if (Array.isArray(data)) { - return "array"; - } - if (data === null) { - return "null"; - } - if (data.then && typeof data.then === "function" && data.catch && typeof data.catch === "function") { - return "promise"; - } - if (typeof Map !== "undefined" && data instanceof Map) { - return "map"; - } - if (typeof Set !== "undefined" && data instanceof Set) { - return "set"; - } - if (typeof Date !== "undefined" && data instanceof Date) { - return "date"; - } - if (typeof File !== "undefined" && data instanceof File) { - return "file"; - } - return "object"; - default: - throw new Error(`Unknown data type: ${t}`); - } -}; -var propertyKeyTypes = /* @__PURE__ */ new Set(["string", "number", "symbol"]); -var primitiveTypes = /* @__PURE__ */ new Set(["string", "number", "bigint", "boolean", "symbol", "undefined"]); -function escapeRegex(str) { - return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); -} -function clone(inst, def, params) { - const cl = new inst._zod.constr(def ?? inst._zod.def); - if (!def || params?.parent) - cl._zod.parent = inst; - return cl; -} -function normalizeParams(_params) { - const params = _params; - if (!params) - return {}; - if (typeof params === "string") - return { error: () => params }; - if (params?.message !== void 0) { - if (params?.error !== void 0) - throw new Error("Cannot specify both `message` and `error` params"); - params.error = params.message; - } - delete params.message; - if (typeof params.error === "string") - return { ...params, error: () => params.error }; - return params; -} -function createTransparentProxy(getter) { - let target; - return new Proxy({}, { - get(_, prop, receiver) { - target ?? (target = getter()); - return Reflect.get(target, prop, receiver); - }, - set(_, prop, value, receiver) { - target ?? (target = getter()); - return Reflect.set(target, prop, value, receiver); - }, - has(_, prop) { - target ?? (target = getter()); - return Reflect.has(target, prop); - }, - deleteProperty(_, prop) { - target ?? (target = getter()); - return Reflect.deleteProperty(target, prop); - }, - ownKeys(_) { - target ?? (target = getter()); - return Reflect.ownKeys(target); - }, - getOwnPropertyDescriptor(_, prop) { - target ?? (target = getter()); - return Reflect.getOwnPropertyDescriptor(target, prop); - }, - defineProperty(_, prop, descriptor) { - target ?? (target = getter()); - return Reflect.defineProperty(target, prop, descriptor); - } - }); -} -function stringifyPrimitive(value) { - if (typeof value === "bigint") - return value.toString() + "n"; - if (typeof value === "string") - return `"${value}"`; - return `${value}`; -} -function optionalKeys(shape) { - return Object.keys(shape).filter((k) => { - return shape[k]._zod.optin === "optional" && shape[k]._zod.optout === "optional"; - }); -} -var NUMBER_FORMAT_RANGES = { - safeint: [Number.MIN_SAFE_INTEGER, Number.MAX_SAFE_INTEGER], - int32: [-2147483648, 2147483647], - uint32: [0, 4294967295], - float32: [-34028234663852886e22, 34028234663852886e22], - float64: [-Number.MAX_VALUE, Number.MAX_VALUE] -}; -var BIGINT_FORMAT_RANGES = { - int64: [/* @__PURE__ */ BigInt("-9223372036854775808"), /* @__PURE__ */ BigInt("9223372036854775807")], - uint64: [/* @__PURE__ */ BigInt(0), /* @__PURE__ */ BigInt("18446744073709551615")] -}; -function pick(schema, mask) { - const currDef = schema._zod.def; - const checks = currDef.checks; - const hasChecks = checks && checks.length > 0; - if (hasChecks) { - throw new Error(".pick() cannot be used on object schemas containing refinements"); - } - const def = mergeDefs(schema._zod.def, { - get shape() { - const newShape = {}; - for (const key in mask) { - if (!(key in currDef.shape)) { - throw new Error(`Unrecognized key: "${key}"`); - } - if (!mask[key]) - continue; - newShape[key] = currDef.shape[key]; - } - assignProp(this, "shape", newShape); - return newShape; - }, - checks: [] - }); - return clone(schema, def); -} -function omit(schema, mask) { - const currDef = schema._zod.def; - const checks = currDef.checks; - const hasChecks = checks && checks.length > 0; - if (hasChecks) { - throw new Error(".omit() cannot be used on object schemas containing refinements"); - } - const def = mergeDefs(schema._zod.def, { - get shape() { - const newShape = { ...schema._zod.def.shape }; - for (const key in mask) { - if (!(key in currDef.shape)) { - throw new Error(`Unrecognized key: "${key}"`); - } - if (!mask[key]) - continue; - delete newShape[key]; - } - assignProp(this, "shape", newShape); - return newShape; - }, - checks: [] - }); - return clone(schema, def); -} -function extend(schema, shape) { - if (!isPlainObject(shape)) { - throw new Error("Invalid input to extend: expected a plain object"); - } - const checks = schema._zod.def.checks; - const hasChecks = checks && checks.length > 0; - if (hasChecks) { - const existingShape = schema._zod.def.shape; - for (const key in shape) { - if (Object.getOwnPropertyDescriptor(existingShape, key) !== void 0) { - throw new Error("Cannot overwrite keys on object schemas containing refinements. Use `.safeExtend()` instead."); - } - } - } - const def = mergeDefs(schema._zod.def, { - get shape() { - const _shape = { ...schema._zod.def.shape, ...shape }; - assignProp(this, "shape", _shape); - return _shape; - } - }); - return clone(schema, def); -} -function safeExtend(schema, shape) { - if (!isPlainObject(shape)) { - throw new Error("Invalid input to safeExtend: expected a plain object"); - } - const def = mergeDefs(schema._zod.def, { - get shape() { - const _shape = { ...schema._zod.def.shape, ...shape }; - assignProp(this, "shape", _shape); - return _shape; - } - }); - return clone(schema, def); -} -function merge(a, b) { - const def = mergeDefs(a._zod.def, { - get shape() { - const _shape = { ...a._zod.def.shape, ...b._zod.def.shape }; - assignProp(this, "shape", _shape); - return _shape; - }, - get catchall() { - return b._zod.def.catchall; - }, - checks: [] - // delete existing checks - }); - return clone(a, def); -} -function partial(Class2, schema, mask) { - const currDef = schema._zod.def; - const checks = currDef.checks; - const hasChecks = checks && checks.length > 0; - if (hasChecks) { - throw new Error(".partial() cannot be used on object schemas containing refinements"); - } - const def = mergeDefs(schema._zod.def, { - get shape() { - const oldShape = schema._zod.def.shape; - const shape = { ...oldShape }; - if (mask) { - for (const key in mask) { - if (!(key in oldShape)) { - throw new Error(`Unrecognized key: "${key}"`); - } - if (!mask[key]) - continue; - shape[key] = Class2 ? new Class2({ - type: "optional", - innerType: oldShape[key] - }) : oldShape[key]; - } - } else { - for (const key in oldShape) { - shape[key] = Class2 ? new Class2({ - type: "optional", - innerType: oldShape[key] - }) : oldShape[key]; - } - } - assignProp(this, "shape", shape); - return shape; - }, - checks: [] - }); - return clone(schema, def); -} -function required(Class2, schema, mask) { - const def = mergeDefs(schema._zod.def, { - get shape() { - const oldShape = schema._zod.def.shape; - const shape = { ...oldShape }; - if (mask) { - for (const key in mask) { - if (!(key in shape)) { - throw new Error(`Unrecognized key: "${key}"`); - } - if (!mask[key]) - continue; - shape[key] = new Class2({ - type: "nonoptional", - innerType: oldShape[key] - }); - } - } else { - for (const key in oldShape) { - shape[key] = new Class2({ - type: "nonoptional", - innerType: oldShape[key] - }); - } - } - assignProp(this, "shape", shape); - return shape; - } - }); - return clone(schema, def); -} -function aborted(x, startIndex = 0) { - if (x.aborted === true) - return true; - for (let i = startIndex; i < x.issues.length; i++) { - if (x.issues[i]?.continue !== true) { - return true; - } - } - return false; -} -function prefixIssues(path, issues) { - return issues.map((iss) => { - var _a3; - (_a3 = iss).path ?? (_a3.path = []); - iss.path.unshift(path); - return iss; - }); -} -function unwrapMessage(message) { - return typeof message === "string" ? message : message?.message; -} -function finalizeIssue(iss, ctx, config2) { - const full = { ...iss, path: iss.path ?? [] }; - if (!iss.message) { - const message = unwrapMessage(iss.inst?._zod.def?.error?.(iss)) ?? unwrapMessage(ctx?.error?.(iss)) ?? unwrapMessage(config2.customError?.(iss)) ?? unwrapMessage(config2.localeError?.(iss)) ?? "Invalid input"; - full.message = message; - } - delete full.inst; - delete full.continue; - if (!ctx?.reportInput) { - delete full.input; - } - return full; -} -function getSizableOrigin(input) { - if (input instanceof Set) - return "set"; - if (input instanceof Map) - return "map"; - if (input instanceof File) - return "file"; - return "unknown"; -} -function getLengthableOrigin(input) { - if (Array.isArray(input)) - return "array"; - if (typeof input === "string") - return "string"; - return "unknown"; -} -function parsedType(data) { - const t = typeof data; - switch (t) { - case "number": { - return Number.isNaN(data) ? "nan" : "number"; - } - case "object": { - if (data === null) { - return "null"; - } - if (Array.isArray(data)) { - return "array"; - } - const obj = data; - if (obj && Object.getPrototypeOf(obj) !== Object.prototype && "constructor" in obj && obj.constructor) { - return obj.constructor.name; - } - } - } - return t; -} -function issue(...args) { - const [iss, input, inst] = args; - if (typeof iss === "string") { - return { - message: iss, - code: "custom", - input, - inst - }; - } - return { ...iss }; -} -function cleanEnum(obj) { - return Object.entries(obj).filter(([k, _]) => { - return Number.isNaN(Number.parseInt(k, 10)); - }).map((el) => el[1]); -} -function base64ToUint8Array(base643) { - const binaryString = atob(base643); - const bytes = new Uint8Array(binaryString.length); - for (let i = 0; i < binaryString.length; i++) { - bytes[i] = binaryString.charCodeAt(i); - } - return bytes; -} -function uint8ArrayToBase64(bytes) { - let binaryString = ""; - for (let i = 0; i < bytes.length; i++) { - binaryString += String.fromCharCode(bytes[i]); - } - return btoa(binaryString); -} -function base64urlToUint8Array(base64url3) { - const base643 = base64url3.replace(/-/g, "+").replace(/_/g, "/"); - const padding = "=".repeat((4 - base643.length % 4) % 4); - return base64ToUint8Array(base643 + padding); -} -function uint8ArrayToBase64url(bytes) { - return uint8ArrayToBase64(bytes).replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, ""); -} -function hexToUint8Array(hex3) { - const cleanHex = hex3.replace(/^0x/, ""); - if (cleanHex.length % 2 !== 0) { - throw new Error("Invalid hex string length"); - } - const bytes = new Uint8Array(cleanHex.length / 2); - for (let i = 0; i < cleanHex.length; i += 2) { - bytes[i / 2] = Number.parseInt(cleanHex.slice(i, i + 2), 16); - } - return bytes; -} -function uint8ArrayToHex(bytes) { - return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); -} -var Class = class { - constructor(..._args) { - } -}; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/errors.js -var initializer = (inst, def) => { - inst.name = "$ZodError"; - Object.defineProperty(inst, "_zod", { - value: inst._zod, - enumerable: false - }); - Object.defineProperty(inst, "issues", { - value: def, - enumerable: false - }); - inst.message = JSON.stringify(def, jsonStringifyReplacer, 2); - Object.defineProperty(inst, "toString", { - value: () => inst.message, - enumerable: false - }); -}; -var $ZodError = $constructor("$ZodError", initializer); -var $ZodRealError = $constructor("$ZodError", initializer, { Parent: Error }); -function flattenError(error48, mapper = (issue2) => issue2.message) { - const fieldErrors = {}; - const formErrors = []; - for (const sub of error48.issues) { - if (sub.path.length > 0) { - fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || []; - fieldErrors[sub.path[0]].push(mapper(sub)); - } else { - formErrors.push(mapper(sub)); - } - } - return { formErrors, fieldErrors }; -} -function formatError(error48, mapper = (issue2) => issue2.message) { - const fieldErrors = { _errors: [] }; - const processError = (error49) => { - for (const issue2 of error49.issues) { - if (issue2.code === "invalid_union" && issue2.errors.length) { - issue2.errors.map((issues) => processError({ issues })); - } else if (issue2.code === "invalid_key") { - processError({ issues: issue2.issues }); - } else if (issue2.code === "invalid_element") { - processError({ issues: issue2.issues }); - } else if (issue2.path.length === 0) { - fieldErrors._errors.push(mapper(issue2)); - } else { - let curr = fieldErrors; - let i = 0; - while (i < issue2.path.length) { - const el = issue2.path[i]; - const terminal = i === issue2.path.length - 1; - if (!terminal) { - curr[el] = curr[el] || { _errors: [] }; - } else { - curr[el] = curr[el] || { _errors: [] }; - curr[el]._errors.push(mapper(issue2)); - } - curr = curr[el]; - i++; - } - } - } - }; - processError(error48); - return fieldErrors; -} -function treeifyError(error48, mapper = (issue2) => issue2.message) { - const result = { errors: [] }; - const processError = (error49, path = []) => { - var _a3, _b; - for (const issue2 of error49.issues) { - if (issue2.code === "invalid_union" && issue2.errors.length) { - issue2.errors.map((issues) => processError({ issues }, issue2.path)); - } else if (issue2.code === "invalid_key") { - processError({ issues: issue2.issues }, issue2.path); - } else if (issue2.code === "invalid_element") { - processError({ issues: issue2.issues }, issue2.path); - } else { - const fullpath = [...path, ...issue2.path]; - if (fullpath.length === 0) { - result.errors.push(mapper(issue2)); - continue; - } - let curr = result; - let i = 0; - while (i < fullpath.length) { - const el = fullpath[i]; - const terminal = i === fullpath.length - 1; - if (typeof el === "string") { - curr.properties ?? (curr.properties = {}); - (_a3 = curr.properties)[el] ?? (_a3[el] = { errors: [] }); - curr = curr.properties[el]; - } else { - curr.items ?? (curr.items = []); - (_b = curr.items)[el] ?? (_b[el] = { errors: [] }); - curr = curr.items[el]; - } - if (terminal) { - curr.errors.push(mapper(issue2)); - } - i++; - } - } - } - }; - processError(error48); - return result; -} -function toDotPath(_path) { - const segs = []; - const path = _path.map((seg) => typeof seg === "object" ? seg.key : seg); - for (const seg of path) { - if (typeof seg === "number") - segs.push(`[${seg}]`); - else if (typeof seg === "symbol") - segs.push(`[${JSON.stringify(String(seg))}]`); - else if (/[^\w$]/.test(seg)) - segs.push(`[${JSON.stringify(seg)}]`); - else { - if (segs.length) - segs.push("."); - segs.push(seg); - } - } - return segs.join(""); -} -function prettifyError(error48) { - const lines = []; - const issues = [...error48.issues].sort((a, b) => (a.path ?? []).length - (b.path ?? []).length); - for (const issue2 of issues) { - lines.push(`\u2716 ${issue2.message}`); - if (issue2.path?.length) - lines.push(` \u2192 at ${toDotPath(issue2.path)}`); - } - return lines.join("\n"); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/parse.js -var _parse = (_Err) => (schema, value, _ctx, _params) => { - const ctx = _ctx ? Object.assign(_ctx, { async: false }) : { async: false }; - const result = schema._zod.run({ value, issues: [] }, ctx); - if (result instanceof Promise) { - throw new $ZodAsyncError(); - } - if (result.issues.length) { - const e = new (_params?.Err ?? _Err)(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))); - captureStackTrace(e, _params?.callee); - throw e; - } - return result.value; -}; -var parse = /* @__PURE__ */ _parse($ZodRealError); -var _parseAsync = (_Err) => async (schema, value, _ctx, params) => { - const ctx = _ctx ? Object.assign(_ctx, { async: true }) : { async: true }; - let result = schema._zod.run({ value, issues: [] }, ctx); - if (result instanceof Promise) - result = await result; - if (result.issues.length) { - const e = new (params?.Err ?? _Err)(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))); - captureStackTrace(e, params?.callee); - throw e; - } - return result.value; -}; -var parseAsync = /* @__PURE__ */ _parseAsync($ZodRealError); -var _safeParse = (_Err) => (schema, value, _ctx) => { - const ctx = _ctx ? { ..._ctx, async: false } : { async: false }; - const result = schema._zod.run({ value, issues: [] }, ctx); - if (result instanceof Promise) { - throw new $ZodAsyncError(); - } - return result.issues.length ? { - success: false, - error: new (_Err ?? $ZodError)(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) - } : { success: true, data: result.value }; -}; -var safeParse = /* @__PURE__ */ _safeParse($ZodRealError); -var _safeParseAsync = (_Err) => async (schema, value, _ctx) => { - const ctx = _ctx ? Object.assign(_ctx, { async: true }) : { async: true }; - let result = schema._zod.run({ value, issues: [] }, ctx); - if (result instanceof Promise) - result = await result; - return result.issues.length ? { - success: false, - error: new _Err(result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) - } : { success: true, data: result.value }; -}; -var safeParseAsync = /* @__PURE__ */ _safeParseAsync($ZodRealError); -var _encode = (_Err) => (schema, value, _ctx) => { - const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; - return _parse(_Err)(schema, value, ctx); -}; -var encode = /* @__PURE__ */ _encode($ZodRealError); -var _decode = (_Err) => (schema, value, _ctx) => { - return _parse(_Err)(schema, value, _ctx); -}; -var decode = /* @__PURE__ */ _decode($ZodRealError); -var _encodeAsync = (_Err) => async (schema, value, _ctx) => { - const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; - return _parseAsync(_Err)(schema, value, ctx); -}; -var encodeAsync = /* @__PURE__ */ _encodeAsync($ZodRealError); -var _decodeAsync = (_Err) => async (schema, value, _ctx) => { - return _parseAsync(_Err)(schema, value, _ctx); -}; -var decodeAsync = /* @__PURE__ */ _decodeAsync($ZodRealError); -var _safeEncode = (_Err) => (schema, value, _ctx) => { - const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; - return _safeParse(_Err)(schema, value, ctx); -}; -var safeEncode = /* @__PURE__ */ _safeEncode($ZodRealError); -var _safeDecode = (_Err) => (schema, value, _ctx) => { - return _safeParse(_Err)(schema, value, _ctx); -}; -var safeDecode = /* @__PURE__ */ _safeDecode($ZodRealError); -var _safeEncodeAsync = (_Err) => async (schema, value, _ctx) => { - const ctx = _ctx ? Object.assign(_ctx, { direction: "backward" }) : { direction: "backward" }; - return _safeParseAsync(_Err)(schema, value, ctx); -}; -var safeEncodeAsync = /* @__PURE__ */ _safeEncodeAsync($ZodRealError); -var _safeDecodeAsync = (_Err) => async (schema, value, _ctx) => { - return _safeParseAsync(_Err)(schema, value, _ctx); -}; -var safeDecodeAsync = /* @__PURE__ */ _safeDecodeAsync($ZodRealError); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/regexes.js -var regexes_exports = {}; -__export(regexes_exports, { - base64: () => base64, - base64url: () => base64url, - bigint: () => bigint, - boolean: () => boolean, - browserEmail: () => browserEmail, - cidrv4: () => cidrv4, - cidrv6: () => cidrv6, - cuid: () => cuid, - cuid2: () => cuid2, - date: () => date, - datetime: () => datetime, - domain: () => domain, - duration: () => duration, - e164: () => e164, - email: () => email, - emoji: () => emoji, - extendedDuration: () => extendedDuration, - guid: () => guid, - hex: () => hex, - hostname: () => hostname, - html5Email: () => html5Email, - idnEmail: () => idnEmail, - integer: () => integer, - ipv4: () => ipv4, - ipv6: () => ipv6, - ksuid: () => ksuid, - lowercase: () => lowercase, - mac: () => mac, - md5_base64: () => md5_base64, - md5_base64url: () => md5_base64url, - md5_hex: () => md5_hex, - nanoid: () => nanoid, - null: () => _null, - number: () => number, - rfc5322Email: () => rfc5322Email, - sha1_base64: () => sha1_base64, - sha1_base64url: () => sha1_base64url, - sha1_hex: () => sha1_hex, - sha256_base64: () => sha256_base64, - sha256_base64url: () => sha256_base64url, - sha256_hex: () => sha256_hex, - sha384_base64: () => sha384_base64, - sha384_base64url: () => sha384_base64url, - sha384_hex: () => sha384_hex, - sha512_base64: () => sha512_base64, - sha512_base64url: () => sha512_base64url, - sha512_hex: () => sha512_hex, - string: () => string, - time: () => time, - ulid: () => ulid, - undefined: () => _undefined, - unicodeEmail: () => unicodeEmail, - uppercase: () => uppercase, - uuid: () => uuid, - uuid4: () => uuid4, - uuid6: () => uuid6, - uuid7: () => uuid7, - xid: () => xid -}); -var cuid = /^[cC][^\s-]{8,}$/; -var cuid2 = /^[0-9a-z]+$/; -var ulid = /^[0-9A-HJKMNP-TV-Za-hjkmnp-tv-z]{26}$/; -var xid = /^[0-9a-vA-V]{20}$/; -var ksuid = /^[A-Za-z0-9]{27}$/; -var nanoid = /^[a-zA-Z0-9_-]{21}$/; -var duration = /^P(?:(\d+W)|(?!.*W)(?=\d|T\d)(\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+([.,]\d+)?S)?)?)$/; -var extendedDuration = /^[-+]?P(?!$)(?:(?:[-+]?\d+Y)|(?:[-+]?\d+[.,]\d+Y$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:(?:[-+]?\d+W)|(?:[-+]?\d+[.,]\d+W$))?(?:(?:[-+]?\d+D)|(?:[-+]?\d+[.,]\d+D$))?(?:T(?=[\d+-])(?:(?:[-+]?\d+H)|(?:[-+]?\d+[.,]\d+H$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:[-+]?\d+(?:[.,]\d+)?S)?)??$/; -var guid = /^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$/; -var uuid = (version2) => { - if (!version2) - return /^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/; - return new RegExp(`^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-${version2}[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$`); -}; -var uuid4 = /* @__PURE__ */ uuid(4); -var uuid6 = /* @__PURE__ */ uuid(6); -var uuid7 = /* @__PURE__ */ uuid(7); -var email = /^(?!\.)(?!.*\.\.)([A-Za-z0-9_'+\-\.]*)[A-Za-z0-9_+-]@([A-Za-z0-9][A-Za-z0-9\-]*\.)+[A-Za-z]{2,}$/; -var html5Email = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; -var rfc5322Email = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; -var unicodeEmail = /^[^\s@"]{1,64}@[^\s@]{1,255}$/u; -var idnEmail = unicodeEmail; -var browserEmail = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; -var _emoji = `^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$`; -function emoji() { - return new RegExp(_emoji, "u"); -} -var ipv4 = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/; -var ipv6 = /^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:))$/; -var mac = (delimiter) => { - const escapedDelim = escapeRegex(delimiter ?? ":"); - return new RegExp(`^(?:[0-9A-F]{2}${escapedDelim}){5}[0-9A-F]{2}$|^(?:[0-9a-f]{2}${escapedDelim}){5}[0-9a-f]{2}$`); -}; -var cidrv4 = /^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\/([0-9]|[1-2][0-9]|3[0-2])$/; -var cidrv6 = /^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|::|([0-9a-fA-F]{1,4})?::([0-9a-fA-F]{1,4}:?){0,6})\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])$/; -var base64 = /^$|^(?:[0-9a-zA-Z+/]{4})*(?:(?:[0-9a-zA-Z+/]{2}==)|(?:[0-9a-zA-Z+/]{3}=))?$/; -var base64url = /^[A-Za-z0-9_-]*$/; -var hostname = /^(?=.{1,253}\.?$)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[-0-9a-zA-Z]{0,61}[0-9a-zA-Z])?)*\.?$/; -var domain = /^([a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,}$/; -var e164 = /^\+[1-9]\d{6,14}$/; -var dateSource = `(?:(?:\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-(?:(?:0[13578]|1[02])-(?:0[1-9]|[12]\\d|3[01])|(?:0[469]|11)-(?:0[1-9]|[12]\\d|30)|(?:02)-(?:0[1-9]|1\\d|2[0-8])))`; -var date = /* @__PURE__ */ new RegExp(`^${dateSource}$`); -function timeSource(args) { - const hhmm = `(?:[01]\\d|2[0-3]):[0-5]\\d`; - const regex = typeof args.precision === "number" ? args.precision === -1 ? `${hhmm}` : args.precision === 0 ? `${hhmm}:[0-5]\\d` : `${hhmm}:[0-5]\\d\\.\\d{${args.precision}}` : `${hhmm}(?::[0-5]\\d(?:\\.\\d+)?)?`; - return regex; -} -function time(args) { - return new RegExp(`^${timeSource(args)}$`); -} -function datetime(args) { - const time3 = timeSource({ precision: args.precision }); - const opts = ["Z"]; - if (args.local) - opts.push(""); - if (args.offset) - opts.push(`([+-](?:[01]\\d|2[0-3]):[0-5]\\d)`); - const timeRegex = `${time3}(?:${opts.join("|")})`; - return new RegExp(`^${dateSource}T(?:${timeRegex})$`); -} -var string = (params) => { - const regex = params ? `[\\s\\S]{${params?.minimum ?? 0},${params?.maximum ?? ""}}` : `[\\s\\S]*`; - return new RegExp(`^${regex}$`); -}; -var bigint = /^-?\d+n?$/; -var integer = /^-?\d+$/; -var number = /^-?\d+(?:\.\d+)?$/; -var boolean = /^(?:true|false)$/i; -var _null = /^null$/i; -var _undefined = /^undefined$/i; -var lowercase = /^[^A-Z]*$/; -var uppercase = /^[^a-z]*$/; -var hex = /^[0-9a-fA-F]*$/; -function fixedBase64(bodyLength, padding) { - return new RegExp(`^[A-Za-z0-9+/]{${bodyLength}}${padding}$`); -} -function fixedBase64url(length) { - return new RegExp(`^[A-Za-z0-9_-]{${length}}$`); -} -var md5_hex = /^[0-9a-fA-F]{32}$/; -var md5_base64 = /* @__PURE__ */ fixedBase64(22, "=="); -var md5_base64url = /* @__PURE__ */ fixedBase64url(22); -var sha1_hex = /^[0-9a-fA-F]{40}$/; -var sha1_base64 = /* @__PURE__ */ fixedBase64(27, "="); -var sha1_base64url = /* @__PURE__ */ fixedBase64url(27); -var sha256_hex = /^[0-9a-fA-F]{64}$/; -var sha256_base64 = /* @__PURE__ */ fixedBase64(43, "="); -var sha256_base64url = /* @__PURE__ */ fixedBase64url(43); -var sha384_hex = /^[0-9a-fA-F]{96}$/; -var sha384_base64 = /* @__PURE__ */ fixedBase64(64, ""); -var sha384_base64url = /* @__PURE__ */ fixedBase64url(64); -var sha512_hex = /^[0-9a-fA-F]{128}$/; -var sha512_base64 = /* @__PURE__ */ fixedBase64(86, "=="); -var sha512_base64url = /* @__PURE__ */ fixedBase64url(86); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/checks.js -var $ZodCheck = /* @__PURE__ */ $constructor("$ZodCheck", (inst, def) => { - var _a3; - inst._zod ?? (inst._zod = {}); - inst._zod.def = def; - (_a3 = inst._zod).onattach ?? (_a3.onattach = []); -}); -var numericOriginMap = { - number: "number", - bigint: "bigint", - object: "date" -}; -var $ZodCheckLessThan = /* @__PURE__ */ $constructor("$ZodCheckLessThan", (inst, def) => { - $ZodCheck.init(inst, def); - const origin = numericOriginMap[typeof def.value]; - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - const curr = (def.inclusive ? bag.maximum : bag.exclusiveMaximum) ?? Number.POSITIVE_INFINITY; - if (def.value < curr) { - if (def.inclusive) - bag.maximum = def.value; - else - bag.exclusiveMaximum = def.value; - } - }); - inst._zod.check = (payload) => { - if (def.inclusive ? payload.value <= def.value : payload.value < def.value) { - return; - } - payload.issues.push({ - origin, - code: "too_big", - maximum: typeof def.value === "object" ? def.value.getTime() : def.value, - input: payload.value, - inclusive: def.inclusive, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckGreaterThan = /* @__PURE__ */ $constructor("$ZodCheckGreaterThan", (inst, def) => { - $ZodCheck.init(inst, def); - const origin = numericOriginMap[typeof def.value]; - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - const curr = (def.inclusive ? bag.minimum : bag.exclusiveMinimum) ?? Number.NEGATIVE_INFINITY; - if (def.value > curr) { - if (def.inclusive) - bag.minimum = def.value; - else - bag.exclusiveMinimum = def.value; - } - }); - inst._zod.check = (payload) => { - if (def.inclusive ? payload.value >= def.value : payload.value > def.value) { - return; - } - payload.issues.push({ - origin, - code: "too_small", - minimum: typeof def.value === "object" ? def.value.getTime() : def.value, - input: payload.value, - inclusive: def.inclusive, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckMultipleOf = /* @__PURE__ */ $constructor("$ZodCheckMultipleOf", (inst, def) => { - $ZodCheck.init(inst, def); - inst._zod.onattach.push((inst2) => { - var _a3; - (_a3 = inst2._zod.bag).multipleOf ?? (_a3.multipleOf = def.value); - }); - inst._zod.check = (payload) => { - if (typeof payload.value !== typeof def.value) - throw new Error("Cannot mix number and bigint in multiple_of check."); - const isMultiple = typeof payload.value === "bigint" ? payload.value % def.value === BigInt(0) : floatSafeRemainder(payload.value, def.value) === 0; - if (isMultiple) - return; - payload.issues.push({ - origin: typeof payload.value, - code: "not_multiple_of", - divisor: def.value, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckNumberFormat = /* @__PURE__ */ $constructor("$ZodCheckNumberFormat", (inst, def) => { - $ZodCheck.init(inst, def); - def.format = def.format || "float64"; - const isInt = def.format?.includes("int"); - const origin = isInt ? "int" : "number"; - const [minimum, maximum] = NUMBER_FORMAT_RANGES[def.format]; - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.format = def.format; - bag.minimum = minimum; - bag.maximum = maximum; - if (isInt) - bag.pattern = integer; - }); - inst._zod.check = (payload) => { - const input = payload.value; - if (isInt) { - if (!Number.isInteger(input)) { - payload.issues.push({ - expected: origin, - format: def.format, - code: "invalid_type", - continue: false, - input, - inst - }); - return; - } - if (!Number.isSafeInteger(input)) { - if (input > 0) { - payload.issues.push({ - input, - code: "too_big", - maximum: Number.MAX_SAFE_INTEGER, - note: "Integers must be within the safe integer range.", - inst, - origin, - inclusive: true, - continue: !def.abort - }); - } else { - payload.issues.push({ - input, - code: "too_small", - minimum: Number.MIN_SAFE_INTEGER, - note: "Integers must be within the safe integer range.", - inst, - origin, - inclusive: true, - continue: !def.abort - }); - } - return; - } - } - if (input < minimum) { - payload.issues.push({ - origin: "number", - input, - code: "too_small", - minimum, - inclusive: true, - inst, - continue: !def.abort - }); - } - if (input > maximum) { - payload.issues.push({ - origin: "number", - input, - code: "too_big", - maximum, - inclusive: true, - inst, - continue: !def.abort - }); - } - }; -}); -var $ZodCheckBigIntFormat = /* @__PURE__ */ $constructor("$ZodCheckBigIntFormat", (inst, def) => { - $ZodCheck.init(inst, def); - const [minimum, maximum] = BIGINT_FORMAT_RANGES[def.format]; - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.format = def.format; - bag.minimum = minimum; - bag.maximum = maximum; - }); - inst._zod.check = (payload) => { - const input = payload.value; - if (input < minimum) { - payload.issues.push({ - origin: "bigint", - input, - code: "too_small", - minimum, - inclusive: true, - inst, - continue: !def.abort - }); - } - if (input > maximum) { - payload.issues.push({ - origin: "bigint", - input, - code: "too_big", - maximum, - inclusive: true, - inst, - continue: !def.abort - }); - } - }; -}); -var $ZodCheckMaxSize = /* @__PURE__ */ $constructor("$ZodCheckMaxSize", (inst, def) => { - var _a3; - $ZodCheck.init(inst, def); - (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { - const val = payload.value; - return !nullish(val) && val.size !== void 0; - }); - inst._zod.onattach.push((inst2) => { - const curr = inst2._zod.bag.maximum ?? Number.POSITIVE_INFINITY; - if (def.maximum < curr) - inst2._zod.bag.maximum = def.maximum; - }); - inst._zod.check = (payload) => { - const input = payload.value; - const size = input.size; - if (size <= def.maximum) - return; - payload.issues.push({ - origin: getSizableOrigin(input), - code: "too_big", - maximum: def.maximum, - inclusive: true, - input, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckMinSize = /* @__PURE__ */ $constructor("$ZodCheckMinSize", (inst, def) => { - var _a3; - $ZodCheck.init(inst, def); - (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { - const val = payload.value; - return !nullish(val) && val.size !== void 0; - }); - inst._zod.onattach.push((inst2) => { - const curr = inst2._zod.bag.minimum ?? Number.NEGATIVE_INFINITY; - if (def.minimum > curr) - inst2._zod.bag.minimum = def.minimum; - }); - inst._zod.check = (payload) => { - const input = payload.value; - const size = input.size; - if (size >= def.minimum) - return; - payload.issues.push({ - origin: getSizableOrigin(input), - code: "too_small", - minimum: def.minimum, - inclusive: true, - input, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckSizeEquals = /* @__PURE__ */ $constructor("$ZodCheckSizeEquals", (inst, def) => { - var _a3; - $ZodCheck.init(inst, def); - (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { - const val = payload.value; - return !nullish(val) && val.size !== void 0; - }); - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.minimum = def.size; - bag.maximum = def.size; - bag.size = def.size; - }); - inst._zod.check = (payload) => { - const input = payload.value; - const size = input.size; - if (size === def.size) - return; - const tooBig = size > def.size; - payload.issues.push({ - origin: getSizableOrigin(input), - ...tooBig ? { code: "too_big", maximum: def.size } : { code: "too_small", minimum: def.size }, - inclusive: true, - exact: true, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckMaxLength = /* @__PURE__ */ $constructor("$ZodCheckMaxLength", (inst, def) => { - var _a3; - $ZodCheck.init(inst, def); - (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { - const val = payload.value; - return !nullish(val) && val.length !== void 0; - }); - inst._zod.onattach.push((inst2) => { - const curr = inst2._zod.bag.maximum ?? Number.POSITIVE_INFINITY; - if (def.maximum < curr) - inst2._zod.bag.maximum = def.maximum; - }); - inst._zod.check = (payload) => { - const input = payload.value; - const length = input.length; - if (length <= def.maximum) - return; - const origin = getLengthableOrigin(input); - payload.issues.push({ - origin, - code: "too_big", - maximum: def.maximum, - inclusive: true, - input, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckMinLength = /* @__PURE__ */ $constructor("$ZodCheckMinLength", (inst, def) => { - var _a3; - $ZodCheck.init(inst, def); - (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { - const val = payload.value; - return !nullish(val) && val.length !== void 0; - }); - inst._zod.onattach.push((inst2) => { - const curr = inst2._zod.bag.minimum ?? Number.NEGATIVE_INFINITY; - if (def.minimum > curr) - inst2._zod.bag.minimum = def.minimum; - }); - inst._zod.check = (payload) => { - const input = payload.value; - const length = input.length; - if (length >= def.minimum) - return; - const origin = getLengthableOrigin(input); - payload.issues.push({ - origin, - code: "too_small", - minimum: def.minimum, - inclusive: true, - input, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckLengthEquals = /* @__PURE__ */ $constructor("$ZodCheckLengthEquals", (inst, def) => { - var _a3; - $ZodCheck.init(inst, def); - (_a3 = inst._zod.def).when ?? (_a3.when = (payload) => { - const val = payload.value; - return !nullish(val) && val.length !== void 0; - }); - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.minimum = def.length; - bag.maximum = def.length; - bag.length = def.length; - }); - inst._zod.check = (payload) => { - const input = payload.value; - const length = input.length; - if (length === def.length) - return; - const origin = getLengthableOrigin(input); - const tooBig = length > def.length; - payload.issues.push({ - origin, - ...tooBig ? { code: "too_big", maximum: def.length } : { code: "too_small", minimum: def.length }, - inclusive: true, - exact: true, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckStringFormat = /* @__PURE__ */ $constructor("$ZodCheckStringFormat", (inst, def) => { - var _a3, _b; - $ZodCheck.init(inst, def); - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.format = def.format; - if (def.pattern) { - bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); - bag.patterns.add(def.pattern); - } - }); - if (def.pattern) - (_a3 = inst._zod).check ?? (_a3.check = (payload) => { - def.pattern.lastIndex = 0; - if (def.pattern.test(payload.value)) - return; - payload.issues.push({ - origin: "string", - code: "invalid_format", - format: def.format, - input: payload.value, - ...def.pattern ? { pattern: def.pattern.toString() } : {}, - inst, - continue: !def.abort - }); - }); - else - (_b = inst._zod).check ?? (_b.check = () => { - }); -}); -var $ZodCheckRegex = /* @__PURE__ */ $constructor("$ZodCheckRegex", (inst, def) => { - $ZodCheckStringFormat.init(inst, def); - inst._zod.check = (payload) => { - def.pattern.lastIndex = 0; - if (def.pattern.test(payload.value)) - return; - payload.issues.push({ - origin: "string", - code: "invalid_format", - format: "regex", - input: payload.value, - pattern: def.pattern.toString(), - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckLowerCase = /* @__PURE__ */ $constructor("$ZodCheckLowerCase", (inst, def) => { - def.pattern ?? (def.pattern = lowercase); - $ZodCheckStringFormat.init(inst, def); -}); -var $ZodCheckUpperCase = /* @__PURE__ */ $constructor("$ZodCheckUpperCase", (inst, def) => { - def.pattern ?? (def.pattern = uppercase); - $ZodCheckStringFormat.init(inst, def); -}); -var $ZodCheckIncludes = /* @__PURE__ */ $constructor("$ZodCheckIncludes", (inst, def) => { - $ZodCheck.init(inst, def); - const escapedRegex = escapeRegex(def.includes); - const pattern = new RegExp(typeof def.position === "number" ? `^.{${def.position}}${escapedRegex}` : escapedRegex); - def.pattern = pattern; - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); - bag.patterns.add(pattern); - }); - inst._zod.check = (payload) => { - if (payload.value.includes(def.includes, def.position)) - return; - payload.issues.push({ - origin: "string", - code: "invalid_format", - format: "includes", - includes: def.includes, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckStartsWith = /* @__PURE__ */ $constructor("$ZodCheckStartsWith", (inst, def) => { - $ZodCheck.init(inst, def); - const pattern = new RegExp(`^${escapeRegex(def.prefix)}.*`); - def.pattern ?? (def.pattern = pattern); - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); - bag.patterns.add(pattern); - }); - inst._zod.check = (payload) => { - if (payload.value.startsWith(def.prefix)) - return; - payload.issues.push({ - origin: "string", - code: "invalid_format", - format: "starts_with", - prefix: def.prefix, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckEndsWith = /* @__PURE__ */ $constructor("$ZodCheckEndsWith", (inst, def) => { - $ZodCheck.init(inst, def); - const pattern = new RegExp(`.*${escapeRegex(def.suffix)}$`); - def.pattern ?? (def.pattern = pattern); - inst._zod.onattach.push((inst2) => { - const bag = inst2._zod.bag; - bag.patterns ?? (bag.patterns = /* @__PURE__ */ new Set()); - bag.patterns.add(pattern); - }); - inst._zod.check = (payload) => { - if (payload.value.endsWith(def.suffix)) - return; - payload.issues.push({ - origin: "string", - code: "invalid_format", - format: "ends_with", - suffix: def.suffix, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -function handleCheckPropertyResult(result, payload, property) { - if (result.issues.length) { - payload.issues.push(...prefixIssues(property, result.issues)); - } -} -var $ZodCheckProperty = /* @__PURE__ */ $constructor("$ZodCheckProperty", (inst, def) => { - $ZodCheck.init(inst, def); - inst._zod.check = (payload) => { - const result = def.schema._zod.run({ - value: payload.value[def.property], - issues: [] - }, {}); - if (result instanceof Promise) { - return result.then((result2) => handleCheckPropertyResult(result2, payload, def.property)); - } - handleCheckPropertyResult(result, payload, def.property); - return; - }; -}); -var $ZodCheckMimeType = /* @__PURE__ */ $constructor("$ZodCheckMimeType", (inst, def) => { - $ZodCheck.init(inst, def); - const mimeSet = new Set(def.mime); - inst._zod.onattach.push((inst2) => { - inst2._zod.bag.mime = def.mime; - }); - inst._zod.check = (payload) => { - if (mimeSet.has(payload.value.type)) - return; - payload.issues.push({ - code: "invalid_value", - values: def.mime, - input: payload.value.type, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCheckOverwrite = /* @__PURE__ */ $constructor("$ZodCheckOverwrite", (inst, def) => { - $ZodCheck.init(inst, def); - inst._zod.check = (payload) => { - payload.value = def.tx(payload.value); - }; -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/doc.js -var Doc = class { - constructor(args = []) { - this.content = []; - this.indent = 0; - if (this) - this.args = args; - } - indented(fn) { - this.indent += 1; - fn(this); - this.indent -= 1; - } - write(arg) { - if (typeof arg === "function") { - arg(this, { execution: "sync" }); - arg(this, { execution: "async" }); - return; - } - const content = arg; - const lines = content.split("\n").filter((x) => x); - const minIndent = Math.min(...lines.map((x) => x.length - x.trimStart().length)); - const dedented = lines.map((x) => x.slice(minIndent)).map((x) => " ".repeat(this.indent * 2) + x); - for (const line of dedented) { - this.content.push(line); - } - } - compile() { - const F = Function; - const args = this?.args; - const content = this?.content ?? [``]; - const lines = [...content.map((x) => ` ${x}`)]; - return new F(...args, lines.join("\n")); - } -}; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/versions.js -var version = { - major: 4, - minor: 3, - patch: 6 -}; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/schemas.js -var $ZodType = /* @__PURE__ */ $constructor("$ZodType", (inst, def) => { - var _a3; - inst ?? (inst = {}); - inst._zod.def = def; - inst._zod.bag = inst._zod.bag || {}; - inst._zod.version = version; - const checks = [...inst._zod.def.checks ?? []]; - if (inst._zod.traits.has("$ZodCheck")) { - checks.unshift(inst); - } - for (const ch of checks) { - for (const fn of ch._zod.onattach) { - fn(inst); - } - } - if (checks.length === 0) { - (_a3 = inst._zod).deferred ?? (_a3.deferred = []); - inst._zod.deferred?.push(() => { - inst._zod.run = inst._zod.parse; - }); - } else { - const runChecks = (payload, checks2, ctx) => { - let isAborted = aborted(payload); - let asyncResult; - for (const ch of checks2) { - if (ch._zod.def.when) { - const shouldRun = ch._zod.def.when(payload); - if (!shouldRun) - continue; - } else if (isAborted) { - continue; - } - const currLen = payload.issues.length; - const _ = ch._zod.check(payload); - if (_ instanceof Promise && ctx?.async === false) { - throw new $ZodAsyncError(); - } - if (asyncResult || _ instanceof Promise) { - asyncResult = (asyncResult ?? Promise.resolve()).then(async () => { - await _; - const nextLen = payload.issues.length; - if (nextLen === currLen) - return; - if (!isAborted) - isAborted = aborted(payload, currLen); - }); - } else { - const nextLen = payload.issues.length; - if (nextLen === currLen) - continue; - if (!isAborted) - isAborted = aborted(payload, currLen); - } - } - if (asyncResult) { - return asyncResult.then(() => { - return payload; - }); - } - return payload; - }; - const handleCanaryResult = (canary, payload, ctx) => { - if (aborted(canary)) { - canary.aborted = true; - return canary; - } - const checkResult = runChecks(payload, checks, ctx); - if (checkResult instanceof Promise) { - if (ctx.async === false) - throw new $ZodAsyncError(); - return checkResult.then((checkResult2) => inst._zod.parse(checkResult2, ctx)); - } - return inst._zod.parse(checkResult, ctx); - }; - inst._zod.run = (payload, ctx) => { - if (ctx.skipChecks) { - return inst._zod.parse(payload, ctx); - } - if (ctx.direction === "backward") { - const canary = inst._zod.parse({ value: payload.value, issues: [] }, { ...ctx, skipChecks: true }); - if (canary instanceof Promise) { - return canary.then((canary2) => { - return handleCanaryResult(canary2, payload, ctx); - }); - } - return handleCanaryResult(canary, payload, ctx); - } - const result = inst._zod.parse(payload, ctx); - if (result instanceof Promise) { - if (ctx.async === false) - throw new $ZodAsyncError(); - return result.then((result2) => runChecks(result2, checks, ctx)); - } - return runChecks(result, checks, ctx); - }; - } - defineLazy(inst, "~standard", () => ({ - validate: (value) => { - try { - const r = safeParse(inst, value); - return r.success ? { value: r.data } : { issues: r.error?.issues }; - } catch (_) { - return safeParseAsync(inst, value).then((r) => r.success ? { value: r.data } : { issues: r.error?.issues }); - } - }, - vendor: "zod", - version: 1 - })); -}); -var $ZodString = /* @__PURE__ */ $constructor("$ZodString", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.pattern = [...inst?._zod.bag?.patterns ?? []].pop() ?? string(inst._zod.bag); - inst._zod.parse = (payload, _) => { - if (def.coerce) - try { - payload.value = String(payload.value); - } catch (_2) { - } - if (typeof payload.value === "string") - return payload; - payload.issues.push({ - expected: "string", - code: "invalid_type", - input: payload.value, - inst - }); - return payload; - }; -}); -var $ZodStringFormat = /* @__PURE__ */ $constructor("$ZodStringFormat", (inst, def) => { - $ZodCheckStringFormat.init(inst, def); - $ZodString.init(inst, def); -}); -var $ZodGUID = /* @__PURE__ */ $constructor("$ZodGUID", (inst, def) => { - def.pattern ?? (def.pattern = guid); - $ZodStringFormat.init(inst, def); -}); -var $ZodUUID = /* @__PURE__ */ $constructor("$ZodUUID", (inst, def) => { - if (def.version) { - const versionMap = { - v1: 1, - v2: 2, - v3: 3, - v4: 4, - v5: 5, - v6: 6, - v7: 7, - v8: 8 - }; - const v = versionMap[def.version]; - if (v === void 0) - throw new Error(`Invalid UUID version: "${def.version}"`); - def.pattern ?? (def.pattern = uuid(v)); - } else - def.pattern ?? (def.pattern = uuid()); - $ZodStringFormat.init(inst, def); -}); -var $ZodEmail = /* @__PURE__ */ $constructor("$ZodEmail", (inst, def) => { - def.pattern ?? (def.pattern = email); - $ZodStringFormat.init(inst, def); -}); -var $ZodURL = /* @__PURE__ */ $constructor("$ZodURL", (inst, def) => { - $ZodStringFormat.init(inst, def); - inst._zod.check = (payload) => { - try { - const trimmed = payload.value.trim(); - const url2 = new URL(trimmed); - if (def.hostname) { - def.hostname.lastIndex = 0; - if (!def.hostname.test(url2.hostname)) { - payload.issues.push({ - code: "invalid_format", - format: "url", - note: "Invalid hostname", - pattern: def.hostname.source, - input: payload.value, - inst, - continue: !def.abort - }); - } - } - if (def.protocol) { - def.protocol.lastIndex = 0; - if (!def.protocol.test(url2.protocol.endsWith(":") ? url2.protocol.slice(0, -1) : url2.protocol)) { - payload.issues.push({ - code: "invalid_format", - format: "url", - note: "Invalid protocol", - pattern: def.protocol.source, - input: payload.value, - inst, - continue: !def.abort - }); - } - } - if (def.normalize) { - payload.value = url2.href; - } else { - payload.value = trimmed; - } - return; - } catch (_) { - payload.issues.push({ - code: "invalid_format", - format: "url", - input: payload.value, - inst, - continue: !def.abort - }); - } - }; -}); -var $ZodEmoji = /* @__PURE__ */ $constructor("$ZodEmoji", (inst, def) => { - def.pattern ?? (def.pattern = emoji()); - $ZodStringFormat.init(inst, def); -}); -var $ZodNanoID = /* @__PURE__ */ $constructor("$ZodNanoID", (inst, def) => { - def.pattern ?? (def.pattern = nanoid); - $ZodStringFormat.init(inst, def); -}); -var $ZodCUID = /* @__PURE__ */ $constructor("$ZodCUID", (inst, def) => { - def.pattern ?? (def.pattern = cuid); - $ZodStringFormat.init(inst, def); -}); -var $ZodCUID2 = /* @__PURE__ */ $constructor("$ZodCUID2", (inst, def) => { - def.pattern ?? (def.pattern = cuid2); - $ZodStringFormat.init(inst, def); -}); -var $ZodULID = /* @__PURE__ */ $constructor("$ZodULID", (inst, def) => { - def.pattern ?? (def.pattern = ulid); - $ZodStringFormat.init(inst, def); -}); -var $ZodXID = /* @__PURE__ */ $constructor("$ZodXID", (inst, def) => { - def.pattern ?? (def.pattern = xid); - $ZodStringFormat.init(inst, def); -}); -var $ZodKSUID = /* @__PURE__ */ $constructor("$ZodKSUID", (inst, def) => { - def.pattern ?? (def.pattern = ksuid); - $ZodStringFormat.init(inst, def); -}); -var $ZodISODateTime = /* @__PURE__ */ $constructor("$ZodISODateTime", (inst, def) => { - def.pattern ?? (def.pattern = datetime(def)); - $ZodStringFormat.init(inst, def); -}); -var $ZodISODate = /* @__PURE__ */ $constructor("$ZodISODate", (inst, def) => { - def.pattern ?? (def.pattern = date); - $ZodStringFormat.init(inst, def); -}); -var $ZodISOTime = /* @__PURE__ */ $constructor("$ZodISOTime", (inst, def) => { - def.pattern ?? (def.pattern = time(def)); - $ZodStringFormat.init(inst, def); -}); -var $ZodISODuration = /* @__PURE__ */ $constructor("$ZodISODuration", (inst, def) => { - def.pattern ?? (def.pattern = duration); - $ZodStringFormat.init(inst, def); -}); -var $ZodIPv4 = /* @__PURE__ */ $constructor("$ZodIPv4", (inst, def) => { - def.pattern ?? (def.pattern = ipv4); - $ZodStringFormat.init(inst, def); - inst._zod.bag.format = `ipv4`; -}); -var $ZodIPv6 = /* @__PURE__ */ $constructor("$ZodIPv6", (inst, def) => { - def.pattern ?? (def.pattern = ipv6); - $ZodStringFormat.init(inst, def); - inst._zod.bag.format = `ipv6`; - inst._zod.check = (payload) => { - try { - new URL(`http://[${payload.value}]`); - } catch { - payload.issues.push({ - code: "invalid_format", - format: "ipv6", - input: payload.value, - inst, - continue: !def.abort - }); - } - }; -}); -var $ZodMAC = /* @__PURE__ */ $constructor("$ZodMAC", (inst, def) => { - def.pattern ?? (def.pattern = mac(def.delimiter)); - $ZodStringFormat.init(inst, def); - inst._zod.bag.format = `mac`; -}); -var $ZodCIDRv4 = /* @__PURE__ */ $constructor("$ZodCIDRv4", (inst, def) => { - def.pattern ?? (def.pattern = cidrv4); - $ZodStringFormat.init(inst, def); -}); -var $ZodCIDRv6 = /* @__PURE__ */ $constructor("$ZodCIDRv6", (inst, def) => { - def.pattern ?? (def.pattern = cidrv6); - $ZodStringFormat.init(inst, def); - inst._zod.check = (payload) => { - const parts = payload.value.split("/"); - try { - if (parts.length !== 2) - throw new Error(); - const [address, prefix] = parts; - if (!prefix) - throw new Error(); - const prefixNum = Number(prefix); - if (`${prefixNum}` !== prefix) - throw new Error(); - if (prefixNum < 0 || prefixNum > 128) - throw new Error(); - new URL(`http://[${address}]`); - } catch { - payload.issues.push({ - code: "invalid_format", - format: "cidrv6", - input: payload.value, - inst, - continue: !def.abort - }); - } - }; -}); -function isValidBase64(data) { - if (data === "") - return true; - if (data.length % 4 !== 0) - return false; - try { - atob(data); - return true; - } catch { - return false; - } -} -var $ZodBase64 = /* @__PURE__ */ $constructor("$ZodBase64", (inst, def) => { - def.pattern ?? (def.pattern = base64); - $ZodStringFormat.init(inst, def); - inst._zod.bag.contentEncoding = "base64"; - inst._zod.check = (payload) => { - if (isValidBase64(payload.value)) - return; - payload.issues.push({ - code: "invalid_format", - format: "base64", - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -function isValidBase64URL(data) { - if (!base64url.test(data)) - return false; - const base643 = data.replace(/[-_]/g, (c) => c === "-" ? "+" : "/"); - const padded = base643.padEnd(Math.ceil(base643.length / 4) * 4, "="); - return isValidBase64(padded); -} -var $ZodBase64URL = /* @__PURE__ */ $constructor("$ZodBase64URL", (inst, def) => { - def.pattern ?? (def.pattern = base64url); - $ZodStringFormat.init(inst, def); - inst._zod.bag.contentEncoding = "base64url"; - inst._zod.check = (payload) => { - if (isValidBase64URL(payload.value)) - return; - payload.issues.push({ - code: "invalid_format", - format: "base64url", - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodE164 = /* @__PURE__ */ $constructor("$ZodE164", (inst, def) => { - def.pattern ?? (def.pattern = e164); - $ZodStringFormat.init(inst, def); -}); -function isValidJWT(token, algorithm = null) { - try { - const tokensParts = token.split("."); - if (tokensParts.length !== 3) - return false; - const [header] = tokensParts; - if (!header) - return false; - const parsedHeader = JSON.parse(atob(header)); - if ("typ" in parsedHeader && parsedHeader?.typ !== "JWT") - return false; - if (!parsedHeader.alg) - return false; - if (algorithm && (!("alg" in parsedHeader) || parsedHeader.alg !== algorithm)) - return false; - return true; - } catch { - return false; - } -} -var $ZodJWT = /* @__PURE__ */ $constructor("$ZodJWT", (inst, def) => { - $ZodStringFormat.init(inst, def); - inst._zod.check = (payload) => { - if (isValidJWT(payload.value, def.alg)) - return; - payload.issues.push({ - code: "invalid_format", - format: "jwt", - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodCustomStringFormat = /* @__PURE__ */ $constructor("$ZodCustomStringFormat", (inst, def) => { - $ZodStringFormat.init(inst, def); - inst._zod.check = (payload) => { - if (def.fn(payload.value)) - return; - payload.issues.push({ - code: "invalid_format", - format: def.format, - input: payload.value, - inst, - continue: !def.abort - }); - }; -}); -var $ZodNumber = /* @__PURE__ */ $constructor("$ZodNumber", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.pattern = inst._zod.bag.pattern ?? number; - inst._zod.parse = (payload, _ctx) => { - if (def.coerce) - try { - payload.value = Number(payload.value); - } catch (_) { - } - const input = payload.value; - if (typeof input === "number" && !Number.isNaN(input) && Number.isFinite(input)) { - return payload; - } - const received = typeof input === "number" ? Number.isNaN(input) ? "NaN" : !Number.isFinite(input) ? "Infinity" : void 0 : void 0; - payload.issues.push({ - expected: "number", - code: "invalid_type", - input, - inst, - ...received ? { received } : {} - }); - return payload; - }; -}); -var $ZodNumberFormat = /* @__PURE__ */ $constructor("$ZodNumberFormat", (inst, def) => { - $ZodCheckNumberFormat.init(inst, def); - $ZodNumber.init(inst, def); -}); -var $ZodBoolean = /* @__PURE__ */ $constructor("$ZodBoolean", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.pattern = boolean; - inst._zod.parse = (payload, _ctx) => { - if (def.coerce) - try { - payload.value = Boolean(payload.value); - } catch (_) { - } - const input = payload.value; - if (typeof input === "boolean") - return payload; - payload.issues.push({ - expected: "boolean", - code: "invalid_type", - input, - inst - }); - return payload; - }; -}); -var $ZodBigInt = /* @__PURE__ */ $constructor("$ZodBigInt", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.pattern = bigint; - inst._zod.parse = (payload, _ctx) => { - if (def.coerce) - try { - payload.value = BigInt(payload.value); - } catch (_) { - } - if (typeof payload.value === "bigint") - return payload; - payload.issues.push({ - expected: "bigint", - code: "invalid_type", - input: payload.value, - inst - }); - return payload; - }; -}); -var $ZodBigIntFormat = /* @__PURE__ */ $constructor("$ZodBigIntFormat", (inst, def) => { - $ZodCheckBigIntFormat.init(inst, def); - $ZodBigInt.init(inst, def); -}); -var $ZodSymbol = /* @__PURE__ */ $constructor("$ZodSymbol", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (typeof input === "symbol") - return payload; - payload.issues.push({ - expected: "symbol", - code: "invalid_type", - input, - inst - }); - return payload; - }; -}); -var $ZodUndefined = /* @__PURE__ */ $constructor("$ZodUndefined", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.pattern = _undefined; - inst._zod.values = /* @__PURE__ */ new Set([void 0]); - inst._zod.optin = "optional"; - inst._zod.optout = "optional"; - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (typeof input === "undefined") - return payload; - payload.issues.push({ - expected: "undefined", - code: "invalid_type", - input, - inst - }); - return payload; - }; -}); -var $ZodNull = /* @__PURE__ */ $constructor("$ZodNull", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.pattern = _null; - inst._zod.values = /* @__PURE__ */ new Set([null]); - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (input === null) - return payload; - payload.issues.push({ - expected: "null", - code: "invalid_type", - input, - inst - }); - return payload; - }; -}); -var $ZodAny = /* @__PURE__ */ $constructor("$ZodAny", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload) => payload; -}); -var $ZodUnknown = /* @__PURE__ */ $constructor("$ZodUnknown", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload) => payload; -}); -var $ZodNever = /* @__PURE__ */ $constructor("$ZodNever", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, _ctx) => { - payload.issues.push({ - expected: "never", - code: "invalid_type", - input: payload.value, - inst - }); - return payload; - }; -}); -var $ZodVoid = /* @__PURE__ */ $constructor("$ZodVoid", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (typeof input === "undefined") - return payload; - payload.issues.push({ - expected: "void", - code: "invalid_type", - input, - inst - }); - return payload; - }; -}); -var $ZodDate = /* @__PURE__ */ $constructor("$ZodDate", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, _ctx) => { - if (def.coerce) { - try { - payload.value = new Date(payload.value); - } catch (_err) { - } - } - const input = payload.value; - const isDate = input instanceof Date; - const isValidDate = isDate && !Number.isNaN(input.getTime()); - if (isValidDate) - return payload; - payload.issues.push({ - expected: "date", - code: "invalid_type", - input, - ...isDate ? { received: "Invalid Date" } : {}, - inst - }); - return payload; - }; -}); -function handleArrayResult(result, final, index) { - if (result.issues.length) { - final.issues.push(...prefixIssues(index, result.issues)); - } - final.value[index] = result.value; -} -var $ZodArray = /* @__PURE__ */ $constructor("$ZodArray", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - if (!Array.isArray(input)) { - payload.issues.push({ - expected: "array", - code: "invalid_type", - input, - inst - }); - return payload; - } - payload.value = Array(input.length); - const proms = []; - for (let i = 0; i < input.length; i++) { - const item = input[i]; - const result = def.element._zod.run({ - value: item, - issues: [] - }, ctx); - if (result instanceof Promise) { - proms.push(result.then((result2) => handleArrayResult(result2, payload, i))); - } else { - handleArrayResult(result, payload, i); - } - } - if (proms.length) { - return Promise.all(proms).then(() => payload); - } - return payload; - }; -}); -function handlePropertyResult(result, final, key, input, isOptionalOut) { - if (result.issues.length) { - if (isOptionalOut && !(key in input)) { - return; - } - final.issues.push(...prefixIssues(key, result.issues)); - } - if (result.value === void 0) { - if (key in input) { - final.value[key] = void 0; - } - } else { - final.value[key] = result.value; - } -} -function normalizeDef(def) { - const keys = Object.keys(def.shape); - for (const k of keys) { - if (!def.shape?.[k]?._zod?.traits?.has("$ZodType")) { - throw new Error(`Invalid element at key "${k}": expected a Zod schema`); - } - } - const okeys = optionalKeys(def.shape); - return { - ...def, - keys, - keySet: new Set(keys), - numKeys: keys.length, - optionalKeys: new Set(okeys) - }; -} -function handleCatchall(proms, input, payload, ctx, def, inst) { - const unrecognized = []; - const keySet = def.keySet; - const _catchall = def.catchall._zod; - const t = _catchall.def.type; - const isOptionalOut = _catchall.optout === "optional"; - for (const key in input) { - if (keySet.has(key)) - continue; - if (t === "never") { - unrecognized.push(key); - continue; - } - const r = _catchall.run({ value: input[key], issues: [] }, ctx); - if (r instanceof Promise) { - proms.push(r.then((r2) => handlePropertyResult(r2, payload, key, input, isOptionalOut))); - } else { - handlePropertyResult(r, payload, key, input, isOptionalOut); - } - } - if (unrecognized.length) { - payload.issues.push({ - code: "unrecognized_keys", - keys: unrecognized, - input, - inst - }); - } - if (!proms.length) - return payload; - return Promise.all(proms).then(() => { - return payload; - }); -} -var $ZodObject = /* @__PURE__ */ $constructor("$ZodObject", (inst, def) => { - $ZodType.init(inst, def); - const desc = Object.getOwnPropertyDescriptor(def, "shape"); - if (!desc?.get) { - const sh = def.shape; - Object.defineProperty(def, "shape", { - get: () => { - const newSh = { ...sh }; - Object.defineProperty(def, "shape", { - value: newSh - }); - return newSh; - } - }); - } - const _normalized = cached(() => normalizeDef(def)); - defineLazy(inst._zod, "propValues", () => { - const shape = def.shape; - const propValues = {}; - for (const key in shape) { - const field = shape[key]._zod; - if (field.values) { - propValues[key] ?? (propValues[key] = /* @__PURE__ */ new Set()); - for (const v of field.values) - propValues[key].add(v); - } - } - return propValues; - }); - const isObject2 = isObject; - const catchall = def.catchall; - let value; - inst._zod.parse = (payload, ctx) => { - value ?? (value = _normalized.value); - const input = payload.value; - if (!isObject2(input)) { - payload.issues.push({ - expected: "object", - code: "invalid_type", - input, - inst - }); - return payload; - } - payload.value = {}; - const proms = []; - const shape = value.shape; - for (const key of value.keys) { - const el = shape[key]; - const isOptionalOut = el._zod.optout === "optional"; - const r = el._zod.run({ value: input[key], issues: [] }, ctx); - if (r instanceof Promise) { - proms.push(r.then((r2) => handlePropertyResult(r2, payload, key, input, isOptionalOut))); - } else { - handlePropertyResult(r, payload, key, input, isOptionalOut); - } - } - if (!catchall) { - return proms.length ? Promise.all(proms).then(() => payload) : payload; - } - return handleCatchall(proms, input, payload, ctx, _normalized.value, inst); - }; -}); -var $ZodObjectJIT = /* @__PURE__ */ $constructor("$ZodObjectJIT", (inst, def) => { - $ZodObject.init(inst, def); - const superParse = inst._zod.parse; - const _normalized = cached(() => normalizeDef(def)); - const generateFastpass = (shape) => { - const doc = new Doc(["shape", "payload", "ctx"]); - const normalized = _normalized.value; - const parseStr = (key) => { - const k = esc(key); - return `shape[${k}]._zod.run({ value: input[${k}], issues: [] }, ctx)`; - }; - doc.write(`const input = payload.value;`); - const ids = /* @__PURE__ */ Object.create(null); - let counter = 0; - for (const key of normalized.keys) { - ids[key] = `key_${counter++}`; - } - doc.write(`const newResult = {};`); - for (const key of normalized.keys) { - const id = ids[key]; - const k = esc(key); - const schema = shape[key]; - const isOptionalOut = schema?._zod?.optout === "optional"; - doc.write(`const ${id} = ${parseStr(key)};`); - if (isOptionalOut) { - doc.write(` - if (${id}.issues.length) { - if (${k} in input) { - payload.issues = payload.issues.concat(${id}.issues.map(iss => ({ - ...iss, - path: iss.path ? [${k}, ...iss.path] : [${k}] - }))); - } - } - - if (${id}.value === undefined) { - if (${k} in input) { - newResult[${k}] = undefined; - } - } else { - newResult[${k}] = ${id}.value; - } - - `); - } else { - doc.write(` - if (${id}.issues.length) { - payload.issues = payload.issues.concat(${id}.issues.map(iss => ({ - ...iss, - path: iss.path ? [${k}, ...iss.path] : [${k}] - }))); - } - - if (${id}.value === undefined) { - if (${k} in input) { - newResult[${k}] = undefined; - } - } else { - newResult[${k}] = ${id}.value; - } - - `); - } - } - doc.write(`payload.value = newResult;`); - doc.write(`return payload;`); - const fn = doc.compile(); - return (payload, ctx) => fn(shape, payload, ctx); - }; - let fastpass; - const isObject2 = isObject; - const jit = !globalConfig.jitless; - const allowsEval2 = allowsEval; - const fastEnabled = jit && allowsEval2.value; - const catchall = def.catchall; - let value; - inst._zod.parse = (payload, ctx) => { - value ?? (value = _normalized.value); - const input = payload.value; - if (!isObject2(input)) { - payload.issues.push({ - expected: "object", - code: "invalid_type", - input, - inst - }); - return payload; - } - if (jit && fastEnabled && ctx?.async === false && ctx.jitless !== true) { - if (!fastpass) - fastpass = generateFastpass(def.shape); - payload = fastpass(payload, ctx); - if (!catchall) - return payload; - return handleCatchall([], input, payload, ctx, value, inst); - } - return superParse(payload, ctx); - }; -}); -function handleUnionResults(results, final, inst, ctx) { - for (const result of results) { - if (result.issues.length === 0) { - final.value = result.value; - return final; - } - } - const nonaborted = results.filter((r) => !aborted(r)); - if (nonaborted.length === 1) { - final.value = nonaborted[0].value; - return nonaborted[0]; - } - final.issues.push({ - code: "invalid_union", - input: final.value, - inst, - errors: results.map((result) => result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) - }); - return final; -} -var $ZodUnion = /* @__PURE__ */ $constructor("$ZodUnion", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "optin", () => def.options.some((o) => o._zod.optin === "optional") ? "optional" : void 0); - defineLazy(inst._zod, "optout", () => def.options.some((o) => o._zod.optout === "optional") ? "optional" : void 0); - defineLazy(inst._zod, "values", () => { - if (def.options.every((o) => o._zod.values)) { - return new Set(def.options.flatMap((option) => Array.from(option._zod.values))); - } - return void 0; - }); - defineLazy(inst._zod, "pattern", () => { - if (def.options.every((o) => o._zod.pattern)) { - const patterns = def.options.map((o) => o._zod.pattern); - return new RegExp(`^(${patterns.map((p) => cleanRegex(p.source)).join("|")})$`); - } - return void 0; - }); - const single = def.options.length === 1; - const first = def.options[0]._zod.run; - inst._zod.parse = (payload, ctx) => { - if (single) { - return first(payload, ctx); - } - let async = false; - const results = []; - for (const option of def.options) { - const result = option._zod.run({ - value: payload.value, - issues: [] - }, ctx); - if (result instanceof Promise) { - results.push(result); - async = true; - } else { - if (result.issues.length === 0) - return result; - results.push(result); - } - } - if (!async) - return handleUnionResults(results, payload, inst, ctx); - return Promise.all(results).then((results2) => { - return handleUnionResults(results2, payload, inst, ctx); - }); - }; -}); -function handleExclusiveUnionResults(results, final, inst, ctx) { - const successes = results.filter((r) => r.issues.length === 0); - if (successes.length === 1) { - final.value = successes[0].value; - return final; - } - if (successes.length === 0) { - final.issues.push({ - code: "invalid_union", - input: final.value, - inst, - errors: results.map((result) => result.issues.map((iss) => finalizeIssue(iss, ctx, config()))) - }); - } else { - final.issues.push({ - code: "invalid_union", - input: final.value, - inst, - errors: [], - inclusive: false - }); - } - return final; -} -var $ZodXor = /* @__PURE__ */ $constructor("$ZodXor", (inst, def) => { - $ZodUnion.init(inst, def); - def.inclusive = false; - const single = def.options.length === 1; - const first = def.options[0]._zod.run; - inst._zod.parse = (payload, ctx) => { - if (single) { - return first(payload, ctx); - } - let async = false; - const results = []; - for (const option of def.options) { - const result = option._zod.run({ - value: payload.value, - issues: [] - }, ctx); - if (result instanceof Promise) { - results.push(result); - async = true; - } else { - results.push(result); - } - } - if (!async) - return handleExclusiveUnionResults(results, payload, inst, ctx); - return Promise.all(results).then((results2) => { - return handleExclusiveUnionResults(results2, payload, inst, ctx); - }); - }; -}); -var $ZodDiscriminatedUnion = /* @__PURE__ */ $constructor("$ZodDiscriminatedUnion", (inst, def) => { - def.inclusive = false; - $ZodUnion.init(inst, def); - const _super = inst._zod.parse; - defineLazy(inst._zod, "propValues", () => { - const propValues = {}; - for (const option of def.options) { - const pv = option._zod.propValues; - if (!pv || Object.keys(pv).length === 0) - throw new Error(`Invalid discriminated union option at index "${def.options.indexOf(option)}"`); - for (const [k, v] of Object.entries(pv)) { - if (!propValues[k]) - propValues[k] = /* @__PURE__ */ new Set(); - for (const val of v) { - propValues[k].add(val); - } - } - } - return propValues; - }); - const disc = cached(() => { - const opts = def.options; - const map2 = /* @__PURE__ */ new Map(); - for (const o of opts) { - const values = o._zod.propValues?.[def.discriminator]; - if (!values || values.size === 0) - throw new Error(`Invalid discriminated union option at index "${def.options.indexOf(o)}"`); - for (const v of values) { - if (map2.has(v)) { - throw new Error(`Duplicate discriminator value "${String(v)}"`); - } - map2.set(v, o); - } - } - return map2; - }); - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - if (!isObject(input)) { - payload.issues.push({ - code: "invalid_type", - expected: "object", - input, - inst - }); - return payload; - } - const opt = disc.value.get(input?.[def.discriminator]); - if (opt) { - return opt._zod.run(payload, ctx); - } - if (def.unionFallback) { - return _super(payload, ctx); - } - payload.issues.push({ - code: "invalid_union", - errors: [], - note: "No matching discriminator", - discriminator: def.discriminator, - input, - path: [def.discriminator], - inst - }); - return payload; - }; -}); -var $ZodIntersection = /* @__PURE__ */ $constructor("$ZodIntersection", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - const left = def.left._zod.run({ value: input, issues: [] }, ctx); - const right = def.right._zod.run({ value: input, issues: [] }, ctx); - const async = left instanceof Promise || right instanceof Promise; - if (async) { - return Promise.all([left, right]).then(([left2, right2]) => { - return handleIntersectionResults(payload, left2, right2); - }); - } - return handleIntersectionResults(payload, left, right); - }; -}); -function mergeValues(a, b) { - if (a === b) { - return { valid: true, data: a }; - } - if (a instanceof Date && b instanceof Date && +a === +b) { - return { valid: true, data: a }; - } - if (isPlainObject(a) && isPlainObject(b)) { - const bKeys = Object.keys(b); - const sharedKeys = Object.keys(a).filter((key) => bKeys.indexOf(key) !== -1); - const newObj = { ...a, ...b }; - for (const key of sharedKeys) { - const sharedValue = mergeValues(a[key], b[key]); - if (!sharedValue.valid) { - return { - valid: false, - mergeErrorPath: [key, ...sharedValue.mergeErrorPath] - }; - } - newObj[key] = sharedValue.data; - } - return { valid: true, data: newObj }; - } - if (Array.isArray(a) && Array.isArray(b)) { - if (a.length !== b.length) { - return { valid: false, mergeErrorPath: [] }; - } - const newArray = []; - for (let index = 0; index < a.length; index++) { - const itemA = a[index]; - const itemB = b[index]; - const sharedValue = mergeValues(itemA, itemB); - if (!sharedValue.valid) { - return { - valid: false, - mergeErrorPath: [index, ...sharedValue.mergeErrorPath] - }; - } - newArray.push(sharedValue.data); - } - return { valid: true, data: newArray }; - } - return { valid: false, mergeErrorPath: [] }; -} -function handleIntersectionResults(result, left, right) { - const unrecKeys = /* @__PURE__ */ new Map(); - let unrecIssue; - for (const iss of left.issues) { - if (iss.code === "unrecognized_keys") { - unrecIssue ?? (unrecIssue = iss); - for (const k of iss.keys) { - if (!unrecKeys.has(k)) - unrecKeys.set(k, {}); - unrecKeys.get(k).l = true; - } - } else { - result.issues.push(iss); - } - } - for (const iss of right.issues) { - if (iss.code === "unrecognized_keys") { - for (const k of iss.keys) { - if (!unrecKeys.has(k)) - unrecKeys.set(k, {}); - unrecKeys.get(k).r = true; - } - } else { - result.issues.push(iss); - } - } - const bothKeys = [...unrecKeys].filter(([, f]) => f.l && f.r).map(([k]) => k); - if (bothKeys.length && unrecIssue) { - result.issues.push({ ...unrecIssue, keys: bothKeys }); - } - if (aborted(result)) - return result; - const merged = mergeValues(left.value, right.value); - if (!merged.valid) { - throw new Error(`Unmergable intersection. Error path: ${JSON.stringify(merged.mergeErrorPath)}`); - } - result.value = merged.data; - return result; -} -var $ZodTuple = /* @__PURE__ */ $constructor("$ZodTuple", (inst, def) => { - $ZodType.init(inst, def); - const items = def.items; - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - if (!Array.isArray(input)) { - payload.issues.push({ - input, - inst, - expected: "tuple", - code: "invalid_type" - }); - return payload; - } - payload.value = []; - const proms = []; - const reversedIndex = [...items].reverse().findIndex((item) => item._zod.optin !== "optional"); - const optStart = reversedIndex === -1 ? 0 : items.length - reversedIndex; - if (!def.rest) { - const tooBig = input.length > items.length; - const tooSmall = input.length < optStart - 1; - if (tooBig || tooSmall) { - payload.issues.push({ - ...tooBig ? { code: "too_big", maximum: items.length, inclusive: true } : { code: "too_small", minimum: items.length }, - input, - inst, - origin: "array" - }); - return payload; - } - } - let i = -1; - for (const item of items) { - i++; - if (i >= input.length) { - if (i >= optStart) - continue; - } - const result = item._zod.run({ - value: input[i], - issues: [] - }, ctx); - if (result instanceof Promise) { - proms.push(result.then((result2) => handleTupleResult(result2, payload, i))); - } else { - handleTupleResult(result, payload, i); - } - } - if (def.rest) { - const rest = input.slice(items.length); - for (const el of rest) { - i++; - const result = def.rest._zod.run({ - value: el, - issues: [] - }, ctx); - if (result instanceof Promise) { - proms.push(result.then((result2) => handleTupleResult(result2, payload, i))); - } else { - handleTupleResult(result, payload, i); - } - } - } - if (proms.length) - return Promise.all(proms).then(() => payload); - return payload; - }; -}); -function handleTupleResult(result, final, index) { - if (result.issues.length) { - final.issues.push(...prefixIssues(index, result.issues)); - } - final.value[index] = result.value; -} -var $ZodRecord = /* @__PURE__ */ $constructor("$ZodRecord", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - if (!isPlainObject(input)) { - payload.issues.push({ - expected: "record", - code: "invalid_type", - input, - inst - }); - return payload; - } - const proms = []; - const values = def.keyType._zod.values; - if (values) { - payload.value = {}; - const recordKeys = /* @__PURE__ */ new Set(); - for (const key of values) { - if (typeof key === "string" || typeof key === "number" || typeof key === "symbol") { - recordKeys.add(typeof key === "number" ? key.toString() : key); - const result = def.valueType._zod.run({ value: input[key], issues: [] }, ctx); - if (result instanceof Promise) { - proms.push(result.then((result2) => { - if (result2.issues.length) { - payload.issues.push(...prefixIssues(key, result2.issues)); - } - payload.value[key] = result2.value; - })); - } else { - if (result.issues.length) { - payload.issues.push(...prefixIssues(key, result.issues)); - } - payload.value[key] = result.value; - } - } - } - let unrecognized; - for (const key in input) { - if (!recordKeys.has(key)) { - unrecognized = unrecognized ?? []; - unrecognized.push(key); - } - } - if (unrecognized && unrecognized.length > 0) { - payload.issues.push({ - code: "unrecognized_keys", - input, - inst, - keys: unrecognized - }); - } - } else { - payload.value = {}; - for (const key of Reflect.ownKeys(input)) { - if (key === "__proto__") - continue; - let keyResult = def.keyType._zod.run({ value: key, issues: [] }, ctx); - if (keyResult instanceof Promise) { - throw new Error("Async schemas not supported in object keys currently"); - } - const checkNumericKey = typeof key === "string" && number.test(key) && keyResult.issues.length; - if (checkNumericKey) { - const retryResult = def.keyType._zod.run({ value: Number(key), issues: [] }, ctx); - if (retryResult instanceof Promise) { - throw new Error("Async schemas not supported in object keys currently"); - } - if (retryResult.issues.length === 0) { - keyResult = retryResult; - } - } - if (keyResult.issues.length) { - if (def.mode === "loose") { - payload.value[key] = input[key]; - } else { - payload.issues.push({ - code: "invalid_key", - origin: "record", - issues: keyResult.issues.map((iss) => finalizeIssue(iss, ctx, config())), - input: key, - path: [key], - inst - }); - } - continue; - } - const result = def.valueType._zod.run({ value: input[key], issues: [] }, ctx); - if (result instanceof Promise) { - proms.push(result.then((result2) => { - if (result2.issues.length) { - payload.issues.push(...prefixIssues(key, result2.issues)); - } - payload.value[keyResult.value] = result2.value; - })); - } else { - if (result.issues.length) { - payload.issues.push(...prefixIssues(key, result.issues)); - } - payload.value[keyResult.value] = result.value; - } - } - } - if (proms.length) { - return Promise.all(proms).then(() => payload); - } - return payload; - }; -}); -var $ZodMap = /* @__PURE__ */ $constructor("$ZodMap", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - if (!(input instanceof Map)) { - payload.issues.push({ - expected: "map", - code: "invalid_type", - input, - inst - }); - return payload; - } - const proms = []; - payload.value = /* @__PURE__ */ new Map(); - for (const [key, value] of input) { - const keyResult = def.keyType._zod.run({ value: key, issues: [] }, ctx); - const valueResult = def.valueType._zod.run({ value, issues: [] }, ctx); - if (keyResult instanceof Promise || valueResult instanceof Promise) { - proms.push(Promise.all([keyResult, valueResult]).then(([keyResult2, valueResult2]) => { - handleMapResult(keyResult2, valueResult2, payload, key, input, inst, ctx); - })); - } else { - handleMapResult(keyResult, valueResult, payload, key, input, inst, ctx); - } - } - if (proms.length) - return Promise.all(proms).then(() => payload); - return payload; - }; -}); -function handleMapResult(keyResult, valueResult, final, key, input, inst, ctx) { - if (keyResult.issues.length) { - if (propertyKeyTypes.has(typeof key)) { - final.issues.push(...prefixIssues(key, keyResult.issues)); - } else { - final.issues.push({ - code: "invalid_key", - origin: "map", - input, - inst, - issues: keyResult.issues.map((iss) => finalizeIssue(iss, ctx, config())) - }); - } - } - if (valueResult.issues.length) { - if (propertyKeyTypes.has(typeof key)) { - final.issues.push(...prefixIssues(key, valueResult.issues)); - } else { - final.issues.push({ - origin: "map", - code: "invalid_element", - input, - inst, - key, - issues: valueResult.issues.map((iss) => finalizeIssue(iss, ctx, config())) - }); - } - } - final.value.set(keyResult.value, valueResult.value); -} -var $ZodSet = /* @__PURE__ */ $constructor("$ZodSet", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - const input = payload.value; - if (!(input instanceof Set)) { - payload.issues.push({ - input, - inst, - expected: "set", - code: "invalid_type" - }); - return payload; - } - const proms = []; - payload.value = /* @__PURE__ */ new Set(); - for (const item of input) { - const result = def.valueType._zod.run({ value: item, issues: [] }, ctx); - if (result instanceof Promise) { - proms.push(result.then((result2) => handleSetResult(result2, payload))); - } else - handleSetResult(result, payload); - } - if (proms.length) - return Promise.all(proms).then(() => payload); - return payload; - }; -}); -function handleSetResult(result, final) { - if (result.issues.length) { - final.issues.push(...result.issues); - } - final.value.add(result.value); -} -var $ZodEnum = /* @__PURE__ */ $constructor("$ZodEnum", (inst, def) => { - $ZodType.init(inst, def); - const values = getEnumValues(def.entries); - const valuesSet = new Set(values); - inst._zod.values = valuesSet; - inst._zod.pattern = new RegExp(`^(${values.filter((k) => propertyKeyTypes.has(typeof k)).map((o) => typeof o === "string" ? escapeRegex(o) : o.toString()).join("|")})$`); - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (valuesSet.has(input)) { - return payload; - } - payload.issues.push({ - code: "invalid_value", - values, - input, - inst - }); - return payload; - }; -}); -var $ZodLiteral = /* @__PURE__ */ $constructor("$ZodLiteral", (inst, def) => { - $ZodType.init(inst, def); - if (def.values.length === 0) { - throw new Error("Cannot create literal schema with no valid values"); - } - const values = new Set(def.values); - inst._zod.values = values; - inst._zod.pattern = new RegExp(`^(${def.values.map((o) => typeof o === "string" ? escapeRegex(o) : o ? escapeRegex(o.toString()) : String(o)).join("|")})$`); - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (values.has(input)) { - return payload; - } - payload.issues.push({ - code: "invalid_value", - values: def.values, - input, - inst - }); - return payload; - }; -}); -var $ZodFile = /* @__PURE__ */ $constructor("$ZodFile", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, _ctx) => { - const input = payload.value; - if (input instanceof File) - return payload; - payload.issues.push({ - expected: "file", - code: "invalid_type", - input, - inst - }); - return payload; - }; -}); -var $ZodTransform = /* @__PURE__ */ $constructor("$ZodTransform", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - throw new $ZodEncodeError(inst.constructor.name); - } - const _out = def.transform(payload.value, payload); - if (ctx.async) { - const output = _out instanceof Promise ? _out : Promise.resolve(_out); - return output.then((output2) => { - payload.value = output2; - return payload; - }); - } - if (_out instanceof Promise) { - throw new $ZodAsyncError(); - } - payload.value = _out; - return payload; - }; -}); -function handleOptionalResult(result, input) { - if (result.issues.length && input === void 0) { - return { issues: [], value: void 0 }; - } - return result; -} -var $ZodOptional = /* @__PURE__ */ $constructor("$ZodOptional", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.optin = "optional"; - inst._zod.optout = "optional"; - defineLazy(inst._zod, "values", () => { - return def.innerType._zod.values ? /* @__PURE__ */ new Set([...def.innerType._zod.values, void 0]) : void 0; - }); - defineLazy(inst._zod, "pattern", () => { - const pattern = def.innerType._zod.pattern; - return pattern ? new RegExp(`^(${cleanRegex(pattern.source)})?$`) : void 0; - }); - inst._zod.parse = (payload, ctx) => { - if (def.innerType._zod.optin === "optional") { - const result = def.innerType._zod.run(payload, ctx); - if (result instanceof Promise) - return result.then((r) => handleOptionalResult(r, payload.value)); - return handleOptionalResult(result, payload.value); - } - if (payload.value === void 0) { - return payload; - } - return def.innerType._zod.run(payload, ctx); - }; -}); -var $ZodExactOptional = /* @__PURE__ */ $constructor("$ZodExactOptional", (inst, def) => { - $ZodOptional.init(inst, def); - defineLazy(inst._zod, "values", () => def.innerType._zod.values); - defineLazy(inst._zod, "pattern", () => def.innerType._zod.pattern); - inst._zod.parse = (payload, ctx) => { - return def.innerType._zod.run(payload, ctx); - }; -}); -var $ZodNullable = /* @__PURE__ */ $constructor("$ZodNullable", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "optin", () => def.innerType._zod.optin); - defineLazy(inst._zod, "optout", () => def.innerType._zod.optout); - defineLazy(inst._zod, "pattern", () => { - const pattern = def.innerType._zod.pattern; - return pattern ? new RegExp(`^(${cleanRegex(pattern.source)}|null)$`) : void 0; - }); - defineLazy(inst._zod, "values", () => { - return def.innerType._zod.values ? /* @__PURE__ */ new Set([...def.innerType._zod.values, null]) : void 0; - }); - inst._zod.parse = (payload, ctx) => { - if (payload.value === null) - return payload; - return def.innerType._zod.run(payload, ctx); - }; -}); -var $ZodDefault = /* @__PURE__ */ $constructor("$ZodDefault", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.optin = "optional"; - defineLazy(inst._zod, "values", () => def.innerType._zod.values); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - return def.innerType._zod.run(payload, ctx); - } - if (payload.value === void 0) { - payload.value = def.defaultValue; - return payload; - } - const result = def.innerType._zod.run(payload, ctx); - if (result instanceof Promise) { - return result.then((result2) => handleDefaultResult(result2, def)); - } - return handleDefaultResult(result, def); - }; -}); -function handleDefaultResult(payload, def) { - if (payload.value === void 0) { - payload.value = def.defaultValue; - } - return payload; -} -var $ZodPrefault = /* @__PURE__ */ $constructor("$ZodPrefault", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.optin = "optional"; - defineLazy(inst._zod, "values", () => def.innerType._zod.values); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - return def.innerType._zod.run(payload, ctx); - } - if (payload.value === void 0) { - payload.value = def.defaultValue; - } - return def.innerType._zod.run(payload, ctx); - }; -}); -var $ZodNonOptional = /* @__PURE__ */ $constructor("$ZodNonOptional", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "values", () => { - const v = def.innerType._zod.values; - return v ? new Set([...v].filter((x) => x !== void 0)) : void 0; - }); - inst._zod.parse = (payload, ctx) => { - const result = def.innerType._zod.run(payload, ctx); - if (result instanceof Promise) { - return result.then((result2) => handleNonOptionalResult(result2, inst)); - } - return handleNonOptionalResult(result, inst); - }; -}); -function handleNonOptionalResult(payload, inst) { - if (!payload.issues.length && payload.value === void 0) { - payload.issues.push({ - code: "invalid_type", - expected: "nonoptional", - input: payload.value, - inst - }); - } - return payload; -} -var $ZodSuccess = /* @__PURE__ */ $constructor("$ZodSuccess", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - throw new $ZodEncodeError("ZodSuccess"); - } - const result = def.innerType._zod.run(payload, ctx); - if (result instanceof Promise) { - return result.then((result2) => { - payload.value = result2.issues.length === 0; - return payload; - }); - } - payload.value = result.issues.length === 0; - return payload; - }; -}); -var $ZodCatch = /* @__PURE__ */ $constructor("$ZodCatch", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "optin", () => def.innerType._zod.optin); - defineLazy(inst._zod, "optout", () => def.innerType._zod.optout); - defineLazy(inst._zod, "values", () => def.innerType._zod.values); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - return def.innerType._zod.run(payload, ctx); - } - const result = def.innerType._zod.run(payload, ctx); - if (result instanceof Promise) { - return result.then((result2) => { - payload.value = result2.value; - if (result2.issues.length) { - payload.value = def.catchValue({ - ...payload, - error: { - issues: result2.issues.map((iss) => finalizeIssue(iss, ctx, config())) - }, - input: payload.value - }); - payload.issues = []; - } - return payload; - }); - } - payload.value = result.value; - if (result.issues.length) { - payload.value = def.catchValue({ - ...payload, - error: { - issues: result.issues.map((iss) => finalizeIssue(iss, ctx, config())) - }, - input: payload.value - }); - payload.issues = []; - } - return payload; - }; -}); -var $ZodNaN = /* @__PURE__ */ $constructor("$ZodNaN", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, _ctx) => { - if (typeof payload.value !== "number" || !Number.isNaN(payload.value)) { - payload.issues.push({ - input: payload.value, - inst, - expected: "nan", - code: "invalid_type" - }); - return payload; - } - return payload; - }; -}); -var $ZodPipe = /* @__PURE__ */ $constructor("$ZodPipe", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "values", () => def.in._zod.values); - defineLazy(inst._zod, "optin", () => def.in._zod.optin); - defineLazy(inst._zod, "optout", () => def.out._zod.optout); - defineLazy(inst._zod, "propValues", () => def.in._zod.propValues); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - const right = def.out._zod.run(payload, ctx); - if (right instanceof Promise) { - return right.then((right2) => handlePipeResult(right2, def.in, ctx)); - } - return handlePipeResult(right, def.in, ctx); - } - const left = def.in._zod.run(payload, ctx); - if (left instanceof Promise) { - return left.then((left2) => handlePipeResult(left2, def.out, ctx)); - } - return handlePipeResult(left, def.out, ctx); - }; -}); -function handlePipeResult(left, next, ctx) { - if (left.issues.length) { - left.aborted = true; - return left; - } - return next._zod.run({ value: left.value, issues: left.issues }, ctx); -} -var $ZodCodec = /* @__PURE__ */ $constructor("$ZodCodec", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "values", () => def.in._zod.values); - defineLazy(inst._zod, "optin", () => def.in._zod.optin); - defineLazy(inst._zod, "optout", () => def.out._zod.optout); - defineLazy(inst._zod, "propValues", () => def.in._zod.propValues); - inst._zod.parse = (payload, ctx) => { - const direction = ctx.direction || "forward"; - if (direction === "forward") { - const left = def.in._zod.run(payload, ctx); - if (left instanceof Promise) { - return left.then((left2) => handleCodecAResult(left2, def, ctx)); - } - return handleCodecAResult(left, def, ctx); - } else { - const right = def.out._zod.run(payload, ctx); - if (right instanceof Promise) { - return right.then((right2) => handleCodecAResult(right2, def, ctx)); - } - return handleCodecAResult(right, def, ctx); - } - }; -}); -function handleCodecAResult(result, def, ctx) { - if (result.issues.length) { - result.aborted = true; - return result; - } - const direction = ctx.direction || "forward"; - if (direction === "forward") { - const transformed = def.transform(result.value, result); - if (transformed instanceof Promise) { - return transformed.then((value) => handleCodecTxResult(result, value, def.out, ctx)); - } - return handleCodecTxResult(result, transformed, def.out, ctx); - } else { - const transformed = def.reverseTransform(result.value, result); - if (transformed instanceof Promise) { - return transformed.then((value) => handleCodecTxResult(result, value, def.in, ctx)); - } - return handleCodecTxResult(result, transformed, def.in, ctx); - } -} -function handleCodecTxResult(left, value, nextSchema, ctx) { - if (left.issues.length) { - left.aborted = true; - return left; - } - return nextSchema._zod.run({ value, issues: left.issues }, ctx); -} -var $ZodReadonly = /* @__PURE__ */ $constructor("$ZodReadonly", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "propValues", () => def.innerType._zod.propValues); - defineLazy(inst._zod, "values", () => def.innerType._zod.values); - defineLazy(inst._zod, "optin", () => def.innerType?._zod?.optin); - defineLazy(inst._zod, "optout", () => def.innerType?._zod?.optout); - inst._zod.parse = (payload, ctx) => { - if (ctx.direction === "backward") { - return def.innerType._zod.run(payload, ctx); - } - const result = def.innerType._zod.run(payload, ctx); - if (result instanceof Promise) { - return result.then(handleReadonlyResult); - } - return handleReadonlyResult(result); - }; -}); -function handleReadonlyResult(payload) { - payload.value = Object.freeze(payload.value); - return payload; -} -var $ZodTemplateLiteral = /* @__PURE__ */ $constructor("$ZodTemplateLiteral", (inst, def) => { - $ZodType.init(inst, def); - const regexParts = []; - for (const part of def.parts) { - if (typeof part === "object" && part !== null) { - if (!part._zod.pattern) { - throw new Error(`Invalid template literal part, no pattern found: ${[...part._zod.traits].shift()}`); - } - const source = part._zod.pattern instanceof RegExp ? part._zod.pattern.source : part._zod.pattern; - if (!source) - throw new Error(`Invalid template literal part: ${part._zod.traits}`); - const start = source.startsWith("^") ? 1 : 0; - const end = source.endsWith("$") ? source.length - 1 : source.length; - regexParts.push(source.slice(start, end)); - } else if (part === null || primitiveTypes.has(typeof part)) { - regexParts.push(escapeRegex(`${part}`)); - } else { - throw new Error(`Invalid template literal part: ${part}`); - } - } - inst._zod.pattern = new RegExp(`^${regexParts.join("")}$`); - inst._zod.parse = (payload, _ctx) => { - if (typeof payload.value !== "string") { - payload.issues.push({ - input: payload.value, - inst, - expected: "string", - code: "invalid_type" - }); - return payload; - } - inst._zod.pattern.lastIndex = 0; - if (!inst._zod.pattern.test(payload.value)) { - payload.issues.push({ - input: payload.value, - inst, - code: "invalid_format", - format: def.format ?? "template_literal", - pattern: inst._zod.pattern.source - }); - return payload; - } - return payload; - }; -}); -var $ZodFunction = /* @__PURE__ */ $constructor("$ZodFunction", (inst, def) => { - $ZodType.init(inst, def); - inst._def = def; - inst._zod.def = def; - inst.implement = (func) => { - if (typeof func !== "function") { - throw new Error("implement() must be called with a function"); - } - return function(...args) { - const parsedArgs = inst._def.input ? parse(inst._def.input, args) : args; - const result = Reflect.apply(func, this, parsedArgs); - if (inst._def.output) { - return parse(inst._def.output, result); - } - return result; - }; - }; - inst.implementAsync = (func) => { - if (typeof func !== "function") { - throw new Error("implementAsync() must be called with a function"); - } - return async function(...args) { - const parsedArgs = inst._def.input ? await parseAsync(inst._def.input, args) : args; - const result = await Reflect.apply(func, this, parsedArgs); - if (inst._def.output) { - return await parseAsync(inst._def.output, result); - } - return result; - }; - }; - inst._zod.parse = (payload, _ctx) => { - if (typeof payload.value !== "function") { - payload.issues.push({ - code: "invalid_type", - expected: "function", - input: payload.value, - inst - }); - return payload; - } - const hasPromiseOutput = inst._def.output && inst._def.output._zod.def.type === "promise"; - if (hasPromiseOutput) { - payload.value = inst.implementAsync(payload.value); - } else { - payload.value = inst.implement(payload.value); - } - return payload; - }; - inst.input = (...args) => { - const F = inst.constructor; - if (Array.isArray(args[0])) { - return new F({ - type: "function", - input: new $ZodTuple({ - type: "tuple", - items: args[0], - rest: args[1] - }), - output: inst._def.output - }); - } - return new F({ - type: "function", - input: args[0], - output: inst._def.output - }); - }; - inst.output = (output) => { - const F = inst.constructor; - return new F({ - type: "function", - input: inst._def.input, - output - }); - }; - return inst; -}); -var $ZodPromise = /* @__PURE__ */ $constructor("$ZodPromise", (inst, def) => { - $ZodType.init(inst, def); - inst._zod.parse = (payload, ctx) => { - return Promise.resolve(payload.value).then((inner) => def.innerType._zod.run({ value: inner, issues: [] }, ctx)); - }; -}); -var $ZodLazy = /* @__PURE__ */ $constructor("$ZodLazy", (inst, def) => { - $ZodType.init(inst, def); - defineLazy(inst._zod, "innerType", () => def.getter()); - defineLazy(inst._zod, "pattern", () => inst._zod.innerType?._zod?.pattern); - defineLazy(inst._zod, "propValues", () => inst._zod.innerType?._zod?.propValues); - defineLazy(inst._zod, "optin", () => inst._zod.innerType?._zod?.optin ?? void 0); - defineLazy(inst._zod, "optout", () => inst._zod.innerType?._zod?.optout ?? void 0); - inst._zod.parse = (payload, ctx) => { - const inner = inst._zod.innerType; - return inner._zod.run(payload, ctx); - }; -}); -var $ZodCustom = /* @__PURE__ */ $constructor("$ZodCustom", (inst, def) => { - $ZodCheck.init(inst, def); - $ZodType.init(inst, def); - inst._zod.parse = (payload, _) => { - return payload; - }; - inst._zod.check = (payload) => { - const input = payload.value; - const r = def.fn(input); - if (r instanceof Promise) { - return r.then((r2) => handleRefineResult(r2, payload, input, inst)); - } - handleRefineResult(r, payload, input, inst); - return; - }; -}); -function handleRefineResult(result, payload, input, inst) { - if (!result) { - const _iss = { - code: "custom", - input, - inst, - // incorporates params.error into issue reporting - path: [...inst._zod.def.path ?? []], - // incorporates params.error into issue reporting - continue: !inst._zod.def.abort - // params: inst._zod.def.params, - }; - if (inst._zod.def.params) - _iss.params = inst._zod.def.params; - payload.issues.push(issue(_iss)); - } -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/index.js -var locales_exports = {}; -__export(locales_exports, { - ar: () => ar_default, - az: () => az_default, - be: () => be_default, - bg: () => bg_default, - ca: () => ca_default, - cs: () => cs_default, - da: () => da_default, - de: () => de_default, - en: () => en_default, - eo: () => eo_default, - es: () => es_default, - fa: () => fa_default, - fi: () => fi_default, - fr: () => fr_default, - frCA: () => fr_CA_default, - he: () => he_default, - hu: () => hu_default, - hy: () => hy_default, - id: () => id_default, - is: () => is_default, - it: () => it_default, - ja: () => ja_default, - ka: () => ka_default, - kh: () => kh_default, - km: () => km_default, - ko: () => ko_default, - lt: () => lt_default, - mk: () => mk_default, - ms: () => ms_default, - nl: () => nl_default, - no: () => no_default, - ota: () => ota_default, - pl: () => pl_default, - ps: () => ps_default, - pt: () => pt_default, - ru: () => ru_default, - sl: () => sl_default, - sv: () => sv_default, - ta: () => ta_default, - th: () => th_default, - tr: () => tr_default, - ua: () => ua_default, - uk: () => uk_default, - ur: () => ur_default, - uz: () => uz_default, - vi: () => vi_default, - yo: () => yo_default, - zhCN: () => zh_CN_default, - zhTW: () => zh_TW_default -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ar.js -var error = () => { - const Sizable = { - string: { unit: "\u062D\u0631\u0641", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" }, - file: { unit: "\u0628\u0627\u064A\u062A", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" }, - array: { unit: "\u0639\u0646\u0635\u0631", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" }, - set: { unit: "\u0639\u0646\u0635\u0631", verb: "\u0623\u0646 \u064A\u062D\u0648\u064A" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0645\u062F\u062E\u0644", - email: "\u0628\u0631\u064A\u062F \u0625\u0644\u0643\u062A\u0631\u0648\u0646\u064A", - url: "\u0631\u0627\u0628\u0637", - emoji: "\u0625\u064A\u0645\u0648\u062C\u064A", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u062A\u0627\u0631\u064A\u062E \u0648\u0648\u0642\u062A \u0628\u0645\u0639\u064A\u0627\u0631 ISO", - date: "\u062A\u0627\u0631\u064A\u062E \u0628\u0645\u0639\u064A\u0627\u0631 ISO", - time: "\u0648\u0642\u062A \u0628\u0645\u0639\u064A\u0627\u0631 ISO", - duration: "\u0645\u062F\u0629 \u0628\u0645\u0639\u064A\u0627\u0631 ISO", - ipv4: "\u0639\u0646\u0648\u0627\u0646 IPv4", - ipv6: "\u0639\u0646\u0648\u0627\u0646 IPv6", - cidrv4: "\u0645\u062F\u0649 \u0639\u0646\u0627\u0648\u064A\u0646 \u0628\u0635\u064A\u063A\u0629 IPv4", - cidrv6: "\u0645\u062F\u0649 \u0639\u0646\u0627\u0648\u064A\u0646 \u0628\u0635\u064A\u063A\u0629 IPv6", - base64: "\u0646\u064E\u0635 \u0628\u062A\u0631\u0645\u064A\u0632 base64-encoded", - base64url: "\u0646\u064E\u0635 \u0628\u062A\u0631\u0645\u064A\u0632 base64url-encoded", - json_string: "\u0646\u064E\u0635 \u0639\u0644\u0649 \u0647\u064A\u0626\u0629 JSON", - e164: "\u0631\u0642\u0645 \u0647\u0627\u062A\u0641 \u0628\u0645\u0639\u064A\u0627\u0631 E.164", - jwt: "JWT", - template_literal: "\u0645\u062F\u062E\u0644" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0645\u062F\u062E\u0644\u0627\u062A \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644\u0629: \u064A\u0641\u062A\u0631\u0636 \u0625\u062F\u062E\u0627\u0644 instanceof ${issue2.expected}\u060C \u0648\u0644\u0643\u0646 \u062A\u0645 \u0625\u062F\u062E\u0627\u0644 ${received}`; - } - return `\u0645\u062F\u062E\u0644\u0627\u062A \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644\u0629: \u064A\u0641\u062A\u0631\u0636 \u0625\u062F\u062E\u0627\u0644 ${expected}\u060C \u0648\u0644\u0643\u0646 \u062A\u0645 \u0625\u062F\u062E\u0627\u0644 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u0645\u062F\u062E\u0644\u0627\u062A \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644\u0629: \u064A\u0641\u062A\u0631\u0636 \u0625\u062F\u062E\u0627\u0644 ${stringifyPrimitive(issue2.values[0])}`; - return `\u0627\u062E\u062A\u064A\u0627\u0631 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062A\u0648\u0642\u0639 \u0627\u0646\u062A\u0642\u0627\u0621 \u0623\u062D\u062F \u0647\u0630\u0647 \u0627\u0644\u062E\u064A\u0627\u0631\u0627\u062A: ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return ` \u0623\u0643\u0628\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0623\u0646 \u062A\u0643\u0648\u0646 ${issue2.origin ?? "\u0627\u0644\u0642\u064A\u0645\u0629"} ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0635\u0631"}`; - return `\u0623\u0643\u0628\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0623\u0646 \u062A\u0643\u0648\u0646 ${issue2.origin ?? "\u0627\u0644\u0642\u064A\u0645\u0629"} ${adj} ${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0623\u0635\u063A\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0644\u0640 ${issue2.origin} \u0623\u0646 \u064A\u0643\u0648\u0646 ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u0623\u0635\u063A\u0631 \u0645\u0646 \u0627\u0644\u0644\u0627\u0632\u0645: \u064A\u0641\u062A\u0631\u0636 \u0644\u0640 ${issue2.origin} \u0623\u0646 \u064A\u0643\u0648\u0646 ${adj} ${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0628\u062F\u0623 \u0628\u0640 "${issue2.prefix}"`; - if (_issue.format === "ends_with") - return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0646\u062A\u0647\u064A \u0628\u0640 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u062A\u0636\u0645\u0651\u064E\u0646 "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u0646\u064E\u0635 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0637\u0627\u0628\u0642 \u0627\u0644\u0646\u0645\u0637 ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644`; - } - case "not_multiple_of": - return `\u0631\u0642\u0645 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644: \u064A\u062C\u0628 \u0623\u0646 \u064A\u0643\u0648\u0646 \u0645\u0646 \u0645\u0636\u0627\u0639\u0641\u0627\u062A ${issue2.divisor}`; - case "unrecognized_keys": - return `\u0645\u0639\u0631\u0641${issue2.keys.length > 1 ? "\u0627\u062A" : ""} \u063A\u0631\u064A\u0628${issue2.keys.length > 1 ? "\u0629" : ""}: ${joinValues(issue2.keys, "\u060C ")}`; - case "invalid_key": - return `\u0645\u0639\u0631\u0641 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644 \u0641\u064A ${issue2.origin}`; - case "invalid_union": - return "\u0645\u062F\u062E\u0644 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644"; - case "invalid_element": - return `\u0645\u062F\u062E\u0644 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644 \u0641\u064A ${issue2.origin}`; - default: - return "\u0645\u062F\u062E\u0644 \u063A\u064A\u0631 \u0645\u0642\u0628\u0648\u0644"; - } - }; -}; -function ar_default() { - return { - localeError: error() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/az.js -var error2 = () => { - const Sizable = { - string: { unit: "simvol", verb: "olmal\u0131d\u0131r" }, - file: { unit: "bayt", verb: "olmal\u0131d\u0131r" }, - array: { unit: "element", verb: "olmal\u0131d\u0131r" }, - set: { unit: "element", verb: "olmal\u0131d\u0131r" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "email address", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO datetime", - date: "ISO date", - time: "ISO time", - duration: "ISO duration", - ipv4: "IPv4 address", - ipv6: "IPv6 address", - cidrv4: "IPv4 range", - cidrv6: "IPv6 range", - base64: "base64-encoded string", - base64url: "base64url-encoded string", - json_string: "JSON string", - e164: "E.164 number", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Yanl\u0131\u015F d\u0259y\u0259r: g\xF6zl\u0259nil\u0259n instanceof ${issue2.expected}, daxil olan ${received}`; - } - return `Yanl\u0131\u015F d\u0259y\u0259r: g\xF6zl\u0259nil\u0259n ${expected}, daxil olan ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Yanl\u0131\u015F d\u0259y\u0259r: g\xF6zl\u0259nil\u0259n ${stringifyPrimitive(issue2.values[0])}`; - return `Yanl\u0131\u015F se\xE7im: a\u015Fa\u011F\u0131dak\u0131lardan biri olmal\u0131d\u0131r: ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\xC7ox b\xF6y\xFCk: g\xF6zl\u0259nil\u0259n ${issue2.origin ?? "d\u0259y\u0259r"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "element"}`; - return `\xC7ox b\xF6y\xFCk: g\xF6zl\u0259nil\u0259n ${issue2.origin ?? "d\u0259y\u0259r"} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\xC7ox ki\xE7ik: g\xF6zl\u0259nil\u0259n ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - return `\xC7ox ki\xE7ik: g\xF6zl\u0259nil\u0259n ${issue2.origin} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Yanl\u0131\u015F m\u0259tn: "${_issue.prefix}" il\u0259 ba\u015Flamal\u0131d\u0131r`; - if (_issue.format === "ends_with") - return `Yanl\u0131\u015F m\u0259tn: "${_issue.suffix}" il\u0259 bitm\u0259lidir`; - if (_issue.format === "includes") - return `Yanl\u0131\u015F m\u0259tn: "${_issue.includes}" daxil olmal\u0131d\u0131r`; - if (_issue.format === "regex") - return `Yanl\u0131\u015F m\u0259tn: ${_issue.pattern} \u015Fablonuna uy\u011Fun olmal\u0131d\u0131r`; - return `Yanl\u0131\u015F ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Yanl\u0131\u015F \u0259d\u0259d: ${issue2.divisor} il\u0259 b\xF6l\xFCn\u0259 bil\u0259n olmal\u0131d\u0131r`; - case "unrecognized_keys": - return `Tan\u0131nmayan a\xE7ar${issue2.keys.length > 1 ? "lar" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `${issue2.origin} daxilind\u0259 yanl\u0131\u015F a\xE7ar`; - case "invalid_union": - return "Yanl\u0131\u015F d\u0259y\u0259r"; - case "invalid_element": - return `${issue2.origin} daxilind\u0259 yanl\u0131\u015F d\u0259y\u0259r`; - default: - return `Yanl\u0131\u015F d\u0259y\u0259r`; - } - }; -}; -function az_default() { - return { - localeError: error2() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/be.js -function getBelarusianPlural(count, one, few, many) { - const absCount = Math.abs(count); - const lastDigit = absCount % 10; - const lastTwoDigits = absCount % 100; - if (lastTwoDigits >= 11 && lastTwoDigits <= 19) { - return many; - } - if (lastDigit === 1) { - return one; - } - if (lastDigit >= 2 && lastDigit <= 4) { - return few; - } - return many; -} -var error3 = () => { - const Sizable = { - string: { - unit: { - one: "\u0441\u0456\u043C\u0432\u0430\u043B", - few: "\u0441\u0456\u043C\u0432\u0430\u043B\u044B", - many: "\u0441\u0456\u043C\u0432\u0430\u043B\u0430\u045E" - }, - verb: "\u043C\u0435\u0446\u044C" - }, - array: { - unit: { - one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", - few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u044B", - many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430\u045E" - }, - verb: "\u043C\u0435\u0446\u044C" - }, - set: { - unit: { - one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", - few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u044B", - many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430\u045E" - }, - verb: "\u043C\u0435\u0446\u044C" - }, - file: { - unit: { - one: "\u0431\u0430\u0439\u0442", - few: "\u0431\u0430\u0439\u0442\u044B", - many: "\u0431\u0430\u0439\u0442\u0430\u045E" - }, - verb: "\u043C\u0435\u0446\u044C" - } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0443\u0432\u043E\u0434", - email: "email \u0430\u0434\u0440\u0430\u0441", - url: "URL", - emoji: "\u044D\u043C\u043E\u0434\u0437\u0456", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u0434\u0430\u0442\u0430 \u0456 \u0447\u0430\u0441", - date: "ISO \u0434\u0430\u0442\u0430", - time: "ISO \u0447\u0430\u0441", - duration: "ISO \u043F\u0440\u0430\u0446\u044F\u0433\u043B\u0430\u0441\u0446\u044C", - ipv4: "IPv4 \u0430\u0434\u0440\u0430\u0441", - ipv6: "IPv6 \u0430\u0434\u0440\u0430\u0441", - cidrv4: "IPv4 \u0434\u044B\u044F\u043F\u0430\u0437\u043E\u043D", - cidrv6: "IPv6 \u0434\u044B\u044F\u043F\u0430\u0437\u043E\u043D", - base64: "\u0440\u0430\u0434\u043E\u043A \u0443 \u0444\u0430\u0440\u043C\u0430\u0446\u0435 base64", - base64url: "\u0440\u0430\u0434\u043E\u043A \u0443 \u0444\u0430\u0440\u043C\u0430\u0446\u0435 base64url", - json_string: "JSON \u0440\u0430\u0434\u043E\u043A", - e164: "\u043D\u0443\u043C\u0430\u0440 E.164", - jwt: "JWT", - template_literal: "\u0443\u0432\u043E\u0434" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u043B\u0456\u043A", - array: "\u043C\u0430\u0441\u0456\u045E" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434: \u0447\u0430\u043A\u0430\u045E\u0441\u044F instanceof ${issue2.expected}, \u0430\u0442\u0440\u044B\u043C\u0430\u043D\u0430 ${received}`; - } - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434: \u0447\u0430\u043A\u0430\u045E\u0441\u044F ${expected}, \u0430\u0442\u0440\u044B\u043C\u0430\u043D\u0430 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F ${stringifyPrimitive(issue2.values[0])}`; - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0432\u0430\u0440\u044B\u044F\u043D\u0442: \u0447\u0430\u043A\u0430\u045E\u0441\u044F \u0430\u0434\u0437\u0456\u043D \u0437 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - const maxValue = Number(issue2.maximum); - const unit = getBelarusianPlural(maxValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); - return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u0432\u044F\u043B\u0456\u043A\u0456: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u044D\u043D\u043D\u0435"} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 ${sizing.verb} ${adj}${issue2.maximum.toString()} ${unit}`; - } - return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u0432\u044F\u043B\u0456\u043A\u0456: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u044D\u043D\u043D\u0435"} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 \u0431\u044B\u0446\u044C ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - const minValue = Number(issue2.minimum); - const unit = getBelarusianPlural(minValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); - return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u043C\u0430\u043B\u044B: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 ${sizing.verb} ${adj}${issue2.minimum.toString()} ${unit}`; - } - return `\u0417\u0430\u043D\u0430\u0434\u0442\u0430 \u043C\u0430\u043B\u044B: \u0447\u0430\u043A\u0430\u043B\u0430\u0441\u044F, \u0448\u0442\u043E ${issue2.origin} \u043F\u0430\u0432\u0456\u043D\u043D\u0430 \u0431\u044B\u0446\u044C ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u043F\u0430\u0447\u044B\u043D\u0430\u0446\u0446\u0430 \u0437 "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0437\u0430\u043A\u0430\u043D\u0447\u0432\u0430\u0446\u0446\u0430 \u043D\u0430 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0437\u043C\u044F\u0448\u0447\u0430\u0446\u044C "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u0440\u0430\u0434\u043E\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0430\u0434\u043F\u0430\u0432\u044F\u0434\u0430\u0446\u044C \u0448\u0430\u0431\u043B\u043E\u043D\u0443 ${_issue.pattern}`; - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u043B\u0456\u043A: \u043F\u0430\u0432\u0456\u043D\u0435\u043D \u0431\u044B\u0446\u044C \u043A\u0440\u0430\u0442\u043D\u044B\u043C ${issue2.divisor}`; - case "unrecognized_keys": - return `\u041D\u0435\u0440\u0430\u0441\u043F\u0430\u0437\u043D\u0430\u043D\u044B ${issue2.keys.length > 1 ? "\u043A\u043B\u044E\u0447\u044B" : "\u043A\u043B\u044E\u0447"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u043A\u043B\u044E\u0447 \u0443 ${issue2.origin}`; - case "invalid_union": - return "\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434"; - case "invalid_element": - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u0430\u0435 \u0437\u043D\u0430\u0447\u044D\u043D\u043D\u0435 \u045E ${issue2.origin}`; - default: - return `\u041D\u044F\u043F\u0440\u0430\u0432\u0456\u043B\u044C\u043D\u044B \u045E\u0432\u043E\u0434`; - } - }; -}; -function be_default() { - return { - localeError: error3() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/bg.js -var error4 = () => { - const Sizable = { - string: { unit: "\u0441\u0438\u043C\u0432\u043E\u043B\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" }, - file: { unit: "\u0431\u0430\u0439\u0442\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" }, - array: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" }, - set: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430", verb: "\u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0432\u0445\u043E\u0434", - email: "\u0438\u043C\u0435\u0439\u043B \u0430\u0434\u0440\u0435\u0441", - url: "URL", - emoji: "\u0435\u043C\u043E\u0434\u0436\u0438", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u0432\u0440\u0435\u043C\u0435", - date: "ISO \u0434\u0430\u0442\u0430", - time: "ISO \u0432\u0440\u0435\u043C\u0435", - duration: "ISO \u043F\u0440\u043E\u0434\u044A\u043B\u0436\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442", - ipv4: "IPv4 \u0430\u0434\u0440\u0435\u0441", - ipv6: "IPv6 \u0430\u0434\u0440\u0435\u0441", - cidrv4: "IPv4 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", - cidrv6: "IPv6 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", - base64: "base64-\u043A\u043E\u0434\u0438\u0440\u0430\u043D \u043D\u0438\u0437", - base64url: "base64url-\u043A\u043E\u0434\u0438\u0440\u0430\u043D \u043D\u0438\u0437", - json_string: "JSON \u043D\u0438\u0437", - e164: "E.164 \u043D\u043E\u043C\u0435\u0440", - jwt: "JWT", - template_literal: "\u0432\u0445\u043E\u0434" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0447\u0438\u0441\u043B\u043E", - array: "\u043C\u0430\u0441\u0438\u0432" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434: \u043E\u0447\u0430\u043A\u0432\u0430\u043D instanceof ${issue2.expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D ${received}`; - } - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434: \u043E\u0447\u0430\u043A\u0432\u0430\u043D ${expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434: \u043E\u0447\u0430\u043A\u0432\u0430\u043D ${stringifyPrimitive(issue2.values[0])}`; - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430 \u043E\u043F\u0446\u0438\u044F: \u043E\u0447\u0430\u043A\u0432\u0430\u043D\u043E \u0435\u0434\u043D\u043E \u043E\u0442 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u0422\u0432\u044A\u0440\u0434\u0435 \u0433\u043E\u043B\u044F\u043C\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin ?? "\u0441\u0442\u043E\u0439\u043D\u043E\u0441\u0442"} \u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430"}`; - return `\u0422\u0432\u044A\u0440\u0434\u0435 \u0433\u043E\u043B\u044F\u043C\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin ?? "\u0441\u0442\u043E\u0439\u043D\u043E\u0441\u0442"} \u0434\u0430 \u0431\u044A\u0434\u0435 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0422\u0432\u044A\u0440\u0434\u0435 \u043C\u0430\u043B\u043A\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin} \u0434\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430 ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u0422\u0432\u044A\u0440\u0434\u0435 \u043C\u0430\u043B\u043A\u043E: \u043E\u0447\u0430\u043A\u0432\u0430 \u0441\u0435 ${issue2.origin} \u0434\u0430 \u0431\u044A\u0434\u0435 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0437\u0430\u043F\u043E\u0447\u0432\u0430 \u0441 "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0437\u0430\u0432\u044A\u0440\u0448\u0432\u0430 \u0441 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0432\u043A\u043B\u044E\u0447\u0432\u0430 "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043D\u0438\u0437: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0441\u044A\u0432\u043F\u0430\u0434\u0430 \u0441 ${_issue.pattern}`; - let invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D"; - if (_issue.format === "emoji") - invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E"; - if (_issue.format === "datetime") - invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E"; - if (_issue.format === "date") - invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430"; - if (_issue.format === "time") - invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E"; - if (_issue.format === "duration") - invalid_adj = "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430"; - return `${invalid_adj} ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u043E \u0447\u0438\u0441\u043B\u043E: \u0442\u0440\u044F\u0431\u0432\u0430 \u0434\u0430 \u0431\u044A\u0434\u0435 \u043A\u0440\u0430\u0442\u043D\u043E \u043D\u0430 ${issue2.divisor}`; - case "unrecognized_keys": - return `\u041D\u0435\u0440\u0430\u0437\u043F\u043E\u0437\u043D\u0430\u0442${issue2.keys.length > 1 ? "\u0438" : ""} \u043A\u043B\u044E\u0447${issue2.keys.length > 1 ? "\u043E\u0432\u0435" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u043A\u043B\u044E\u0447 \u0432 ${issue2.origin}`; - case "invalid_union": - return "\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434"; - case "invalid_element": - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u043D\u0430 \u0441\u0442\u043E\u0439\u043D\u043E\u0441\u0442 \u0432 ${issue2.origin}`; - default: - return `\u041D\u0435\u0432\u0430\u043B\u0438\u0434\u0435\u043D \u0432\u0445\u043E\u0434`; - } - }; -}; -function bg_default() { - return { - localeError: error4() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ca.js -var error5 = () => { - const Sizable = { - string: { unit: "car\xE0cters", verb: "contenir" }, - file: { unit: "bytes", verb: "contenir" }, - array: { unit: "elements", verb: "contenir" }, - set: { unit: "elements", verb: "contenir" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "entrada", - email: "adre\xE7a electr\xF2nica", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "data i hora ISO", - date: "data ISO", - time: "hora ISO", - duration: "durada ISO", - ipv4: "adre\xE7a IPv4", - ipv6: "adre\xE7a IPv6", - cidrv4: "rang IPv4", - cidrv6: "rang IPv6", - base64: "cadena codificada en base64", - base64url: "cadena codificada en base64url", - json_string: "cadena JSON", - e164: "n\xFAmero E.164", - jwt: "JWT", - template_literal: "entrada" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Tipus inv\xE0lid: s'esperava instanceof ${issue2.expected}, s'ha rebut ${received}`; - } - return `Tipus inv\xE0lid: s'esperava ${expected}, s'ha rebut ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Valor inv\xE0lid: s'esperava ${stringifyPrimitive(issue2.values[0])}`; - return `Opci\xF3 inv\xE0lida: s'esperava una de ${joinValues(issue2.values, " o ")}`; - case "too_big": { - const adj = issue2.inclusive ? "com a m\xE0xim" : "menys de"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Massa gran: s'esperava que ${issue2.origin ?? "el valor"} contingu\xE9s ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "elements"}`; - return `Massa gran: s'esperava que ${issue2.origin ?? "el valor"} fos ${adj} ${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? "com a m\xEDnim" : "m\xE9s de"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Massa petit: s'esperava que ${issue2.origin} contingu\xE9s ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Massa petit: s'esperava que ${issue2.origin} fos ${adj} ${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Format inv\xE0lid: ha de comen\xE7ar amb "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `Format inv\xE0lid: ha d'acabar amb "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Format inv\xE0lid: ha d'incloure "${_issue.includes}"`; - if (_issue.format === "regex") - return `Format inv\xE0lid: ha de coincidir amb el patr\xF3 ${_issue.pattern}`; - return `Format inv\xE0lid per a ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `N\xFAmero inv\xE0lid: ha de ser m\xFAltiple de ${issue2.divisor}`; - case "unrecognized_keys": - return `Clau${issue2.keys.length > 1 ? "s" : ""} no reconeguda${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Clau inv\xE0lida a ${issue2.origin}`; - case "invalid_union": - return "Entrada inv\xE0lida"; - // Could also be "Tipus d'unió invàlid" but "Entrada invàlida" is more general - case "invalid_element": - return `Element inv\xE0lid a ${issue2.origin}`; - default: - return `Entrada inv\xE0lida`; - } - }; -}; -function ca_default() { - return { - localeError: error5() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/cs.js -var error6 = () => { - const Sizable = { - string: { unit: "znak\u016F", verb: "m\xEDt" }, - file: { unit: "bajt\u016F", verb: "m\xEDt" }, - array: { unit: "prvk\u016F", verb: "m\xEDt" }, - set: { unit: "prvk\u016F", verb: "m\xEDt" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "regul\xE1rn\xED v\xFDraz", - email: "e-mailov\xE1 adresa", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "datum a \u010Das ve form\xE1tu ISO", - date: "datum ve form\xE1tu ISO", - time: "\u010Das ve form\xE1tu ISO", - duration: "doba trv\xE1n\xED ISO", - ipv4: "IPv4 adresa", - ipv6: "IPv6 adresa", - cidrv4: "rozsah IPv4", - cidrv6: "rozsah IPv6", - base64: "\u0159et\u011Bzec zak\xF3dovan\xFD ve form\xE1tu base64", - base64url: "\u0159et\u011Bzec zak\xF3dovan\xFD ve form\xE1tu base64url", - json_string: "\u0159et\u011Bzec ve form\xE1tu JSON", - e164: "\u010D\xEDslo E.164", - jwt: "JWT", - template_literal: "vstup" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u010D\xEDslo", - string: "\u0159et\u011Bzec", - function: "funkce", - array: "pole" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Neplatn\xFD vstup: o\u010Dek\xE1v\xE1no instanceof ${issue2.expected}, obdr\u017Eeno ${received}`; - } - return `Neplatn\xFD vstup: o\u010Dek\xE1v\xE1no ${expected}, obdr\u017Eeno ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Neplatn\xFD vstup: o\u010Dek\xE1v\xE1no ${stringifyPrimitive(issue2.values[0])}`; - return `Neplatn\xE1 mo\u017Enost: o\u010Dek\xE1v\xE1na jedna z hodnot ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Hodnota je p\u0159\xEDli\u0161 velk\xE1: ${issue2.origin ?? "hodnota"} mus\xED m\xEDt ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "prvk\u016F"}`; - } - return `Hodnota je p\u0159\xEDli\u0161 velk\xE1: ${issue2.origin ?? "hodnota"} mus\xED b\xFDt ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Hodnota je p\u0159\xEDli\u0161 mal\xE1: ${issue2.origin ?? "hodnota"} mus\xED m\xEDt ${adj}${issue2.minimum.toString()} ${sizing.unit ?? "prvk\u016F"}`; - } - return `Hodnota je p\u0159\xEDli\u0161 mal\xE1: ${issue2.origin ?? "hodnota"} mus\xED b\xFDt ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Neplatn\xFD \u0159et\u011Bzec: mus\xED za\u010D\xEDnat na "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Neplatn\xFD \u0159et\u011Bzec: mus\xED kon\u010Dit na "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Neplatn\xFD \u0159et\u011Bzec: mus\xED obsahovat "${_issue.includes}"`; - if (_issue.format === "regex") - return `Neplatn\xFD \u0159et\u011Bzec: mus\xED odpov\xEDdat vzoru ${_issue.pattern}`; - return `Neplatn\xFD form\xE1t ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Neplatn\xE9 \u010D\xEDslo: mus\xED b\xFDt n\xE1sobkem ${issue2.divisor}`; - case "unrecognized_keys": - return `Nezn\xE1m\xE9 kl\xED\u010De: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Neplatn\xFD kl\xED\u010D v ${issue2.origin}`; - case "invalid_union": - return "Neplatn\xFD vstup"; - case "invalid_element": - return `Neplatn\xE1 hodnota v ${issue2.origin}`; - default: - return `Neplatn\xFD vstup`; - } - }; -}; -function cs_default() { - return { - localeError: error6() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/da.js -var error7 = () => { - const Sizable = { - string: { unit: "tegn", verb: "havde" }, - file: { unit: "bytes", verb: "havde" }, - array: { unit: "elementer", verb: "indeholdt" }, - set: { unit: "elementer", verb: "indeholdt" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "e-mailadresse", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO dato- og klokkesl\xE6t", - date: "ISO-dato", - time: "ISO-klokkesl\xE6t", - duration: "ISO-varighed", - ipv4: "IPv4-omr\xE5de", - ipv6: "IPv6-omr\xE5de", - cidrv4: "IPv4-spektrum", - cidrv6: "IPv6-spektrum", - base64: "base64-kodet streng", - base64url: "base64url-kodet streng", - json_string: "JSON-streng", - e164: "E.164-nummer", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN", - string: "streng", - number: "tal", - boolean: "boolean", - array: "liste", - object: "objekt", - set: "s\xE6t", - file: "fil" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Ugyldigt input: forventede instanceof ${issue2.expected}, fik ${received}`; - } - return `Ugyldigt input: forventede ${expected}, fik ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Ugyldig v\xE6rdi: forventede ${stringifyPrimitive(issue2.values[0])}`; - return `Ugyldigt valg: forventede en af f\xF8lgende ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - if (sizing) - return `For stor: forventede ${origin ?? "value"} ${sizing.verb} ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "elementer"}`; - return `For stor: forventede ${origin ?? "value"} havde ${adj} ${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - if (sizing) { - return `For lille: forventede ${origin} ${sizing.verb} ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; - } - return `For lille: forventede ${origin} havde ${adj} ${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Ugyldig streng: skal starte med "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Ugyldig streng: skal ende med "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Ugyldig streng: skal indeholde "${_issue.includes}"`; - if (_issue.format === "regex") - return `Ugyldig streng: skal matche m\xF8nsteret ${_issue.pattern}`; - return `Ugyldig ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Ugyldigt tal: skal v\xE6re deleligt med ${issue2.divisor}`; - case "unrecognized_keys": - return `${issue2.keys.length > 1 ? "Ukendte n\xF8gler" : "Ukendt n\xF8gle"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Ugyldig n\xF8gle i ${issue2.origin}`; - case "invalid_union": - return "Ugyldigt input: matcher ingen af de tilladte typer"; - case "invalid_element": - return `Ugyldig v\xE6rdi i ${issue2.origin}`; - default: - return `Ugyldigt input`; - } - }; -}; -function da_default() { - return { - localeError: error7() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/de.js -var error8 = () => { - const Sizable = { - string: { unit: "Zeichen", verb: "zu haben" }, - file: { unit: "Bytes", verb: "zu haben" }, - array: { unit: "Elemente", verb: "zu haben" }, - set: { unit: "Elemente", verb: "zu haben" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "Eingabe", - email: "E-Mail-Adresse", - url: "URL", - emoji: "Emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO-Datum und -Uhrzeit", - date: "ISO-Datum", - time: "ISO-Uhrzeit", - duration: "ISO-Dauer", - ipv4: "IPv4-Adresse", - ipv6: "IPv6-Adresse", - cidrv4: "IPv4-Bereich", - cidrv6: "IPv6-Bereich", - base64: "Base64-codierter String", - base64url: "Base64-URL-codierter String", - json_string: "JSON-String", - e164: "E.164-Nummer", - jwt: "JWT", - template_literal: "Eingabe" - }; - const TypeDictionary = { - nan: "NaN", - number: "Zahl", - array: "Array" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Ung\xFCltige Eingabe: erwartet instanceof ${issue2.expected}, erhalten ${received}`; - } - return `Ung\xFCltige Eingabe: erwartet ${expected}, erhalten ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Ung\xFCltige Eingabe: erwartet ${stringifyPrimitive(issue2.values[0])}`; - return `Ung\xFCltige Option: erwartet eine von ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Zu gro\xDF: erwartet, dass ${issue2.origin ?? "Wert"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "Elemente"} hat`; - return `Zu gro\xDF: erwartet, dass ${issue2.origin ?? "Wert"} ${adj}${issue2.maximum.toString()} ist`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Zu klein: erwartet, dass ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} hat`; - } - return `Zu klein: erwartet, dass ${issue2.origin} ${adj}${issue2.minimum.toString()} ist`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Ung\xFCltiger String: muss mit "${_issue.prefix}" beginnen`; - if (_issue.format === "ends_with") - return `Ung\xFCltiger String: muss mit "${_issue.suffix}" enden`; - if (_issue.format === "includes") - return `Ung\xFCltiger String: muss "${_issue.includes}" enthalten`; - if (_issue.format === "regex") - return `Ung\xFCltiger String: muss dem Muster ${_issue.pattern} entsprechen`; - return `Ung\xFCltig: ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Ung\xFCltige Zahl: muss ein Vielfaches von ${issue2.divisor} sein`; - case "unrecognized_keys": - return `${issue2.keys.length > 1 ? "Unbekannte Schl\xFCssel" : "Unbekannter Schl\xFCssel"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Ung\xFCltiger Schl\xFCssel in ${issue2.origin}`; - case "invalid_union": - return "Ung\xFCltige Eingabe"; - case "invalid_element": - return `Ung\xFCltiger Wert in ${issue2.origin}`; - default: - return `Ung\xFCltige Eingabe`; - } - }; -}; -function de_default() { - return { - localeError: error8() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/en.js -var error9 = () => { - const Sizable = { - string: { unit: "characters", verb: "to have" }, - file: { unit: "bytes", verb: "to have" }, - array: { unit: "items", verb: "to have" }, - set: { unit: "items", verb: "to have" }, - map: { unit: "entries", verb: "to have" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "email address", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO datetime", - date: "ISO date", - time: "ISO time", - duration: "ISO duration", - ipv4: "IPv4 address", - ipv6: "IPv6 address", - mac: "MAC address", - cidrv4: "IPv4 range", - cidrv6: "IPv6 range", - base64: "base64-encoded string", - base64url: "base64url-encoded string", - json_string: "JSON string", - e164: "E.164 number", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - // Compatibility: "nan" -> "NaN" for display - nan: "NaN" - // All other type names omitted - they fall back to raw values via ?? operator - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - return `Invalid input: expected ${expected}, received ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Invalid input: expected ${stringifyPrimitive(issue2.values[0])}`; - return `Invalid option: expected one of ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Too big: expected ${issue2.origin ?? "value"} to have ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elements"}`; - return `Too big: expected ${issue2.origin ?? "value"} to be ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Too small: expected ${issue2.origin} to have ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Too small: expected ${issue2.origin} to be ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Invalid string: must start with "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `Invalid string: must end with "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Invalid string: must include "${_issue.includes}"`; - if (_issue.format === "regex") - return `Invalid string: must match pattern ${_issue.pattern}`; - return `Invalid ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Invalid number: must be a multiple of ${issue2.divisor}`; - case "unrecognized_keys": - return `Unrecognized key${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Invalid key in ${issue2.origin}`; - case "invalid_union": - return "Invalid input"; - case "invalid_element": - return `Invalid value in ${issue2.origin}`; - default: - return `Invalid input`; - } - }; -}; -function en_default() { - return { - localeError: error9() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/eo.js -var error10 = () => { - const Sizable = { - string: { unit: "karaktrojn", verb: "havi" }, - file: { unit: "bajtojn", verb: "havi" }, - array: { unit: "elementojn", verb: "havi" }, - set: { unit: "elementojn", verb: "havi" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "enigo", - email: "retadreso", - url: "URL", - emoji: "emo\u011Dio", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO-datotempo", - date: "ISO-dato", - time: "ISO-tempo", - duration: "ISO-da\u016Dro", - ipv4: "IPv4-adreso", - ipv6: "IPv6-adreso", - cidrv4: "IPv4-rango", - cidrv6: "IPv6-rango", - base64: "64-ume kodita karaktraro", - base64url: "URL-64-ume kodita karaktraro", - json_string: "JSON-karaktraro", - e164: "E.164-nombro", - jwt: "JWT", - template_literal: "enigo" - }; - const TypeDictionary = { - nan: "NaN", - number: "nombro", - array: "tabelo", - null: "senvalora" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Nevalida enigo: atendi\u011Dis instanceof ${issue2.expected}, ricevi\u011Dis ${received}`; - } - return `Nevalida enigo: atendi\u011Dis ${expected}, ricevi\u011Dis ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Nevalida enigo: atendi\u011Dis ${stringifyPrimitive(issue2.values[0])}`; - return `Nevalida opcio: atendi\u011Dis unu el ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Tro granda: atendi\u011Dis ke ${issue2.origin ?? "valoro"} havu ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementojn"}`; - return `Tro granda: atendi\u011Dis ke ${issue2.origin ?? "valoro"} havu ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Tro malgranda: atendi\u011Dis ke ${issue2.origin} havu ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Tro malgranda: atendi\u011Dis ke ${issue2.origin} estu ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Nevalida karaktraro: devas komenci\u011Di per "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Nevalida karaktraro: devas fini\u011Di per "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Nevalida karaktraro: devas inkluzivi "${_issue.includes}"`; - if (_issue.format === "regex") - return `Nevalida karaktraro: devas kongrui kun la modelo ${_issue.pattern}`; - return `Nevalida ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Nevalida nombro: devas esti oblo de ${issue2.divisor}`; - case "unrecognized_keys": - return `Nekonata${issue2.keys.length > 1 ? "j" : ""} \u015Dlosilo${issue2.keys.length > 1 ? "j" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Nevalida \u015Dlosilo en ${issue2.origin}`; - case "invalid_union": - return "Nevalida enigo"; - case "invalid_element": - return `Nevalida valoro en ${issue2.origin}`; - default: - return `Nevalida enigo`; - } - }; -}; -function eo_default() { - return { - localeError: error10() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/es.js -var error11 = () => { - const Sizable = { - string: { unit: "caracteres", verb: "tener" }, - file: { unit: "bytes", verb: "tener" }, - array: { unit: "elementos", verb: "tener" }, - set: { unit: "elementos", verb: "tener" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "entrada", - email: "direcci\xF3n de correo electr\xF3nico", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "fecha y hora ISO", - date: "fecha ISO", - time: "hora ISO", - duration: "duraci\xF3n ISO", - ipv4: "direcci\xF3n IPv4", - ipv6: "direcci\xF3n IPv6", - cidrv4: "rango IPv4", - cidrv6: "rango IPv6", - base64: "cadena codificada en base64", - base64url: "URL codificada en base64", - json_string: "cadena JSON", - e164: "n\xFAmero E.164", - jwt: "JWT", - template_literal: "entrada" - }; - const TypeDictionary = { - nan: "NaN", - string: "texto", - number: "n\xFAmero", - boolean: "booleano", - array: "arreglo", - object: "objeto", - set: "conjunto", - file: "archivo", - date: "fecha", - bigint: "n\xFAmero grande", - symbol: "s\xEDmbolo", - undefined: "indefinido", - null: "nulo", - function: "funci\xF3n", - map: "mapa", - record: "registro", - tuple: "tupla", - enum: "enumeraci\xF3n", - union: "uni\xF3n", - literal: "literal", - promise: "promesa", - void: "vac\xEDo", - never: "nunca", - unknown: "desconocido", - any: "cualquiera" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Entrada inv\xE1lida: se esperaba instanceof ${issue2.expected}, recibido ${received}`; - } - return `Entrada inv\xE1lida: se esperaba ${expected}, recibido ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Entrada inv\xE1lida: se esperaba ${stringifyPrimitive(issue2.values[0])}`; - return `Opci\xF3n inv\xE1lida: se esperaba una de ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - if (sizing) - return `Demasiado grande: se esperaba que ${origin ?? "valor"} tuviera ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementos"}`; - return `Demasiado grande: se esperaba que ${origin ?? "valor"} fuera ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - if (sizing) { - return `Demasiado peque\xF1o: se esperaba que ${origin} tuviera ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Demasiado peque\xF1o: se esperaba que ${origin} fuera ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Cadena inv\xE1lida: debe comenzar con "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Cadena inv\xE1lida: debe terminar en "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Cadena inv\xE1lida: debe incluir "${_issue.includes}"`; - if (_issue.format === "regex") - return `Cadena inv\xE1lida: debe coincidir con el patr\xF3n ${_issue.pattern}`; - return `Inv\xE1lido ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `N\xFAmero inv\xE1lido: debe ser m\xFAltiplo de ${issue2.divisor}`; - case "unrecognized_keys": - return `Llave${issue2.keys.length > 1 ? "s" : ""} desconocida${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Llave inv\xE1lida en ${TypeDictionary[issue2.origin] ?? issue2.origin}`; - case "invalid_union": - return "Entrada inv\xE1lida"; - case "invalid_element": - return `Valor inv\xE1lido en ${TypeDictionary[issue2.origin] ?? issue2.origin}`; - default: - return `Entrada inv\xE1lida`; - } - }; -}; -function es_default() { - return { - localeError: error11() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fa.js -var error12 = () => { - const Sizable = { - string: { unit: "\u06A9\u0627\u0631\u0627\u06A9\u062A\u0631", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" }, - file: { unit: "\u0628\u0627\u06CC\u062A", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" }, - array: { unit: "\u0622\u06CC\u062A\u0645", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" }, - set: { unit: "\u0622\u06CC\u062A\u0645", verb: "\u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0648\u0631\u0648\u062F\u06CC", - email: "\u0622\u062F\u0631\u0633 \u0627\u06CC\u0645\u06CC\u0644", - url: "URL", - emoji: "\u0627\u06CC\u0645\u0648\u062C\u06CC", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u062A\u0627\u0631\u06CC\u062E \u0648 \u0632\u0645\u0627\u0646 \u0627\u06CC\u0632\u0648", - date: "\u062A\u0627\u0631\u06CC\u062E \u0627\u06CC\u0632\u0648", - time: "\u0632\u0645\u0627\u0646 \u0627\u06CC\u0632\u0648", - duration: "\u0645\u062F\u062A \u0632\u0645\u0627\u0646 \u0627\u06CC\u0632\u0648", - ipv4: "IPv4 \u0622\u062F\u0631\u0633", - ipv6: "IPv6 \u0622\u062F\u0631\u0633", - cidrv4: "IPv4 \u062F\u0627\u0645\u0646\u0647", - cidrv6: "IPv6 \u062F\u0627\u0645\u0646\u0647", - base64: "base64-encoded \u0631\u0634\u062A\u0647", - base64url: "base64url-encoded \u0631\u0634\u062A\u0647", - json_string: "JSON \u0631\u0634\u062A\u0647", - e164: "E.164 \u0639\u062F\u062F", - jwt: "JWT", - template_literal: "\u0648\u0631\u0648\u062F\u06CC" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0639\u062F\u062F", - array: "\u0622\u0631\u0627\u06CC\u0647" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A instanceof ${issue2.expected} \u0645\u06CC\u200C\u0628\u0648\u062F\u060C ${received} \u062F\u0631\u06CC\u0627\u0641\u062A \u0634\u062F`; - } - return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A ${expected} \u0645\u06CC\u200C\u0628\u0648\u062F\u060C ${received} \u062F\u0631\u06CC\u0627\u0641\u062A \u0634\u062F`; - } - case "invalid_value": - if (issue2.values.length === 1) { - return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A ${stringifyPrimitive(issue2.values[0])} \u0645\u06CC\u200C\u0628\u0648\u062F`; - } - return `\u06AF\u0632\u06CC\u0646\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0645\u06CC\u200C\u0628\u0627\u06CC\u0633\u062A \u06CC\u06A9\u06CC \u0627\u0632 ${joinValues(issue2.values, "|")} \u0645\u06CC\u200C\u0628\u0648\u062F`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u062E\u06CC\u0644\u06CC \u0628\u0632\u0631\u06AF: ${issue2.origin ?? "\u0645\u0642\u062F\u0627\u0631"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0635\u0631"} \u0628\u0627\u0634\u062F`; - } - return `\u062E\u06CC\u0644\u06CC \u0628\u0632\u0631\u06AF: ${issue2.origin ?? "\u0645\u0642\u062F\u0627\u0631"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} \u0628\u0627\u0634\u062F`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u062E\u06CC\u0644\u06CC \u06A9\u0648\u0686\u06A9: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} ${sizing.unit} \u0628\u0627\u0634\u062F`; - } - return `\u062E\u06CC\u0644\u06CC \u06A9\u0648\u0686\u06A9: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} \u0628\u0627\u0634\u062F`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0628\u0627 "${_issue.prefix}" \u0634\u0631\u0648\u0639 \u0634\u0648\u062F`; - } - if (_issue.format === "ends_with") { - return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0628\u0627 "${_issue.suffix}" \u062A\u0645\u0627\u0645 \u0634\u0648\u062F`; - } - if (_issue.format === "includes") { - return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0634\u0627\u0645\u0644 "${_issue.includes}" \u0628\u0627\u0634\u062F`; - } - if (_issue.format === "regex") { - return `\u0631\u0634\u062A\u0647 \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0628\u0627 \u0627\u0644\u06AF\u0648\u06CC ${_issue.pattern} \u0645\u0637\u0627\u0628\u0642\u062A \u062F\u0627\u0634\u062A\u0647 \u0628\u0627\u0634\u062F`; - } - return `${FormatDictionary[_issue.format] ?? issue2.format} \u0646\u0627\u0645\u0639\u062A\u0628\u0631`; - } - case "not_multiple_of": - return `\u0639\u062F\u062F \u0646\u0627\u0645\u0639\u062A\u0628\u0631: \u0628\u0627\u06CC\u062F \u0645\u0636\u0631\u0628 ${issue2.divisor} \u0628\u0627\u0634\u062F`; - case "unrecognized_keys": - return `\u06A9\u0644\u06CC\u062F${issue2.keys.length > 1 ? "\u0647\u0627\u06CC" : ""} \u0646\u0627\u0634\u0646\u0627\u0633: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u06A9\u0644\u06CC\u062F \u0646\u0627\u0634\u0646\u0627\u0633 \u062F\u0631 ${issue2.origin}`; - case "invalid_union": - return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631`; - case "invalid_element": - return `\u0645\u0642\u062F\u0627\u0631 \u0646\u0627\u0645\u0639\u062A\u0628\u0631 \u062F\u0631 ${issue2.origin}`; - default: - return `\u0648\u0631\u0648\u062F\u06CC \u0646\u0627\u0645\u0639\u062A\u0628\u0631`; - } - }; -}; -function fa_default() { - return { - localeError: error12() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fi.js -var error13 = () => { - const Sizable = { - string: { unit: "merkki\xE4", subject: "merkkijonon" }, - file: { unit: "tavua", subject: "tiedoston" }, - array: { unit: "alkiota", subject: "listan" }, - set: { unit: "alkiota", subject: "joukon" }, - number: { unit: "", subject: "luvun" }, - bigint: { unit: "", subject: "suuren kokonaisluvun" }, - int: { unit: "", subject: "kokonaisluvun" }, - date: { unit: "", subject: "p\xE4iv\xE4m\xE4\xE4r\xE4n" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "s\xE4\xE4nn\xF6llinen lauseke", - email: "s\xE4hk\xF6postiosoite", - url: "URL-osoite", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO-aikaleima", - date: "ISO-p\xE4iv\xE4m\xE4\xE4r\xE4", - time: "ISO-aika", - duration: "ISO-kesto", - ipv4: "IPv4-osoite", - ipv6: "IPv6-osoite", - cidrv4: "IPv4-alue", - cidrv6: "IPv6-alue", - base64: "base64-koodattu merkkijono", - base64url: "base64url-koodattu merkkijono", - json_string: "JSON-merkkijono", - e164: "E.164-luku", - jwt: "JWT", - template_literal: "templaattimerkkijono" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Virheellinen tyyppi: odotettiin instanceof ${issue2.expected}, oli ${received}`; - } - return `Virheellinen tyyppi: odotettiin ${expected}, oli ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Virheellinen sy\xF6te: t\xE4ytyy olla ${stringifyPrimitive(issue2.values[0])}`; - return `Virheellinen valinta: t\xE4ytyy olla yksi seuraavista: ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Liian suuri: ${sizing.subject} t\xE4ytyy olla ${adj}${issue2.maximum.toString()} ${sizing.unit}`.trim(); - } - return `Liian suuri: arvon t\xE4ytyy olla ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Liian pieni: ${sizing.subject} t\xE4ytyy olla ${adj}${issue2.minimum.toString()} ${sizing.unit}`.trim(); - } - return `Liian pieni: arvon t\xE4ytyy olla ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Virheellinen sy\xF6te: t\xE4ytyy alkaa "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Virheellinen sy\xF6te: t\xE4ytyy loppua "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Virheellinen sy\xF6te: t\xE4ytyy sis\xE4lt\xE4\xE4 "${_issue.includes}"`; - if (_issue.format === "regex") { - return `Virheellinen sy\xF6te: t\xE4ytyy vastata s\xE4\xE4nn\xF6llist\xE4 lauseketta ${_issue.pattern}`; - } - return `Virheellinen ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Virheellinen luku: t\xE4ytyy olla luvun ${issue2.divisor} monikerta`; - case "unrecognized_keys": - return `${issue2.keys.length > 1 ? "Tuntemattomat avaimet" : "Tuntematon avain"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return "Virheellinen avain tietueessa"; - case "invalid_union": - return "Virheellinen unioni"; - case "invalid_element": - return "Virheellinen arvo joukossa"; - default: - return `Virheellinen sy\xF6te`; - } - }; -}; -function fi_default() { - return { - localeError: error13() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fr.js -var error14 = () => { - const Sizable = { - string: { unit: "caract\xE8res", verb: "avoir" }, - file: { unit: "octets", verb: "avoir" }, - array: { unit: "\xE9l\xE9ments", verb: "avoir" }, - set: { unit: "\xE9l\xE9ments", verb: "avoir" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "entr\xE9e", - email: "adresse e-mail", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "date et heure ISO", - date: "date ISO", - time: "heure ISO", - duration: "dur\xE9e ISO", - ipv4: "adresse IPv4", - ipv6: "adresse IPv6", - cidrv4: "plage IPv4", - cidrv6: "plage IPv6", - base64: "cha\xEEne encod\xE9e en base64", - base64url: "cha\xEEne encod\xE9e en base64url", - json_string: "cha\xEEne JSON", - e164: "num\xE9ro E.164", - jwt: "JWT", - template_literal: "entr\xE9e" - }; - const TypeDictionary = { - nan: "NaN", - number: "nombre", - array: "tableau" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Entr\xE9e invalide : instanceof ${issue2.expected} attendu, ${received} re\xE7u`; - } - return `Entr\xE9e invalide : ${expected} attendu, ${received} re\xE7u`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Entr\xE9e invalide : ${stringifyPrimitive(issue2.values[0])} attendu`; - return `Option invalide : une valeur parmi ${joinValues(issue2.values, "|")} attendue`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Trop grand : ${issue2.origin ?? "valeur"} doit ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\xE9l\xE9ment(s)"}`; - return `Trop grand : ${issue2.origin ?? "valeur"} doit \xEAtre ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Trop petit : ${issue2.origin} doit ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Trop petit : ${issue2.origin} doit \xEAtre ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Cha\xEEne invalide : doit commencer par "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Cha\xEEne invalide : doit se terminer par "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Cha\xEEne invalide : doit inclure "${_issue.includes}"`; - if (_issue.format === "regex") - return `Cha\xEEne invalide : doit correspondre au mod\xE8le ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} invalide`; - } - case "not_multiple_of": - return `Nombre invalide : doit \xEAtre un multiple de ${issue2.divisor}`; - case "unrecognized_keys": - return `Cl\xE9${issue2.keys.length > 1 ? "s" : ""} non reconnue${issue2.keys.length > 1 ? "s" : ""} : ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Cl\xE9 invalide dans ${issue2.origin}`; - case "invalid_union": - return "Entr\xE9e invalide"; - case "invalid_element": - return `Valeur invalide dans ${issue2.origin}`; - default: - return `Entr\xE9e invalide`; - } - }; -}; -function fr_default() { - return { - localeError: error14() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/fr-CA.js -var error15 = () => { - const Sizable = { - string: { unit: "caract\xE8res", verb: "avoir" }, - file: { unit: "octets", verb: "avoir" }, - array: { unit: "\xE9l\xE9ments", verb: "avoir" }, - set: { unit: "\xE9l\xE9ments", verb: "avoir" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "entr\xE9e", - email: "adresse courriel", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "date-heure ISO", - date: "date ISO", - time: "heure ISO", - duration: "dur\xE9e ISO", - ipv4: "adresse IPv4", - ipv6: "adresse IPv6", - cidrv4: "plage IPv4", - cidrv6: "plage IPv6", - base64: "cha\xEEne encod\xE9e en base64", - base64url: "cha\xEEne encod\xE9e en base64url", - json_string: "cha\xEEne JSON", - e164: "num\xE9ro E.164", - jwt: "JWT", - template_literal: "entr\xE9e" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Entr\xE9e invalide : attendu instanceof ${issue2.expected}, re\xE7u ${received}`; - } - return `Entr\xE9e invalide : attendu ${expected}, re\xE7u ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Entr\xE9e invalide : attendu ${stringifyPrimitive(issue2.values[0])}`; - return `Option invalide : attendu l'une des valeurs suivantes ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "\u2264" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Trop grand : attendu que ${issue2.origin ?? "la valeur"} ait ${adj}${issue2.maximum.toString()} ${sizing.unit}`; - return `Trop grand : attendu que ${issue2.origin ?? "la valeur"} soit ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? "\u2265" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Trop petit : attendu que ${issue2.origin} ait ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Trop petit : attendu que ${issue2.origin} soit ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Cha\xEEne invalide : doit commencer par "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `Cha\xEEne invalide : doit se terminer par "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Cha\xEEne invalide : doit inclure "${_issue.includes}"`; - if (_issue.format === "regex") - return `Cha\xEEne invalide : doit correspondre au motif ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} invalide`; - } - case "not_multiple_of": - return `Nombre invalide : doit \xEAtre un multiple de ${issue2.divisor}`; - case "unrecognized_keys": - return `Cl\xE9${issue2.keys.length > 1 ? "s" : ""} non reconnue${issue2.keys.length > 1 ? "s" : ""} : ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Cl\xE9 invalide dans ${issue2.origin}`; - case "invalid_union": - return "Entr\xE9e invalide"; - case "invalid_element": - return `Valeur invalide dans ${issue2.origin}`; - default: - return `Entr\xE9e invalide`; - } - }; -}; -function fr_CA_default() { - return { - localeError: error15() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/he.js -var error16 = () => { - const TypeNames = { - string: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA", gender: "f" }, - number: { label: "\u05DE\u05E1\u05E4\u05E8", gender: "m" }, - boolean: { label: "\u05E2\u05E8\u05DA \u05D1\u05D5\u05DC\u05D9\u05D0\u05E0\u05D9", gender: "m" }, - bigint: { label: "BigInt", gender: "m" }, - date: { label: "\u05EA\u05D0\u05E8\u05D9\u05DA", gender: "m" }, - array: { label: "\u05DE\u05E2\u05E8\u05DA", gender: "m" }, - object: { label: "\u05D0\u05D5\u05D1\u05D9\u05D9\u05E7\u05D8", gender: "m" }, - null: { label: "\u05E2\u05E8\u05DA \u05E8\u05D9\u05E7 (null)", gender: "m" }, - undefined: { label: "\u05E2\u05E8\u05DA \u05DC\u05D0 \u05DE\u05D5\u05D2\u05D3\u05E8 (undefined)", gender: "m" }, - symbol: { label: "\u05E1\u05D9\u05DE\u05D1\u05D5\u05DC (Symbol)", gender: "m" }, - function: { label: "\u05E4\u05D5\u05E0\u05E7\u05E6\u05D9\u05D4", gender: "f" }, - map: { label: "\u05DE\u05E4\u05D4 (Map)", gender: "f" }, - set: { label: "\u05E7\u05D1\u05D5\u05E6\u05D4 (Set)", gender: "f" }, - file: { label: "\u05E7\u05D5\u05D1\u05E5", gender: "m" }, - promise: { label: "Promise", gender: "m" }, - NaN: { label: "NaN", gender: "m" }, - unknown: { label: "\u05E2\u05E8\u05DA \u05DC\u05D0 \u05D9\u05D3\u05D5\u05E2", gender: "m" }, - value: { label: "\u05E2\u05E8\u05DA", gender: "m" } - }; - const Sizable = { - string: { unit: "\u05EA\u05D5\u05D5\u05D9\u05DD", shortLabel: "\u05E7\u05E6\u05E8", longLabel: "\u05D0\u05E8\u05D5\u05DA" }, - file: { unit: "\u05D1\u05D9\u05D9\u05D8\u05D9\u05DD", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" }, - array: { unit: "\u05E4\u05E8\u05D9\u05D8\u05D9\u05DD", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" }, - set: { unit: "\u05E4\u05E8\u05D9\u05D8\u05D9\u05DD", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" }, - number: { unit: "", shortLabel: "\u05E7\u05D8\u05DF", longLabel: "\u05D2\u05D3\u05D5\u05DC" } - // no unit - }; - const typeEntry = (t) => t ? TypeNames[t] : void 0; - const typeLabel = (t) => { - const e = typeEntry(t); - if (e) - return e.label; - return t ?? TypeNames.unknown.label; - }; - const withDefinite = (t) => `\u05D4${typeLabel(t)}`; - const verbFor = (t) => { - const e = typeEntry(t); - const gender = e?.gender ?? "m"; - return gender === "f" ? "\u05E6\u05E8\u05D9\u05DB\u05D4 \u05DC\u05D4\u05D9\u05D5\u05EA" : "\u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA"; - }; - const getSizing = (origin) => { - if (!origin) - return null; - return Sizable[origin] ?? null; - }; - const FormatDictionary = { - regex: { label: "\u05E7\u05DC\u05D8", gender: "m" }, - email: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA \u05D0\u05D9\u05DE\u05D9\u05D9\u05DC", gender: "f" }, - url: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA \u05E8\u05E9\u05EA", gender: "f" }, - emoji: { label: "\u05D0\u05D9\u05DE\u05D5\u05D2'\u05D9", gender: "m" }, - uuid: { label: "UUID", gender: "m" }, - nanoid: { label: "nanoid", gender: "m" }, - guid: { label: "GUID", gender: "m" }, - cuid: { label: "cuid", gender: "m" }, - cuid2: { label: "cuid2", gender: "m" }, - ulid: { label: "ULID", gender: "m" }, - xid: { label: "XID", gender: "m" }, - ksuid: { label: "KSUID", gender: "m" }, - datetime: { label: "\u05EA\u05D0\u05E8\u05D9\u05DA \u05D5\u05D6\u05DE\u05DF ISO", gender: "m" }, - date: { label: "\u05EA\u05D0\u05E8\u05D9\u05DA ISO", gender: "m" }, - time: { label: "\u05D6\u05DE\u05DF ISO", gender: "m" }, - duration: { label: "\u05DE\u05E9\u05DA \u05D6\u05DE\u05DF ISO", gender: "m" }, - ipv4: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA IPv4", gender: "f" }, - ipv6: { label: "\u05DB\u05EA\u05D5\u05D1\u05EA IPv6", gender: "f" }, - cidrv4: { label: "\u05D8\u05D5\u05D5\u05D7 IPv4", gender: "m" }, - cidrv6: { label: "\u05D8\u05D5\u05D5\u05D7 IPv6", gender: "m" }, - base64: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D1\u05D1\u05E1\u05D9\u05E1 64", gender: "f" }, - base64url: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D1\u05D1\u05E1\u05D9\u05E1 64 \u05DC\u05DB\u05EA\u05D5\u05D1\u05D5\u05EA \u05E8\u05E9\u05EA", gender: "f" }, - json_string: { label: "\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA JSON", gender: "f" }, - e164: { label: "\u05DE\u05E1\u05E4\u05E8 E.164", gender: "m" }, - jwt: { label: "JWT", gender: "m" }, - ends_with: { label: "\u05E7\u05DC\u05D8", gender: "m" }, - includes: { label: "\u05E7\u05DC\u05D8", gender: "m" }, - lowercase: { label: "\u05E7\u05DC\u05D8", gender: "m" }, - starts_with: { label: "\u05E7\u05DC\u05D8", gender: "m" }, - uppercase: { label: "\u05E7\u05DC\u05D8", gender: "m" } - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expectedKey = issue2.expected; - const expected = TypeDictionary[expectedKey ?? ""] ?? typeLabel(expectedKey); - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? TypeNames[receivedType]?.label ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA instanceof ${issue2.expected}, \u05D4\u05EA\u05E7\u05D1\u05DC ${received}`; - } - return `\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA ${expected}, \u05D4\u05EA\u05E7\u05D1\u05DC ${received}`; - } - case "invalid_value": { - if (issue2.values.length === 1) { - return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D4\u05E2\u05E8\u05DA \u05D7\u05D9\u05D9\u05D1 \u05DC\u05D4\u05D9\u05D5\u05EA ${stringifyPrimitive(issue2.values[0])}`; - } - const stringified = issue2.values.map((v) => stringifyPrimitive(v)); - if (issue2.values.length === 2) { - return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D4\u05D0\u05E4\u05E9\u05E8\u05D5\u05D9\u05D5\u05EA \u05D4\u05DE\u05EA\u05D0\u05D9\u05DE\u05D5\u05EA \u05D4\u05DF ${stringified[0]} \u05D0\u05D5 ${stringified[1]}`; - } - const lastValue = stringified[stringified.length - 1]; - const restValues = stringified.slice(0, -1).join(", "); - return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D4\u05D0\u05E4\u05E9\u05E8\u05D5\u05D9\u05D5\u05EA \u05D4\u05DE\u05EA\u05D0\u05D9\u05DE\u05D5\u05EA \u05D4\u05DF ${restValues} \u05D0\u05D5 ${lastValue}`; - } - case "too_big": { - const sizing = getSizing(issue2.origin); - const subject = withDefinite(issue2.origin ?? "value"); - if (issue2.origin === "string") { - return `${sizing?.longLabel ?? "\u05D0\u05E8\u05D5\u05DA"} \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DB\u05D4 \u05DC\u05D4\u05DB\u05D9\u05DC ${issue2.maximum.toString()} ${sizing?.unit ?? ""} ${issue2.inclusive ? "\u05D0\u05D5 \u05E4\u05D7\u05D5\u05EA" : "\u05DC\u05DB\u05DC \u05D4\u05D9\u05D5\u05EA\u05E8"}`.trim(); - } - if (issue2.origin === "number") { - const comparison = issue2.inclusive ? `\u05E7\u05D8\u05DF \u05D0\u05D5 \u05E9\u05D5\u05D5\u05D4 \u05DC-${issue2.maximum}` : `\u05E7\u05D8\u05DF \u05DE-${issue2.maximum}`; - return `\u05D2\u05D3\u05D5\u05DC \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA ${comparison}`; - } - if (issue2.origin === "array" || issue2.origin === "set") { - const verb = issue2.origin === "set" ? "\u05E6\u05E8\u05D9\u05DB\u05D4" : "\u05E6\u05E8\u05D9\u05DA"; - const comparison = issue2.inclusive ? `${issue2.maximum} ${sizing?.unit ?? ""} \u05D0\u05D5 \u05E4\u05D7\u05D5\u05EA` : `\u05E4\u05D7\u05D5\u05EA \u05DE-${issue2.maximum} ${sizing?.unit ?? ""}`; - return `\u05D2\u05D3\u05D5\u05DC \u05DE\u05D3\u05D9: ${subject} ${verb} \u05DC\u05D4\u05DB\u05D9\u05DC ${comparison}`.trim(); - } - const adj = issue2.inclusive ? "<=" : "<"; - const be = verbFor(issue2.origin ?? "value"); - if (sizing?.unit) { - return `${sizing.longLabel} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.maximum.toString()} ${sizing.unit}`; - } - return `${sizing?.longLabel ?? "\u05D2\u05D3\u05D5\u05DC"} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const sizing = getSizing(issue2.origin); - const subject = withDefinite(issue2.origin ?? "value"); - if (issue2.origin === "string") { - return `${sizing?.shortLabel ?? "\u05E7\u05E6\u05E8"} \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DB\u05D4 \u05DC\u05D4\u05DB\u05D9\u05DC ${issue2.minimum.toString()} ${sizing?.unit ?? ""} ${issue2.inclusive ? "\u05D0\u05D5 \u05D9\u05D5\u05EA\u05E8" : "\u05DC\u05E4\u05D7\u05D5\u05EA"}`.trim(); - } - if (issue2.origin === "number") { - const comparison = issue2.inclusive ? `\u05D2\u05D3\u05D5\u05DC \u05D0\u05D5 \u05E9\u05D5\u05D5\u05D4 \u05DC-${issue2.minimum}` : `\u05D2\u05D3\u05D5\u05DC \u05DE-${issue2.minimum}`; - return `\u05E7\u05D8\u05DF \u05DE\u05D3\u05D9: ${subject} \u05E6\u05E8\u05D9\u05DA \u05DC\u05D4\u05D9\u05D5\u05EA ${comparison}`; - } - if (issue2.origin === "array" || issue2.origin === "set") { - const verb = issue2.origin === "set" ? "\u05E6\u05E8\u05D9\u05DB\u05D4" : "\u05E6\u05E8\u05D9\u05DA"; - if (issue2.minimum === 1 && issue2.inclusive) { - const singularPhrase = issue2.origin === "set" ? "\u05DC\u05E4\u05D7\u05D5\u05EA \u05E4\u05E8\u05D9\u05D8 \u05D0\u05D7\u05D3" : "\u05DC\u05E4\u05D7\u05D5\u05EA \u05E4\u05E8\u05D9\u05D8 \u05D0\u05D7\u05D3"; - return `\u05E7\u05D8\u05DF \u05DE\u05D3\u05D9: ${subject} ${verb} \u05DC\u05D4\u05DB\u05D9\u05DC ${singularPhrase}`; - } - const comparison = issue2.inclusive ? `${issue2.minimum} ${sizing?.unit ?? ""} \u05D0\u05D5 \u05D9\u05D5\u05EA\u05E8` : `\u05D9\u05D5\u05EA\u05E8 \u05DE-${issue2.minimum} ${sizing?.unit ?? ""}`; - return `\u05E7\u05D8\u05DF \u05DE\u05D3\u05D9: ${subject} ${verb} \u05DC\u05D4\u05DB\u05D9\u05DC ${comparison}`.trim(); - } - const adj = issue2.inclusive ? ">=" : ">"; - const be = verbFor(issue2.origin ?? "value"); - if (sizing?.unit) { - return `${sizing.shortLabel} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `${sizing?.shortLabel ?? "\u05E7\u05D8\u05DF"} \u05DE\u05D3\u05D9: ${subject} ${be} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05D4\u05EA\u05D7\u05D9\u05DC \u05D1 "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05D4\u05E1\u05EA\u05D9\u05D9\u05DD \u05D1 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05DB\u05DC\u05D5\u05DC "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u05D4\u05DE\u05D7\u05E8\u05D5\u05D6\u05EA \u05D7\u05D9\u05D9\u05D1\u05EA \u05DC\u05D4\u05EA\u05D0\u05D9\u05DD \u05DC\u05EA\u05D1\u05E0\u05D9\u05EA ${_issue.pattern}`; - const nounEntry = FormatDictionary[_issue.format]; - const noun = nounEntry?.label ?? _issue.format; - const gender = nounEntry?.gender ?? "m"; - const adjective = gender === "f" ? "\u05EA\u05E7\u05D9\u05E0\u05D4" : "\u05EA\u05E7\u05D9\u05DF"; - return `${noun} \u05DC\u05D0 ${adjective}`; - } - case "not_multiple_of": - return `\u05DE\u05E1\u05E4\u05E8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF: \u05D7\u05D9\u05D9\u05D1 \u05DC\u05D4\u05D9\u05D5\u05EA \u05DE\u05DB\u05E4\u05DC\u05D4 \u05E9\u05DC ${issue2.divisor}`; - case "unrecognized_keys": - return `\u05DE\u05E4\u05EA\u05D7${issue2.keys.length > 1 ? "\u05D5\u05EA" : ""} \u05DC\u05D0 \u05DE\u05D6\u05D5\u05D4${issue2.keys.length > 1 ? "\u05D9\u05DD" : "\u05D4"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": { - return `\u05E9\u05D3\u05D4 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF \u05D1\u05D0\u05D5\u05D1\u05D9\u05D9\u05E7\u05D8`; - } - case "invalid_union": - return "\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF"; - case "invalid_element": { - const place = withDefinite(issue2.origin ?? "array"); - return `\u05E2\u05E8\u05DA \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF \u05D1${place}`; - } - default: - return `\u05E7\u05DC\u05D8 \u05DC\u05D0 \u05EA\u05E7\u05D9\u05DF`; - } - }; -}; -function he_default() { - return { - localeError: error16() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/hu.js -var error17 = () => { - const Sizable = { - string: { unit: "karakter", verb: "legyen" }, - file: { unit: "byte", verb: "legyen" }, - array: { unit: "elem", verb: "legyen" }, - set: { unit: "elem", verb: "legyen" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "bemenet", - email: "email c\xEDm", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO id\u0151b\xE9lyeg", - date: "ISO d\xE1tum", - time: "ISO id\u0151", - duration: "ISO id\u0151intervallum", - ipv4: "IPv4 c\xEDm", - ipv6: "IPv6 c\xEDm", - cidrv4: "IPv4 tartom\xE1ny", - cidrv6: "IPv6 tartom\xE1ny", - base64: "base64-k\xF3dolt string", - base64url: "base64url-k\xF3dolt string", - json_string: "JSON string", - e164: "E.164 sz\xE1m", - jwt: "JWT", - template_literal: "bemenet" - }; - const TypeDictionary = { - nan: "NaN", - number: "sz\xE1m", - array: "t\xF6mb" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\xC9rv\xE9nytelen bemenet: a v\xE1rt \xE9rt\xE9k instanceof ${issue2.expected}, a kapott \xE9rt\xE9k ${received}`; - } - return `\xC9rv\xE9nytelen bemenet: a v\xE1rt \xE9rt\xE9k ${expected}, a kapott \xE9rt\xE9k ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\xC9rv\xE9nytelen bemenet: a v\xE1rt \xE9rt\xE9k ${stringifyPrimitive(issue2.values[0])}`; - return `\xC9rv\xE9nytelen opci\xF3: valamelyik \xE9rt\xE9k v\xE1rt ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `T\xFAl nagy: ${issue2.origin ?? "\xE9rt\xE9k"} m\xE9rete t\xFAl nagy ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elem"}`; - return `T\xFAl nagy: a bemeneti \xE9rt\xE9k ${issue2.origin ?? "\xE9rt\xE9k"} t\xFAl nagy: ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `T\xFAl kicsi: a bemeneti \xE9rt\xE9k ${issue2.origin} m\xE9rete t\xFAl kicsi ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `T\xFAl kicsi: a bemeneti \xE9rt\xE9k ${issue2.origin} t\xFAl kicsi ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\xC9rv\xE9nytelen string: "${_issue.prefix}" \xE9rt\xE9kkel kell kezd\u0151dnie`; - if (_issue.format === "ends_with") - return `\xC9rv\xE9nytelen string: "${_issue.suffix}" \xE9rt\xE9kkel kell v\xE9gz\u0151dnie`; - if (_issue.format === "includes") - return `\xC9rv\xE9nytelen string: "${_issue.includes}" \xE9rt\xE9ket kell tartalmaznia`; - if (_issue.format === "regex") - return `\xC9rv\xE9nytelen string: ${_issue.pattern} mint\xE1nak kell megfelelnie`; - return `\xC9rv\xE9nytelen ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\xC9rv\xE9nytelen sz\xE1m: ${issue2.divisor} t\xF6bbsz\xF6r\xF6s\xE9nek kell lennie`; - case "unrecognized_keys": - return `Ismeretlen kulcs${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\xC9rv\xE9nytelen kulcs ${issue2.origin}`; - case "invalid_union": - return "\xC9rv\xE9nytelen bemenet"; - case "invalid_element": - return `\xC9rv\xE9nytelen \xE9rt\xE9k: ${issue2.origin}`; - default: - return `\xC9rv\xE9nytelen bemenet`; - } - }; -}; -function hu_default() { - return { - localeError: error17() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/hy.js -function getArmenianPlural(count, one, many) { - return Math.abs(count) === 1 ? one : many; -} -function withDefiniteArticle(word) { - if (!word) - return ""; - const vowels = ["\u0561", "\u0565", "\u0568", "\u056B", "\u0578", "\u0578\u0582", "\u0585"]; - const lastChar = word[word.length - 1]; - return word + (vowels.includes(lastChar) ? "\u0576" : "\u0568"); -} -var error18 = () => { - const Sizable = { - string: { - unit: { - one: "\u0576\u0577\u0561\u0576", - many: "\u0576\u0577\u0561\u0576\u0576\u0565\u0580" - }, - verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" - }, - file: { - unit: { - one: "\u0562\u0561\u0575\u0569", - many: "\u0562\u0561\u0575\u0569\u0565\u0580" - }, - verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" - }, - array: { - unit: { - one: "\u057F\u0561\u0580\u0580", - many: "\u057F\u0561\u0580\u0580\u0565\u0580" - }, - verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" - }, - set: { - unit: { - one: "\u057F\u0561\u0580\u0580", - many: "\u057F\u0561\u0580\u0580\u0565\u0580" - }, - verb: "\u0578\u0582\u0576\u0565\u0576\u0561\u056C" - } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0574\u0578\u0582\u057F\u0584", - email: "\u0567\u056C. \u0570\u0561\u057D\u0581\u0565", - url: "URL", - emoji: "\u0567\u0574\u0578\u057B\u056B", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u0561\u0574\u057D\u0561\u0569\u056B\u057E \u0587 \u056A\u0561\u0574", - date: "ISO \u0561\u0574\u057D\u0561\u0569\u056B\u057E", - time: "ISO \u056A\u0561\u0574", - duration: "ISO \u057F\u0587\u0578\u0572\u0578\u0582\u0569\u0575\u0578\u0582\u0576", - ipv4: "IPv4 \u0570\u0561\u057D\u0581\u0565", - ipv6: "IPv6 \u0570\u0561\u057D\u0581\u0565", - cidrv4: "IPv4 \u0574\u056B\u057B\u0561\u056F\u0561\u0575\u0584", - cidrv6: "IPv6 \u0574\u056B\u057B\u0561\u056F\u0561\u0575\u0584", - base64: "base64 \u0571\u0587\u0561\u0579\u0561\u0583\u0578\u057E \u057F\u0578\u0572", - base64url: "base64url \u0571\u0587\u0561\u0579\u0561\u0583\u0578\u057E \u057F\u0578\u0572", - json_string: "JSON \u057F\u0578\u0572", - e164: "E.164 \u0570\u0561\u0574\u0561\u0580", - jwt: "JWT", - template_literal: "\u0574\u0578\u0582\u057F\u0584" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0569\u056B\u057E", - array: "\u0566\u0561\u0576\u0563\u057E\u0561\u056E" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 instanceof ${issue2.expected}, \u057D\u057F\u0561\u0581\u057E\u0565\u056C \u0567 ${received}`; - } - return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 ${expected}, \u057D\u057F\u0561\u0581\u057E\u0565\u056C \u0567 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 ${stringifyPrimitive(issue2.values[1])}`; - return `\u054D\u056D\u0561\u056C \u057F\u0561\u0580\u0562\u0565\u0580\u0561\u056F\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567\u0580 \u0570\u0565\u057F\u0587\u0575\u0561\u056C\u0576\u0565\u0580\u056B\u0581 \u0574\u0565\u056F\u0568\u055D ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - const maxValue = Number(issue2.maximum); - const unit = getArmenianPlural(maxValue, sizing.unit.one, sizing.unit.many); - return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0574\u0565\u056E \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin ?? "\u0561\u0580\u056A\u0565\u0584")} \u056F\u0578\u0582\u0576\u0565\u0576\u0561 ${adj}${issue2.maximum.toString()} ${unit}`; - } - return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0574\u0565\u056E \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin ?? "\u0561\u0580\u056A\u0565\u0584")} \u056C\u056B\u0576\u056B ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - const minValue = Number(issue2.minimum); - const unit = getArmenianPlural(minValue, sizing.unit.one, sizing.unit.many); - return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0583\u0578\u0584\u0580 \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin)} \u056F\u0578\u0582\u0576\u0565\u0576\u0561 ${adj}${issue2.minimum.toString()} ${unit}`; - } - return `\u0549\u0561\u0583\u0561\u0566\u0561\u0576\u0581 \u0583\u0578\u0584\u0580 \u0561\u0580\u056A\u0565\u0584\u2024 \u057D\u057A\u0561\u057D\u057E\u0578\u0582\u0574 \u0567, \u0578\u0580 ${withDefiniteArticle(issue2.origin)} \u056C\u056B\u0576\u056B ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u057D\u056F\u057D\u057E\u056B "${_issue.prefix}"-\u0578\u057E`; - if (_issue.format === "ends_with") - return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u0561\u057E\u0561\u0580\u057F\u057E\u056B "${_issue.suffix}"-\u0578\u057E`; - if (_issue.format === "includes") - return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u057A\u0561\u0580\u0578\u0582\u0576\u0561\u056F\u056B "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u054D\u056D\u0561\u056C \u057F\u0578\u0572\u2024 \u057A\u0565\u057F\u0584 \u0567 \u0570\u0561\u0574\u0561\u057A\u0561\u057F\u0561\u057D\u056D\u0561\u0576\u056B ${_issue.pattern} \u0571\u0587\u0561\u0579\u0561\u0583\u056B\u0576`; - return `\u054D\u056D\u0561\u056C ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u054D\u056D\u0561\u056C \u0569\u056B\u057E\u2024 \u057A\u0565\u057F\u0584 \u0567 \u0562\u0561\u0566\u0574\u0561\u057A\u0561\u057F\u056B\u056F \u056C\u056B\u0576\u056B ${issue2.divisor}-\u056B`; - case "unrecognized_keys": - return `\u0549\u0573\u0561\u0576\u0561\u0579\u057E\u0561\u056E \u0562\u0561\u0576\u0561\u056C\u056B${issue2.keys.length > 1 ? "\u0576\u0565\u0580" : ""}. ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u054D\u056D\u0561\u056C \u0562\u0561\u0576\u0561\u056C\u056B ${withDefiniteArticle(issue2.origin)}-\u0578\u0582\u0574`; - case "invalid_union": - return "\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574"; - case "invalid_element": - return `\u054D\u056D\u0561\u056C \u0561\u0580\u056A\u0565\u0584 ${withDefiniteArticle(issue2.origin)}-\u0578\u0582\u0574`; - default: - return `\u054D\u056D\u0561\u056C \u0574\u0578\u0582\u057F\u0584\u0561\u0563\u0580\u0578\u0582\u0574`; - } - }; -}; -function hy_default() { - return { - localeError: error18() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/id.js -var error19 = () => { - const Sizable = { - string: { unit: "karakter", verb: "memiliki" }, - file: { unit: "byte", verb: "memiliki" }, - array: { unit: "item", verb: "memiliki" }, - set: { unit: "item", verb: "memiliki" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "alamat email", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "tanggal dan waktu format ISO", - date: "tanggal format ISO", - time: "jam format ISO", - duration: "durasi format ISO", - ipv4: "alamat IPv4", - ipv6: "alamat IPv6", - cidrv4: "rentang alamat IPv4", - cidrv6: "rentang alamat IPv6", - base64: "string dengan enkode base64", - base64url: "string dengan enkode base64url", - json_string: "string JSON", - e164: "angka E.164", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Input tidak valid: diharapkan instanceof ${issue2.expected}, diterima ${received}`; - } - return `Input tidak valid: diharapkan ${expected}, diterima ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Input tidak valid: diharapkan ${stringifyPrimitive(issue2.values[0])}`; - return `Pilihan tidak valid: diharapkan salah satu dari ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Terlalu besar: diharapkan ${issue2.origin ?? "value"} memiliki ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elemen"}`; - return `Terlalu besar: diharapkan ${issue2.origin ?? "value"} menjadi ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Terlalu kecil: diharapkan ${issue2.origin} memiliki ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Terlalu kecil: diharapkan ${issue2.origin} menjadi ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `String tidak valid: harus dimulai dengan "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `String tidak valid: harus berakhir dengan "${_issue.suffix}"`; - if (_issue.format === "includes") - return `String tidak valid: harus menyertakan "${_issue.includes}"`; - if (_issue.format === "regex") - return `String tidak valid: harus sesuai pola ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} tidak valid`; - } - case "not_multiple_of": - return `Angka tidak valid: harus kelipatan dari ${issue2.divisor}`; - case "unrecognized_keys": - return `Kunci tidak dikenali ${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Kunci tidak valid di ${issue2.origin}`; - case "invalid_union": - return "Input tidak valid"; - case "invalid_element": - return `Nilai tidak valid di ${issue2.origin}`; - default: - return `Input tidak valid`; - } - }; -}; -function id_default() { - return { - localeError: error19() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/is.js -var error20 = () => { - const Sizable = { - string: { unit: "stafi", verb: "a\xF0 hafa" }, - file: { unit: "b\xE6ti", verb: "a\xF0 hafa" }, - array: { unit: "hluti", verb: "a\xF0 hafa" }, - set: { unit: "hluti", verb: "a\xF0 hafa" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "gildi", - email: "netfang", - url: "vefsl\xF3\xF0", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO dagsetning og t\xEDmi", - date: "ISO dagsetning", - time: "ISO t\xEDmi", - duration: "ISO t\xEDmalengd", - ipv4: "IPv4 address", - ipv6: "IPv6 address", - cidrv4: "IPv4 range", - cidrv6: "IPv6 range", - base64: "base64-encoded strengur", - base64url: "base64url-encoded strengur", - json_string: "JSON strengur", - e164: "E.164 t\xF6lugildi", - jwt: "JWT", - template_literal: "gildi" - }; - const TypeDictionary = { - nan: "NaN", - number: "n\xFAmer", - array: "fylki" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Rangt gildi: \xDE\xFA sl\xF3st inn ${received} \xFEar sem \xE1 a\xF0 vera instanceof ${issue2.expected}`; - } - return `Rangt gildi: \xDE\xFA sl\xF3st inn ${received} \xFEar sem \xE1 a\xF0 vera ${expected}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Rangt gildi: gert r\xE1\xF0 fyrir ${stringifyPrimitive(issue2.values[0])}`; - return `\xD3gilt val: m\xE1 vera eitt af eftirfarandi ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Of st\xF3rt: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin ?? "gildi"} hafi ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "hluti"}`; - return `Of st\xF3rt: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin ?? "gildi"} s\xE9 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Of l\xEDti\xF0: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin} hafi ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Of l\xEDti\xF0: gert er r\xE1\xF0 fyrir a\xF0 ${issue2.origin} s\xE9 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\xD3gildur strengur: ver\xF0ur a\xF0 byrja \xE1 "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `\xD3gildur strengur: ver\xF0ur a\xF0 enda \xE1 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\xD3gildur strengur: ver\xF0ur a\xF0 innihalda "${_issue.includes}"`; - if (_issue.format === "regex") - return `\xD3gildur strengur: ver\xF0ur a\xF0 fylgja mynstri ${_issue.pattern}`; - return `Rangt ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `R\xF6ng tala: ver\xF0ur a\xF0 vera margfeldi af ${issue2.divisor}`; - case "unrecognized_keys": - return `\xD3\xFEekkt ${issue2.keys.length > 1 ? "ir lyklar" : "ur lykill"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Rangur lykill \xED ${issue2.origin}`; - case "invalid_union": - return "Rangt gildi"; - case "invalid_element": - return `Rangt gildi \xED ${issue2.origin}`; - default: - return `Rangt gildi`; - } - }; -}; -function is_default() { - return { - localeError: error20() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/it.js -var error21 = () => { - const Sizable = { - string: { unit: "caratteri", verb: "avere" }, - file: { unit: "byte", verb: "avere" }, - array: { unit: "elementi", verb: "avere" }, - set: { unit: "elementi", verb: "avere" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "indirizzo email", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "data e ora ISO", - date: "data ISO", - time: "ora ISO", - duration: "durata ISO", - ipv4: "indirizzo IPv4", - ipv6: "indirizzo IPv6", - cidrv4: "intervallo IPv4", - cidrv6: "intervallo IPv6", - base64: "stringa codificata in base64", - base64url: "URL codificata in base64", - json_string: "stringa JSON", - e164: "numero E.164", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN", - number: "numero", - array: "vettore" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Input non valido: atteso instanceof ${issue2.expected}, ricevuto ${received}`; - } - return `Input non valido: atteso ${expected}, ricevuto ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Input non valido: atteso ${stringifyPrimitive(issue2.values[0])}`; - return `Opzione non valida: atteso uno tra ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Troppo grande: ${issue2.origin ?? "valore"} deve avere ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementi"}`; - return `Troppo grande: ${issue2.origin ?? "valore"} deve essere ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Troppo piccolo: ${issue2.origin} deve avere ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Troppo piccolo: ${issue2.origin} deve essere ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Stringa non valida: deve iniziare con "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Stringa non valida: deve terminare con "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Stringa non valida: deve includere "${_issue.includes}"`; - if (_issue.format === "regex") - return `Stringa non valida: deve corrispondere al pattern ${_issue.pattern}`; - return `Invalid ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Numero non valido: deve essere un multiplo di ${issue2.divisor}`; - case "unrecognized_keys": - return `Chiav${issue2.keys.length > 1 ? "i" : "e"} non riconosciut${issue2.keys.length > 1 ? "e" : "a"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Chiave non valida in ${issue2.origin}`; - case "invalid_union": - return "Input non valido"; - case "invalid_element": - return `Valore non valido in ${issue2.origin}`; - default: - return `Input non valido`; - } - }; -}; -function it_default() { - return { - localeError: error21() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ja.js -var error22 = () => { - const Sizable = { - string: { unit: "\u6587\u5B57", verb: "\u3067\u3042\u308B" }, - file: { unit: "\u30D0\u30A4\u30C8", verb: "\u3067\u3042\u308B" }, - array: { unit: "\u8981\u7D20", verb: "\u3067\u3042\u308B" }, - set: { unit: "\u8981\u7D20", verb: "\u3067\u3042\u308B" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u5165\u529B\u5024", - email: "\u30E1\u30FC\u30EB\u30A2\u30C9\u30EC\u30B9", - url: "URL", - emoji: "\u7D75\u6587\u5B57", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO\u65E5\u6642", - date: "ISO\u65E5\u4ED8", - time: "ISO\u6642\u523B", - duration: "ISO\u671F\u9593", - ipv4: "IPv4\u30A2\u30C9\u30EC\u30B9", - ipv6: "IPv6\u30A2\u30C9\u30EC\u30B9", - cidrv4: "IPv4\u7BC4\u56F2", - cidrv6: "IPv6\u7BC4\u56F2", - base64: "base64\u30A8\u30F3\u30B3\u30FC\u30C9\u6587\u5B57\u5217", - base64url: "base64url\u30A8\u30F3\u30B3\u30FC\u30C9\u6587\u5B57\u5217", - json_string: "JSON\u6587\u5B57\u5217", - e164: "E.164\u756A\u53F7", - jwt: "JWT", - template_literal: "\u5165\u529B\u5024" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u6570\u5024", - array: "\u914D\u5217" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u7121\u52B9\u306A\u5165\u529B: instanceof ${issue2.expected}\u304C\u671F\u5F85\u3055\u308C\u307E\u3057\u305F\u304C\u3001${received}\u304C\u5165\u529B\u3055\u308C\u307E\u3057\u305F`; - } - return `\u7121\u52B9\u306A\u5165\u529B: ${expected}\u304C\u671F\u5F85\u3055\u308C\u307E\u3057\u305F\u304C\u3001${received}\u304C\u5165\u529B\u3055\u308C\u307E\u3057\u305F`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u7121\u52B9\u306A\u5165\u529B: ${stringifyPrimitive(issue2.values[0])}\u304C\u671F\u5F85\u3055\u308C\u307E\u3057\u305F`; - return `\u7121\u52B9\u306A\u9078\u629E: ${joinValues(issue2.values, "\u3001")}\u306E\u3044\u305A\u308C\u304B\u3067\u3042\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - case "too_big": { - const adj = issue2.inclusive ? "\u4EE5\u4E0B\u3067\u3042\u308B" : "\u3088\u308A\u5C0F\u3055\u3044"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u5927\u304D\u3059\u304E\u308B\u5024: ${issue2.origin ?? "\u5024"}\u306F${issue2.maximum.toString()}${sizing.unit ?? "\u8981\u7D20"}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - return `\u5927\u304D\u3059\u304E\u308B\u5024: ${issue2.origin ?? "\u5024"}\u306F${issue2.maximum.toString()}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - } - case "too_small": { - const adj = issue2.inclusive ? "\u4EE5\u4E0A\u3067\u3042\u308B" : "\u3088\u308A\u5927\u304D\u3044"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u5C0F\u3055\u3059\u304E\u308B\u5024: ${issue2.origin}\u306F${issue2.minimum.toString()}${sizing.unit}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - return `\u5C0F\u3055\u3059\u304E\u308B\u5024: ${issue2.origin}\u306F${issue2.minimum.toString()}${adj}\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u7121\u52B9\u306A\u6587\u5B57\u5217: "${_issue.prefix}"\u3067\u59CB\u307E\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - if (_issue.format === "ends_with") - return `\u7121\u52B9\u306A\u6587\u5B57\u5217: "${_issue.suffix}"\u3067\u7D42\u308F\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - if (_issue.format === "includes") - return `\u7121\u52B9\u306A\u6587\u5B57\u5217: "${_issue.includes}"\u3092\u542B\u3080\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - if (_issue.format === "regex") - return `\u7121\u52B9\u306A\u6587\u5B57\u5217: \u30D1\u30BF\u30FC\u30F3${_issue.pattern}\u306B\u4E00\u81F4\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - return `\u7121\u52B9\u306A${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u7121\u52B9\u306A\u6570\u5024: ${issue2.divisor}\u306E\u500D\u6570\u3067\u3042\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059`; - case "unrecognized_keys": - return `\u8A8D\u8B58\u3055\u308C\u3066\u3044\u306A\u3044\u30AD\u30FC${issue2.keys.length > 1 ? "\u7FA4" : ""}: ${joinValues(issue2.keys, "\u3001")}`; - case "invalid_key": - return `${issue2.origin}\u5185\u306E\u7121\u52B9\u306A\u30AD\u30FC`; - case "invalid_union": - return "\u7121\u52B9\u306A\u5165\u529B"; - case "invalid_element": - return `${issue2.origin}\u5185\u306E\u7121\u52B9\u306A\u5024`; - default: - return `\u7121\u52B9\u306A\u5165\u529B`; - } - }; -}; -function ja_default() { - return { - localeError: error22() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ka.js -var error23 = () => { - const Sizable = { - string: { unit: "\u10E1\u10D8\u10DB\u10D1\u10DD\u10DA\u10DD", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" }, - file: { unit: "\u10D1\u10D0\u10D8\u10E2\u10D8", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" }, - array: { unit: "\u10D4\u10DA\u10D4\u10DB\u10D4\u10DC\u10E2\u10D8", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" }, - set: { unit: "\u10D4\u10DA\u10D4\u10DB\u10D4\u10DC\u10E2\u10D8", verb: "\u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0", - email: "\u10D4\u10DA-\u10E4\u10DD\u10E1\u10E2\u10D8\u10E1 \u10DB\u10D8\u10E1\u10D0\u10DB\u10D0\u10E0\u10D7\u10D8", - url: "URL", - emoji: "\u10D4\u10DB\u10DD\u10EF\u10D8", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u10D7\u10D0\u10E0\u10D8\u10E6\u10D8-\u10D3\u10E0\u10DD", - date: "\u10D7\u10D0\u10E0\u10D8\u10E6\u10D8", - time: "\u10D3\u10E0\u10DD", - duration: "\u10EE\u10D0\u10DC\u10D2\u10E0\u10EB\u10DA\u10D8\u10D5\u10DD\u10D1\u10D0", - ipv4: "IPv4 \u10DB\u10D8\u10E1\u10D0\u10DB\u10D0\u10E0\u10D7\u10D8", - ipv6: "IPv6 \u10DB\u10D8\u10E1\u10D0\u10DB\u10D0\u10E0\u10D7\u10D8", - cidrv4: "IPv4 \u10D3\u10D8\u10D0\u10DE\u10D0\u10D6\u10DD\u10DC\u10D8", - cidrv6: "IPv6 \u10D3\u10D8\u10D0\u10DE\u10D0\u10D6\u10DD\u10DC\u10D8", - base64: "base64-\u10D9\u10DD\u10D3\u10D8\u10E0\u10D4\u10D1\u10E3\u10DA\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", - base64url: "base64url-\u10D9\u10DD\u10D3\u10D8\u10E0\u10D4\u10D1\u10E3\u10DA\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", - json_string: "JSON \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", - e164: "E.164 \u10DC\u10DD\u10DB\u10D4\u10E0\u10D8", - jwt: "JWT", - template_literal: "\u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u10E0\u10D8\u10EA\u10EE\u10D5\u10D8", - string: "\u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8", - boolean: "\u10D1\u10E3\u10DA\u10D4\u10D0\u10DC\u10D8", - function: "\u10E4\u10E3\u10DC\u10E5\u10EA\u10D8\u10D0", - array: "\u10DB\u10D0\u10E1\u10D8\u10D5\u10D8" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 instanceof ${issue2.expected}, \u10DB\u10D8\u10E6\u10D4\u10D1\u10E3\u10DA\u10D8 ${received}`; - } - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${expected}, \u10DB\u10D8\u10E6\u10D4\u10D1\u10E3\u10DA\u10D8 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${stringifyPrimitive(issue2.values[0])}`; - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10D5\u10D0\u10E0\u10D8\u10D0\u10DC\u10E2\u10D8: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8\u10D0 \u10D4\u10E0\u10D7-\u10D4\u10E0\u10D7\u10D8 ${joinValues(issue2.values, "|")}-\u10D3\u10D0\u10DC`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10D3\u10D8\u10D3\u10D8: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin ?? "\u10DB\u10DC\u10D8\u10E8\u10D5\u10DC\u10D4\u10DA\u10DD\u10D1\u10D0"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit}`; - return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10D3\u10D8\u10D3\u10D8: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin ?? "\u10DB\u10DC\u10D8\u10E8\u10D5\u10DC\u10D4\u10DA\u10DD\u10D1\u10D0"} \u10D8\u10E7\u10DD\u10E1 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10DE\u10D0\u10E2\u10D0\u10E0\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u10D6\u10D4\u10D3\u10DB\u10D4\u10E2\u10D0\u10D3 \u10DE\u10D0\u10E2\u10D0\u10E0\u10D0: \u10DB\u10DD\u10E1\u10D0\u10DA\u10DD\u10D3\u10DC\u10D4\u10DA\u10D8 ${issue2.origin} \u10D8\u10E7\u10DD\u10E1 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10D8\u10EC\u10E7\u10D4\u10D1\u10DD\u10D3\u10D4\u10E1 "${_issue.prefix}"-\u10D8\u10D7`; - } - if (_issue.format === "ends_with") - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10DB\u10D7\u10D0\u10D5\u10E0\u10D3\u10D4\u10D1\u10DD\u10D3\u10D4\u10E1 "${_issue.suffix}"-\u10D8\u10D7`; - if (_issue.format === "includes") - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D8\u10EA\u10D0\u10D5\u10D3\u10D4\u10E1 "${_issue.includes}"-\u10E1`; - if (_issue.format === "regex") - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E1\u10E2\u10E0\u10D8\u10DC\u10D2\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10E8\u10D4\u10D4\u10E1\u10D0\u10D1\u10D0\u10DB\u10D4\u10D1\u10DD\u10D3\u10D4\u10E1 \u10E8\u10D0\u10D1\u10DA\u10DD\u10DC\u10E1 ${_issue.pattern}`; - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E0\u10D8\u10EA\u10EE\u10D5\u10D8: \u10E3\u10DC\u10D3\u10D0 \u10D8\u10E7\u10DD\u10E1 ${issue2.divisor}-\u10D8\u10E1 \u10EF\u10D4\u10E0\u10D0\u10D3\u10D8`; - case "unrecognized_keys": - return `\u10E3\u10EA\u10DC\u10DD\u10D1\u10D8 \u10D2\u10D0\u10E1\u10D0\u10E6\u10D4\u10D1${issue2.keys.length > 1 ? "\u10D4\u10D1\u10D8" : "\u10D8"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10D2\u10D0\u10E1\u10D0\u10E6\u10D4\u10D1\u10D8 ${issue2.origin}-\u10E8\u10D8`; - case "invalid_union": - return "\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0"; - case "invalid_element": - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10DB\u10DC\u10D8\u10E8\u10D5\u10DC\u10D4\u10DA\u10DD\u10D1\u10D0 ${issue2.origin}-\u10E8\u10D8`; - default: - return `\u10D0\u10E0\u10D0\u10E1\u10EC\u10DD\u10E0\u10D8 \u10E8\u10D4\u10E7\u10D5\u10D0\u10DC\u10D0`; - } - }; -}; -function ka_default() { - return { - localeError: error23() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/km.js -var error24 = () => { - const Sizable = { - string: { unit: "\u178F\u17BD\u17A2\u1780\u17D2\u179F\u179A", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" }, - file: { unit: "\u1794\u17C3", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" }, - array: { unit: "\u1792\u17B6\u178F\u17BB", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" }, - set: { unit: "\u1792\u17B6\u178F\u17BB", verb: "\u1782\u17BD\u179A\u1798\u17B6\u1793" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B", - email: "\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793\u17A2\u17CA\u17B8\u1798\u17C2\u179B", - url: "URL", - emoji: "\u179F\u1789\u17D2\u1789\u17B6\u17A2\u17B6\u179A\u1798\u17D2\u1798\u178E\u17CD", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u1780\u17B6\u179B\u1794\u179A\u17B7\u1785\u17D2\u1786\u17C1\u1791 \u1793\u17B7\u1784\u1798\u17C9\u17C4\u1784 ISO", - date: "\u1780\u17B6\u179B\u1794\u179A\u17B7\u1785\u17D2\u1786\u17C1\u1791 ISO", - time: "\u1798\u17C9\u17C4\u1784 ISO", - duration: "\u179A\u1799\u17C8\u1796\u17C1\u179B ISO", - ipv4: "\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv4", - ipv6: "\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv6", - cidrv4: "\u178A\u17C2\u1793\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv4", - cidrv6: "\u178A\u17C2\u1793\u17A2\u17B6\u179F\u1799\u178A\u17D2\u178B\u17B6\u1793 IPv6", - base64: "\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u17A2\u17CA\u17B7\u1780\u17BC\u178A base64", - base64url: "\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u17A2\u17CA\u17B7\u1780\u17BC\u178A base64url", - json_string: "\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A JSON", - e164: "\u179B\u17C1\u1781 E.164", - jwt: "JWT", - template_literal: "\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u179B\u17C1\u1781", - array: "\u17A2\u17B6\u179A\u17C1 (Array)", - null: "\u1782\u17D2\u1798\u17B6\u1793\u178F\u1798\u17D2\u179B\u17C3 (null)" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A instanceof ${issue2.expected} \u1794\u17C9\u17BB\u1793\u17D2\u178F\u17C2\u1791\u1791\u17BD\u179B\u1794\u17B6\u1793 ${received}`; - } - return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${expected} \u1794\u17C9\u17BB\u1793\u17D2\u178F\u17C2\u1791\u1791\u17BD\u179B\u1794\u17B6\u1793 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1794\u1789\u17D2\u1785\u17BC\u179B\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${stringifyPrimitive(issue2.values[0])}`; - return `\u1787\u1798\u17D2\u179A\u17BE\u179F\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1787\u17B6\u1798\u17BD\u1799\u1780\u17D2\u1793\u17BB\u1784\u1785\u17C6\u178E\u17C4\u1798 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u1792\u17C6\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin ?? "\u178F\u1798\u17D2\u179B\u17C3"} ${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "\u1792\u17B6\u178F\u17BB"}`; - return `\u1792\u17C6\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin ?? "\u178F\u1798\u17D2\u179B\u17C3"} ${adj} ${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u178F\u17BC\u1785\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin} ${adj} ${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u178F\u17BC\u1785\u1796\u17C1\u1780\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1780\u17B6\u179A ${issue2.origin} ${adj} ${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1785\u17B6\u1794\u17CB\u1795\u17D2\u178F\u17BE\u1798\u178A\u17C4\u1799 "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1794\u1789\u17D2\u1785\u1794\u17CB\u178A\u17C4\u1799 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u1798\u17B6\u1793 "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u1781\u17D2\u179F\u17C2\u17A2\u1780\u17D2\u179F\u179A\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u178F\u17C2\u1795\u17D2\u1782\u17BC\u1795\u17D2\u1782\u1784\u1793\u17B9\u1784\u1791\u1798\u17D2\u179A\u1784\u17CB\u178A\u17C2\u179B\u1794\u17B6\u1793\u1780\u17C6\u178E\u178F\u17CB ${_issue.pattern}`; - return `\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u179B\u17C1\u1781\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u17D6 \u178F\u17D2\u179A\u17BC\u179C\u178F\u17C2\u1787\u17B6\u1796\u17A0\u17BB\u1782\u17BB\u178E\u1793\u17C3 ${issue2.divisor}`; - case "unrecognized_keys": - return `\u179A\u1780\u1783\u17BE\u1789\u179F\u17C4\u1798\u17B7\u1793\u179F\u17D2\u1782\u17B6\u179B\u17CB\u17D6 ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u179F\u17C4\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u1793\u17C5\u1780\u17D2\u1793\u17BB\u1784 ${issue2.origin}`; - case "invalid_union": - return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C`; - case "invalid_element": - return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C\u1793\u17C5\u1780\u17D2\u1793\u17BB\u1784 ${issue2.origin}`; - default: - return `\u1791\u17B7\u1793\u17D2\u1793\u1793\u17D0\u1799\u1798\u17B7\u1793\u178F\u17D2\u179A\u17B9\u1798\u178F\u17D2\u179A\u17BC\u179C`; - } - }; -}; -function km_default() { - return { - localeError: error24() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/kh.js -function kh_default() { - return km_default(); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ko.js -var error25 = () => { - const Sizable = { - string: { unit: "\uBB38\uC790", verb: "to have" }, - file: { unit: "\uBC14\uC774\uD2B8", verb: "to have" }, - array: { unit: "\uAC1C", verb: "to have" }, - set: { unit: "\uAC1C", verb: "to have" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\uC785\uB825", - email: "\uC774\uBA54\uC77C \uC8FC\uC18C", - url: "URL", - emoji: "\uC774\uBAA8\uC9C0", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \uB0A0\uC9DC\uC2DC\uAC04", - date: "ISO \uB0A0\uC9DC", - time: "ISO \uC2DC\uAC04", - duration: "ISO \uAE30\uAC04", - ipv4: "IPv4 \uC8FC\uC18C", - ipv6: "IPv6 \uC8FC\uC18C", - cidrv4: "IPv4 \uBC94\uC704", - cidrv6: "IPv6 \uBC94\uC704", - base64: "base64 \uC778\uCF54\uB529 \uBB38\uC790\uC5F4", - base64url: "base64url \uC778\uCF54\uB529 \uBB38\uC790\uC5F4", - json_string: "JSON \uBB38\uC790\uC5F4", - e164: "E.164 \uBC88\uD638", - jwt: "JWT", - template_literal: "\uC785\uB825" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\uC798\uBABB\uB41C \uC785\uB825: \uC608\uC0C1 \uD0C0\uC785\uC740 instanceof ${issue2.expected}, \uBC1B\uC740 \uD0C0\uC785\uC740 ${received}\uC785\uB2C8\uB2E4`; - } - return `\uC798\uBABB\uB41C \uC785\uB825: \uC608\uC0C1 \uD0C0\uC785\uC740 ${expected}, \uBC1B\uC740 \uD0C0\uC785\uC740 ${received}\uC785\uB2C8\uB2E4`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\uC798\uBABB\uB41C \uC785\uB825: \uAC12\uC740 ${stringifyPrimitive(issue2.values[0])} \uC774\uC5B4\uC57C \uD569\uB2C8\uB2E4`; - return `\uC798\uBABB\uB41C \uC635\uC158: ${joinValues(issue2.values, "\uB610\uB294 ")} \uC911 \uD558\uB098\uC5EC\uC57C \uD569\uB2C8\uB2E4`; - case "too_big": { - const adj = issue2.inclusive ? "\uC774\uD558" : "\uBBF8\uB9CC"; - const suffix = adj === "\uBBF8\uB9CC" ? "\uC774\uC5B4\uC57C \uD569\uB2C8\uB2E4" : "\uC5EC\uC57C \uD569\uB2C8\uB2E4"; - const sizing = getSizing(issue2.origin); - const unit = sizing?.unit ?? "\uC694\uC18C"; - if (sizing) - return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uD07D\uB2C8\uB2E4: ${issue2.maximum.toString()}${unit} ${adj}${suffix}`; - return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uD07D\uB2C8\uB2E4: ${issue2.maximum.toString()} ${adj}${suffix}`; - } - case "too_small": { - const adj = issue2.inclusive ? "\uC774\uC0C1" : "\uCD08\uACFC"; - const suffix = adj === "\uC774\uC0C1" ? "\uC774\uC5B4\uC57C \uD569\uB2C8\uB2E4" : "\uC5EC\uC57C \uD569\uB2C8\uB2E4"; - const sizing = getSizing(issue2.origin); - const unit = sizing?.unit ?? "\uC694\uC18C"; - if (sizing) { - return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uC791\uC2B5\uB2C8\uB2E4: ${issue2.minimum.toString()}${unit} ${adj}${suffix}`; - } - return `${issue2.origin ?? "\uAC12"}\uC774 \uB108\uBB34 \uC791\uC2B5\uB2C8\uB2E4: ${issue2.minimum.toString()} ${adj}${suffix}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: "${_issue.prefix}"(\uC73C)\uB85C \uC2DC\uC791\uD574\uC57C \uD569\uB2C8\uB2E4`; - } - if (_issue.format === "ends_with") - return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: "${_issue.suffix}"(\uC73C)\uB85C \uB05D\uB098\uC57C \uD569\uB2C8\uB2E4`; - if (_issue.format === "includes") - return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: "${_issue.includes}"\uC744(\uB97C) \uD3EC\uD568\uD574\uC57C \uD569\uB2C8\uB2E4`; - if (_issue.format === "regex") - return `\uC798\uBABB\uB41C \uBB38\uC790\uC5F4: \uC815\uADDC\uC2DD ${_issue.pattern} \uD328\uD134\uACFC \uC77C\uCE58\uD574\uC57C \uD569\uB2C8\uB2E4`; - return `\uC798\uBABB\uB41C ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\uC798\uBABB\uB41C \uC22B\uC790: ${issue2.divisor}\uC758 \uBC30\uC218\uC5EC\uC57C \uD569\uB2C8\uB2E4`; - case "unrecognized_keys": - return `\uC778\uC2DD\uD560 \uC218 \uC5C6\uB294 \uD0A4: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\uC798\uBABB\uB41C \uD0A4: ${issue2.origin}`; - case "invalid_union": - return `\uC798\uBABB\uB41C \uC785\uB825`; - case "invalid_element": - return `\uC798\uBABB\uB41C \uAC12: ${issue2.origin}`; - default: - return `\uC798\uBABB\uB41C \uC785\uB825`; - } - }; -}; -function ko_default() { - return { - localeError: error25() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/lt.js -var capitalizeFirstCharacter = (text) => { - return text.charAt(0).toUpperCase() + text.slice(1); -}; -function getUnitTypeFromNumber(number4) { - const abs = Math.abs(number4); - const last = abs % 10; - const last2 = abs % 100; - if (last2 >= 11 && last2 <= 19 || last === 0) - return "many"; - if (last === 1) - return "one"; - return "few"; -} -var error26 = () => { - const Sizable = { - string: { - unit: { - one: "simbolis", - few: "simboliai", - many: "simboli\u0173" - }, - verb: { - smaller: { - inclusive: "turi b\u016Bti ne ilgesn\u0117 kaip", - notInclusive: "turi b\u016Bti trumpesn\u0117 kaip" - }, - bigger: { - inclusive: "turi b\u016Bti ne trumpesn\u0117 kaip", - notInclusive: "turi b\u016Bti ilgesn\u0117 kaip" - } - } - }, - file: { - unit: { - one: "baitas", - few: "baitai", - many: "bait\u0173" - }, - verb: { - smaller: { - inclusive: "turi b\u016Bti ne didesnis kaip", - notInclusive: "turi b\u016Bti ma\u017Eesnis kaip" - }, - bigger: { - inclusive: "turi b\u016Bti ne ma\u017Eesnis kaip", - notInclusive: "turi b\u016Bti didesnis kaip" - } - } - }, - array: { - unit: { - one: "element\u0105", - few: "elementus", - many: "element\u0173" - }, - verb: { - smaller: { - inclusive: "turi tur\u0117ti ne daugiau kaip", - notInclusive: "turi tur\u0117ti ma\u017Eiau kaip" - }, - bigger: { - inclusive: "turi tur\u0117ti ne ma\u017Eiau kaip", - notInclusive: "turi tur\u0117ti daugiau kaip" - } - } - }, - set: { - unit: { - one: "element\u0105", - few: "elementus", - many: "element\u0173" - }, - verb: { - smaller: { - inclusive: "turi tur\u0117ti ne daugiau kaip", - notInclusive: "turi tur\u0117ti ma\u017Eiau kaip" - }, - bigger: { - inclusive: "turi tur\u0117ti ne ma\u017Eiau kaip", - notInclusive: "turi tur\u0117ti daugiau kaip" - } - } - } - }; - function getSizing(origin, unitType, inclusive, targetShouldBe) { - const result = Sizable[origin] ?? null; - if (result === null) - return result; - return { - unit: result.unit[unitType], - verb: result.verb[targetShouldBe][inclusive ? "inclusive" : "notInclusive"] - }; - } - const FormatDictionary = { - regex: "\u012Fvestis", - email: "el. pa\u0161to adresas", - url: "URL", - emoji: "jaustukas", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO data ir laikas", - date: "ISO data", - time: "ISO laikas", - duration: "ISO trukm\u0117", - ipv4: "IPv4 adresas", - ipv6: "IPv6 adresas", - cidrv4: "IPv4 tinklo prefiksas (CIDR)", - cidrv6: "IPv6 tinklo prefiksas (CIDR)", - base64: "base64 u\u017Ekoduota eilut\u0117", - base64url: "base64url u\u017Ekoduota eilut\u0117", - json_string: "JSON eilut\u0117", - e164: "E.164 numeris", - jwt: "JWT", - template_literal: "\u012Fvestis" - }; - const TypeDictionary = { - nan: "NaN", - number: "skai\u010Dius", - bigint: "sveikasis skai\u010Dius", - string: "eilut\u0117", - boolean: "login\u0117 reik\u0161m\u0117", - undefined: "neapibr\u0117\u017Eta reik\u0161m\u0117", - function: "funkcija", - symbol: "simbolis", - array: "masyvas", - object: "objektas", - null: "nulin\u0117 reik\u0161m\u0117" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Gautas tipas ${received}, o tik\u0117tasi - instanceof ${issue2.expected}`; - } - return `Gautas tipas ${received}, o tik\u0117tasi - ${expected}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Privalo b\u016Bti ${stringifyPrimitive(issue2.values[0])}`; - return `Privalo b\u016Bti vienas i\u0161 ${joinValues(issue2.values, "|")} pasirinkim\u0173`; - case "too_big": { - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - const sizing = getSizing(issue2.origin, getUnitTypeFromNumber(Number(issue2.maximum)), issue2.inclusive ?? false, "smaller"); - if (sizing?.verb) - return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} ${sizing.verb} ${issue2.maximum.toString()} ${sizing.unit ?? "element\u0173"}`; - const adj = issue2.inclusive ? "ne didesnis kaip" : "ma\u017Eesnis kaip"; - return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} turi b\u016Bti ${adj} ${issue2.maximum.toString()} ${sizing?.unit}`; - } - case "too_small": { - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - const sizing = getSizing(issue2.origin, getUnitTypeFromNumber(Number(issue2.minimum)), issue2.inclusive ?? false, "bigger"); - if (sizing?.verb) - return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} ${sizing.verb} ${issue2.minimum.toString()} ${sizing.unit ?? "element\u0173"}`; - const adj = issue2.inclusive ? "ne ma\u017Eesnis kaip" : "didesnis kaip"; - return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} turi b\u016Bti ${adj} ${issue2.minimum.toString()} ${sizing?.unit}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Eilut\u0117 privalo prasid\u0117ti "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `Eilut\u0117 privalo pasibaigti "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Eilut\u0117 privalo \u012Ftraukti "${_issue.includes}"`; - if (_issue.format === "regex") - return `Eilut\u0117 privalo atitikti ${_issue.pattern}`; - return `Neteisingas ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Skai\u010Dius privalo b\u016Bti ${issue2.divisor} kartotinis.`; - case "unrecognized_keys": - return `Neatpa\u017Eint${issue2.keys.length > 1 ? "i" : "as"} rakt${issue2.keys.length > 1 ? "ai" : "as"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return "Rastas klaidingas raktas"; - case "invalid_union": - return "Klaidinga \u012Fvestis"; - case "invalid_element": { - const origin = TypeDictionary[issue2.origin] ?? issue2.origin; - return `${capitalizeFirstCharacter(origin ?? issue2.origin ?? "reik\u0161m\u0117")} turi klaiding\u0105 \u012Fvest\u012F`; - } - default: - return "Klaidinga \u012Fvestis"; - } - }; -}; -function lt_default() { - return { - localeError: error26() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/mk.js -var error27 = () => { - const Sizable = { - string: { unit: "\u0437\u043D\u0430\u0446\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" }, - file: { unit: "\u0431\u0430\u0458\u0442\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" }, - array: { unit: "\u0441\u0442\u0430\u0432\u043A\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" }, - set: { unit: "\u0441\u0442\u0430\u0432\u043A\u0438", verb: "\u0434\u0430 \u0438\u043C\u0430\u0430\u0442" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0432\u043D\u0435\u0441", - email: "\u0430\u0434\u0440\u0435\u0441\u0430 \u043D\u0430 \u0435-\u043F\u043E\u0448\u0442\u0430", - url: "URL", - emoji: "\u0435\u043C\u043E\u045F\u0438", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u0434\u0430\u0442\u0443\u043C \u0438 \u0432\u0440\u0435\u043C\u0435", - date: "ISO \u0434\u0430\u0442\u0443\u043C", - time: "ISO \u0432\u0440\u0435\u043C\u0435", - duration: "ISO \u0432\u0440\u0435\u043C\u0435\u0442\u0440\u0430\u0435\u045A\u0435", - ipv4: "IPv4 \u0430\u0434\u0440\u0435\u0441\u0430", - ipv6: "IPv6 \u0430\u0434\u0440\u0435\u0441\u0430", - cidrv4: "IPv4 \u043E\u043F\u0441\u0435\u0433", - cidrv6: "IPv6 \u043E\u043F\u0441\u0435\u0433", - base64: "base64-\u0435\u043D\u043A\u043E\u0434\u0438\u0440\u0430\u043D\u0430 \u043D\u0438\u0437\u0430", - base64url: "base64url-\u0435\u043D\u043A\u043E\u0434\u0438\u0440\u0430\u043D\u0430 \u043D\u0438\u0437\u0430", - json_string: "JSON \u043D\u0438\u0437\u0430", - e164: "E.164 \u0431\u0440\u043E\u0458", - jwt: "JWT", - template_literal: "\u0432\u043D\u0435\u0441" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0431\u0440\u043E\u0458", - array: "\u043D\u0438\u0437\u0430" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 instanceof ${issue2.expected}, \u043F\u0440\u0438\u043C\u0435\u043D\u043E ${received}`; - } - return `\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${expected}, \u043F\u0440\u0438\u043C\u0435\u043D\u043E ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Invalid input: expected ${stringifyPrimitive(issue2.values[0])}`; - return `\u0413\u0440\u0435\u0448\u0430\u043D\u0430 \u043E\u043F\u0446\u0438\u0458\u0430: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 \u0435\u0434\u043D\u0430 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u0433\u043E\u043B\u0435\u043C: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin ?? "\u0432\u0440\u0435\u0434\u043D\u043E\u0441\u0442\u0430"} \u0434\u0430 \u0438\u043C\u0430 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0438"}`; - return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u0433\u043E\u043B\u0435\u043C: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin ?? "\u0432\u0440\u0435\u0434\u043D\u043E\u0441\u0442\u0430"} \u0434\u0430 \u0431\u0438\u0434\u0435 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u043C\u0430\u043B: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin} \u0434\u0430 \u0438\u043C\u0430 ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u041F\u0440\u0435\u043C\u043D\u043E\u0433\u0443 \u043C\u0430\u043B: \u0441\u0435 \u043E\u0447\u0435\u043A\u0443\u0432\u0430 ${issue2.origin} \u0434\u0430 \u0431\u0438\u0434\u0435 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0437\u0430\u043F\u043E\u0447\u043D\u0443\u0432\u0430 \u0441\u043E "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0437\u0430\u0432\u0440\u0448\u0443\u0432\u0430 \u0441\u043E "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0432\u043A\u043B\u0443\u0447\u0443\u0432\u0430 "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u041D\u0435\u0432\u0430\u0436\u0435\u0447\u043A\u0430 \u043D\u0438\u0437\u0430: \u043C\u043E\u0440\u0430 \u0434\u0430 \u043E\u0434\u0433\u043E\u0430\u0440\u0430 \u043D\u0430 \u043F\u0430\u0442\u0435\u0440\u043D\u043E\u0442 ${_issue.pattern}`; - return `Invalid ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u0413\u0440\u0435\u0448\u0435\u043D \u0431\u0440\u043E\u0458: \u043C\u043E\u0440\u0430 \u0434\u0430 \u0431\u0438\u0434\u0435 \u0434\u0435\u043B\u0438\u0432 \u0441\u043E ${issue2.divisor}`; - case "unrecognized_keys": - return `${issue2.keys.length > 1 ? "\u041D\u0435\u043F\u0440\u0435\u043F\u043E\u0437\u043D\u0430\u0435\u043D\u0438 \u043A\u043B\u0443\u0447\u0435\u0432\u0438" : "\u041D\u0435\u043F\u0440\u0435\u043F\u043E\u0437\u043D\u0430\u0435\u043D \u043A\u043B\u0443\u0447"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u0413\u0440\u0435\u0448\u0435\u043D \u043A\u043B\u0443\u0447 \u0432\u043E ${issue2.origin}`; - case "invalid_union": - return "\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441"; - case "invalid_element": - return `\u0413\u0440\u0435\u0448\u043D\u0430 \u0432\u0440\u0435\u0434\u043D\u043E\u0441\u0442 \u0432\u043E ${issue2.origin}`; - default: - return `\u0413\u0440\u0435\u0448\u0435\u043D \u0432\u043D\u0435\u0441`; - } - }; -}; -function mk_default() { - return { - localeError: error27() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ms.js -var error28 = () => { - const Sizable = { - string: { unit: "aksara", verb: "mempunyai" }, - file: { unit: "bait", verb: "mempunyai" }, - array: { unit: "elemen", verb: "mempunyai" }, - set: { unit: "elemen", verb: "mempunyai" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "alamat e-mel", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "tarikh masa ISO", - date: "tarikh ISO", - time: "masa ISO", - duration: "tempoh ISO", - ipv4: "alamat IPv4", - ipv6: "alamat IPv6", - cidrv4: "julat IPv4", - cidrv6: "julat IPv6", - base64: "string dikodkan base64", - base64url: "string dikodkan base64url", - json_string: "string JSON", - e164: "nombor E.164", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN", - number: "nombor" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Input tidak sah: dijangka instanceof ${issue2.expected}, diterima ${received}`; - } - return `Input tidak sah: dijangka ${expected}, diterima ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Input tidak sah: dijangka ${stringifyPrimitive(issue2.values[0])}`; - return `Pilihan tidak sah: dijangka salah satu daripada ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Terlalu besar: dijangka ${issue2.origin ?? "nilai"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elemen"}`; - return `Terlalu besar: dijangka ${issue2.origin ?? "nilai"} adalah ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Terlalu kecil: dijangka ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Terlalu kecil: dijangka ${issue2.origin} adalah ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `String tidak sah: mesti bermula dengan "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `String tidak sah: mesti berakhir dengan "${_issue.suffix}"`; - if (_issue.format === "includes") - return `String tidak sah: mesti mengandungi "${_issue.includes}"`; - if (_issue.format === "regex") - return `String tidak sah: mesti sepadan dengan corak ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} tidak sah`; - } - case "not_multiple_of": - return `Nombor tidak sah: perlu gandaan ${issue2.divisor}`; - case "unrecognized_keys": - return `Kunci tidak dikenali: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Kunci tidak sah dalam ${issue2.origin}`; - case "invalid_union": - return "Input tidak sah"; - case "invalid_element": - return `Nilai tidak sah dalam ${issue2.origin}`; - default: - return `Input tidak sah`; - } - }; -}; -function ms_default() { - return { - localeError: error28() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/nl.js -var error29 = () => { - const Sizable = { - string: { unit: "tekens", verb: "heeft" }, - file: { unit: "bytes", verb: "heeft" }, - array: { unit: "elementen", verb: "heeft" }, - set: { unit: "elementen", verb: "heeft" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "invoer", - email: "emailadres", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO datum en tijd", - date: "ISO datum", - time: "ISO tijd", - duration: "ISO duur", - ipv4: "IPv4-adres", - ipv6: "IPv6-adres", - cidrv4: "IPv4-bereik", - cidrv6: "IPv6-bereik", - base64: "base64-gecodeerde tekst", - base64url: "base64 URL-gecodeerde tekst", - json_string: "JSON string", - e164: "E.164-nummer", - jwt: "JWT", - template_literal: "invoer" - }; - const TypeDictionary = { - nan: "NaN", - number: "getal" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Ongeldige invoer: verwacht instanceof ${issue2.expected}, ontving ${received}`; - } - return `Ongeldige invoer: verwacht ${expected}, ontving ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Ongeldige invoer: verwacht ${stringifyPrimitive(issue2.values[0])}`; - return `Ongeldige optie: verwacht \xE9\xE9n van ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - const longName = issue2.origin === "date" ? "laat" : issue2.origin === "string" ? "lang" : "groot"; - if (sizing) - return `Te ${longName}: verwacht dat ${issue2.origin ?? "waarde"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementen"} ${sizing.verb}`; - return `Te ${longName}: verwacht dat ${issue2.origin ?? "waarde"} ${adj}${issue2.maximum.toString()} is`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - const shortName = issue2.origin === "date" ? "vroeg" : issue2.origin === "string" ? "kort" : "klein"; - if (sizing) { - return `Te ${shortName}: verwacht dat ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} ${sizing.verb}`; - } - return `Te ${shortName}: verwacht dat ${issue2.origin} ${adj}${issue2.minimum.toString()} is`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Ongeldige tekst: moet met "${_issue.prefix}" beginnen`; - } - if (_issue.format === "ends_with") - return `Ongeldige tekst: moet op "${_issue.suffix}" eindigen`; - if (_issue.format === "includes") - return `Ongeldige tekst: moet "${_issue.includes}" bevatten`; - if (_issue.format === "regex") - return `Ongeldige tekst: moet overeenkomen met patroon ${_issue.pattern}`; - return `Ongeldig: ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Ongeldig getal: moet een veelvoud van ${issue2.divisor} zijn`; - case "unrecognized_keys": - return `Onbekende key${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Ongeldige key in ${issue2.origin}`; - case "invalid_union": - return "Ongeldige invoer"; - case "invalid_element": - return `Ongeldige waarde in ${issue2.origin}`; - default: - return `Ongeldige invoer`; - } - }; -}; -function nl_default() { - return { - localeError: error29() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/no.js -var error30 = () => { - const Sizable = { - string: { unit: "tegn", verb: "\xE5 ha" }, - file: { unit: "bytes", verb: "\xE5 ha" }, - array: { unit: "elementer", verb: "\xE5 inneholde" }, - set: { unit: "elementer", verb: "\xE5 inneholde" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "input", - email: "e-postadresse", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO dato- og klokkeslett", - date: "ISO-dato", - time: "ISO-klokkeslett", - duration: "ISO-varighet", - ipv4: "IPv4-omr\xE5de", - ipv6: "IPv6-omr\xE5de", - cidrv4: "IPv4-spekter", - cidrv6: "IPv6-spekter", - base64: "base64-enkodet streng", - base64url: "base64url-enkodet streng", - json_string: "JSON-streng", - e164: "E.164-nummer", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN", - number: "tall", - array: "liste" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Ugyldig input: forventet instanceof ${issue2.expected}, fikk ${received}`; - } - return `Ugyldig input: forventet ${expected}, fikk ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Ugyldig verdi: forventet ${stringifyPrimitive(issue2.values[0])}`; - return `Ugyldig valg: forventet en av ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `For stor(t): forventet ${issue2.origin ?? "value"} til \xE5 ha ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementer"}`; - return `For stor(t): forventet ${issue2.origin ?? "value"} til \xE5 ha ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `For lite(n): forventet ${issue2.origin} til \xE5 ha ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `For lite(n): forventet ${issue2.origin} til \xE5 ha ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Ugyldig streng: m\xE5 starte med "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Ugyldig streng: m\xE5 ende med "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Ugyldig streng: m\xE5 inneholde "${_issue.includes}"`; - if (_issue.format === "regex") - return `Ugyldig streng: m\xE5 matche m\xF8nsteret ${_issue.pattern}`; - return `Ugyldig ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Ugyldig tall: m\xE5 v\xE6re et multiplum av ${issue2.divisor}`; - case "unrecognized_keys": - return `${issue2.keys.length > 1 ? "Ukjente n\xF8kler" : "Ukjent n\xF8kkel"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Ugyldig n\xF8kkel i ${issue2.origin}`; - case "invalid_union": - return "Ugyldig input"; - case "invalid_element": - return `Ugyldig verdi i ${issue2.origin}`; - default: - return `Ugyldig input`; - } - }; -}; -function no_default() { - return { - localeError: error30() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ota.js -var error31 = () => { - const Sizable = { - string: { unit: "harf", verb: "olmal\u0131d\u0131r" }, - file: { unit: "bayt", verb: "olmal\u0131d\u0131r" }, - array: { unit: "unsur", verb: "olmal\u0131d\u0131r" }, - set: { unit: "unsur", verb: "olmal\u0131d\u0131r" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "giren", - email: "epostag\xE2h", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO heng\xE2m\u0131", - date: "ISO tarihi", - time: "ISO zaman\u0131", - duration: "ISO m\xFCddeti", - ipv4: "IPv4 ni\u015F\xE2n\u0131", - ipv6: "IPv6 ni\u015F\xE2n\u0131", - cidrv4: "IPv4 menzili", - cidrv6: "IPv6 menzili", - base64: "base64-\u015Fifreli metin", - base64url: "base64url-\u015Fifreli metin", - json_string: "JSON metin", - e164: "E.164 say\u0131s\u0131", - jwt: "JWT", - template_literal: "giren" - }; - const TypeDictionary = { - nan: "NaN", - number: "numara", - array: "saf", - null: "gayb" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `F\xE2sit giren: umulan instanceof ${issue2.expected}, al\u0131nan ${received}`; - } - return `F\xE2sit giren: umulan ${expected}, al\u0131nan ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `F\xE2sit giren: umulan ${stringifyPrimitive(issue2.values[0])}`; - return `F\xE2sit tercih: m\xFBteberler ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Fazla b\xFCy\xFCk: ${issue2.origin ?? "value"}, ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elements"} sahip olmal\u0131yd\u0131.`; - return `Fazla b\xFCy\xFCk: ${issue2.origin ?? "value"}, ${adj}${issue2.maximum.toString()} olmal\u0131yd\u0131.`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Fazla k\xFC\xE7\xFCk: ${issue2.origin}, ${adj}${issue2.minimum.toString()} ${sizing.unit} sahip olmal\u0131yd\u0131.`; - } - return `Fazla k\xFC\xE7\xFCk: ${issue2.origin}, ${adj}${issue2.minimum.toString()} olmal\u0131yd\u0131.`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `F\xE2sit metin: "${_issue.prefix}" ile ba\u015Flamal\u0131.`; - if (_issue.format === "ends_with") - return `F\xE2sit metin: "${_issue.suffix}" ile bitmeli.`; - if (_issue.format === "includes") - return `F\xE2sit metin: "${_issue.includes}" ihtiv\xE2 etmeli.`; - if (_issue.format === "regex") - return `F\xE2sit metin: ${_issue.pattern} nak\u015F\u0131na uymal\u0131.`; - return `F\xE2sit ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `F\xE2sit say\u0131: ${issue2.divisor} kat\u0131 olmal\u0131yd\u0131.`; - case "unrecognized_keys": - return `Tan\u0131nmayan anahtar ${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `${issue2.origin} i\xE7in tan\u0131nmayan anahtar var.`; - case "invalid_union": - return "Giren tan\u0131namad\u0131."; - case "invalid_element": - return `${issue2.origin} i\xE7in tan\u0131nmayan k\u0131ymet var.`; - default: - return `K\u0131ymet tan\u0131namad\u0131.`; - } - }; -}; -function ota_default() { - return { - localeError: error31() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ps.js -var error32 = () => { - const Sizable = { - string: { unit: "\u062A\u0648\u06A9\u064A", verb: "\u0648\u0644\u0631\u064A" }, - file: { unit: "\u0628\u0627\u06CC\u067C\u0633", verb: "\u0648\u0644\u0631\u064A" }, - array: { unit: "\u062A\u0648\u06A9\u064A", verb: "\u0648\u0644\u0631\u064A" }, - set: { unit: "\u062A\u0648\u06A9\u064A", verb: "\u0648\u0644\u0631\u064A" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0648\u0631\u0648\u062F\u064A", - email: "\u0628\u0631\u06CC\u069A\u0646\u0627\u0644\u06CC\u06A9", - url: "\u06CC\u0648 \u0622\u0631 \u0627\u0644", - emoji: "\u0627\u06CC\u0645\u0648\u062C\u064A", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u0646\u06CC\u067C\u0647 \u0627\u0648 \u0648\u062E\u062A", - date: "\u0646\u06D0\u067C\u0647", - time: "\u0648\u062E\u062A", - duration: "\u0645\u0648\u062F\u0647", - ipv4: "\u062F IPv4 \u067E\u062A\u0647", - ipv6: "\u062F IPv6 \u067E\u062A\u0647", - cidrv4: "\u062F IPv4 \u0633\u0627\u062D\u0647", - cidrv6: "\u062F IPv6 \u0633\u0627\u062D\u0647", - base64: "base64-encoded \u0645\u062A\u0646", - base64url: "base64url-encoded \u0645\u062A\u0646", - json_string: "JSON \u0645\u062A\u0646", - e164: "\u062F E.164 \u0634\u0645\u06D0\u0631\u0647", - jwt: "JWT", - template_literal: "\u0648\u0631\u0648\u062F\u064A" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0639\u062F\u062F", - array: "\u0627\u0631\u06D0" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0646\u0627\u0633\u0645 \u0648\u0631\u0648\u062F\u064A: \u0628\u0627\u06CC\u062F instanceof ${issue2.expected} \u0648\u0627\u06CC, \u0645\u06AB\u0631 ${received} \u062A\u0631\u0644\u0627\u0633\u0647 \u0634\u0648`; - } - return `\u0646\u0627\u0633\u0645 \u0648\u0631\u0648\u062F\u064A: \u0628\u0627\u06CC\u062F ${expected} \u0648\u0627\u06CC, \u0645\u06AB\u0631 ${received} \u062A\u0631\u0644\u0627\u0633\u0647 \u0634\u0648`; - } - case "invalid_value": - if (issue2.values.length === 1) { - return `\u0646\u0627\u0633\u0645 \u0648\u0631\u0648\u062F\u064A: \u0628\u0627\u06CC\u062F ${stringifyPrimitive(issue2.values[0])} \u0648\u0627\u06CC`; - } - return `\u0646\u0627\u0633\u0645 \u0627\u0646\u062A\u062E\u0627\u0628: \u0628\u0627\u06CC\u062F \u06CC\u0648 \u0644\u0647 ${joinValues(issue2.values, "|")} \u0685\u062E\u0647 \u0648\u0627\u06CC`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0689\u06CC\u0631 \u0644\u0648\u06CC: ${issue2.origin ?? "\u0627\u0631\u0632\u069A\u062A"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0635\u0631\u0648\u0646\u0647"} \u0648\u0644\u0631\u064A`; - } - return `\u0689\u06CC\u0631 \u0644\u0648\u06CC: ${issue2.origin ?? "\u0627\u0631\u0632\u069A\u062A"} \u0628\u0627\u06CC\u062F ${adj}${issue2.maximum.toString()} \u0648\u064A`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0689\u06CC\u0631 \u06A9\u0648\u0686\u0646\u06CC: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} ${sizing.unit} \u0648\u0644\u0631\u064A`; - } - return `\u0689\u06CC\u0631 \u06A9\u0648\u0686\u0646\u06CC: ${issue2.origin} \u0628\u0627\u06CC\u062F ${adj}${issue2.minimum.toString()} \u0648\u064A`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F \u062F "${_issue.prefix}" \u0633\u0631\u0647 \u067E\u06CC\u0644 \u0634\u064A`; - } - if (_issue.format === "ends_with") { - return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F \u062F "${_issue.suffix}" \u0633\u0631\u0647 \u067E\u0627\u06CC \u062A\u0647 \u0648\u0631\u0633\u064A\u0696\u064A`; - } - if (_issue.format === "includes") { - return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F "${_issue.includes}" \u0648\u0644\u0631\u064A`; - } - if (_issue.format === "regex") { - return `\u0646\u0627\u0633\u0645 \u0645\u062A\u0646: \u0628\u0627\u06CC\u062F \u062F ${_issue.pattern} \u0633\u0631\u0647 \u0645\u0637\u0627\u0628\u0642\u062A \u0648\u0644\u0631\u064A`; - } - return `${FormatDictionary[_issue.format] ?? issue2.format} \u0646\u0627\u0633\u0645 \u062F\u06CC`; - } - case "not_multiple_of": - return `\u0646\u0627\u0633\u0645 \u0639\u062F\u062F: \u0628\u0627\u06CC\u062F \u062F ${issue2.divisor} \u0645\u0636\u0631\u0628 \u0648\u064A`; - case "unrecognized_keys": - return `\u0646\u0627\u0633\u0645 ${issue2.keys.length > 1 ? "\u06A9\u0644\u06CC\u0689\u0648\u0646\u0647" : "\u06A9\u0644\u06CC\u0689"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u0646\u0627\u0633\u0645 \u06A9\u0644\u06CC\u0689 \u067E\u0647 ${issue2.origin} \u06A9\u06D0`; - case "invalid_union": - return `\u0646\u0627\u0633\u0645\u0647 \u0648\u0631\u0648\u062F\u064A`; - case "invalid_element": - return `\u0646\u0627\u0633\u0645 \u0639\u0646\u0635\u0631 \u067E\u0647 ${issue2.origin} \u06A9\u06D0`; - default: - return `\u0646\u0627\u0633\u0645\u0647 \u0648\u0631\u0648\u062F\u064A`; - } - }; -}; -function ps_default() { - return { - localeError: error32() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/pl.js -var error33 = () => { - const Sizable = { - string: { unit: "znak\xF3w", verb: "mie\u0107" }, - file: { unit: "bajt\xF3w", verb: "mie\u0107" }, - array: { unit: "element\xF3w", verb: "mie\u0107" }, - set: { unit: "element\xF3w", verb: "mie\u0107" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "wyra\u017Cenie", - email: "adres email", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "data i godzina w formacie ISO", - date: "data w formacie ISO", - time: "godzina w formacie ISO", - duration: "czas trwania ISO", - ipv4: "adres IPv4", - ipv6: "adres IPv6", - cidrv4: "zakres IPv4", - cidrv6: "zakres IPv6", - base64: "ci\u0105g znak\xF3w zakodowany w formacie base64", - base64url: "ci\u0105g znak\xF3w zakodowany w formacie base64url", - json_string: "ci\u0105g znak\xF3w w formacie JSON", - e164: "liczba E.164", - jwt: "JWT", - template_literal: "wej\u015Bcie" - }; - const TypeDictionary = { - nan: "NaN", - number: "liczba", - array: "tablica" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Nieprawid\u0142owe dane wej\u015Bciowe: oczekiwano instanceof ${issue2.expected}, otrzymano ${received}`; - } - return `Nieprawid\u0142owe dane wej\u015Bciowe: oczekiwano ${expected}, otrzymano ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Nieprawid\u0142owe dane wej\u015Bciowe: oczekiwano ${stringifyPrimitive(issue2.values[0])}`; - return `Nieprawid\u0142owa opcja: oczekiwano jednej z warto\u015Bci ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Za du\u017Ca warto\u015B\u0107: oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie mie\u0107 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "element\xF3w"}`; - } - return `Zbyt du\u017C(y/a/e): oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie wynosi\u0107 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Za ma\u0142a warto\u015B\u0107: oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie mie\u0107 ${adj}${issue2.minimum.toString()} ${sizing.unit ?? "element\xF3w"}`; - } - return `Zbyt ma\u0142(y/a/e): oczekiwano, \u017Ce ${issue2.origin ?? "warto\u015B\u0107"} b\u0119dzie wynosi\u0107 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi zaczyna\u0107 si\u0119 od "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi ko\u0144czy\u0107 si\u0119 na "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi zawiera\u0107 "${_issue.includes}"`; - if (_issue.format === "regex") - return `Nieprawid\u0142owy ci\u0105g znak\xF3w: musi odpowiada\u0107 wzorcowi ${_issue.pattern}`; - return `Nieprawid\u0142ow(y/a/e) ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Nieprawid\u0142owa liczba: musi by\u0107 wielokrotno\u015Bci\u0105 ${issue2.divisor}`; - case "unrecognized_keys": - return `Nierozpoznane klucze${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Nieprawid\u0142owy klucz w ${issue2.origin}`; - case "invalid_union": - return "Nieprawid\u0142owe dane wej\u015Bciowe"; - case "invalid_element": - return `Nieprawid\u0142owa warto\u015B\u0107 w ${issue2.origin}`; - default: - return `Nieprawid\u0142owe dane wej\u015Bciowe`; - } - }; -}; -function pl_default() { - return { - localeError: error33() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/pt.js -var error34 = () => { - const Sizable = { - string: { unit: "caracteres", verb: "ter" }, - file: { unit: "bytes", verb: "ter" }, - array: { unit: "itens", verb: "ter" }, - set: { unit: "itens", verb: "ter" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "padr\xE3o", - email: "endere\xE7o de e-mail", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "data e hora ISO", - date: "data ISO", - time: "hora ISO", - duration: "dura\xE7\xE3o ISO", - ipv4: "endere\xE7o IPv4", - ipv6: "endere\xE7o IPv6", - cidrv4: "faixa de IPv4", - cidrv6: "faixa de IPv6", - base64: "texto codificado em base64", - base64url: "URL codificada em base64", - json_string: "texto JSON", - e164: "n\xFAmero E.164", - jwt: "JWT", - template_literal: "entrada" - }; - const TypeDictionary = { - nan: "NaN", - number: "n\xFAmero", - null: "nulo" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Tipo inv\xE1lido: esperado instanceof ${issue2.expected}, recebido ${received}`; - } - return `Tipo inv\xE1lido: esperado ${expected}, recebido ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Entrada inv\xE1lida: esperado ${stringifyPrimitive(issue2.values[0])}`; - return `Op\xE7\xE3o inv\xE1lida: esperada uma das ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Muito grande: esperado que ${issue2.origin ?? "valor"} tivesse ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementos"}`; - return `Muito grande: esperado que ${issue2.origin ?? "valor"} fosse ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Muito pequeno: esperado que ${issue2.origin} tivesse ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Muito pequeno: esperado que ${issue2.origin} fosse ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Texto inv\xE1lido: deve come\xE7ar com "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Texto inv\xE1lido: deve terminar com "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Texto inv\xE1lido: deve incluir "${_issue.includes}"`; - if (_issue.format === "regex") - return `Texto inv\xE1lido: deve corresponder ao padr\xE3o ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} inv\xE1lido`; - } - case "not_multiple_of": - return `N\xFAmero inv\xE1lido: deve ser m\xFAltiplo de ${issue2.divisor}`; - case "unrecognized_keys": - return `Chave${issue2.keys.length > 1 ? "s" : ""} desconhecida${issue2.keys.length > 1 ? "s" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Chave inv\xE1lida em ${issue2.origin}`; - case "invalid_union": - return "Entrada inv\xE1lida"; - case "invalid_element": - return `Valor inv\xE1lido em ${issue2.origin}`; - default: - return `Campo inv\xE1lido`; - } - }; -}; -function pt_default() { - return { - localeError: error34() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ru.js -function getRussianPlural(count, one, few, many) { - const absCount = Math.abs(count); - const lastDigit = absCount % 10; - const lastTwoDigits = absCount % 100; - if (lastTwoDigits >= 11 && lastTwoDigits <= 19) { - return many; - } - if (lastDigit === 1) { - return one; - } - if (lastDigit >= 2 && lastDigit <= 4) { - return few; - } - return many; -} -var error35 = () => { - const Sizable = { - string: { - unit: { - one: "\u0441\u0438\u043C\u0432\u043E\u043B", - few: "\u0441\u0438\u043C\u0432\u043E\u043B\u0430", - many: "\u0441\u0438\u043C\u0432\u043E\u043B\u043E\u0432" - }, - verb: "\u0438\u043C\u0435\u0442\u044C" - }, - file: { - unit: { - one: "\u0431\u0430\u0439\u0442", - few: "\u0431\u0430\u0439\u0442\u0430", - many: "\u0431\u0430\u0439\u0442" - }, - verb: "\u0438\u043C\u0435\u0442\u044C" - }, - array: { - unit: { - one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", - few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430", - many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u043E\u0432" - }, - verb: "\u0438\u043C\u0435\u0442\u044C" - }, - set: { - unit: { - one: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442", - few: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u0430", - many: "\u044D\u043B\u0435\u043C\u0435\u043D\u0442\u043E\u0432" - }, - verb: "\u0438\u043C\u0435\u0442\u044C" - } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0432\u0432\u043E\u0434", - email: "email \u0430\u0434\u0440\u0435\u0441", - url: "URL", - emoji: "\u044D\u043C\u043E\u0434\u0437\u0438", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u0434\u0430\u0442\u0430 \u0438 \u0432\u0440\u0435\u043C\u044F", - date: "ISO \u0434\u0430\u0442\u0430", - time: "ISO \u0432\u0440\u0435\u043C\u044F", - duration: "ISO \u0434\u043B\u0438\u0442\u0435\u043B\u044C\u043D\u043E\u0441\u0442\u044C", - ipv4: "IPv4 \u0430\u0434\u0440\u0435\u0441", - ipv6: "IPv6 \u0430\u0434\u0440\u0435\u0441", - cidrv4: "IPv4 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", - cidrv6: "IPv6 \u0434\u0438\u0430\u043F\u0430\u0437\u043E\u043D", - base64: "\u0441\u0442\u0440\u043E\u043A\u0430 \u0432 \u0444\u043E\u0440\u043C\u0430\u0442\u0435 base64", - base64url: "\u0441\u0442\u0440\u043E\u043A\u0430 \u0432 \u0444\u043E\u0440\u043C\u0430\u0442\u0435 base64url", - json_string: "JSON \u0441\u0442\u0440\u043E\u043A\u0430", - e164: "\u043D\u043E\u043C\u0435\u0440 E.164", - jwt: "JWT", - template_literal: "\u0432\u0432\u043E\u0434" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0447\u0438\u0441\u043B\u043E", - array: "\u043C\u0430\u0441\u0441\u0438\u0432" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0432\u043E\u0434: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C instanceof ${issue2.expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D\u043E ${received}`; - } - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0432\u043E\u0434: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C ${expected}, \u043F\u043E\u043B\u0443\u0447\u0435\u043D\u043E ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0432\u043E\u0434: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C ${stringifyPrimitive(issue2.values[0])}`; - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u0432\u0430\u0440\u0438\u0430\u043D\u0442: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C \u043E\u0434\u043D\u043E \u0438\u0437 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - const maxValue = Number(issue2.maximum); - const unit = getRussianPlural(maxValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); - return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u0431\u043E\u043B\u044C\u0448\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435"} \u0431\u0443\u0434\u0435\u0442 \u0438\u043C\u0435\u0442\u044C ${adj}${issue2.maximum.toString()} ${unit}`; - } - return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u0431\u043E\u043B\u044C\u0448\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435"} \u0431\u0443\u0434\u0435\u0442 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - const minValue = Number(issue2.minimum); - const unit = getRussianPlural(minValue, sizing.unit.one, sizing.unit.few, sizing.unit.many); - return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u043C\u0430\u043B\u0435\u043D\u044C\u043A\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin} \u0431\u0443\u0434\u0435\u0442 \u0438\u043C\u0435\u0442\u044C ${adj}${issue2.minimum.toString()} ${unit}`; - } - return `\u0421\u043B\u0438\u0448\u043A\u043E\u043C \u043C\u0430\u043B\u0435\u043D\u044C\u043A\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435: \u043E\u0436\u0438\u0434\u0430\u043B\u043E\u0441\u044C, \u0447\u0442\u043E ${issue2.origin} \u0431\u0443\u0434\u0435\u0442 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u043D\u0430\u0447\u0438\u043D\u0430\u0442\u044C\u0441\u044F \u0441 "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u0437\u0430\u043A\u0430\u043D\u0447\u0438\u0432\u0430\u0442\u044C\u0441\u044F \u043D\u0430 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u0441\u043E\u0434\u0435\u0440\u0436\u0430\u0442\u044C "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u041D\u0435\u0432\u0435\u0440\u043D\u0430\u044F \u0441\u0442\u0440\u043E\u043A\u0430: \u0434\u043E\u043B\u0436\u043D\u0430 \u0441\u043E\u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u043E\u0432\u0430\u0442\u044C \u0448\u0430\u0431\u043B\u043E\u043D\u0443 ${_issue.pattern}`; - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u041D\u0435\u0432\u0435\u0440\u043D\u043E\u0435 \u0447\u0438\u0441\u043B\u043E: \u0434\u043E\u043B\u0436\u043D\u043E \u0431\u044B\u0442\u044C \u043A\u0440\u0430\u0442\u043D\u044B\u043C ${issue2.divisor}`; - case "unrecognized_keys": - return `\u041D\u0435\u0440\u0430\u0441\u043F\u043E\u0437\u043D\u0430\u043D\u043D${issue2.keys.length > 1 ? "\u044B\u0435" : "\u044B\u0439"} \u043A\u043B\u044E\u0447${issue2.keys.length > 1 ? "\u0438" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0439 \u043A\u043B\u044E\u0447 \u0432 ${issue2.origin}`; - case "invalid_union": - return "\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0435 \u0432\u0445\u043E\u0434\u043D\u044B\u0435 \u0434\u0430\u043D\u043D\u044B\u0435"; - case "invalid_element": - return `\u041D\u0435\u0432\u0435\u0440\u043D\u043E\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435 \u0432 ${issue2.origin}`; - default: - return `\u041D\u0435\u0432\u0435\u0440\u043D\u044B\u0435 \u0432\u0445\u043E\u0434\u043D\u044B\u0435 \u0434\u0430\u043D\u043D\u044B\u0435`; - } - }; -}; -function ru_default() { - return { - localeError: error35() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/sl.js -var error36 = () => { - const Sizable = { - string: { unit: "znakov", verb: "imeti" }, - file: { unit: "bajtov", verb: "imeti" }, - array: { unit: "elementov", verb: "imeti" }, - set: { unit: "elementov", verb: "imeti" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "vnos", - email: "e-po\u0161tni naslov", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO datum in \u010Das", - date: "ISO datum", - time: "ISO \u010Das", - duration: "ISO trajanje", - ipv4: "IPv4 naslov", - ipv6: "IPv6 naslov", - cidrv4: "obseg IPv4", - cidrv6: "obseg IPv6", - base64: "base64 kodiran niz", - base64url: "base64url kodiran niz", - json_string: "JSON niz", - e164: "E.164 \u0161tevilka", - jwt: "JWT", - template_literal: "vnos" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0161tevilo", - array: "tabela" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Neveljaven vnos: pri\u010Dakovano instanceof ${issue2.expected}, prejeto ${received}`; - } - return `Neveljaven vnos: pri\u010Dakovano ${expected}, prejeto ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Neveljaven vnos: pri\u010Dakovano ${stringifyPrimitive(issue2.values[0])}`; - return `Neveljavna mo\u017Enost: pri\u010Dakovano eno izmed ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Preveliko: pri\u010Dakovano, da bo ${issue2.origin ?? "vrednost"} imelo ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "elementov"}`; - return `Preveliko: pri\u010Dakovano, da bo ${issue2.origin ?? "vrednost"} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Premajhno: pri\u010Dakovano, da bo ${issue2.origin} imelo ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Premajhno: pri\u010Dakovano, da bo ${issue2.origin} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Neveljaven niz: mora se za\u010Deti z "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `Neveljaven niz: mora se kon\u010Dati z "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Neveljaven niz: mora vsebovati "${_issue.includes}"`; - if (_issue.format === "regex") - return `Neveljaven niz: mora ustrezati vzorcu ${_issue.pattern}`; - return `Neveljaven ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Neveljavno \u0161tevilo: mora biti ve\u010Dkratnik ${issue2.divisor}`; - case "unrecognized_keys": - return `Neprepoznan${issue2.keys.length > 1 ? "i klju\u010Di" : " klju\u010D"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Neveljaven klju\u010D v ${issue2.origin}`; - case "invalid_union": - return "Neveljaven vnos"; - case "invalid_element": - return `Neveljavna vrednost v ${issue2.origin}`; - default: - return "Neveljaven vnos"; - } - }; -}; -function sl_default() { - return { - localeError: error36() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/sv.js -var error37 = () => { - const Sizable = { - string: { unit: "tecken", verb: "att ha" }, - file: { unit: "bytes", verb: "att ha" }, - array: { unit: "objekt", verb: "att inneh\xE5lla" }, - set: { unit: "objekt", verb: "att inneh\xE5lla" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "regulj\xE4rt uttryck", - email: "e-postadress", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO-datum och tid", - date: "ISO-datum", - time: "ISO-tid", - duration: "ISO-varaktighet", - ipv4: "IPv4-intervall", - ipv6: "IPv6-intervall", - cidrv4: "IPv4-spektrum", - cidrv6: "IPv6-spektrum", - base64: "base64-kodad str\xE4ng", - base64url: "base64url-kodad str\xE4ng", - json_string: "JSON-str\xE4ng", - e164: "E.164-nummer", - jwt: "JWT", - template_literal: "mall-literal" - }; - const TypeDictionary = { - nan: "NaN", - number: "antal", - array: "lista" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Ogiltig inmatning: f\xF6rv\xE4ntat instanceof ${issue2.expected}, fick ${received}`; - } - return `Ogiltig inmatning: f\xF6rv\xE4ntat ${expected}, fick ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Ogiltig inmatning: f\xF6rv\xE4ntat ${stringifyPrimitive(issue2.values[0])}`; - return `Ogiltigt val: f\xF6rv\xE4ntade en av ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `F\xF6r stor(t): f\xF6rv\xE4ntade ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "element"}`; - } - return `F\xF6r stor(t): f\xF6rv\xE4ntat ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `F\xF6r lite(t): f\xF6rv\xE4ntade ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `F\xF6r lite(t): f\xF6rv\xE4ntade ${issue2.origin ?? "v\xE4rdet"} att ha ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `Ogiltig str\xE4ng: m\xE5ste b\xF6rja med "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `Ogiltig str\xE4ng: m\xE5ste sluta med "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Ogiltig str\xE4ng: m\xE5ste inneh\xE5lla "${_issue.includes}"`; - if (_issue.format === "regex") - return `Ogiltig str\xE4ng: m\xE5ste matcha m\xF6nstret "${_issue.pattern}"`; - return `Ogiltig(t) ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Ogiltigt tal: m\xE5ste vara en multipel av ${issue2.divisor}`; - case "unrecognized_keys": - return `${issue2.keys.length > 1 ? "Ok\xE4nda nycklar" : "Ok\xE4nd nyckel"}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Ogiltig nyckel i ${issue2.origin ?? "v\xE4rdet"}`; - case "invalid_union": - return "Ogiltig input"; - case "invalid_element": - return `Ogiltigt v\xE4rde i ${issue2.origin ?? "v\xE4rdet"}`; - default: - return `Ogiltig input`; - } - }; -}; -function sv_default() { - return { - localeError: error37() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ta.js -var error38 = () => { - const Sizable = { - string: { unit: "\u0B8E\u0BB4\u0BC1\u0BA4\u0BCD\u0BA4\u0BC1\u0B95\u0BCD\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" }, - file: { unit: "\u0BAA\u0BC8\u0B9F\u0BCD\u0B9F\u0BC1\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" }, - array: { unit: "\u0B89\u0BB1\u0BC1\u0BAA\u0BCD\u0BAA\u0BC1\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" }, - set: { unit: "\u0B89\u0BB1\u0BC1\u0BAA\u0BCD\u0BAA\u0BC1\u0B95\u0BB3\u0BCD", verb: "\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1", - email: "\u0BAE\u0BBF\u0BA9\u0BCD\u0BA9\u0B9E\u0BCD\u0B9A\u0BB2\u0BCD \u0BAE\u0BC1\u0B95\u0BB5\u0BB0\u0BBF", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u0BA4\u0BC7\u0BA4\u0BBF \u0BA8\u0BC7\u0BB0\u0BAE\u0BCD", - date: "ISO \u0BA4\u0BC7\u0BA4\u0BBF", - time: "ISO \u0BA8\u0BC7\u0BB0\u0BAE\u0BCD", - duration: "ISO \u0B95\u0BBE\u0BB2 \u0B85\u0BB3\u0BB5\u0BC1", - ipv4: "IPv4 \u0BAE\u0BC1\u0B95\u0BB5\u0BB0\u0BBF", - ipv6: "IPv6 \u0BAE\u0BC1\u0B95\u0BB5\u0BB0\u0BBF", - cidrv4: "IPv4 \u0BB5\u0BB0\u0BAE\u0BCD\u0BAA\u0BC1", - cidrv6: "IPv6 \u0BB5\u0BB0\u0BAE\u0BCD\u0BAA\u0BC1", - base64: "base64-encoded \u0B9A\u0BB0\u0BAE\u0BCD", - base64url: "base64url-encoded \u0B9A\u0BB0\u0BAE\u0BCD", - json_string: "JSON \u0B9A\u0BB0\u0BAE\u0BCD", - e164: "E.164 \u0B8E\u0BA3\u0BCD", - jwt: "JWT", - template_literal: "input" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0B8E\u0BA3\u0BCD", - array: "\u0B85\u0BA3\u0BBF", - null: "\u0BB5\u0BC6\u0BB1\u0BC1\u0BAE\u0BC8" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 instanceof ${issue2.expected}, \u0BAA\u0BC6\u0BB1\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${received}`; - } - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${expected}, \u0BAA\u0BC6\u0BB1\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${stringifyPrimitive(issue2.values[0])}`; - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0BB5\u0BBF\u0BB0\u0BC1\u0BAA\u0BCD\u0BAA\u0BAE\u0BCD: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${joinValues(issue2.values, "|")} \u0B87\u0BB2\u0BCD \u0B92\u0BA9\u0BCD\u0BB1\u0BC1`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0BAE\u0BBF\u0B95 \u0BAA\u0BC6\u0BB0\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin ?? "\u0BAE\u0BA4\u0BBF\u0BAA\u0BCD\u0BAA\u0BC1"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0B89\u0BB1\u0BC1\u0BAA\u0BCD\u0BAA\u0BC1\u0B95\u0BB3\u0BCD"} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - } - return `\u0BAE\u0BBF\u0B95 \u0BAA\u0BC6\u0BB0\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin ?? "\u0BAE\u0BA4\u0BBF\u0BAA\u0BCD\u0BAA\u0BC1"} ${adj}${issue2.maximum.toString()} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0BAE\u0BBF\u0B95\u0B9A\u0BCD \u0B9A\u0BBF\u0BB1\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - } - return `\u0BAE\u0BBF\u0B95\u0B9A\u0BCD \u0B9A\u0BBF\u0BB1\u0BBF\u0BAF\u0BA4\u0BC1: \u0B8E\u0BA4\u0BBF\u0BB0\u0BCD\u0BAA\u0BBE\u0BB0\u0BCD\u0B95\u0BCD\u0B95\u0BAA\u0BCD\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1 ${issue2.origin} ${adj}${issue2.minimum.toString()} \u0B86\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: "${_issue.prefix}" \u0B87\u0BB2\u0BCD \u0BA4\u0BCA\u0B9F\u0B99\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - if (_issue.format === "ends_with") - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: "${_issue.suffix}" \u0B87\u0BB2\u0BCD \u0BAE\u0BC1\u0B9F\u0BBF\u0BB5\u0B9F\u0BC8\u0BAF \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - if (_issue.format === "includes") - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: "${_issue.includes}" \u0B90 \u0B89\u0BB3\u0BCD\u0BB3\u0B9F\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - if (_issue.format === "regex") - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B9A\u0BB0\u0BAE\u0BCD: ${_issue.pattern} \u0BAE\u0BC1\u0BB1\u0BC8\u0BAA\u0BBE\u0B9F\u0BCD\u0B9F\u0BC1\u0B9F\u0BA9\u0BCD \u0BAA\u0BCA\u0BB0\u0BC1\u0BA8\u0BCD\u0BA4 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B8E\u0BA3\u0BCD: ${issue2.divisor} \u0B87\u0BA9\u0BCD \u0BAA\u0BB2\u0BAE\u0BBE\u0B95 \u0B87\u0BB0\u0BC1\u0B95\u0BCD\u0B95 \u0BB5\u0BC7\u0BA3\u0BCD\u0B9F\u0BC1\u0BAE\u0BCD`; - case "unrecognized_keys": - return `\u0B85\u0B9F\u0BC8\u0BAF\u0BBE\u0BB3\u0BAE\u0BCD \u0BA4\u0BC6\u0BB0\u0BBF\u0BAF\u0BBE\u0BA4 \u0BB5\u0BBF\u0B9A\u0BC8${issue2.keys.length > 1 ? "\u0B95\u0BB3\u0BCD" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `${issue2.origin} \u0B87\u0BB2\u0BCD \u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0BB5\u0BBF\u0B9A\u0BC8`; - case "invalid_union": - return "\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1"; - case "invalid_element": - return `${issue2.origin} \u0B87\u0BB2\u0BCD \u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0BAE\u0BA4\u0BBF\u0BAA\u0BCD\u0BAA\u0BC1`; - default: - return `\u0BA4\u0BB5\u0BB1\u0BBE\u0BA9 \u0B89\u0BB3\u0BCD\u0BB3\u0BC0\u0B9F\u0BC1`; - } - }; -}; -function ta_default() { - return { - localeError: error38() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/th.js -var error39 = () => { - const Sizable = { - string: { unit: "\u0E15\u0E31\u0E27\u0E2D\u0E31\u0E01\u0E29\u0E23", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" }, - file: { unit: "\u0E44\u0E1A\u0E15\u0E4C", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" }, - array: { unit: "\u0E23\u0E32\u0E22\u0E01\u0E32\u0E23", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" }, - set: { unit: "\u0E23\u0E32\u0E22\u0E01\u0E32\u0E23", verb: "\u0E04\u0E27\u0E23\u0E21\u0E35" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E17\u0E35\u0E48\u0E1B\u0E49\u0E2D\u0E19", - email: "\u0E17\u0E35\u0E48\u0E2D\u0E22\u0E39\u0E48\u0E2D\u0E35\u0E40\u0E21\u0E25", - url: "URL", - emoji: "\u0E2D\u0E34\u0E42\u0E21\u0E08\u0E34", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u0E27\u0E31\u0E19\u0E17\u0E35\u0E48\u0E40\u0E27\u0E25\u0E32\u0E41\u0E1A\u0E1A ISO", - date: "\u0E27\u0E31\u0E19\u0E17\u0E35\u0E48\u0E41\u0E1A\u0E1A ISO", - time: "\u0E40\u0E27\u0E25\u0E32\u0E41\u0E1A\u0E1A ISO", - duration: "\u0E0A\u0E48\u0E27\u0E07\u0E40\u0E27\u0E25\u0E32\u0E41\u0E1A\u0E1A ISO", - ipv4: "\u0E17\u0E35\u0E48\u0E2D\u0E22\u0E39\u0E48 IPv4", - ipv6: "\u0E17\u0E35\u0E48\u0E2D\u0E22\u0E39\u0E48 IPv6", - cidrv4: "\u0E0A\u0E48\u0E27\u0E07 IP \u0E41\u0E1A\u0E1A IPv4", - cidrv6: "\u0E0A\u0E48\u0E27\u0E07 IP \u0E41\u0E1A\u0E1A IPv6", - base64: "\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E41\u0E1A\u0E1A Base64", - base64url: "\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E41\u0E1A\u0E1A Base64 \u0E2A\u0E33\u0E2B\u0E23\u0E31\u0E1A URL", - json_string: "\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E41\u0E1A\u0E1A JSON", - e164: "\u0E40\u0E1A\u0E2D\u0E23\u0E4C\u0E42\u0E17\u0E23\u0E28\u0E31\u0E1E\u0E17\u0E4C\u0E23\u0E30\u0E2B\u0E27\u0E48\u0E32\u0E07\u0E1B\u0E23\u0E30\u0E40\u0E17\u0E28 (E.164)", - jwt: "\u0E42\u0E17\u0E40\u0E04\u0E19 JWT", - template_literal: "\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E17\u0E35\u0E48\u0E1B\u0E49\u0E2D\u0E19" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0E15\u0E31\u0E27\u0E40\u0E25\u0E02", - array: "\u0E2D\u0E32\u0E23\u0E4C\u0E40\u0E23\u0E22\u0E4C (Array)", - null: "\u0E44\u0E21\u0E48\u0E21\u0E35\u0E04\u0E48\u0E32 (null)" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0E1B\u0E23\u0E30\u0E40\u0E20\u0E17\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19 instanceof ${issue2.expected} \u0E41\u0E15\u0E48\u0E44\u0E14\u0E49\u0E23\u0E31\u0E1A ${received}`; - } - return `\u0E1B\u0E23\u0E30\u0E40\u0E20\u0E17\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19 ${expected} \u0E41\u0E15\u0E48\u0E44\u0E14\u0E49\u0E23\u0E31\u0E1A ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u0E04\u0E48\u0E32\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19 ${stringifyPrimitive(issue2.values[0])}`; - return `\u0E15\u0E31\u0E27\u0E40\u0E25\u0E37\u0E2D\u0E01\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E04\u0E27\u0E23\u0E40\u0E1B\u0E47\u0E19\u0E2B\u0E19\u0E36\u0E48\u0E07\u0E43\u0E19 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "\u0E44\u0E21\u0E48\u0E40\u0E01\u0E34\u0E19" : "\u0E19\u0E49\u0E2D\u0E22\u0E01\u0E27\u0E48\u0E32"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u0E40\u0E01\u0E34\u0E19\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin ?? "\u0E04\u0E48\u0E32"} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.maximum.toString()} ${sizing.unit ?? "\u0E23\u0E32\u0E22\u0E01\u0E32\u0E23"}`; - return `\u0E40\u0E01\u0E34\u0E19\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin ?? "\u0E04\u0E48\u0E32"} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? "\u0E2D\u0E22\u0E48\u0E32\u0E07\u0E19\u0E49\u0E2D\u0E22" : "\u0E21\u0E32\u0E01\u0E01\u0E27\u0E48\u0E32"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0E19\u0E49\u0E2D\u0E22\u0E01\u0E27\u0E48\u0E32\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u0E19\u0E49\u0E2D\u0E22\u0E01\u0E27\u0E48\u0E32\u0E01\u0E33\u0E2B\u0E19\u0E14: ${issue2.origin} \u0E04\u0E27\u0E23\u0E21\u0E35${adj} ${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E15\u0E49\u0E2D\u0E07\u0E02\u0E36\u0E49\u0E19\u0E15\u0E49\u0E19\u0E14\u0E49\u0E27\u0E22 "${_issue.prefix}"`; - } - if (_issue.format === "ends_with") - return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E15\u0E49\u0E2D\u0E07\u0E25\u0E07\u0E17\u0E49\u0E32\u0E22\u0E14\u0E49\u0E27\u0E22 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21\u0E15\u0E49\u0E2D\u0E07\u0E21\u0E35 "${_issue.includes}" \u0E2D\u0E22\u0E39\u0E48\u0E43\u0E19\u0E02\u0E49\u0E2D\u0E04\u0E27\u0E32\u0E21`; - if (_issue.format === "regex") - return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E15\u0E49\u0E2D\u0E07\u0E15\u0E23\u0E07\u0E01\u0E31\u0E1A\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E17\u0E35\u0E48\u0E01\u0E33\u0E2B\u0E19\u0E14 ${_issue.pattern}`; - return `\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u0E15\u0E31\u0E27\u0E40\u0E25\u0E02\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E15\u0E49\u0E2D\u0E07\u0E40\u0E1B\u0E47\u0E19\u0E08\u0E33\u0E19\u0E27\u0E19\u0E17\u0E35\u0E48\u0E2B\u0E32\u0E23\u0E14\u0E49\u0E27\u0E22 ${issue2.divisor} \u0E44\u0E14\u0E49\u0E25\u0E07\u0E15\u0E31\u0E27`; - case "unrecognized_keys": - return `\u0E1E\u0E1A\u0E04\u0E35\u0E22\u0E4C\u0E17\u0E35\u0E48\u0E44\u0E21\u0E48\u0E23\u0E39\u0E49\u0E08\u0E31\u0E01: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u0E04\u0E35\u0E22\u0E4C\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07\u0E43\u0E19 ${issue2.origin}`; - case "invalid_union": - return "\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07: \u0E44\u0E21\u0E48\u0E15\u0E23\u0E07\u0E01\u0E31\u0E1A\u0E23\u0E39\u0E1B\u0E41\u0E1A\u0E1A\u0E22\u0E39\u0E40\u0E19\u0E35\u0E22\u0E19\u0E17\u0E35\u0E48\u0E01\u0E33\u0E2B\u0E19\u0E14\u0E44\u0E27\u0E49"; - case "invalid_element": - return `\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07\u0E43\u0E19 ${issue2.origin}`; - default: - return `\u0E02\u0E49\u0E2D\u0E21\u0E39\u0E25\u0E44\u0E21\u0E48\u0E16\u0E39\u0E01\u0E15\u0E49\u0E2D\u0E07`; - } - }; -}; -function th_default() { - return { - localeError: error39() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/tr.js -var error40 = () => { - const Sizable = { - string: { unit: "karakter", verb: "olmal\u0131" }, - file: { unit: "bayt", verb: "olmal\u0131" }, - array: { unit: "\xF6\u011Fe", verb: "olmal\u0131" }, - set: { unit: "\xF6\u011Fe", verb: "olmal\u0131" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "girdi", - email: "e-posta adresi", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO tarih ve saat", - date: "ISO tarih", - time: "ISO saat", - duration: "ISO s\xFCre", - ipv4: "IPv4 adresi", - ipv6: "IPv6 adresi", - cidrv4: "IPv4 aral\u0131\u011F\u0131", - cidrv6: "IPv6 aral\u0131\u011F\u0131", - base64: "base64 ile \u015Fifrelenmi\u015F metin", - base64url: "base64url ile \u015Fifrelenmi\u015F metin", - json_string: "JSON dizesi", - e164: "E.164 say\u0131s\u0131", - jwt: "JWT", - template_literal: "\u015Eablon dizesi" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Ge\xE7ersiz de\u011Fer: beklenen instanceof ${issue2.expected}, al\u0131nan ${received}`; - } - return `Ge\xE7ersiz de\u011Fer: beklenen ${expected}, al\u0131nan ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Ge\xE7ersiz de\u011Fer: beklenen ${stringifyPrimitive(issue2.values[0])}`; - return `Ge\xE7ersiz se\xE7enek: a\u015Fa\u011F\u0131dakilerden biri olmal\u0131: ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\xC7ok b\xFCy\xFCk: beklenen ${issue2.origin ?? "de\u011Fer"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\xF6\u011Fe"}`; - return `\xC7ok b\xFCy\xFCk: beklenen ${issue2.origin ?? "de\u011Fer"} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\xC7ok k\xFC\xE7\xFCk: beklenen ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - return `\xC7ok k\xFC\xE7\xFCk: beklenen ${issue2.origin} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Ge\xE7ersiz metin: "${_issue.prefix}" ile ba\u015Flamal\u0131`; - if (_issue.format === "ends_with") - return `Ge\xE7ersiz metin: "${_issue.suffix}" ile bitmeli`; - if (_issue.format === "includes") - return `Ge\xE7ersiz metin: "${_issue.includes}" i\xE7ermeli`; - if (_issue.format === "regex") - return `Ge\xE7ersiz metin: ${_issue.pattern} desenine uymal\u0131`; - return `Ge\xE7ersiz ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Ge\xE7ersiz say\u0131: ${issue2.divisor} ile tam b\xF6l\xFCnebilmeli`; - case "unrecognized_keys": - return `Tan\u0131nmayan anahtar${issue2.keys.length > 1 ? "lar" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `${issue2.origin} i\xE7inde ge\xE7ersiz anahtar`; - case "invalid_union": - return "Ge\xE7ersiz de\u011Fer"; - case "invalid_element": - return `${issue2.origin} i\xE7inde ge\xE7ersiz de\u011Fer`; - default: - return `Ge\xE7ersiz de\u011Fer`; - } - }; -}; -function tr_default() { - return { - localeError: error40() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/uk.js -var error41 = () => { - const Sizable = { - string: { unit: "\u0441\u0438\u043C\u0432\u043E\u043B\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" }, - file: { unit: "\u0431\u0430\u0439\u0442\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" }, - array: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" }, - set: { unit: "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0456\u0432", verb: "\u043C\u0430\u0442\u0438\u043C\u0435" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456", - email: "\u0430\u0434\u0440\u0435\u0441\u0430 \u0435\u043B\u0435\u043A\u0442\u0440\u043E\u043D\u043D\u043E\u0457 \u043F\u043E\u0448\u0442\u0438", - url: "URL", - emoji: "\u0435\u043C\u043E\u0434\u0437\u0456", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\u0434\u0430\u0442\u0430 \u0442\u0430 \u0447\u0430\u0441 ISO", - date: "\u0434\u0430\u0442\u0430 ISO", - time: "\u0447\u0430\u0441 ISO", - duration: "\u0442\u0440\u0438\u0432\u0430\u043B\u0456\u0441\u0442\u044C ISO", - ipv4: "\u0430\u0434\u0440\u0435\u0441\u0430 IPv4", - ipv6: "\u0430\u0434\u0440\u0435\u0441\u0430 IPv6", - cidrv4: "\u0434\u0456\u0430\u043F\u0430\u0437\u043E\u043D IPv4", - cidrv6: "\u0434\u0456\u0430\u043F\u0430\u0437\u043E\u043D IPv6", - base64: "\u0440\u044F\u0434\u043E\u043A \u0443 \u043A\u043E\u0434\u0443\u0432\u0430\u043D\u043D\u0456 base64", - base64url: "\u0440\u044F\u0434\u043E\u043A \u0443 \u043A\u043E\u0434\u0443\u0432\u0430\u043D\u043D\u0456 base64url", - json_string: "\u0440\u044F\u0434\u043E\u043A JSON", - e164: "\u043D\u043E\u043C\u0435\u0440 E.164", - jwt: "JWT", - template_literal: "\u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0447\u0438\u0441\u043B\u043E", - array: "\u043C\u0430\u0441\u0438\u0432" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F instanceof ${issue2.expected}, \u043E\u0442\u0440\u0438\u043C\u0430\u043D\u043E ${received}`; - } - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F ${expected}, \u043E\u0442\u0440\u0438\u043C\u0430\u043D\u043E ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F ${stringifyPrimitive(issue2.values[0])}`; - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0430 \u043E\u043F\u0446\u0456\u044F: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F \u043E\u0434\u043D\u0435 \u0437 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u0432\u0435\u043B\u0438\u043A\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0456\u0432"}`; - return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u0432\u0435\u043B\u0438\u043A\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin ?? "\u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F"} \u0431\u0443\u0434\u0435 ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u043C\u0430\u043B\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u0417\u0430\u043D\u0430\u0434\u0442\u043E \u043C\u0430\u043B\u0435: \u043E\u0447\u0456\u043A\u0443\u0454\u0442\u044C\u0441\u044F, \u0449\u043E ${issue2.origin} \u0431\u0443\u0434\u0435 ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u043F\u043E\u0447\u0438\u043D\u0430\u0442\u0438\u0441\u044F \u0437 "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u0437\u0430\u043A\u0456\u043D\u0447\u0443\u0432\u0430\u0442\u0438\u0441\u044F \u043D\u0430 "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u043C\u0456\u0441\u0442\u0438\u0442\u0438 "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u0440\u044F\u0434\u043E\u043A: \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u0432\u0456\u0434\u043F\u043E\u0432\u0456\u0434\u0430\u0442\u0438 \u0448\u0430\u0431\u043B\u043E\u043D\u0443 ${_issue.pattern}`; - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0435 \u0447\u0438\u0441\u043B\u043E: \u043F\u043E\u0432\u0438\u043D\u043D\u043E \u0431\u0443\u0442\u0438 \u043A\u0440\u0430\u0442\u043D\u0438\u043C ${issue2.divisor}`; - case "unrecognized_keys": - return `\u041D\u0435\u0440\u043E\u0437\u043F\u0456\u0437\u043D\u0430\u043D\u0438\u0439 \u043A\u043B\u044E\u0447${issue2.keys.length > 1 ? "\u0456" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0438\u0439 \u043A\u043B\u044E\u0447 \u0443 ${issue2.origin}`; - case "invalid_union": - return "\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456"; - case "invalid_element": - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0435 \u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F \u0443 ${issue2.origin}`; - default: - return `\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0456 \u0432\u0445\u0456\u0434\u043D\u0456 \u0434\u0430\u043D\u0456`; - } - }; -}; -function uk_default() { - return { - localeError: error41() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ua.js -function ua_default() { - return uk_default(); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/ur.js -var error42 = () => { - const Sizable = { - string: { unit: "\u062D\u0631\u0648\u0641", verb: "\u06C1\u0648\u0646\u0627" }, - file: { unit: "\u0628\u0627\u0626\u0679\u0633", verb: "\u06C1\u0648\u0646\u0627" }, - array: { unit: "\u0622\u0626\u0679\u0645\u0632", verb: "\u06C1\u0648\u0646\u0627" }, - set: { unit: "\u0622\u0626\u0679\u0645\u0632", verb: "\u06C1\u0648\u0646\u0627" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0627\u0646 \u067E\u0679", - email: "\u0627\u06CC \u0645\u06CC\u0644 \u0627\u06CC\u0688\u0631\u06CC\u0633", - url: "\u06CC\u0648 \u0622\u0631 \u0627\u06CC\u0644", - emoji: "\u0627\u06CC\u0645\u0648\u062C\u06CC", - uuid: "\u06CC\u0648 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", - uuidv4: "\u06CC\u0648 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC \u0648\u06CC 4", - uuidv6: "\u06CC\u0648 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC \u0648\u06CC 6", - nanoid: "\u0646\u06CC\u0646\u0648 \u0622\u0626\u06CC \u0688\u06CC", - guid: "\u062C\u06CC \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", - cuid: "\u0633\u06CC \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", - cuid2: "\u0633\u06CC \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC 2", - ulid: "\u06CC\u0648 \u0627\u06CC\u0644 \u0622\u0626\u06CC \u0688\u06CC", - xid: "\u0627\u06CC\u06A9\u0633 \u0622\u0626\u06CC \u0688\u06CC", - ksuid: "\u06A9\u06D2 \u0627\u06CC\u0633 \u06CC\u0648 \u0622\u0626\u06CC \u0688\u06CC", - datetime: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u0688\u06CC\u0679 \u0679\u0627\u0626\u0645", - date: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u062A\u0627\u0631\u06CC\u062E", - time: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u0648\u0642\u062A", - duration: "\u0622\u0626\u06CC \u0627\u06CC\u0633 \u0627\u0648 \u0645\u062F\u062A", - ipv4: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 4 \u0627\u06CC\u0688\u0631\u06CC\u0633", - ipv6: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 6 \u0627\u06CC\u0688\u0631\u06CC\u0633", - cidrv4: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 4 \u0631\u06CC\u0646\u062C", - cidrv6: "\u0622\u0626\u06CC \u067E\u06CC \u0648\u06CC 6 \u0631\u06CC\u0646\u062C", - base64: "\u0628\u06CC\u0633 64 \u0627\u0646 \u06A9\u0648\u0688\u0688 \u0633\u0679\u0631\u0646\u06AF", - base64url: "\u0628\u06CC\u0633 64 \u06CC\u0648 \u0622\u0631 \u0627\u06CC\u0644 \u0627\u0646 \u06A9\u0648\u0688\u0688 \u0633\u0679\u0631\u0646\u06AF", - json_string: "\u062C\u06D2 \u0627\u06CC\u0633 \u0627\u0648 \u0627\u06CC\u0646 \u0633\u0679\u0631\u0646\u06AF", - e164: "\u0627\u06CC 164 \u0646\u0645\u0628\u0631", - jwt: "\u062C\u06D2 \u0688\u0628\u0644\u06CC\u0648 \u0679\u06CC", - template_literal: "\u0627\u0646 \u067E\u0679" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u0646\u0645\u0628\u0631", - array: "\u0622\u0631\u06D2", - null: "\u0646\u0644" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679: instanceof ${issue2.expected} \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627\u060C ${received} \u0645\u0648\u0635\u0648\u0644 \u06C1\u0648\u0627`; - } - return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679: ${expected} \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627\u060C ${received} \u0645\u0648\u0635\u0648\u0644 \u06C1\u0648\u0627`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679: ${stringifyPrimitive(issue2.values[0])} \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; - return `\u063A\u0644\u0637 \u0622\u067E\u0634\u0646: ${joinValues(issue2.values, "|")} \u0645\u06CC\u06BA \u0633\u06D2 \u0627\u06CC\u06A9 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u0628\u06C1\u062A \u0628\u0691\u0627: ${issue2.origin ?? "\u0648\u06CC\u0644\u06CC\u0648"} \u06A9\u06D2 ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u0639\u0646\u0627\u0635\u0631"} \u06C1\u0648\u0646\u06D2 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u06D2`; - return `\u0628\u06C1\u062A \u0628\u0691\u0627: ${issue2.origin ?? "\u0648\u06CC\u0644\u06CC\u0648"} \u06A9\u0627 ${adj}${issue2.maximum.toString()} \u06C1\u0648\u0646\u0627 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u0628\u06C1\u062A \u0686\u06BE\u0648\u0679\u0627: ${issue2.origin} \u06A9\u06D2 ${adj}${issue2.minimum.toString()} ${sizing.unit} \u06C1\u0648\u0646\u06D2 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u06D2`; - } - return `\u0628\u06C1\u062A \u0686\u06BE\u0648\u0679\u0627: ${issue2.origin} \u06A9\u0627 ${adj}${issue2.minimum.toString()} \u06C1\u0648\u0646\u0627 \u0645\u062A\u0648\u0642\u0639 \u062A\u06BE\u0627`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: "${_issue.prefix}" \u0633\u06D2 \u0634\u0631\u0648\u0639 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; - } - if (_issue.format === "ends_with") - return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: "${_issue.suffix}" \u067E\u0631 \u062E\u062A\u0645 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; - if (_issue.format === "includes") - return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: "${_issue.includes}" \u0634\u0627\u0645\u0644 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; - if (_issue.format === "regex") - return `\u063A\u0644\u0637 \u0633\u0679\u0631\u0646\u06AF: \u067E\u06CC\u0679\u0631\u0646 ${_issue.pattern} \u0633\u06D2 \u0645\u06CC\u0686 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; - return `\u063A\u0644\u0637 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u063A\u0644\u0637 \u0646\u0645\u0628\u0631: ${issue2.divisor} \u06A9\u0627 \u0645\u0636\u0627\u0639\u0641 \u06C1\u0648\u0646\u0627 \u0686\u0627\u06C1\u06CC\u06D2`; - case "unrecognized_keys": - return `\u063A\u06CC\u0631 \u062A\u0633\u0644\u06CC\u0645 \u0634\u062F\u06C1 \u06A9\u06CC${issue2.keys.length > 1 ? "\u0632" : ""}: ${joinValues(issue2.keys, "\u060C ")}`; - case "invalid_key": - return `${issue2.origin} \u0645\u06CC\u06BA \u063A\u0644\u0637 \u06A9\u06CC`; - case "invalid_union": - return "\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679"; - case "invalid_element": - return `${issue2.origin} \u0645\u06CC\u06BA \u063A\u0644\u0637 \u0648\u06CC\u0644\u06CC\u0648`; - default: - return `\u063A\u0644\u0637 \u0627\u0646 \u067E\u0679`; - } - }; -}; -function ur_default() { - return { - localeError: error42() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/uz.js -var error43 = () => { - const Sizable = { - string: { unit: "belgi", verb: "bo\u2018lishi kerak" }, - file: { unit: "bayt", verb: "bo\u2018lishi kerak" }, - array: { unit: "element", verb: "bo\u2018lishi kerak" }, - set: { unit: "element", verb: "bo\u2018lishi kerak" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "kirish", - email: "elektron pochta manzili", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO sana va vaqti", - date: "ISO sana", - time: "ISO vaqt", - duration: "ISO davomiylik", - ipv4: "IPv4 manzil", - ipv6: "IPv6 manzil", - mac: "MAC manzil", - cidrv4: "IPv4 diapazon", - cidrv6: "IPv6 diapazon", - base64: "base64 kodlangan satr", - base64url: "base64url kodlangan satr", - json_string: "JSON satr", - e164: "E.164 raqam", - jwt: "JWT", - template_literal: "kirish" - }; - const TypeDictionary = { - nan: "NaN", - number: "raqam", - array: "massiv" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `Noto\u2018g\u2018ri kirish: kutilgan instanceof ${issue2.expected}, qabul qilingan ${received}`; - } - return `Noto\u2018g\u2018ri kirish: kutilgan ${expected}, qabul qilingan ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `Noto\u2018g\u2018ri kirish: kutilgan ${stringifyPrimitive(issue2.values[0])}`; - return `Noto\u2018g\u2018ri variant: quyidagilardan biri kutilgan ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Juda katta: kutilgan ${issue2.origin ?? "qiymat"} ${adj}${issue2.maximum.toString()} ${sizing.unit} ${sizing.verb}`; - return `Juda katta: kutilgan ${issue2.origin ?? "qiymat"} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Juda kichik: kutilgan ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit} ${sizing.verb}`; - } - return `Juda kichik: kutilgan ${issue2.origin} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Noto\u2018g\u2018ri satr: "${_issue.prefix}" bilan boshlanishi kerak`; - if (_issue.format === "ends_with") - return `Noto\u2018g\u2018ri satr: "${_issue.suffix}" bilan tugashi kerak`; - if (_issue.format === "includes") - return `Noto\u2018g\u2018ri satr: "${_issue.includes}" ni o\u2018z ichiga olishi kerak`; - if (_issue.format === "regex") - return `Noto\u2018g\u2018ri satr: ${_issue.pattern} shabloniga mos kelishi kerak`; - return `Noto\u2018g\u2018ri ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `Noto\u2018g\u2018ri raqam: ${issue2.divisor} ning karralisi bo\u2018lishi kerak`; - case "unrecognized_keys": - return `Noma\u2019lum kalit${issue2.keys.length > 1 ? "lar" : ""}: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `${issue2.origin} dagi kalit noto\u2018g\u2018ri`; - case "invalid_union": - return "Noto\u2018g\u2018ri kirish"; - case "invalid_element": - return `${issue2.origin} da noto\u2018g\u2018ri qiymat`; - default: - return `Noto\u2018g\u2018ri kirish`; - } - }; -}; -function uz_default() { - return { - localeError: error43() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/vi.js -var error44 = () => { - const Sizable = { - string: { unit: "k\xFD t\u1EF1", verb: "c\xF3" }, - file: { unit: "byte", verb: "c\xF3" }, - array: { unit: "ph\u1EA7n t\u1EED", verb: "c\xF3" }, - set: { unit: "ph\u1EA7n t\u1EED", verb: "c\xF3" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u0111\u1EA7u v\xE0o", - email: "\u0111\u1ECBa ch\u1EC9 email", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ng\xE0y gi\u1EDD ISO", - date: "ng\xE0y ISO", - time: "gi\u1EDD ISO", - duration: "kho\u1EA3ng th\u1EDDi gian ISO", - ipv4: "\u0111\u1ECBa ch\u1EC9 IPv4", - ipv6: "\u0111\u1ECBa ch\u1EC9 IPv6", - cidrv4: "d\u1EA3i IPv4", - cidrv6: "d\u1EA3i IPv6", - base64: "chu\u1ED7i m\xE3 h\xF3a base64", - base64url: "chu\u1ED7i m\xE3 h\xF3a base64url", - json_string: "chu\u1ED7i JSON", - e164: "s\u1ED1 E.164", - jwt: "JWT", - template_literal: "\u0111\u1EA7u v\xE0o" - }; - const TypeDictionary = { - nan: "NaN", - number: "s\u1ED1", - array: "m\u1EA3ng" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i instanceof ${issue2.expected}, nh\u1EADn \u0111\u01B0\u1EE3c ${received}`; - } - return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i ${expected}, nh\u1EADn \u0111\u01B0\u1EE3c ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i ${stringifyPrimitive(issue2.values[0])}`; - return `T\xF9y ch\u1ECDn kh\xF4ng h\u1EE3p l\u1EC7: mong \u0111\u1EE3i m\u1ED9t trong c\xE1c gi\xE1 tr\u1ECB ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `Qu\xE1 l\u1EDBn: mong \u0111\u1EE3i ${issue2.origin ?? "gi\xE1 tr\u1ECB"} ${sizing.verb} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "ph\u1EA7n t\u1EED"}`; - return `Qu\xE1 l\u1EDBn: mong \u0111\u1EE3i ${issue2.origin ?? "gi\xE1 tr\u1ECB"} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `Qu\xE1 nh\u1ECF: mong \u0111\u1EE3i ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `Qu\xE1 nh\u1ECF: mong \u0111\u1EE3i ${issue2.origin} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i b\u1EAFt \u0111\u1EA7u b\u1EB1ng "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i k\u1EBFt th\xFAc b\u1EB1ng "${_issue.suffix}"`; - if (_issue.format === "includes") - return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i bao g\u1ED3m "${_issue.includes}"`; - if (_issue.format === "regex") - return `Chu\u1ED7i kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i kh\u1EDBp v\u1EDBi m\u1EABu ${_issue.pattern}`; - return `${FormatDictionary[_issue.format] ?? issue2.format} kh\xF4ng h\u1EE3p l\u1EC7`; - } - case "not_multiple_of": - return `S\u1ED1 kh\xF4ng h\u1EE3p l\u1EC7: ph\u1EA3i l\xE0 b\u1ED9i s\u1ED1 c\u1EE7a ${issue2.divisor}`; - case "unrecognized_keys": - return `Kh\xF3a kh\xF4ng \u0111\u01B0\u1EE3c nh\u1EADn d\u1EA1ng: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `Kh\xF3a kh\xF4ng h\u1EE3p l\u1EC7 trong ${issue2.origin}`; - case "invalid_union": - return "\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7"; - case "invalid_element": - return `Gi\xE1 tr\u1ECB kh\xF4ng h\u1EE3p l\u1EC7 trong ${issue2.origin}`; - default: - return `\u0110\u1EA7u v\xE0o kh\xF4ng h\u1EE3p l\u1EC7`; - } - }; -}; -function vi_default() { - return { - localeError: error44() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/zh-CN.js -var error45 = () => { - const Sizable = { - string: { unit: "\u5B57\u7B26", verb: "\u5305\u542B" }, - file: { unit: "\u5B57\u8282", verb: "\u5305\u542B" }, - array: { unit: "\u9879", verb: "\u5305\u542B" }, - set: { unit: "\u9879", verb: "\u5305\u542B" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u8F93\u5165", - email: "\u7535\u5B50\u90AE\u4EF6", - url: "URL", - emoji: "\u8868\u60C5\u7B26\u53F7", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO\u65E5\u671F\u65F6\u95F4", - date: "ISO\u65E5\u671F", - time: "ISO\u65F6\u95F4", - duration: "ISO\u65F6\u957F", - ipv4: "IPv4\u5730\u5740", - ipv6: "IPv6\u5730\u5740", - cidrv4: "IPv4\u7F51\u6BB5", - cidrv6: "IPv6\u7F51\u6BB5", - base64: "base64\u7F16\u7801\u5B57\u7B26\u4E32", - base64url: "base64url\u7F16\u7801\u5B57\u7B26\u4E32", - json_string: "JSON\u5B57\u7B26\u4E32", - e164: "E.164\u53F7\u7801", - jwt: "JWT", - template_literal: "\u8F93\u5165" - }; - const TypeDictionary = { - nan: "NaN", - number: "\u6570\u5B57", - array: "\u6570\u7EC4", - null: "\u7A7A\u503C(null)" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u65E0\u6548\u8F93\u5165\uFF1A\u671F\u671B instanceof ${issue2.expected}\uFF0C\u5B9E\u9645\u63A5\u6536 ${received}`; - } - return `\u65E0\u6548\u8F93\u5165\uFF1A\u671F\u671B ${expected}\uFF0C\u5B9E\u9645\u63A5\u6536 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u65E0\u6548\u8F93\u5165\uFF1A\u671F\u671B ${stringifyPrimitive(issue2.values[0])}`; - return `\u65E0\u6548\u9009\u9879\uFF1A\u671F\u671B\u4EE5\u4E0B\u4E4B\u4E00 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u6570\u503C\u8FC7\u5927\uFF1A\u671F\u671B ${issue2.origin ?? "\u503C"} ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u4E2A\u5143\u7D20"}`; - return `\u6570\u503C\u8FC7\u5927\uFF1A\u671F\u671B ${issue2.origin ?? "\u503C"} ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u6570\u503C\u8FC7\u5C0F\uFF1A\u671F\u671B ${issue2.origin} ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u6570\u503C\u8FC7\u5C0F\uFF1A\u671F\u671B ${issue2.origin} ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u4EE5 "${_issue.prefix}" \u5F00\u5934`; - if (_issue.format === "ends_with") - return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u4EE5 "${_issue.suffix}" \u7ED3\u5C3E`; - if (_issue.format === "includes") - return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u5305\u542B "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u65E0\u6548\u5B57\u7B26\u4E32\uFF1A\u5FC5\u987B\u6EE1\u8DB3\u6B63\u5219\u8868\u8FBE\u5F0F ${_issue.pattern}`; - return `\u65E0\u6548${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u65E0\u6548\u6570\u5B57\uFF1A\u5FC5\u987B\u662F ${issue2.divisor} \u7684\u500D\u6570`; - case "unrecognized_keys": - return `\u51FA\u73B0\u672A\u77E5\u7684\u952E(key): ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `${issue2.origin} \u4E2D\u7684\u952E(key)\u65E0\u6548`; - case "invalid_union": - return "\u65E0\u6548\u8F93\u5165"; - case "invalid_element": - return `${issue2.origin} \u4E2D\u5305\u542B\u65E0\u6548\u503C(value)`; - default: - return `\u65E0\u6548\u8F93\u5165`; - } - }; -}; -function zh_CN_default() { - return { - localeError: error45() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/zh-TW.js -var error46 = () => { - const Sizable = { - string: { unit: "\u5B57\u5143", verb: "\u64C1\u6709" }, - file: { unit: "\u4F4D\u5143\u7D44", verb: "\u64C1\u6709" }, - array: { unit: "\u9805\u76EE", verb: "\u64C1\u6709" }, - set: { unit: "\u9805\u76EE", verb: "\u64C1\u6709" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u8F38\u5165", - email: "\u90F5\u4EF6\u5730\u5740", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "ISO \u65E5\u671F\u6642\u9593", - date: "ISO \u65E5\u671F", - time: "ISO \u6642\u9593", - duration: "ISO \u671F\u9593", - ipv4: "IPv4 \u4F4D\u5740", - ipv6: "IPv6 \u4F4D\u5740", - cidrv4: "IPv4 \u7BC4\u570D", - cidrv6: "IPv6 \u7BC4\u570D", - base64: "base64 \u7DE8\u78BC\u5B57\u4E32", - base64url: "base64url \u7DE8\u78BC\u5B57\u4E32", - json_string: "JSON \u5B57\u4E32", - e164: "E.164 \u6578\u503C", - jwt: "JWT", - template_literal: "\u8F38\u5165" - }; - const TypeDictionary = { - nan: "NaN" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\u7121\u6548\u7684\u8F38\u5165\u503C\uFF1A\u9810\u671F\u70BA instanceof ${issue2.expected}\uFF0C\u4F46\u6536\u5230 ${received}`; - } - return `\u7121\u6548\u7684\u8F38\u5165\u503C\uFF1A\u9810\u671F\u70BA ${expected}\uFF0C\u4F46\u6536\u5230 ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\u7121\u6548\u7684\u8F38\u5165\u503C\uFF1A\u9810\u671F\u70BA ${stringifyPrimitive(issue2.values[0])}`; - return `\u7121\u6548\u7684\u9078\u9805\uFF1A\u9810\u671F\u70BA\u4EE5\u4E0B\u5176\u4E2D\u4E4B\u4E00 ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `\u6578\u503C\u904E\u5927\uFF1A\u9810\u671F ${issue2.origin ?? "\u503C"} \u61C9\u70BA ${adj}${issue2.maximum.toString()} ${sizing.unit ?? "\u500B\u5143\u7D20"}`; - return `\u6578\u503C\u904E\u5927\uFF1A\u9810\u671F ${issue2.origin ?? "\u503C"} \u61C9\u70BA ${adj}${issue2.maximum.toString()}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) { - return `\u6578\u503C\u904E\u5C0F\uFF1A\u9810\u671F ${issue2.origin} \u61C9\u70BA ${adj}${issue2.minimum.toString()} ${sizing.unit}`; - } - return `\u6578\u503C\u904E\u5C0F\uFF1A\u9810\u671F ${issue2.origin} \u61C9\u70BA ${adj}${issue2.minimum.toString()}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") { - return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u4EE5 "${_issue.prefix}" \u958B\u982D`; - } - if (_issue.format === "ends_with") - return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u4EE5 "${_issue.suffix}" \u7D50\u5C3E`; - if (_issue.format === "includes") - return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u5305\u542B "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u7121\u6548\u7684\u5B57\u4E32\uFF1A\u5FC5\u9808\u7B26\u5408\u683C\u5F0F ${_issue.pattern}`; - return `\u7121\u6548\u7684 ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `\u7121\u6548\u7684\u6578\u5B57\uFF1A\u5FC5\u9808\u70BA ${issue2.divisor} \u7684\u500D\u6578`; - case "unrecognized_keys": - return `\u7121\u6CD5\u8B58\u5225\u7684\u9375\u503C${issue2.keys.length > 1 ? "\u5011" : ""}\uFF1A${joinValues(issue2.keys, "\u3001")}`; - case "invalid_key": - return `${issue2.origin} \u4E2D\u6709\u7121\u6548\u7684\u9375\u503C`; - case "invalid_union": - return "\u7121\u6548\u7684\u8F38\u5165\u503C"; - case "invalid_element": - return `${issue2.origin} \u4E2D\u6709\u7121\u6548\u7684\u503C`; - default: - return `\u7121\u6548\u7684\u8F38\u5165\u503C`; - } - }; -}; -function zh_TW_default() { - return { - localeError: error46() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/locales/yo.js -var error47 = () => { - const Sizable = { - string: { unit: "\xE0mi", verb: "n\xED" }, - file: { unit: "bytes", verb: "n\xED" }, - array: { unit: "nkan", verb: "n\xED" }, - set: { unit: "nkan", verb: "n\xED" } - }; - function getSizing(origin) { - return Sizable[origin] ?? null; - } - const FormatDictionary = { - regex: "\u1EB9\u0300r\u1ECD \xECb\xE1w\u1ECDl\xE9", - email: "\xE0d\xEDr\u1EB9\u0301s\xEC \xECm\u1EB9\u0301l\xEC", - url: "URL", - emoji: "emoji", - uuid: "UUID", - uuidv4: "UUIDv4", - uuidv6: "UUIDv6", - nanoid: "nanoid", - guid: "GUID", - cuid: "cuid", - cuid2: "cuid2", - ulid: "ULID", - xid: "XID", - ksuid: "KSUID", - datetime: "\xE0k\xF3k\xF2 ISO", - date: "\u1ECDj\u1ECD\u0301 ISO", - time: "\xE0k\xF3k\xF2 ISO", - duration: "\xE0k\xF3k\xF2 t\xF3 p\xE9 ISO", - ipv4: "\xE0d\xEDr\u1EB9\u0301s\xEC IPv4", - ipv6: "\xE0d\xEDr\u1EB9\u0301s\xEC IPv6", - cidrv4: "\xE0gb\xE8gb\xE8 IPv4", - cidrv6: "\xE0gb\xE8gb\xE8 IPv6", - base64: "\u1ECD\u0300r\u1ECD\u0300 t\xED a k\u1ECD\u0301 n\xED base64", - base64url: "\u1ECD\u0300r\u1ECD\u0300 base64url", - json_string: "\u1ECD\u0300r\u1ECD\u0300 JSON", - e164: "n\u1ECD\u0301mb\xE0 E.164", - jwt: "JWT", - template_literal: "\u1EB9\u0300r\u1ECD \xECb\xE1w\u1ECDl\xE9" - }; - const TypeDictionary = { - nan: "NaN", - number: "n\u1ECD\u0301mb\xE0", - array: "akop\u1ECD" - }; - return (issue2) => { - switch (issue2.code) { - case "invalid_type": { - const expected = TypeDictionary[issue2.expected] ?? issue2.expected; - const receivedType = parsedType(issue2.input); - const received = TypeDictionary[receivedType] ?? receivedType; - if (/^[A-Z]/.test(issue2.expected)) { - return `\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e: a n\xED l\xE1ti fi instanceof ${issue2.expected}, \xE0m\u1ECD\u0300 a r\xED ${received}`; - } - return `\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e: a n\xED l\xE1ti fi ${expected}, \xE0m\u1ECD\u0300 a r\xED ${received}`; - } - case "invalid_value": - if (issue2.values.length === 1) - return `\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e: a n\xED l\xE1ti fi ${stringifyPrimitive(issue2.values[0])}`; - return `\xC0\u1E63\xE0y\xE0n a\u1E63\xEC\u1E63e: yan \u1ECD\u0300kan l\xE1ra ${joinValues(issue2.values, "|")}`; - case "too_big": { - const adj = issue2.inclusive ? "<=" : "<"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `T\xF3 p\u1ECD\u0300 j\xF9: a n\xED l\xE1ti j\u1EB9\u0301 p\xE9 ${issue2.origin ?? "iye"} ${sizing.verb} ${adj}${issue2.maximum} ${sizing.unit}`; - return `T\xF3 p\u1ECD\u0300 j\xF9: a n\xED l\xE1ti j\u1EB9\u0301 ${adj}${issue2.maximum}`; - } - case "too_small": { - const adj = issue2.inclusive ? ">=" : ">"; - const sizing = getSizing(issue2.origin); - if (sizing) - return `K\xE9r\xE9 ju: a n\xED l\xE1ti j\u1EB9\u0301 p\xE9 ${issue2.origin} ${sizing.verb} ${adj}${issue2.minimum} ${sizing.unit}`; - return `K\xE9r\xE9 ju: a n\xED l\xE1ti j\u1EB9\u0301 ${adj}${issue2.minimum}`; - } - case "invalid_format": { - const _issue = issue2; - if (_issue.format === "starts_with") - return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 b\u1EB9\u0300r\u1EB9\u0300 p\u1EB9\u0300l\xFA "${_issue.prefix}"`; - if (_issue.format === "ends_with") - return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 par\xED p\u1EB9\u0300l\xFA "${_issue.suffix}"`; - if (_issue.format === "includes") - return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 n\xED "${_issue.includes}"`; - if (_issue.format === "regex") - return `\u1ECC\u0300r\u1ECD\u0300 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 b\xE1 \xE0p\u1EB9\u1EB9r\u1EB9 mu ${_issue.pattern}`; - return `A\u1E63\xEC\u1E63e: ${FormatDictionary[_issue.format] ?? issue2.format}`; - } - case "not_multiple_of": - return `N\u1ECD\u0301mb\xE0 a\u1E63\xEC\u1E63e: gb\u1ECD\u0301d\u1ECD\u0300 j\u1EB9\u0301 \xE8y\xE0 p\xEDp\xEDn ti ${issue2.divisor}`; - case "unrecognized_keys": - return `B\u1ECDt\xECn\xEC \xE0\xECm\u1ECD\u0300: ${joinValues(issue2.keys, ", ")}`; - case "invalid_key": - return `B\u1ECDt\xECn\xEC a\u1E63\xEC\u1E63e n\xEDn\xFA ${issue2.origin}`; - case "invalid_union": - return "\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e"; - case "invalid_element": - return `Iye a\u1E63\xEC\u1E63e n\xEDn\xFA ${issue2.origin}`; - default: - return "\xCCb\xE1w\u1ECDl\xE9 a\u1E63\xEC\u1E63e"; - } - }; -}; -function yo_default() { - return { - localeError: error47() - }; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/registries.js -var _a; -var $output = /* @__PURE__ */ Symbol("ZodOutput"); -var $input = /* @__PURE__ */ Symbol("ZodInput"); -var $ZodRegistry = class { - constructor() { - this._map = /* @__PURE__ */ new WeakMap(); - this._idmap = /* @__PURE__ */ new Map(); - } - add(schema, ..._meta) { - const meta3 = _meta[0]; - this._map.set(schema, meta3); - if (meta3 && typeof meta3 === "object" && "id" in meta3) { - this._idmap.set(meta3.id, schema); - } - return this; - } - clear() { - this._map = /* @__PURE__ */ new WeakMap(); - this._idmap = /* @__PURE__ */ new Map(); - return this; - } - remove(schema) { - const meta3 = this._map.get(schema); - if (meta3 && typeof meta3 === "object" && "id" in meta3) { - this._idmap.delete(meta3.id); - } - this._map.delete(schema); - return this; - } - get(schema) { - const p = schema._zod.parent; - if (p) { - const pm = { ...this.get(p) ?? {} }; - delete pm.id; - const f = { ...pm, ...this._map.get(schema) }; - return Object.keys(f).length ? f : void 0; - } - return this._map.get(schema); - } - has(schema) { - return this._map.has(schema); - } -}; -function registry() { - return new $ZodRegistry(); -} -(_a = globalThis).__zod_globalRegistry ?? (_a.__zod_globalRegistry = registry()); -var globalRegistry = globalThis.__zod_globalRegistry; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/api.js -// @__NO_SIDE_EFFECTS__ -function _string(Class2, params) { - return new Class2({ - type: "string", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _coercedString(Class2, params) { - return new Class2({ - type: "string", - coerce: true, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _email(Class2, params) { - return new Class2({ - type: "string", - format: "email", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _guid(Class2, params) { - return new Class2({ - type: "string", - format: "guid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uuid(Class2, params) { - return new Class2({ - type: "string", - format: "uuid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uuidv4(Class2, params) { - return new Class2({ - type: "string", - format: "uuid", - check: "string_format", - abort: false, - version: "v4", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uuidv6(Class2, params) { - return new Class2({ - type: "string", - format: "uuid", - check: "string_format", - abort: false, - version: "v6", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uuidv7(Class2, params) { - return new Class2({ - type: "string", - format: "uuid", - check: "string_format", - abort: false, - version: "v7", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _url(Class2, params) { - return new Class2({ - type: "string", - format: "url", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _emoji2(Class2, params) { - return new Class2({ - type: "string", - format: "emoji", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _nanoid(Class2, params) { - return new Class2({ - type: "string", - format: "nanoid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _cuid(Class2, params) { - return new Class2({ - type: "string", - format: "cuid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _cuid2(Class2, params) { - return new Class2({ - type: "string", - format: "cuid2", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _ulid(Class2, params) { - return new Class2({ - type: "string", - format: "ulid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _xid(Class2, params) { - return new Class2({ - type: "string", - format: "xid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _ksuid(Class2, params) { - return new Class2({ - type: "string", - format: "ksuid", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _ipv4(Class2, params) { - return new Class2({ - type: "string", - format: "ipv4", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _ipv6(Class2, params) { - return new Class2({ - type: "string", - format: "ipv6", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _mac(Class2, params) { - return new Class2({ - type: "string", - format: "mac", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _cidrv4(Class2, params) { - return new Class2({ - type: "string", - format: "cidrv4", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _cidrv6(Class2, params) { - return new Class2({ - type: "string", - format: "cidrv6", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _base64(Class2, params) { - return new Class2({ - type: "string", - format: "base64", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _base64url(Class2, params) { - return new Class2({ - type: "string", - format: "base64url", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _e164(Class2, params) { - return new Class2({ - type: "string", - format: "e164", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _jwt(Class2, params) { - return new Class2({ - type: "string", - format: "jwt", - check: "string_format", - abort: false, - ...normalizeParams(params) - }); -} -var TimePrecision = { - Any: null, - Minute: -1, - Second: 0, - Millisecond: 3, - Microsecond: 6 -}; -// @__NO_SIDE_EFFECTS__ -function _isoDateTime(Class2, params) { - return new Class2({ - type: "string", - format: "datetime", - check: "string_format", - offset: false, - local: false, - precision: null, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _isoDate(Class2, params) { - return new Class2({ - type: "string", - format: "date", - check: "string_format", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _isoTime(Class2, params) { - return new Class2({ - type: "string", - format: "time", - check: "string_format", - precision: null, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _isoDuration(Class2, params) { - return new Class2({ - type: "string", - format: "duration", - check: "string_format", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _number(Class2, params) { - return new Class2({ - type: "number", - checks: [], - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _coercedNumber(Class2, params) { - return new Class2({ - type: "number", - coerce: true, - checks: [], - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _int(Class2, params) { - return new Class2({ - type: "number", - check: "number_format", - abort: false, - format: "safeint", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _float32(Class2, params) { - return new Class2({ - type: "number", - check: "number_format", - abort: false, - format: "float32", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _float64(Class2, params) { - return new Class2({ - type: "number", - check: "number_format", - abort: false, - format: "float64", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _int32(Class2, params) { - return new Class2({ - type: "number", - check: "number_format", - abort: false, - format: "int32", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uint32(Class2, params) { - return new Class2({ - type: "number", - check: "number_format", - abort: false, - format: "uint32", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _boolean(Class2, params) { - return new Class2({ - type: "boolean", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _coercedBoolean(Class2, params) { - return new Class2({ - type: "boolean", - coerce: true, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _bigint(Class2, params) { - return new Class2({ - type: "bigint", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _coercedBigint(Class2, params) { - return new Class2({ - type: "bigint", - coerce: true, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _int64(Class2, params) { - return new Class2({ - type: "bigint", - check: "bigint_format", - abort: false, - format: "int64", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uint64(Class2, params) { - return new Class2({ - type: "bigint", - check: "bigint_format", - abort: false, - format: "uint64", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _symbol(Class2, params) { - return new Class2({ - type: "symbol", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _undefined2(Class2, params) { - return new Class2({ - type: "undefined", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _null2(Class2, params) { - return new Class2({ - type: "null", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _any(Class2) { - return new Class2({ - type: "any" - }); -} -// @__NO_SIDE_EFFECTS__ -function _unknown(Class2) { - return new Class2({ - type: "unknown" - }); -} -// @__NO_SIDE_EFFECTS__ -function _never(Class2, params) { - return new Class2({ - type: "never", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _void(Class2, params) { - return new Class2({ - type: "void", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _date(Class2, params) { - return new Class2({ - type: "date", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _coercedDate(Class2, params) { - return new Class2({ - type: "date", - coerce: true, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _nan(Class2, params) { - return new Class2({ - type: "nan", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _lt(value, params) { - return new $ZodCheckLessThan({ - check: "less_than", - ...normalizeParams(params), - value, - inclusive: false - }); -} -// @__NO_SIDE_EFFECTS__ -function _lte(value, params) { - return new $ZodCheckLessThan({ - check: "less_than", - ...normalizeParams(params), - value, - inclusive: true - }); -} -// @__NO_SIDE_EFFECTS__ -function _gt(value, params) { - return new $ZodCheckGreaterThan({ - check: "greater_than", - ...normalizeParams(params), - value, - inclusive: false - }); -} -// @__NO_SIDE_EFFECTS__ -function _gte(value, params) { - return new $ZodCheckGreaterThan({ - check: "greater_than", - ...normalizeParams(params), - value, - inclusive: true - }); -} -// @__NO_SIDE_EFFECTS__ -function _positive(params) { - return /* @__PURE__ */ _gt(0, params); -} -// @__NO_SIDE_EFFECTS__ -function _negative(params) { - return /* @__PURE__ */ _lt(0, params); -} -// @__NO_SIDE_EFFECTS__ -function _nonpositive(params) { - return /* @__PURE__ */ _lte(0, params); -} -// @__NO_SIDE_EFFECTS__ -function _nonnegative(params) { - return /* @__PURE__ */ _gte(0, params); -} -// @__NO_SIDE_EFFECTS__ -function _multipleOf(value, params) { - return new $ZodCheckMultipleOf({ - check: "multiple_of", - ...normalizeParams(params), - value - }); -} -// @__NO_SIDE_EFFECTS__ -function _maxSize(maximum, params) { - return new $ZodCheckMaxSize({ - check: "max_size", - ...normalizeParams(params), - maximum - }); -} -// @__NO_SIDE_EFFECTS__ -function _minSize(minimum, params) { - return new $ZodCheckMinSize({ - check: "min_size", - ...normalizeParams(params), - minimum - }); -} -// @__NO_SIDE_EFFECTS__ -function _size(size, params) { - return new $ZodCheckSizeEquals({ - check: "size_equals", - ...normalizeParams(params), - size - }); -} -// @__NO_SIDE_EFFECTS__ -function _maxLength(maximum, params) { - const ch = new $ZodCheckMaxLength({ - check: "max_length", - ...normalizeParams(params), - maximum - }); - return ch; -} -// @__NO_SIDE_EFFECTS__ -function _minLength(minimum, params) { - return new $ZodCheckMinLength({ - check: "min_length", - ...normalizeParams(params), - minimum - }); -} -// @__NO_SIDE_EFFECTS__ -function _length(length, params) { - return new $ZodCheckLengthEquals({ - check: "length_equals", - ...normalizeParams(params), - length - }); -} -// @__NO_SIDE_EFFECTS__ -function _regex(pattern, params) { - return new $ZodCheckRegex({ - check: "string_format", - format: "regex", - ...normalizeParams(params), - pattern - }); -} -// @__NO_SIDE_EFFECTS__ -function _lowercase(params) { - return new $ZodCheckLowerCase({ - check: "string_format", - format: "lowercase", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _uppercase(params) { - return new $ZodCheckUpperCase({ - check: "string_format", - format: "uppercase", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _includes(includes, params) { - return new $ZodCheckIncludes({ - check: "string_format", - format: "includes", - ...normalizeParams(params), - includes - }); -} -// @__NO_SIDE_EFFECTS__ -function _startsWith(prefix, params) { - return new $ZodCheckStartsWith({ - check: "string_format", - format: "starts_with", - ...normalizeParams(params), - prefix - }); -} -// @__NO_SIDE_EFFECTS__ -function _endsWith(suffix, params) { - return new $ZodCheckEndsWith({ - check: "string_format", - format: "ends_with", - ...normalizeParams(params), - suffix - }); -} -// @__NO_SIDE_EFFECTS__ -function _property(property, schema, params) { - return new $ZodCheckProperty({ - check: "property", - property, - schema, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _mime(types, params) { - return new $ZodCheckMimeType({ - check: "mime_type", - mime: types, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _overwrite(tx) { - return new $ZodCheckOverwrite({ - check: "overwrite", - tx - }); -} -// @__NO_SIDE_EFFECTS__ -function _normalize(form) { - return /* @__PURE__ */ _overwrite((input) => input.normalize(form)); -} -// @__NO_SIDE_EFFECTS__ -function _trim() { - return /* @__PURE__ */ _overwrite((input) => input.trim()); -} -// @__NO_SIDE_EFFECTS__ -function _toLowerCase() { - return /* @__PURE__ */ _overwrite((input) => input.toLowerCase()); -} -// @__NO_SIDE_EFFECTS__ -function _toUpperCase() { - return /* @__PURE__ */ _overwrite((input) => input.toUpperCase()); -} -// @__NO_SIDE_EFFECTS__ -function _slugify() { - return /* @__PURE__ */ _overwrite((input) => slugify(input)); -} -// @__NO_SIDE_EFFECTS__ -function _array(Class2, element, params) { - return new Class2({ - type: "array", - element, - // get element() { - // return element; - // }, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _union(Class2, options, params) { - return new Class2({ - type: "union", - options, - ...normalizeParams(params) - }); -} -function _xor(Class2, options, params) { - return new Class2({ - type: "union", - options, - inclusive: false, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _discriminatedUnion(Class2, discriminator, options, params) { - return new Class2({ - type: "union", - options, - discriminator, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _intersection(Class2, left, right) { - return new Class2({ - type: "intersection", - left, - right - }); -} -// @__NO_SIDE_EFFECTS__ -function _tuple(Class2, items, _paramsOrRest, _params) { - const hasRest = _paramsOrRest instanceof $ZodType; - const params = hasRest ? _params : _paramsOrRest; - const rest = hasRest ? _paramsOrRest : null; - return new Class2({ - type: "tuple", - items, - rest, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _record(Class2, keyType, valueType, params) { - return new Class2({ - type: "record", - keyType, - valueType, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _map(Class2, keyType, valueType, params) { - return new Class2({ - type: "map", - keyType, - valueType, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _set(Class2, valueType, params) { - return new Class2({ - type: "set", - valueType, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _enum(Class2, values, params) { - const entries = Array.isArray(values) ? Object.fromEntries(values.map((v) => [v, v])) : values; - return new Class2({ - type: "enum", - entries, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _nativeEnum(Class2, entries, params) { - return new Class2({ - type: "enum", - entries, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _literal(Class2, value, params) { - return new Class2({ - type: "literal", - values: Array.isArray(value) ? value : [value], - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _file(Class2, params) { - return new Class2({ - type: "file", - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _transform(Class2, fn) { - return new Class2({ - type: "transform", - transform: fn - }); -} -// @__NO_SIDE_EFFECTS__ -function _optional(Class2, innerType) { - return new Class2({ - type: "optional", - innerType - }); -} -// @__NO_SIDE_EFFECTS__ -function _nullable(Class2, innerType) { - return new Class2({ - type: "nullable", - innerType - }); -} -// @__NO_SIDE_EFFECTS__ -function _default(Class2, innerType, defaultValue) { - return new Class2({ - type: "default", - innerType, - get defaultValue() { - return typeof defaultValue === "function" ? defaultValue() : shallowClone(defaultValue); - } - }); -} -// @__NO_SIDE_EFFECTS__ -function _nonoptional(Class2, innerType, params) { - return new Class2({ - type: "nonoptional", - innerType, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _success(Class2, innerType) { - return new Class2({ - type: "success", - innerType - }); -} -// @__NO_SIDE_EFFECTS__ -function _catch(Class2, innerType, catchValue) { - return new Class2({ - type: "catch", - innerType, - catchValue: typeof catchValue === "function" ? catchValue : () => catchValue - }); -} -// @__NO_SIDE_EFFECTS__ -function _pipe(Class2, in_, out) { - return new Class2({ - type: "pipe", - in: in_, - out - }); -} -// @__NO_SIDE_EFFECTS__ -function _readonly(Class2, innerType) { - return new Class2({ - type: "readonly", - innerType - }); -} -// @__NO_SIDE_EFFECTS__ -function _templateLiteral(Class2, parts, params) { - return new Class2({ - type: "template_literal", - parts, - ...normalizeParams(params) - }); -} -// @__NO_SIDE_EFFECTS__ -function _lazy(Class2, getter) { - return new Class2({ - type: "lazy", - getter - }); -} -// @__NO_SIDE_EFFECTS__ -function _promise(Class2, innerType) { - return new Class2({ - type: "promise", - innerType - }); -} -// @__NO_SIDE_EFFECTS__ -function _custom(Class2, fn, _params) { - const norm = normalizeParams(_params); - norm.abort ?? (norm.abort = true); - const schema = new Class2({ - type: "custom", - check: "custom", - fn, - ...norm - }); - return schema; -} -// @__NO_SIDE_EFFECTS__ -function _refine(Class2, fn, _params) { - const schema = new Class2({ - type: "custom", - check: "custom", - fn, - ...normalizeParams(_params) - }); - return schema; -} -// @__NO_SIDE_EFFECTS__ -function _superRefine(fn) { - const ch = /* @__PURE__ */ _check((payload) => { - payload.addIssue = (issue2) => { - if (typeof issue2 === "string") { - payload.issues.push(issue(issue2, payload.value, ch._zod.def)); - } else { - const _issue = issue2; - if (_issue.fatal) - _issue.continue = false; - _issue.code ?? (_issue.code = "custom"); - _issue.input ?? (_issue.input = payload.value); - _issue.inst ?? (_issue.inst = ch); - _issue.continue ?? (_issue.continue = !ch._zod.def.abort); - payload.issues.push(issue(_issue)); - } - }; - return fn(payload.value, payload); - }); - return ch; -} -// @__NO_SIDE_EFFECTS__ -function _check(fn, params) { - const ch = new $ZodCheck({ - check: "custom", - ...normalizeParams(params) - }); - ch._zod.check = fn; - return ch; -} -// @__NO_SIDE_EFFECTS__ -function describe(description) { - const ch = new $ZodCheck({ check: "describe" }); - ch._zod.onattach = [ - (inst) => { - const existing = globalRegistry.get(inst) ?? {}; - globalRegistry.add(inst, { ...existing, description }); - } - ]; - ch._zod.check = () => { - }; - return ch; -} -// @__NO_SIDE_EFFECTS__ -function meta(metadata) { - const ch = new $ZodCheck({ check: "meta" }); - ch._zod.onattach = [ - (inst) => { - const existing = globalRegistry.get(inst) ?? {}; - globalRegistry.add(inst, { ...existing, ...metadata }); - } - ]; - ch._zod.check = () => { - }; - return ch; -} -// @__NO_SIDE_EFFECTS__ -function _stringbool(Classes, _params) { - const params = normalizeParams(_params); - let truthyArray = params.truthy ?? ["true", "1", "yes", "on", "y", "enabled"]; - let falsyArray = params.falsy ?? ["false", "0", "no", "off", "n", "disabled"]; - if (params.case !== "sensitive") { - truthyArray = truthyArray.map((v) => typeof v === "string" ? v.toLowerCase() : v); - falsyArray = falsyArray.map((v) => typeof v === "string" ? v.toLowerCase() : v); - } - const truthySet = new Set(truthyArray); - const falsySet = new Set(falsyArray); - const _Codec = Classes.Codec ?? $ZodCodec; - const _Boolean = Classes.Boolean ?? $ZodBoolean; - const _String = Classes.String ?? $ZodString; - const stringSchema = new _String({ type: "string", error: params.error }); - const booleanSchema = new _Boolean({ type: "boolean", error: params.error }); - const codec2 = new _Codec({ - type: "pipe", - in: stringSchema, - out: booleanSchema, - transform: ((input, payload) => { - let data = input; - if (params.case !== "sensitive") - data = data.toLowerCase(); - if (truthySet.has(data)) { - return true; - } else if (falsySet.has(data)) { - return false; - } else { - payload.issues.push({ - code: "invalid_value", - expected: "stringbool", - values: [...truthySet, ...falsySet], - input: payload.value, - inst: codec2, - continue: false - }); - return {}; - } - }), - reverseTransform: ((input, _payload) => { - if (input === true) { - return truthyArray[0] || "true"; - } else { - return falsyArray[0] || "false"; - } - }), - error: params.error - }); - return codec2; -} -// @__NO_SIDE_EFFECTS__ -function _stringFormat(Class2, format, fnOrRegex, _params = {}) { - const params = normalizeParams(_params); - const def = { - ...normalizeParams(_params), - check: "string_format", - type: "string", - format, - fn: typeof fnOrRegex === "function" ? fnOrRegex : (val) => fnOrRegex.test(val), - ...params - }; - if (fnOrRegex instanceof RegExp) { - def.pattern = fnOrRegex; - } - const inst = new Class2(def); - return inst; -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/to-json-schema.js -function initializeContext(params) { - let target = params?.target ?? "draft-2020-12"; - if (target === "draft-4") - target = "draft-04"; - if (target === "draft-7") - target = "draft-07"; - return { - processors: params.processors ?? {}, - metadataRegistry: params?.metadata ?? globalRegistry, - target, - unrepresentable: params?.unrepresentable ?? "throw", - override: params?.override ?? (() => { - }), - io: params?.io ?? "output", - counter: 0, - seen: /* @__PURE__ */ new Map(), - cycles: params?.cycles ?? "ref", - reused: params?.reused ?? "inline", - external: params?.external ?? void 0 - }; -} -function process2(schema, ctx, _params = { path: [], schemaPath: [] }) { - var _a3; - const def = schema._zod.def; - const seen = ctx.seen.get(schema); - if (seen) { - seen.count++; - const isCycle = _params.schemaPath.includes(schema); - if (isCycle) { - seen.cycle = _params.path; - } - return seen.schema; - } - const result = { schema: {}, count: 1, cycle: void 0, path: _params.path }; - ctx.seen.set(schema, result); - const overrideSchema = schema._zod.toJSONSchema?.(); - if (overrideSchema) { - result.schema = overrideSchema; - } else { - const params = { - ..._params, - schemaPath: [..._params.schemaPath, schema], - path: _params.path - }; - if (schema._zod.processJSONSchema) { - schema._zod.processJSONSchema(ctx, result.schema, params); - } else { - const _json = result.schema; - const processor = ctx.processors[def.type]; - if (!processor) { - throw new Error(`[toJSONSchema]: Non-representable type encountered: ${def.type}`); - } - processor(schema, ctx, _json, params); - } - const parent = schema._zod.parent; - if (parent) { - if (!result.ref) - result.ref = parent; - process2(parent, ctx, params); - ctx.seen.get(parent).isParent = true; - } - } - const meta3 = ctx.metadataRegistry.get(schema); - if (meta3) - Object.assign(result.schema, meta3); - if (ctx.io === "input" && isTransforming(schema)) { - delete result.schema.examples; - delete result.schema.default; - } - if (ctx.io === "input" && result.schema._prefault) - (_a3 = result.schema).default ?? (_a3.default = result.schema._prefault); - delete result.schema._prefault; - const _result = ctx.seen.get(schema); - return _result.schema; -} -function extractDefs(ctx, schema) { - const root = ctx.seen.get(schema); - if (!root) - throw new Error("Unprocessed schema. This is a bug in Zod."); - const idToSchema = /* @__PURE__ */ new Map(); - for (const entry of ctx.seen.entries()) { - const id = ctx.metadataRegistry.get(entry[0])?.id; - if (id) { - const existing = idToSchema.get(id); - if (existing && existing !== entry[0]) { - throw new Error(`Duplicate schema id "${id}" detected during JSON Schema conversion. Two different schemas cannot share the same id when converted together.`); - } - idToSchema.set(id, entry[0]); - } - } - const makeURI = (entry) => { - const defsSegment = ctx.target === "draft-2020-12" ? "$defs" : "definitions"; - if (ctx.external) { - const externalId = ctx.external.registry.get(entry[0])?.id; - const uriGenerator = ctx.external.uri ?? ((id2) => id2); - if (externalId) { - return { ref: uriGenerator(externalId) }; - } - const id = entry[1].defId ?? entry[1].schema.id ?? `schema${ctx.counter++}`; - entry[1].defId = id; - return { defId: id, ref: `${uriGenerator("__shared")}#/${defsSegment}/${id}` }; - } - if (entry[1] === root) { - return { ref: "#" }; - } - const uriPrefix = `#`; - const defUriPrefix = `${uriPrefix}/${defsSegment}/`; - const defId = entry[1].schema.id ?? `__schema${ctx.counter++}`; - return { defId, ref: defUriPrefix + defId }; - }; - const extractToDef = (entry) => { - if (entry[1].schema.$ref) { - return; - } - const seen = entry[1]; - const { ref, defId } = makeURI(entry); - seen.def = { ...seen.schema }; - if (defId) - seen.defId = defId; - const schema2 = seen.schema; - for (const key in schema2) { - delete schema2[key]; - } - schema2.$ref = ref; - }; - if (ctx.cycles === "throw") { - for (const entry of ctx.seen.entries()) { - const seen = entry[1]; - if (seen.cycle) { - throw new Error(`Cycle detected: #/${seen.cycle?.join("/")}/ - -Set the \`cycles\` parameter to \`"ref"\` to resolve cyclical schemas with defs.`); - } - } - } - for (const entry of ctx.seen.entries()) { - const seen = entry[1]; - if (schema === entry[0]) { - extractToDef(entry); - continue; - } - if (ctx.external) { - const ext = ctx.external.registry.get(entry[0])?.id; - if (schema !== entry[0] && ext) { - extractToDef(entry); - continue; - } - } - const id = ctx.metadataRegistry.get(entry[0])?.id; - if (id) { - extractToDef(entry); - continue; - } - if (seen.cycle) { - extractToDef(entry); - continue; - } - if (seen.count > 1) { - if (ctx.reused === "ref") { - extractToDef(entry); - continue; - } - } - } -} -function finalize(ctx, schema) { - const root = ctx.seen.get(schema); - if (!root) - throw new Error("Unprocessed schema. This is a bug in Zod."); - const flattenRef = (zodSchema) => { - const seen = ctx.seen.get(zodSchema); - if (seen.ref === null) - return; - const schema2 = seen.def ?? seen.schema; - const _cached = { ...schema2 }; - const ref = seen.ref; - seen.ref = null; - if (ref) { - flattenRef(ref); - const refSeen = ctx.seen.get(ref); - const refSchema = refSeen.schema; - if (refSchema.$ref && (ctx.target === "draft-07" || ctx.target === "draft-04" || ctx.target === "openapi-3.0")) { - schema2.allOf = schema2.allOf ?? []; - schema2.allOf.push(refSchema); - } else { - Object.assign(schema2, refSchema); - } - Object.assign(schema2, _cached); - const isParentRef = zodSchema._zod.parent === ref; - if (isParentRef) { - for (const key in schema2) { - if (key === "$ref" || key === "allOf") - continue; - if (!(key in _cached)) { - delete schema2[key]; - } - } - } - if (refSchema.$ref && refSeen.def) { - for (const key in schema2) { - if (key === "$ref" || key === "allOf") - continue; - if (key in refSeen.def && JSON.stringify(schema2[key]) === JSON.stringify(refSeen.def[key])) { - delete schema2[key]; - } - } - } - } - const parent = zodSchema._zod.parent; - if (parent && parent !== ref) { - flattenRef(parent); - const parentSeen = ctx.seen.get(parent); - if (parentSeen?.schema.$ref) { - schema2.$ref = parentSeen.schema.$ref; - if (parentSeen.def) { - for (const key in schema2) { - if (key === "$ref" || key === "allOf") - continue; - if (key in parentSeen.def && JSON.stringify(schema2[key]) === JSON.stringify(parentSeen.def[key])) { - delete schema2[key]; - } - } - } - } - } - ctx.override({ - zodSchema, - jsonSchema: schema2, - path: seen.path ?? [] - }); - }; - for (const entry of [...ctx.seen.entries()].reverse()) { - flattenRef(entry[0]); - } - const result = {}; - if (ctx.target === "draft-2020-12") { - result.$schema = "https://json-schema.org/draft/2020-12/schema"; - } else if (ctx.target === "draft-07") { - result.$schema = "http://json-schema.org/draft-07/schema#"; - } else if (ctx.target === "draft-04") { - result.$schema = "http://json-schema.org/draft-04/schema#"; - } else if (ctx.target === "openapi-3.0") { - } else { - } - if (ctx.external?.uri) { - const id = ctx.external.registry.get(schema)?.id; - if (!id) - throw new Error("Schema is missing an `id` property"); - result.$id = ctx.external.uri(id); - } - Object.assign(result, root.def ?? root.schema); - const defs = ctx.external?.defs ?? {}; - for (const entry of ctx.seen.entries()) { - const seen = entry[1]; - if (seen.def && seen.defId) { - defs[seen.defId] = seen.def; - } - } - if (ctx.external) { - } else { - if (Object.keys(defs).length > 0) { - if (ctx.target === "draft-2020-12") { - result.$defs = defs; - } else { - result.definitions = defs; - } - } - } - try { - const finalized = JSON.parse(JSON.stringify(result)); - Object.defineProperty(finalized, "~standard", { - value: { - ...schema["~standard"], - jsonSchema: { - input: createStandardJSONSchemaMethod(schema, "input", ctx.processors), - output: createStandardJSONSchemaMethod(schema, "output", ctx.processors) - } - }, - enumerable: false, - writable: false - }); - return finalized; - } catch (_err) { - throw new Error("Error converting schema to JSON."); - } -} -function isTransforming(_schema, _ctx) { - const ctx = _ctx ?? { seen: /* @__PURE__ */ new Set() }; - if (ctx.seen.has(_schema)) - return false; - ctx.seen.add(_schema); - const def = _schema._zod.def; - if (def.type === "transform") - return true; - if (def.type === "array") - return isTransforming(def.element, ctx); - if (def.type === "set") - return isTransforming(def.valueType, ctx); - if (def.type === "lazy") - return isTransforming(def.getter(), ctx); - if (def.type === "promise" || def.type === "optional" || def.type === "nonoptional" || def.type === "nullable" || def.type === "readonly" || def.type === "default" || def.type === "prefault") { - return isTransforming(def.innerType, ctx); - } - if (def.type === "intersection") { - return isTransforming(def.left, ctx) || isTransforming(def.right, ctx); - } - if (def.type === "record" || def.type === "map") { - return isTransforming(def.keyType, ctx) || isTransforming(def.valueType, ctx); - } - if (def.type === "pipe") { - return isTransforming(def.in, ctx) || isTransforming(def.out, ctx); - } - if (def.type === "object") { - for (const key in def.shape) { - if (isTransforming(def.shape[key], ctx)) - return true; - } - return false; - } - if (def.type === "union") { - for (const option of def.options) { - if (isTransforming(option, ctx)) - return true; - } - return false; - } - if (def.type === "tuple") { - for (const item of def.items) { - if (isTransforming(item, ctx)) - return true; - } - if (def.rest && isTransforming(def.rest, ctx)) - return true; - return false; - } - return false; -} -var createToJSONSchemaMethod = (schema, processors = {}) => (params) => { - const ctx = initializeContext({ ...params, processors }); - process2(schema, ctx); - extractDefs(ctx, schema); - return finalize(ctx, schema); -}; -var createStandardJSONSchemaMethod = (schema, io, processors = {}) => (params) => { - const { libraryOptions, target } = params ?? {}; - const ctx = initializeContext({ ...libraryOptions ?? {}, target, io, processors }); - process2(schema, ctx); - extractDefs(ctx, schema); - return finalize(ctx, schema); -}; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/json-schema-processors.js -var formatMap = { - guid: "uuid", - url: "uri", - datetime: "date-time", - json_string: "json-string", - regex: "" - // do not set -}; -var stringProcessor = (schema, ctx, _json, _params) => { - const json2 = _json; - json2.type = "string"; - const { minimum, maximum, format, patterns, contentEncoding } = schema._zod.bag; - if (typeof minimum === "number") - json2.minLength = minimum; - if (typeof maximum === "number") - json2.maxLength = maximum; - if (format) { - json2.format = formatMap[format] ?? format; - if (json2.format === "") - delete json2.format; - if (format === "time") { - delete json2.format; - } - } - if (contentEncoding) - json2.contentEncoding = contentEncoding; - if (patterns && patterns.size > 0) { - const regexes = [...patterns]; - if (regexes.length === 1) - json2.pattern = regexes[0].source; - else if (regexes.length > 1) { - json2.allOf = [ - ...regexes.map((regex) => ({ - ...ctx.target === "draft-07" || ctx.target === "draft-04" || ctx.target === "openapi-3.0" ? { type: "string" } : {}, - pattern: regex.source - })) - ]; - } - } -}; -var numberProcessor = (schema, ctx, _json, _params) => { - const json2 = _json; - const { minimum, maximum, format, multipleOf, exclusiveMaximum, exclusiveMinimum } = schema._zod.bag; - if (typeof format === "string" && format.includes("int")) - json2.type = "integer"; - else - json2.type = "number"; - if (typeof exclusiveMinimum === "number") { - if (ctx.target === "draft-04" || ctx.target === "openapi-3.0") { - json2.minimum = exclusiveMinimum; - json2.exclusiveMinimum = true; - } else { - json2.exclusiveMinimum = exclusiveMinimum; - } - } - if (typeof minimum === "number") { - json2.minimum = minimum; - if (typeof exclusiveMinimum === "number" && ctx.target !== "draft-04") { - if (exclusiveMinimum >= minimum) - delete json2.minimum; - else - delete json2.exclusiveMinimum; - } - } - if (typeof exclusiveMaximum === "number") { - if (ctx.target === "draft-04" || ctx.target === "openapi-3.0") { - json2.maximum = exclusiveMaximum; - json2.exclusiveMaximum = true; - } else { - json2.exclusiveMaximum = exclusiveMaximum; - } - } - if (typeof maximum === "number") { - json2.maximum = maximum; - if (typeof exclusiveMaximum === "number" && ctx.target !== "draft-04") { - if (exclusiveMaximum <= maximum) - delete json2.maximum; - else - delete json2.exclusiveMaximum; - } - } - if (typeof multipleOf === "number") - json2.multipleOf = multipleOf; -}; -var booleanProcessor = (_schema, _ctx, json2, _params) => { - json2.type = "boolean"; -}; -var bigintProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("BigInt cannot be represented in JSON Schema"); - } -}; -var symbolProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Symbols cannot be represented in JSON Schema"); - } -}; -var nullProcessor = (_schema, ctx, json2, _params) => { - if (ctx.target === "openapi-3.0") { - json2.type = "string"; - json2.nullable = true; - json2.enum = [null]; - } else { - json2.type = "null"; - } -}; -var undefinedProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Undefined cannot be represented in JSON Schema"); - } -}; -var voidProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Void cannot be represented in JSON Schema"); - } -}; -var neverProcessor = (_schema, _ctx, json2, _params) => { - json2.not = {}; -}; -var anyProcessor = (_schema, _ctx, _json, _params) => { -}; -var unknownProcessor = (_schema, _ctx, _json, _params) => { -}; -var dateProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Date cannot be represented in JSON Schema"); - } -}; -var enumProcessor = (schema, _ctx, json2, _params) => { - const def = schema._zod.def; - const values = getEnumValues(def.entries); - if (values.every((v) => typeof v === "number")) - json2.type = "number"; - if (values.every((v) => typeof v === "string")) - json2.type = "string"; - json2.enum = values; -}; -var literalProcessor = (schema, ctx, json2, _params) => { - const def = schema._zod.def; - const vals = []; - for (const val of def.values) { - if (val === void 0) { - if (ctx.unrepresentable === "throw") { - throw new Error("Literal `undefined` cannot be represented in JSON Schema"); - } else { - } - } else if (typeof val === "bigint") { - if (ctx.unrepresentable === "throw") { - throw new Error("BigInt literals cannot be represented in JSON Schema"); - } else { - vals.push(Number(val)); - } - } else { - vals.push(val); - } - } - if (vals.length === 0) { - } else if (vals.length === 1) { - const val = vals[0]; - json2.type = val === null ? "null" : typeof val; - if (ctx.target === "draft-04" || ctx.target === "openapi-3.0") { - json2.enum = [val]; - } else { - json2.const = val; - } - } else { - if (vals.every((v) => typeof v === "number")) - json2.type = "number"; - if (vals.every((v) => typeof v === "string")) - json2.type = "string"; - if (vals.every((v) => typeof v === "boolean")) - json2.type = "boolean"; - if (vals.every((v) => v === null)) - json2.type = "null"; - json2.enum = vals; - } -}; -var nanProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("NaN cannot be represented in JSON Schema"); - } -}; -var templateLiteralProcessor = (schema, _ctx, json2, _params) => { - const _json = json2; - const pattern = schema._zod.pattern; - if (!pattern) - throw new Error("Pattern not found in template literal"); - _json.type = "string"; - _json.pattern = pattern.source; -}; -var fileProcessor = (schema, _ctx, json2, _params) => { - const _json = json2; - const file2 = { - type: "string", - format: "binary", - contentEncoding: "binary" - }; - const { minimum, maximum, mime } = schema._zod.bag; - if (minimum !== void 0) - file2.minLength = minimum; - if (maximum !== void 0) - file2.maxLength = maximum; - if (mime) { - if (mime.length === 1) { - file2.contentMediaType = mime[0]; - Object.assign(_json, file2); - } else { - Object.assign(_json, file2); - _json.anyOf = mime.map((m) => ({ contentMediaType: m })); - } - } else { - Object.assign(_json, file2); - } -}; -var successProcessor = (_schema, _ctx, json2, _params) => { - json2.type = "boolean"; -}; -var customProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Custom types cannot be represented in JSON Schema"); - } -}; -var functionProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Function types cannot be represented in JSON Schema"); - } -}; -var transformProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Transforms cannot be represented in JSON Schema"); - } -}; -var mapProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Map cannot be represented in JSON Schema"); - } -}; -var setProcessor = (_schema, ctx, _json, _params) => { - if (ctx.unrepresentable === "throw") { - throw new Error("Set cannot be represented in JSON Schema"); - } -}; -var arrayProcessor = (schema, ctx, _json, params) => { - const json2 = _json; - const def = schema._zod.def; - const { minimum, maximum } = schema._zod.bag; - if (typeof minimum === "number") - json2.minItems = minimum; - if (typeof maximum === "number") - json2.maxItems = maximum; - json2.type = "array"; - json2.items = process2(def.element, ctx, { ...params, path: [...params.path, "items"] }); -}; -var objectProcessor = (schema, ctx, _json, params) => { - const json2 = _json; - const def = schema._zod.def; - json2.type = "object"; - json2.properties = {}; - const shape = def.shape; - for (const key in shape) { - json2.properties[key] = process2(shape[key], ctx, { - ...params, - path: [...params.path, "properties", key] - }); - } - const allKeys = new Set(Object.keys(shape)); - const requiredKeys = new Set([...allKeys].filter((key) => { - const v = def.shape[key]._zod; - if (ctx.io === "input") { - return v.optin === void 0; - } else { - return v.optout === void 0; - } - })); - if (requiredKeys.size > 0) { - json2.required = Array.from(requiredKeys); - } - if (def.catchall?._zod.def.type === "never") { - json2.additionalProperties = false; - } else if (!def.catchall) { - if (ctx.io === "output") - json2.additionalProperties = false; - } else if (def.catchall) { - json2.additionalProperties = process2(def.catchall, ctx, { - ...params, - path: [...params.path, "additionalProperties"] - }); - } -}; -var unionProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - const isExclusive = def.inclusive === false; - const options = def.options.map((x, i) => process2(x, ctx, { - ...params, - path: [...params.path, isExclusive ? "oneOf" : "anyOf", i] - })); - if (isExclusive) { - json2.oneOf = options; - } else { - json2.anyOf = options; - } -}; -var intersectionProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - const a = process2(def.left, ctx, { - ...params, - path: [...params.path, "allOf", 0] - }); - const b = process2(def.right, ctx, { - ...params, - path: [...params.path, "allOf", 1] - }); - const isSimpleIntersection = (val) => "allOf" in val && Object.keys(val).length === 1; - const allOf = [ - ...isSimpleIntersection(a) ? a.allOf : [a], - ...isSimpleIntersection(b) ? b.allOf : [b] - ]; - json2.allOf = allOf; -}; -var tupleProcessor = (schema, ctx, _json, params) => { - const json2 = _json; - const def = schema._zod.def; - json2.type = "array"; - const prefixPath = ctx.target === "draft-2020-12" ? "prefixItems" : "items"; - const restPath = ctx.target === "draft-2020-12" ? "items" : ctx.target === "openapi-3.0" ? "items" : "additionalItems"; - const prefixItems = def.items.map((x, i) => process2(x, ctx, { - ...params, - path: [...params.path, prefixPath, i] - })); - const rest = def.rest ? process2(def.rest, ctx, { - ...params, - path: [...params.path, restPath, ...ctx.target === "openapi-3.0" ? [def.items.length] : []] - }) : null; - if (ctx.target === "draft-2020-12") { - json2.prefixItems = prefixItems; - if (rest) { - json2.items = rest; - } - } else if (ctx.target === "openapi-3.0") { - json2.items = { - anyOf: prefixItems - }; - if (rest) { - json2.items.anyOf.push(rest); - } - json2.minItems = prefixItems.length; - if (!rest) { - json2.maxItems = prefixItems.length; - } - } else { - json2.items = prefixItems; - if (rest) { - json2.additionalItems = rest; - } - } - const { minimum, maximum } = schema._zod.bag; - if (typeof minimum === "number") - json2.minItems = minimum; - if (typeof maximum === "number") - json2.maxItems = maximum; -}; -var recordProcessor = (schema, ctx, _json, params) => { - const json2 = _json; - const def = schema._zod.def; - json2.type = "object"; - const keyType = def.keyType; - const keyBag = keyType._zod.bag; - const patterns = keyBag?.patterns; - if (def.mode === "loose" && patterns && patterns.size > 0) { - const valueSchema = process2(def.valueType, ctx, { - ...params, - path: [...params.path, "patternProperties", "*"] - }); - json2.patternProperties = {}; - for (const pattern of patterns) { - json2.patternProperties[pattern.source] = valueSchema; - } - } else { - if (ctx.target === "draft-07" || ctx.target === "draft-2020-12") { - json2.propertyNames = process2(def.keyType, ctx, { - ...params, - path: [...params.path, "propertyNames"] - }); - } - json2.additionalProperties = process2(def.valueType, ctx, { - ...params, - path: [...params.path, "additionalProperties"] - }); - } - const keyValues = keyType._zod.values; - if (keyValues) { - const validKeyValues = [...keyValues].filter((v) => typeof v === "string" || typeof v === "number"); - if (validKeyValues.length > 0) { - json2.required = validKeyValues; - } - } -}; -var nullableProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - const inner = process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - if (ctx.target === "openapi-3.0") { - seen.ref = def.innerType; - json2.nullable = true; - } else { - json2.anyOf = [inner, { type: "null" }]; - } -}; -var nonoptionalProcessor = (schema, ctx, _json, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; -}; -var defaultProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; - json2.default = JSON.parse(JSON.stringify(def.defaultValue)); -}; -var prefaultProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; - if (ctx.io === "input") - json2._prefault = JSON.parse(JSON.stringify(def.defaultValue)); -}; -var catchProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; - let catchValue; - try { - catchValue = def.catchValue(void 0); - } catch { - throw new Error("Dynamic catch values are not supported in JSON Schema"); - } - json2.default = catchValue; -}; -var pipeProcessor = (schema, ctx, _json, params) => { - const def = schema._zod.def; - const innerType = ctx.io === "input" ? def.in._zod.def.type === "transform" ? def.out : def.in : def.out; - process2(innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = innerType; -}; -var readonlyProcessor = (schema, ctx, json2, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; - json2.readOnly = true; -}; -var promiseProcessor = (schema, ctx, _json, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; -}; -var optionalProcessor = (schema, ctx, _json, params) => { - const def = schema._zod.def; - process2(def.innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = def.innerType; -}; -var lazyProcessor = (schema, ctx, _json, params) => { - const innerType = schema._zod.innerType; - process2(innerType, ctx, params); - const seen = ctx.seen.get(schema); - seen.ref = innerType; -}; -var allProcessors = { - string: stringProcessor, - number: numberProcessor, - boolean: booleanProcessor, - bigint: bigintProcessor, - symbol: symbolProcessor, - null: nullProcessor, - undefined: undefinedProcessor, - void: voidProcessor, - never: neverProcessor, - any: anyProcessor, - unknown: unknownProcessor, - date: dateProcessor, - enum: enumProcessor, - literal: literalProcessor, - nan: nanProcessor, - template_literal: templateLiteralProcessor, - file: fileProcessor, - success: successProcessor, - custom: customProcessor, - function: functionProcessor, - transform: transformProcessor, - map: mapProcessor, - set: setProcessor, - array: arrayProcessor, - object: objectProcessor, - union: unionProcessor, - intersection: intersectionProcessor, - tuple: tupleProcessor, - record: recordProcessor, - nullable: nullableProcessor, - nonoptional: nonoptionalProcessor, - default: defaultProcessor, - prefault: prefaultProcessor, - catch: catchProcessor, - pipe: pipeProcessor, - readonly: readonlyProcessor, - promise: promiseProcessor, - optional: optionalProcessor, - lazy: lazyProcessor -}; -function toJSONSchema(input, params) { - if ("_idmap" in input) { - const registry2 = input; - const ctx2 = initializeContext({ ...params, processors: allProcessors }); - const defs = {}; - for (const entry of registry2._idmap.entries()) { - const [_, schema] = entry; - process2(schema, ctx2); - } - const schemas = {}; - const external = { - registry: registry2, - uri: params?.uri, - defs - }; - ctx2.external = external; - for (const entry of registry2._idmap.entries()) { - const [key, schema] = entry; - extractDefs(ctx2, schema); - schemas[key] = finalize(ctx2, schema); - } - if (Object.keys(defs).length > 0) { - const defsSegment = ctx2.target === "draft-2020-12" ? "$defs" : "definitions"; - schemas.__shared = { - [defsSegment]: defs - }; - } - return { schemas }; - } - const ctx = initializeContext({ ...params, processors: allProcessors }); - process2(input, ctx); - extractDefs(ctx, input); - return finalize(ctx, input); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/json-schema-generator.js -var JSONSchemaGenerator = class { - /** @deprecated Access via ctx instead */ - get metadataRegistry() { - return this.ctx.metadataRegistry; - } - /** @deprecated Access via ctx instead */ - get target() { - return this.ctx.target; - } - /** @deprecated Access via ctx instead */ - get unrepresentable() { - return this.ctx.unrepresentable; - } - /** @deprecated Access via ctx instead */ - get override() { - return this.ctx.override; - } - /** @deprecated Access via ctx instead */ - get io() { - return this.ctx.io; - } - /** @deprecated Access via ctx instead */ - get counter() { - return this.ctx.counter; - } - set counter(value) { - this.ctx.counter = value; - } - /** @deprecated Access via ctx instead */ - get seen() { - return this.ctx.seen; - } - constructor(params) { - let normalizedTarget = params?.target ?? "draft-2020-12"; - if (normalizedTarget === "draft-4") - normalizedTarget = "draft-04"; - if (normalizedTarget === "draft-7") - normalizedTarget = "draft-07"; - this.ctx = initializeContext({ - processors: allProcessors, - target: normalizedTarget, - ...params?.metadata && { metadata: params.metadata }, - ...params?.unrepresentable && { unrepresentable: params.unrepresentable }, - ...params?.override && { override: params.override }, - ...params?.io && { io: params.io } - }); - } - /** - * Process a schema to prepare it for JSON Schema generation. - * This must be called before emit(). - */ - process(schema, _params = { path: [], schemaPath: [] }) { - return process2(schema, this.ctx, _params); - } - /** - * Emit the final JSON Schema after processing. - * Must call process() first. - */ - emit(schema, _params) { - if (_params) { - if (_params.cycles) - this.ctx.cycles = _params.cycles; - if (_params.reused) - this.ctx.reused = _params.reused; - if (_params.external) - this.ctx.external = _params.external; - } - extractDefs(this.ctx, schema); - const result = finalize(this.ctx, schema); - const { "~standard": _, ...plainResult } = result; - return plainResult; - } -}; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/core/json-schema.js -var json_schema_exports = {}; - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/schemas.js -var schemas_exports2 = {}; -__export(schemas_exports2, { - ZodAny: () => ZodAny, - ZodArray: () => ZodArray, - ZodBase64: () => ZodBase64, - ZodBase64URL: () => ZodBase64URL, - ZodBigInt: () => ZodBigInt, - ZodBigIntFormat: () => ZodBigIntFormat, - ZodBoolean: () => ZodBoolean, - ZodCIDRv4: () => ZodCIDRv4, - ZodCIDRv6: () => ZodCIDRv6, - ZodCUID: () => ZodCUID, - ZodCUID2: () => ZodCUID2, - ZodCatch: () => ZodCatch, - ZodCodec: () => ZodCodec, - ZodCustom: () => ZodCustom, - ZodCustomStringFormat: () => ZodCustomStringFormat, - ZodDate: () => ZodDate, - ZodDefault: () => ZodDefault, - ZodDiscriminatedUnion: () => ZodDiscriminatedUnion, - ZodE164: () => ZodE164, - ZodEmail: () => ZodEmail, - ZodEmoji: () => ZodEmoji, - ZodEnum: () => ZodEnum, - ZodExactOptional: () => ZodExactOptional, - ZodFile: () => ZodFile, - ZodFunction: () => ZodFunction, - ZodGUID: () => ZodGUID, - ZodIPv4: () => ZodIPv4, - ZodIPv6: () => ZodIPv6, - ZodIntersection: () => ZodIntersection, - ZodJWT: () => ZodJWT, - ZodKSUID: () => ZodKSUID, - ZodLazy: () => ZodLazy, - ZodLiteral: () => ZodLiteral, - ZodMAC: () => ZodMAC, - ZodMap: () => ZodMap, - ZodNaN: () => ZodNaN, - ZodNanoID: () => ZodNanoID, - ZodNever: () => ZodNever, - ZodNonOptional: () => ZodNonOptional, - ZodNull: () => ZodNull, - ZodNullable: () => ZodNullable, - ZodNumber: () => ZodNumber, - ZodNumberFormat: () => ZodNumberFormat, - ZodObject: () => ZodObject, - ZodOptional: () => ZodOptional, - ZodPipe: () => ZodPipe, - ZodPrefault: () => ZodPrefault, - ZodPromise: () => ZodPromise, - ZodReadonly: () => ZodReadonly, - ZodRecord: () => ZodRecord, - ZodSet: () => ZodSet, - ZodString: () => ZodString, - ZodStringFormat: () => ZodStringFormat, - ZodSuccess: () => ZodSuccess, - ZodSymbol: () => ZodSymbol, - ZodTemplateLiteral: () => ZodTemplateLiteral, - ZodTransform: () => ZodTransform, - ZodTuple: () => ZodTuple, - ZodType: () => ZodType, - ZodULID: () => ZodULID, - ZodURL: () => ZodURL, - ZodUUID: () => ZodUUID, - ZodUndefined: () => ZodUndefined, - ZodUnion: () => ZodUnion, - ZodUnknown: () => ZodUnknown, - ZodVoid: () => ZodVoid, - ZodXID: () => ZodXID, - ZodXor: () => ZodXor, - _ZodString: () => _ZodString, - _default: () => _default2, - _function: () => _function, - any: () => any, - array: () => array, - base64: () => base642, - base64url: () => base64url2, - bigint: () => bigint2, - boolean: () => boolean2, - catch: () => _catch2, - check: () => check, - cidrv4: () => cidrv42, - cidrv6: () => cidrv62, - codec: () => codec, - cuid: () => cuid3, - cuid2: () => cuid22, - custom: () => custom, - date: () => date3, - describe: () => describe2, - discriminatedUnion: () => discriminatedUnion, - e164: () => e1642, - email: () => email2, - emoji: () => emoji2, - enum: () => _enum2, - exactOptional: () => exactOptional, - file: () => file, - float32: () => float32, - float64: () => float64, - function: () => _function, - guid: () => guid2, - hash: () => hash, - hex: () => hex2, - hostname: () => hostname2, - httpUrl: () => httpUrl, - instanceof: () => _instanceof, - int: () => int, - int32: () => int32, - int64: () => int64, - intersection: () => intersection, - ipv4: () => ipv42, - ipv6: () => ipv62, - json: () => json, - jwt: () => jwt, - keyof: () => keyof, - ksuid: () => ksuid2, - lazy: () => lazy, - literal: () => literal, - looseObject: () => looseObject, - looseRecord: () => looseRecord, - mac: () => mac2, - map: () => map, - meta: () => meta2, - nan: () => nan, - nanoid: () => nanoid2, - nativeEnum: () => nativeEnum, - never: () => never, - nonoptional: () => nonoptional, - null: () => _null3, - nullable: () => nullable, - nullish: () => nullish2, - number: () => number2, - object: () => object, - optional: () => optional, - partialRecord: () => partialRecord, - pipe: () => pipe, - prefault: () => prefault, - preprocess: () => preprocess, - promise: () => promise, - readonly: () => readonly, - record: () => record, - refine: () => refine, - set: () => set, - strictObject: () => strictObject, - string: () => string2, - stringFormat: () => stringFormat, - stringbool: () => stringbool, - success: () => success, - superRefine: () => superRefine, - symbol: () => symbol, - templateLiteral: () => templateLiteral, - transform: () => transform, - tuple: () => tuple, - uint32: () => uint32, - uint64: () => uint64, - ulid: () => ulid2, - undefined: () => _undefined3, - union: () => union, - unknown: () => unknown, - url: () => url, - uuid: () => uuid2, - uuidv4: () => uuidv4, - uuidv6: () => uuidv6, - uuidv7: () => uuidv7, - void: () => _void2, - xid: () => xid2, - xor: () => xor -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/checks.js -var checks_exports2 = {}; -__export(checks_exports2, { - endsWith: () => _endsWith, - gt: () => _gt, - gte: () => _gte, - includes: () => _includes, - length: () => _length, - lowercase: () => _lowercase, - lt: () => _lt, - lte: () => _lte, - maxLength: () => _maxLength, - maxSize: () => _maxSize, - mime: () => _mime, - minLength: () => _minLength, - minSize: () => _minSize, - multipleOf: () => _multipleOf, - negative: () => _negative, - nonnegative: () => _nonnegative, - nonpositive: () => _nonpositive, - normalize: () => _normalize, - overwrite: () => _overwrite, - positive: () => _positive, - property: () => _property, - regex: () => _regex, - size: () => _size, - slugify: () => _slugify, - startsWith: () => _startsWith, - toLowerCase: () => _toLowerCase, - toUpperCase: () => _toUpperCase, - trim: () => _trim, - uppercase: () => _uppercase -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/iso.js -var iso_exports = {}; -__export(iso_exports, { - ZodISODate: () => ZodISODate, - ZodISODateTime: () => ZodISODateTime, - ZodISODuration: () => ZodISODuration, - ZodISOTime: () => ZodISOTime, - date: () => date2, - datetime: () => datetime2, - duration: () => duration2, - time: () => time2 -}); -var ZodISODateTime = /* @__PURE__ */ $constructor("ZodISODateTime", (inst, def) => { - $ZodISODateTime.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function datetime2(params) { - return _isoDateTime(ZodISODateTime, params); -} -var ZodISODate = /* @__PURE__ */ $constructor("ZodISODate", (inst, def) => { - $ZodISODate.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function date2(params) { - return _isoDate(ZodISODate, params); -} -var ZodISOTime = /* @__PURE__ */ $constructor("ZodISOTime", (inst, def) => { - $ZodISOTime.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function time2(params) { - return _isoTime(ZodISOTime, params); -} -var ZodISODuration = /* @__PURE__ */ $constructor("ZodISODuration", (inst, def) => { - $ZodISODuration.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function duration2(params) { - return _isoDuration(ZodISODuration, params); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/errors.js -var initializer2 = (inst, issues) => { - $ZodError.init(inst, issues); - inst.name = "ZodError"; - Object.defineProperties(inst, { - format: { - value: (mapper) => formatError(inst, mapper) - // enumerable: false, - }, - flatten: { - value: (mapper) => flattenError(inst, mapper) - // enumerable: false, - }, - addIssue: { - value: (issue2) => { - inst.issues.push(issue2); - inst.message = JSON.stringify(inst.issues, jsonStringifyReplacer, 2); - } - // enumerable: false, - }, - addIssues: { - value: (issues2) => { - inst.issues.push(...issues2); - inst.message = JSON.stringify(inst.issues, jsonStringifyReplacer, 2); - } - // enumerable: false, - }, - isEmpty: { - get() { - return inst.issues.length === 0; - } - // enumerable: false, - } - }); -}; -var ZodError = $constructor("ZodError", initializer2); -var ZodRealError = $constructor("ZodError", initializer2, { - Parent: Error -}); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/parse.js -var parse2 = /* @__PURE__ */ _parse(ZodRealError); -var parseAsync2 = /* @__PURE__ */ _parseAsync(ZodRealError); -var safeParse2 = /* @__PURE__ */ _safeParse(ZodRealError); -var safeParseAsync2 = /* @__PURE__ */ _safeParseAsync(ZodRealError); -var encode2 = /* @__PURE__ */ _encode(ZodRealError); -var decode2 = /* @__PURE__ */ _decode(ZodRealError); -var encodeAsync2 = /* @__PURE__ */ _encodeAsync(ZodRealError); -var decodeAsync2 = /* @__PURE__ */ _decodeAsync(ZodRealError); -var safeEncode2 = /* @__PURE__ */ _safeEncode(ZodRealError); -var safeDecode2 = /* @__PURE__ */ _safeDecode(ZodRealError); -var safeEncodeAsync2 = /* @__PURE__ */ _safeEncodeAsync(ZodRealError); -var safeDecodeAsync2 = /* @__PURE__ */ _safeDecodeAsync(ZodRealError); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/schemas.js -var ZodType = /* @__PURE__ */ $constructor("ZodType", (inst, def) => { - $ZodType.init(inst, def); - Object.assign(inst["~standard"], { - jsonSchema: { - input: createStandardJSONSchemaMethod(inst, "input"), - output: createStandardJSONSchemaMethod(inst, "output") - } - }); - inst.toJSONSchema = createToJSONSchemaMethod(inst, {}); - inst.def = def; - inst.type = def.type; - Object.defineProperty(inst, "_def", { value: def }); - inst.check = (...checks) => { - return inst.clone(util_exports.mergeDefs(def, { - checks: [ - ...def.checks ?? [], - ...checks.map((ch) => typeof ch === "function" ? { _zod: { check: ch, def: { check: "custom" }, onattach: [] } } : ch) - ] - }), { - parent: true - }); - }; - inst.with = inst.check; - inst.clone = (def2, params) => clone(inst, def2, params); - inst.brand = () => inst; - inst.register = ((reg, meta3) => { - reg.add(inst, meta3); - return inst; - }); - inst.parse = (data, params) => parse2(inst, data, params, { callee: inst.parse }); - inst.safeParse = (data, params) => safeParse2(inst, data, params); - inst.parseAsync = async (data, params) => parseAsync2(inst, data, params, { callee: inst.parseAsync }); - inst.safeParseAsync = async (data, params) => safeParseAsync2(inst, data, params); - inst.spa = inst.safeParseAsync; - inst.encode = (data, params) => encode2(inst, data, params); - inst.decode = (data, params) => decode2(inst, data, params); - inst.encodeAsync = async (data, params) => encodeAsync2(inst, data, params); - inst.decodeAsync = async (data, params) => decodeAsync2(inst, data, params); - inst.safeEncode = (data, params) => safeEncode2(inst, data, params); - inst.safeDecode = (data, params) => safeDecode2(inst, data, params); - inst.safeEncodeAsync = async (data, params) => safeEncodeAsync2(inst, data, params); - inst.safeDecodeAsync = async (data, params) => safeDecodeAsync2(inst, data, params); - inst.refine = (check2, params) => inst.check(refine(check2, params)); - inst.superRefine = (refinement) => inst.check(superRefine(refinement)); - inst.overwrite = (fn) => inst.check(_overwrite(fn)); - inst.optional = () => optional(inst); - inst.exactOptional = () => exactOptional(inst); - inst.nullable = () => nullable(inst); - inst.nullish = () => optional(nullable(inst)); - inst.nonoptional = (params) => nonoptional(inst, params); - inst.array = () => array(inst); - inst.or = (arg) => union([inst, arg]); - inst.and = (arg) => intersection(inst, arg); - inst.transform = (tx) => pipe(inst, transform(tx)); - inst.default = (def2) => _default2(inst, def2); - inst.prefault = (def2) => prefault(inst, def2); - inst.catch = (params) => _catch2(inst, params); - inst.pipe = (target) => pipe(inst, target); - inst.readonly = () => readonly(inst); - inst.describe = (description) => { - const cl = inst.clone(); - globalRegistry.add(cl, { description }); - return cl; - }; - Object.defineProperty(inst, "description", { - get() { - return globalRegistry.get(inst)?.description; - }, - configurable: true - }); - inst.meta = (...args) => { - if (args.length === 0) { - return globalRegistry.get(inst); - } - const cl = inst.clone(); - globalRegistry.add(cl, args[0]); - return cl; - }; - inst.isOptional = () => inst.safeParse(void 0).success; - inst.isNullable = () => inst.safeParse(null).success; - inst.apply = (fn) => fn(inst); - return inst; -}); -var _ZodString = /* @__PURE__ */ $constructor("_ZodString", (inst, def) => { - $ZodString.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => stringProcessor(inst, ctx, json2, params); - const bag = inst._zod.bag; - inst.format = bag.format ?? null; - inst.minLength = bag.minimum ?? null; - inst.maxLength = bag.maximum ?? null; - inst.regex = (...args) => inst.check(_regex(...args)); - inst.includes = (...args) => inst.check(_includes(...args)); - inst.startsWith = (...args) => inst.check(_startsWith(...args)); - inst.endsWith = (...args) => inst.check(_endsWith(...args)); - inst.min = (...args) => inst.check(_minLength(...args)); - inst.max = (...args) => inst.check(_maxLength(...args)); - inst.length = (...args) => inst.check(_length(...args)); - inst.nonempty = (...args) => inst.check(_minLength(1, ...args)); - inst.lowercase = (params) => inst.check(_lowercase(params)); - inst.uppercase = (params) => inst.check(_uppercase(params)); - inst.trim = () => inst.check(_trim()); - inst.normalize = (...args) => inst.check(_normalize(...args)); - inst.toLowerCase = () => inst.check(_toLowerCase()); - inst.toUpperCase = () => inst.check(_toUpperCase()); - inst.slugify = () => inst.check(_slugify()); -}); -var ZodString = /* @__PURE__ */ $constructor("ZodString", (inst, def) => { - $ZodString.init(inst, def); - _ZodString.init(inst, def); - inst.email = (params) => inst.check(_email(ZodEmail, params)); - inst.url = (params) => inst.check(_url(ZodURL, params)); - inst.jwt = (params) => inst.check(_jwt(ZodJWT, params)); - inst.emoji = (params) => inst.check(_emoji2(ZodEmoji, params)); - inst.guid = (params) => inst.check(_guid(ZodGUID, params)); - inst.uuid = (params) => inst.check(_uuid(ZodUUID, params)); - inst.uuidv4 = (params) => inst.check(_uuidv4(ZodUUID, params)); - inst.uuidv6 = (params) => inst.check(_uuidv6(ZodUUID, params)); - inst.uuidv7 = (params) => inst.check(_uuidv7(ZodUUID, params)); - inst.nanoid = (params) => inst.check(_nanoid(ZodNanoID, params)); - inst.guid = (params) => inst.check(_guid(ZodGUID, params)); - inst.cuid = (params) => inst.check(_cuid(ZodCUID, params)); - inst.cuid2 = (params) => inst.check(_cuid2(ZodCUID2, params)); - inst.ulid = (params) => inst.check(_ulid(ZodULID, params)); - inst.base64 = (params) => inst.check(_base64(ZodBase64, params)); - inst.base64url = (params) => inst.check(_base64url(ZodBase64URL, params)); - inst.xid = (params) => inst.check(_xid(ZodXID, params)); - inst.ksuid = (params) => inst.check(_ksuid(ZodKSUID, params)); - inst.ipv4 = (params) => inst.check(_ipv4(ZodIPv4, params)); - inst.ipv6 = (params) => inst.check(_ipv6(ZodIPv6, params)); - inst.cidrv4 = (params) => inst.check(_cidrv4(ZodCIDRv4, params)); - inst.cidrv6 = (params) => inst.check(_cidrv6(ZodCIDRv6, params)); - inst.e164 = (params) => inst.check(_e164(ZodE164, params)); - inst.datetime = (params) => inst.check(datetime2(params)); - inst.date = (params) => inst.check(date2(params)); - inst.time = (params) => inst.check(time2(params)); - inst.duration = (params) => inst.check(duration2(params)); -}); -function string2(params) { - return _string(ZodString, params); -} -var ZodStringFormat = /* @__PURE__ */ $constructor("ZodStringFormat", (inst, def) => { - $ZodStringFormat.init(inst, def); - _ZodString.init(inst, def); -}); -var ZodEmail = /* @__PURE__ */ $constructor("ZodEmail", (inst, def) => { - $ZodEmail.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function email2(params) { - return _email(ZodEmail, params); -} -var ZodGUID = /* @__PURE__ */ $constructor("ZodGUID", (inst, def) => { - $ZodGUID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function guid2(params) { - return _guid(ZodGUID, params); -} -var ZodUUID = /* @__PURE__ */ $constructor("ZodUUID", (inst, def) => { - $ZodUUID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function uuid2(params) { - return _uuid(ZodUUID, params); -} -function uuidv4(params) { - return _uuidv4(ZodUUID, params); -} -function uuidv6(params) { - return _uuidv6(ZodUUID, params); -} -function uuidv7(params) { - return _uuidv7(ZodUUID, params); -} -var ZodURL = /* @__PURE__ */ $constructor("ZodURL", (inst, def) => { - $ZodURL.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function url(params) { - return _url(ZodURL, params); -} -function httpUrl(params) { - return _url(ZodURL, { - protocol: /^https?$/, - hostname: regexes_exports.domain, - ...util_exports.normalizeParams(params) - }); -} -var ZodEmoji = /* @__PURE__ */ $constructor("ZodEmoji", (inst, def) => { - $ZodEmoji.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function emoji2(params) { - return _emoji2(ZodEmoji, params); -} -var ZodNanoID = /* @__PURE__ */ $constructor("ZodNanoID", (inst, def) => { - $ZodNanoID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function nanoid2(params) { - return _nanoid(ZodNanoID, params); -} -var ZodCUID = /* @__PURE__ */ $constructor("ZodCUID", (inst, def) => { - $ZodCUID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function cuid3(params) { - return _cuid(ZodCUID, params); -} -var ZodCUID2 = /* @__PURE__ */ $constructor("ZodCUID2", (inst, def) => { - $ZodCUID2.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function cuid22(params) { - return _cuid2(ZodCUID2, params); -} -var ZodULID = /* @__PURE__ */ $constructor("ZodULID", (inst, def) => { - $ZodULID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function ulid2(params) { - return _ulid(ZodULID, params); -} -var ZodXID = /* @__PURE__ */ $constructor("ZodXID", (inst, def) => { - $ZodXID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function xid2(params) { - return _xid(ZodXID, params); -} -var ZodKSUID = /* @__PURE__ */ $constructor("ZodKSUID", (inst, def) => { - $ZodKSUID.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function ksuid2(params) { - return _ksuid(ZodKSUID, params); -} -var ZodIPv4 = /* @__PURE__ */ $constructor("ZodIPv4", (inst, def) => { - $ZodIPv4.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function ipv42(params) { - return _ipv4(ZodIPv4, params); -} -var ZodMAC = /* @__PURE__ */ $constructor("ZodMAC", (inst, def) => { - $ZodMAC.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function mac2(params) { - return _mac(ZodMAC, params); -} -var ZodIPv6 = /* @__PURE__ */ $constructor("ZodIPv6", (inst, def) => { - $ZodIPv6.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function ipv62(params) { - return _ipv6(ZodIPv6, params); -} -var ZodCIDRv4 = /* @__PURE__ */ $constructor("ZodCIDRv4", (inst, def) => { - $ZodCIDRv4.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function cidrv42(params) { - return _cidrv4(ZodCIDRv4, params); -} -var ZodCIDRv6 = /* @__PURE__ */ $constructor("ZodCIDRv6", (inst, def) => { - $ZodCIDRv6.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function cidrv62(params) { - return _cidrv6(ZodCIDRv6, params); -} -var ZodBase64 = /* @__PURE__ */ $constructor("ZodBase64", (inst, def) => { - $ZodBase64.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function base642(params) { - return _base64(ZodBase64, params); -} -var ZodBase64URL = /* @__PURE__ */ $constructor("ZodBase64URL", (inst, def) => { - $ZodBase64URL.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function base64url2(params) { - return _base64url(ZodBase64URL, params); -} -var ZodE164 = /* @__PURE__ */ $constructor("ZodE164", (inst, def) => { - $ZodE164.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function e1642(params) { - return _e164(ZodE164, params); -} -var ZodJWT = /* @__PURE__ */ $constructor("ZodJWT", (inst, def) => { - $ZodJWT.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function jwt(params) { - return _jwt(ZodJWT, params); -} -var ZodCustomStringFormat = /* @__PURE__ */ $constructor("ZodCustomStringFormat", (inst, def) => { - $ZodCustomStringFormat.init(inst, def); - ZodStringFormat.init(inst, def); -}); -function stringFormat(format, fnOrRegex, _params = {}) { - return _stringFormat(ZodCustomStringFormat, format, fnOrRegex, _params); -} -function hostname2(_params) { - return _stringFormat(ZodCustomStringFormat, "hostname", regexes_exports.hostname, _params); -} -function hex2(_params) { - return _stringFormat(ZodCustomStringFormat, "hex", regexes_exports.hex, _params); -} -function hash(alg, params) { - const enc = params?.enc ?? "hex"; - const format = `${alg}_${enc}`; - const regex = regexes_exports[format]; - if (!regex) - throw new Error(`Unrecognized hash format: ${format}`); - return _stringFormat(ZodCustomStringFormat, format, regex, params); -} -var ZodNumber = /* @__PURE__ */ $constructor("ZodNumber", (inst, def) => { - $ZodNumber.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => numberProcessor(inst, ctx, json2, params); - inst.gt = (value, params) => inst.check(_gt(value, params)); - inst.gte = (value, params) => inst.check(_gte(value, params)); - inst.min = (value, params) => inst.check(_gte(value, params)); - inst.lt = (value, params) => inst.check(_lt(value, params)); - inst.lte = (value, params) => inst.check(_lte(value, params)); - inst.max = (value, params) => inst.check(_lte(value, params)); - inst.int = (params) => inst.check(int(params)); - inst.safe = (params) => inst.check(int(params)); - inst.positive = (params) => inst.check(_gt(0, params)); - inst.nonnegative = (params) => inst.check(_gte(0, params)); - inst.negative = (params) => inst.check(_lt(0, params)); - inst.nonpositive = (params) => inst.check(_lte(0, params)); - inst.multipleOf = (value, params) => inst.check(_multipleOf(value, params)); - inst.step = (value, params) => inst.check(_multipleOf(value, params)); - inst.finite = () => inst; - const bag = inst._zod.bag; - inst.minValue = Math.max(bag.minimum ?? Number.NEGATIVE_INFINITY, bag.exclusiveMinimum ?? Number.NEGATIVE_INFINITY) ?? null; - inst.maxValue = Math.min(bag.maximum ?? Number.POSITIVE_INFINITY, bag.exclusiveMaximum ?? Number.POSITIVE_INFINITY) ?? null; - inst.isInt = (bag.format ?? "").includes("int") || Number.isSafeInteger(bag.multipleOf ?? 0.5); - inst.isFinite = true; - inst.format = bag.format ?? null; -}); -function number2(params) { - return _number(ZodNumber, params); -} -var ZodNumberFormat = /* @__PURE__ */ $constructor("ZodNumberFormat", (inst, def) => { - $ZodNumberFormat.init(inst, def); - ZodNumber.init(inst, def); -}); -function int(params) { - return _int(ZodNumberFormat, params); -} -function float32(params) { - return _float32(ZodNumberFormat, params); -} -function float64(params) { - return _float64(ZodNumberFormat, params); -} -function int32(params) { - return _int32(ZodNumberFormat, params); -} -function uint32(params) { - return _uint32(ZodNumberFormat, params); -} -var ZodBoolean = /* @__PURE__ */ $constructor("ZodBoolean", (inst, def) => { - $ZodBoolean.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => booleanProcessor(inst, ctx, json2, params); -}); -function boolean2(params) { - return _boolean(ZodBoolean, params); -} -var ZodBigInt = /* @__PURE__ */ $constructor("ZodBigInt", (inst, def) => { - $ZodBigInt.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => bigintProcessor(inst, ctx, json2, params); - inst.gte = (value, params) => inst.check(_gte(value, params)); - inst.min = (value, params) => inst.check(_gte(value, params)); - inst.gt = (value, params) => inst.check(_gt(value, params)); - inst.gte = (value, params) => inst.check(_gte(value, params)); - inst.min = (value, params) => inst.check(_gte(value, params)); - inst.lt = (value, params) => inst.check(_lt(value, params)); - inst.lte = (value, params) => inst.check(_lte(value, params)); - inst.max = (value, params) => inst.check(_lte(value, params)); - inst.positive = (params) => inst.check(_gt(BigInt(0), params)); - inst.negative = (params) => inst.check(_lt(BigInt(0), params)); - inst.nonpositive = (params) => inst.check(_lte(BigInt(0), params)); - inst.nonnegative = (params) => inst.check(_gte(BigInt(0), params)); - inst.multipleOf = (value, params) => inst.check(_multipleOf(value, params)); - const bag = inst._zod.bag; - inst.minValue = bag.minimum ?? null; - inst.maxValue = bag.maximum ?? null; - inst.format = bag.format ?? null; -}); -function bigint2(params) { - return _bigint(ZodBigInt, params); -} -var ZodBigIntFormat = /* @__PURE__ */ $constructor("ZodBigIntFormat", (inst, def) => { - $ZodBigIntFormat.init(inst, def); - ZodBigInt.init(inst, def); -}); -function int64(params) { - return _int64(ZodBigIntFormat, params); -} -function uint64(params) { - return _uint64(ZodBigIntFormat, params); -} -var ZodSymbol = /* @__PURE__ */ $constructor("ZodSymbol", (inst, def) => { - $ZodSymbol.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => symbolProcessor(inst, ctx, json2, params); -}); -function symbol(params) { - return _symbol(ZodSymbol, params); -} -var ZodUndefined = /* @__PURE__ */ $constructor("ZodUndefined", (inst, def) => { - $ZodUndefined.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => undefinedProcessor(inst, ctx, json2, params); -}); -function _undefined3(params) { - return _undefined2(ZodUndefined, params); -} -var ZodNull = /* @__PURE__ */ $constructor("ZodNull", (inst, def) => { - $ZodNull.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => nullProcessor(inst, ctx, json2, params); -}); -function _null3(params) { - return _null2(ZodNull, params); -} -var ZodAny = /* @__PURE__ */ $constructor("ZodAny", (inst, def) => { - $ZodAny.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => anyProcessor(inst, ctx, json2, params); -}); -function any() { - return _any(ZodAny); -} -var ZodUnknown = /* @__PURE__ */ $constructor("ZodUnknown", (inst, def) => { - $ZodUnknown.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => unknownProcessor(inst, ctx, json2, params); -}); -function unknown() { - return _unknown(ZodUnknown); -} -var ZodNever = /* @__PURE__ */ $constructor("ZodNever", (inst, def) => { - $ZodNever.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => neverProcessor(inst, ctx, json2, params); -}); -function never(params) { - return _never(ZodNever, params); -} -var ZodVoid = /* @__PURE__ */ $constructor("ZodVoid", (inst, def) => { - $ZodVoid.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => voidProcessor(inst, ctx, json2, params); -}); -function _void2(params) { - return _void(ZodVoid, params); -} -var ZodDate = /* @__PURE__ */ $constructor("ZodDate", (inst, def) => { - $ZodDate.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => dateProcessor(inst, ctx, json2, params); - inst.min = (value, params) => inst.check(_gte(value, params)); - inst.max = (value, params) => inst.check(_lte(value, params)); - const c = inst._zod.bag; - inst.minDate = c.minimum ? new Date(c.minimum) : null; - inst.maxDate = c.maximum ? new Date(c.maximum) : null; -}); -function date3(params) { - return _date(ZodDate, params); -} -var ZodArray = /* @__PURE__ */ $constructor("ZodArray", (inst, def) => { - $ZodArray.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => arrayProcessor(inst, ctx, json2, params); - inst.element = def.element; - inst.min = (minLength, params) => inst.check(_minLength(minLength, params)); - inst.nonempty = (params) => inst.check(_minLength(1, params)); - inst.max = (maxLength, params) => inst.check(_maxLength(maxLength, params)); - inst.length = (len, params) => inst.check(_length(len, params)); - inst.unwrap = () => inst.element; -}); -function array(element, params) { - return _array(ZodArray, element, params); -} -function keyof(schema) { - const shape = schema._zod.def.shape; - return _enum2(Object.keys(shape)); -} -var ZodObject = /* @__PURE__ */ $constructor("ZodObject", (inst, def) => { - $ZodObjectJIT.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => objectProcessor(inst, ctx, json2, params); - util_exports.defineLazy(inst, "shape", () => { - return def.shape; - }); - inst.keyof = () => _enum2(Object.keys(inst._zod.def.shape)); - inst.catchall = (catchall) => inst.clone({ ...inst._zod.def, catchall }); - inst.passthrough = () => inst.clone({ ...inst._zod.def, catchall: unknown() }); - inst.loose = () => inst.clone({ ...inst._zod.def, catchall: unknown() }); - inst.strict = () => inst.clone({ ...inst._zod.def, catchall: never() }); - inst.strip = () => inst.clone({ ...inst._zod.def, catchall: void 0 }); - inst.extend = (incoming) => { - return util_exports.extend(inst, incoming); - }; - inst.safeExtend = (incoming) => { - return util_exports.safeExtend(inst, incoming); - }; - inst.merge = (other) => util_exports.merge(inst, other); - inst.pick = (mask) => util_exports.pick(inst, mask); - inst.omit = (mask) => util_exports.omit(inst, mask); - inst.partial = (...args) => util_exports.partial(ZodOptional, inst, args[0]); - inst.required = (...args) => util_exports.required(ZodNonOptional, inst, args[0]); -}); -function object(shape, params) { - const def = { - type: "object", - shape: shape ?? {}, - ...util_exports.normalizeParams(params) - }; - return new ZodObject(def); -} -function strictObject(shape, params) { - return new ZodObject({ - type: "object", - shape, - catchall: never(), - ...util_exports.normalizeParams(params) - }); -} -function looseObject(shape, params) { - return new ZodObject({ - type: "object", - shape, - catchall: unknown(), - ...util_exports.normalizeParams(params) - }); -} -var ZodUnion = /* @__PURE__ */ $constructor("ZodUnion", (inst, def) => { - $ZodUnion.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => unionProcessor(inst, ctx, json2, params); - inst.options = def.options; -}); -function union(options, params) { - return new ZodUnion({ - type: "union", - options, - ...util_exports.normalizeParams(params) - }); -} -var ZodXor = /* @__PURE__ */ $constructor("ZodXor", (inst, def) => { - ZodUnion.init(inst, def); - $ZodXor.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => unionProcessor(inst, ctx, json2, params); - inst.options = def.options; -}); -function xor(options, params) { - return new ZodXor({ - type: "union", - options, - inclusive: false, - ...util_exports.normalizeParams(params) - }); -} -var ZodDiscriminatedUnion = /* @__PURE__ */ $constructor("ZodDiscriminatedUnion", (inst, def) => { - ZodUnion.init(inst, def); - $ZodDiscriminatedUnion.init(inst, def); -}); -function discriminatedUnion(discriminator, options, params) { - return new ZodDiscriminatedUnion({ - type: "union", - options, - discriminator, - ...util_exports.normalizeParams(params) - }); -} -var ZodIntersection = /* @__PURE__ */ $constructor("ZodIntersection", (inst, def) => { - $ZodIntersection.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => intersectionProcessor(inst, ctx, json2, params); -}); -function intersection(left, right) { - return new ZodIntersection({ - type: "intersection", - left, - right - }); -} -var ZodTuple = /* @__PURE__ */ $constructor("ZodTuple", (inst, def) => { - $ZodTuple.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => tupleProcessor(inst, ctx, json2, params); - inst.rest = (rest) => inst.clone({ - ...inst._zod.def, - rest - }); -}); -function tuple(items, _paramsOrRest, _params) { - const hasRest = _paramsOrRest instanceof $ZodType; - const params = hasRest ? _params : _paramsOrRest; - const rest = hasRest ? _paramsOrRest : null; - return new ZodTuple({ - type: "tuple", - items, - rest, - ...util_exports.normalizeParams(params) - }); -} -var ZodRecord = /* @__PURE__ */ $constructor("ZodRecord", (inst, def) => { - $ZodRecord.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => recordProcessor(inst, ctx, json2, params); - inst.keyType = def.keyType; - inst.valueType = def.valueType; -}); -function record(keyType, valueType, params) { - return new ZodRecord({ - type: "record", - keyType, - valueType, - ...util_exports.normalizeParams(params) - }); -} -function partialRecord(keyType, valueType, params) { - const k = clone(keyType); - k._zod.values = void 0; - return new ZodRecord({ - type: "record", - keyType: k, - valueType, - ...util_exports.normalizeParams(params) - }); -} -function looseRecord(keyType, valueType, params) { - return new ZodRecord({ - type: "record", - keyType, - valueType, - mode: "loose", - ...util_exports.normalizeParams(params) - }); -} -var ZodMap = /* @__PURE__ */ $constructor("ZodMap", (inst, def) => { - $ZodMap.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => mapProcessor(inst, ctx, json2, params); - inst.keyType = def.keyType; - inst.valueType = def.valueType; - inst.min = (...args) => inst.check(_minSize(...args)); - inst.nonempty = (params) => inst.check(_minSize(1, params)); - inst.max = (...args) => inst.check(_maxSize(...args)); - inst.size = (...args) => inst.check(_size(...args)); -}); -function map(keyType, valueType, params) { - return new ZodMap({ - type: "map", - keyType, - valueType, - ...util_exports.normalizeParams(params) - }); -} -var ZodSet = /* @__PURE__ */ $constructor("ZodSet", (inst, def) => { - $ZodSet.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => setProcessor(inst, ctx, json2, params); - inst.min = (...args) => inst.check(_minSize(...args)); - inst.nonempty = (params) => inst.check(_minSize(1, params)); - inst.max = (...args) => inst.check(_maxSize(...args)); - inst.size = (...args) => inst.check(_size(...args)); -}); -function set(valueType, params) { - return new ZodSet({ - type: "set", - valueType, - ...util_exports.normalizeParams(params) - }); -} -var ZodEnum = /* @__PURE__ */ $constructor("ZodEnum", (inst, def) => { - $ZodEnum.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => enumProcessor(inst, ctx, json2, params); - inst.enum = def.entries; - inst.options = Object.values(def.entries); - const keys = new Set(Object.keys(def.entries)); - inst.extract = (values, params) => { - const newEntries = {}; - for (const value of values) { - if (keys.has(value)) { - newEntries[value] = def.entries[value]; - } else - throw new Error(`Key ${value} not found in enum`); - } - return new ZodEnum({ - ...def, - checks: [], - ...util_exports.normalizeParams(params), - entries: newEntries - }); - }; - inst.exclude = (values, params) => { - const newEntries = { ...def.entries }; - for (const value of values) { - if (keys.has(value)) { - delete newEntries[value]; - } else - throw new Error(`Key ${value} not found in enum`); - } - return new ZodEnum({ - ...def, - checks: [], - ...util_exports.normalizeParams(params), - entries: newEntries - }); - }; -}); -function _enum2(values, params) { - const entries = Array.isArray(values) ? Object.fromEntries(values.map((v) => [v, v])) : values; - return new ZodEnum({ - type: "enum", - entries, - ...util_exports.normalizeParams(params) - }); -} -function nativeEnum(entries, params) { - return new ZodEnum({ - type: "enum", - entries, - ...util_exports.normalizeParams(params) - }); -} -var ZodLiteral = /* @__PURE__ */ $constructor("ZodLiteral", (inst, def) => { - $ZodLiteral.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => literalProcessor(inst, ctx, json2, params); - inst.values = new Set(def.values); - Object.defineProperty(inst, "value", { - get() { - if (def.values.length > 1) { - throw new Error("This schema contains multiple valid literal values. Use `.values` instead."); - } - return def.values[0]; - } - }); -}); -function literal(value, params) { - return new ZodLiteral({ - type: "literal", - values: Array.isArray(value) ? value : [value], - ...util_exports.normalizeParams(params) - }); -} -var ZodFile = /* @__PURE__ */ $constructor("ZodFile", (inst, def) => { - $ZodFile.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => fileProcessor(inst, ctx, json2, params); - inst.min = (size, params) => inst.check(_minSize(size, params)); - inst.max = (size, params) => inst.check(_maxSize(size, params)); - inst.mime = (types, params) => inst.check(_mime(Array.isArray(types) ? types : [types], params)); -}); -function file(params) { - return _file(ZodFile, params); -} -var ZodTransform = /* @__PURE__ */ $constructor("ZodTransform", (inst, def) => { - $ZodTransform.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => transformProcessor(inst, ctx, json2, params); - inst._zod.parse = (payload, _ctx) => { - if (_ctx.direction === "backward") { - throw new $ZodEncodeError(inst.constructor.name); - } - payload.addIssue = (issue2) => { - if (typeof issue2 === "string") { - payload.issues.push(util_exports.issue(issue2, payload.value, def)); - } else { - const _issue = issue2; - if (_issue.fatal) - _issue.continue = false; - _issue.code ?? (_issue.code = "custom"); - _issue.input ?? (_issue.input = payload.value); - _issue.inst ?? (_issue.inst = inst); - payload.issues.push(util_exports.issue(_issue)); - } - }; - const output = def.transform(payload.value, payload); - if (output instanceof Promise) { - return output.then((output2) => { - payload.value = output2; - return payload; - }); - } - payload.value = output; - return payload; - }; -}); -function transform(fn) { - return new ZodTransform({ - type: "transform", - transform: fn - }); -} -var ZodOptional = /* @__PURE__ */ $constructor("ZodOptional", (inst, def) => { - $ZodOptional.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => optionalProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function optional(innerType) { - return new ZodOptional({ - type: "optional", - innerType - }); -} -var ZodExactOptional = /* @__PURE__ */ $constructor("ZodExactOptional", (inst, def) => { - $ZodExactOptional.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => optionalProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function exactOptional(innerType) { - return new ZodExactOptional({ - type: "optional", - innerType - }); -} -var ZodNullable = /* @__PURE__ */ $constructor("ZodNullable", (inst, def) => { - $ZodNullable.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => nullableProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function nullable(innerType) { - return new ZodNullable({ - type: "nullable", - innerType - }); -} -function nullish2(innerType) { - return optional(nullable(innerType)); -} -var ZodDefault = /* @__PURE__ */ $constructor("ZodDefault", (inst, def) => { - $ZodDefault.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => defaultProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; - inst.removeDefault = inst.unwrap; -}); -function _default2(innerType, defaultValue) { - return new ZodDefault({ - type: "default", - innerType, - get defaultValue() { - return typeof defaultValue === "function" ? defaultValue() : util_exports.shallowClone(defaultValue); - } - }); -} -var ZodPrefault = /* @__PURE__ */ $constructor("ZodPrefault", (inst, def) => { - $ZodPrefault.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => prefaultProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function prefault(innerType, defaultValue) { - return new ZodPrefault({ - type: "prefault", - innerType, - get defaultValue() { - return typeof defaultValue === "function" ? defaultValue() : util_exports.shallowClone(defaultValue); - } - }); -} -var ZodNonOptional = /* @__PURE__ */ $constructor("ZodNonOptional", (inst, def) => { - $ZodNonOptional.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => nonoptionalProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function nonoptional(innerType, params) { - return new ZodNonOptional({ - type: "nonoptional", - innerType, - ...util_exports.normalizeParams(params) - }); -} -var ZodSuccess = /* @__PURE__ */ $constructor("ZodSuccess", (inst, def) => { - $ZodSuccess.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => successProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function success(innerType) { - return new ZodSuccess({ - type: "success", - innerType - }); -} -var ZodCatch = /* @__PURE__ */ $constructor("ZodCatch", (inst, def) => { - $ZodCatch.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => catchProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; - inst.removeCatch = inst.unwrap; -}); -function _catch2(innerType, catchValue) { - return new ZodCatch({ - type: "catch", - innerType, - catchValue: typeof catchValue === "function" ? catchValue : () => catchValue - }); -} -var ZodNaN = /* @__PURE__ */ $constructor("ZodNaN", (inst, def) => { - $ZodNaN.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => nanProcessor(inst, ctx, json2, params); -}); -function nan(params) { - return _nan(ZodNaN, params); -} -var ZodPipe = /* @__PURE__ */ $constructor("ZodPipe", (inst, def) => { - $ZodPipe.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => pipeProcessor(inst, ctx, json2, params); - inst.in = def.in; - inst.out = def.out; -}); -function pipe(in_, out) { - return new ZodPipe({ - type: "pipe", - in: in_, - out - // ...util.normalizeParams(params), - }); -} -var ZodCodec = /* @__PURE__ */ $constructor("ZodCodec", (inst, def) => { - ZodPipe.init(inst, def); - $ZodCodec.init(inst, def); -}); -function codec(in_, out, params) { - return new ZodCodec({ - type: "pipe", - in: in_, - out, - transform: params.decode, - reverseTransform: params.encode - }); -} -var ZodReadonly = /* @__PURE__ */ $constructor("ZodReadonly", (inst, def) => { - $ZodReadonly.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => readonlyProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function readonly(innerType) { - return new ZodReadonly({ - type: "readonly", - innerType - }); -} -var ZodTemplateLiteral = /* @__PURE__ */ $constructor("ZodTemplateLiteral", (inst, def) => { - $ZodTemplateLiteral.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => templateLiteralProcessor(inst, ctx, json2, params); -}); -function templateLiteral(parts, params) { - return new ZodTemplateLiteral({ - type: "template_literal", - parts, - ...util_exports.normalizeParams(params) - }); -} -var ZodLazy = /* @__PURE__ */ $constructor("ZodLazy", (inst, def) => { - $ZodLazy.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => lazyProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.getter(); -}); -function lazy(getter) { - return new ZodLazy({ - type: "lazy", - getter - }); -} -var ZodPromise = /* @__PURE__ */ $constructor("ZodPromise", (inst, def) => { - $ZodPromise.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => promiseProcessor(inst, ctx, json2, params); - inst.unwrap = () => inst._zod.def.innerType; -}); -function promise(innerType) { - return new ZodPromise({ - type: "promise", - innerType - }); -} -var ZodFunction = /* @__PURE__ */ $constructor("ZodFunction", (inst, def) => { - $ZodFunction.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => functionProcessor(inst, ctx, json2, params); -}); -function _function(params) { - return new ZodFunction({ - type: "function", - input: Array.isArray(params?.input) ? tuple(params?.input) : params?.input ?? array(unknown()), - output: params?.output ?? unknown() - }); -} -var ZodCustom = /* @__PURE__ */ $constructor("ZodCustom", (inst, def) => { - $ZodCustom.init(inst, def); - ZodType.init(inst, def); - inst._zod.processJSONSchema = (ctx, json2, params) => customProcessor(inst, ctx, json2, params); -}); -function check(fn) { - const ch = new $ZodCheck({ - check: "custom" - // ...util.normalizeParams(params), - }); - ch._zod.check = fn; - return ch; -} -function custom(fn, _params) { - return _custom(ZodCustom, fn ?? (() => true), _params); -} -function refine(fn, _params = {}) { - return _refine(ZodCustom, fn, _params); -} -function superRefine(fn) { - return _superRefine(fn); -} -var describe2 = describe; -var meta2 = meta; -function _instanceof(cls, params = {}) { - const inst = new ZodCustom({ - type: "custom", - check: "custom", - fn: (data) => data instanceof cls, - abort: true, - ...util_exports.normalizeParams(params) - }); - inst._zod.bag.Class = cls; - inst._zod.check = (payload) => { - if (!(payload.value instanceof cls)) { - payload.issues.push({ - code: "invalid_type", - expected: cls.name, - input: payload.value, - inst, - path: [...inst._zod.def.path ?? []] - }); - } - }; - return inst; -} -var stringbool = (...args) => _stringbool({ - Codec: ZodCodec, - Boolean: ZodBoolean, - String: ZodString -}, ...args); -function json(params) { - const jsonSchema = lazy(() => { - return union([string2(params), number2(), boolean2(), _null3(), array(jsonSchema), record(string2(), jsonSchema)]); - }); - return jsonSchema; -} -function preprocess(fn, schema) { - return pipe(transform(fn), schema); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/compat.js -var ZodIssueCode = { - invalid_type: "invalid_type", - too_big: "too_big", - too_small: "too_small", - invalid_format: "invalid_format", - not_multiple_of: "not_multiple_of", - unrecognized_keys: "unrecognized_keys", - invalid_union: "invalid_union", - invalid_key: "invalid_key", - invalid_element: "invalid_element", - invalid_value: "invalid_value", - custom: "custom" -}; -function setErrorMap(map2) { - config({ - customError: map2 - }); -} -function getErrorMap() { - return config().customError; -} -var ZodFirstPartyTypeKind; -/* @__PURE__ */ (function(ZodFirstPartyTypeKind2) { -})(ZodFirstPartyTypeKind || (ZodFirstPartyTypeKind = {})); - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/from-json-schema.js -var z = { - ...schemas_exports2, - ...checks_exports2, - iso: iso_exports -}; -var RECOGNIZED_KEYS = /* @__PURE__ */ new Set([ - // Schema identification - "$schema", - "$ref", - "$defs", - "definitions", - // Core schema keywords - "$id", - "id", - "$comment", - "$anchor", - "$vocabulary", - "$dynamicRef", - "$dynamicAnchor", - // Type - "type", - "enum", - "const", - // Composition - "anyOf", - "oneOf", - "allOf", - "not", - // Object - "properties", - "required", - "additionalProperties", - "patternProperties", - "propertyNames", - "minProperties", - "maxProperties", - // Array - "items", - "prefixItems", - "additionalItems", - "minItems", - "maxItems", - "uniqueItems", - "contains", - "minContains", - "maxContains", - // String - "minLength", - "maxLength", - "pattern", - "format", - // Number - "minimum", - "maximum", - "exclusiveMinimum", - "exclusiveMaximum", - "multipleOf", - // Already handled metadata - "description", - "default", - // Content - "contentEncoding", - "contentMediaType", - "contentSchema", - // Unsupported (error-throwing) - "unevaluatedItems", - "unevaluatedProperties", - "if", - "then", - "else", - "dependentSchemas", - "dependentRequired", - // OpenAPI - "nullable", - "readOnly" -]); -function detectVersion(schema, defaultTarget) { - const $schema = schema.$schema; - if ($schema === "https://json-schema.org/draft/2020-12/schema") { - return "draft-2020-12"; - } - if ($schema === "http://json-schema.org/draft-07/schema#") { - return "draft-7"; - } - if ($schema === "http://json-schema.org/draft-04/schema#") { - return "draft-4"; - } - return defaultTarget ?? "draft-2020-12"; -} -function resolveRef(ref, ctx) { - if (!ref.startsWith("#")) { - throw new Error("External $ref is not supported, only local refs (#/...) are allowed"); - } - const path = ref.slice(1).split("/").filter(Boolean); - if (path.length === 0) { - return ctx.rootSchema; - } - const defsKey = ctx.version === "draft-2020-12" ? "$defs" : "definitions"; - if (path[0] === defsKey) { - const key = path[1]; - if (!key || !ctx.defs[key]) { - throw new Error(`Reference not found: ${ref}`); - } - return ctx.defs[key]; - } - throw new Error(`Reference not found: ${ref}`); -} -function convertBaseSchema(schema, ctx) { - if (schema.not !== void 0) { - if (typeof schema.not === "object" && Object.keys(schema.not).length === 0) { - return z.never(); - } - throw new Error("not is not supported in Zod (except { not: {} } for never)"); - } - if (schema.unevaluatedItems !== void 0) { - throw new Error("unevaluatedItems is not supported"); - } - if (schema.unevaluatedProperties !== void 0) { - throw new Error("unevaluatedProperties is not supported"); - } - if (schema.if !== void 0 || schema.then !== void 0 || schema.else !== void 0) { - throw new Error("Conditional schemas (if/then/else) are not supported"); - } - if (schema.dependentSchemas !== void 0 || schema.dependentRequired !== void 0) { - throw new Error("dependentSchemas and dependentRequired are not supported"); - } - if (schema.$ref) { - const refPath = schema.$ref; - if (ctx.refs.has(refPath)) { - return ctx.refs.get(refPath); - } - if (ctx.processing.has(refPath)) { - return z.lazy(() => { - if (!ctx.refs.has(refPath)) { - throw new Error(`Circular reference not resolved: ${refPath}`); - } - return ctx.refs.get(refPath); - }); - } - ctx.processing.add(refPath); - const resolved = resolveRef(refPath, ctx); - const zodSchema2 = convertSchema(resolved, ctx); - ctx.refs.set(refPath, zodSchema2); - ctx.processing.delete(refPath); - return zodSchema2; - } - if (schema.enum !== void 0) { - const enumValues = schema.enum; - if (ctx.version === "openapi-3.0" && schema.nullable === true && enumValues.length === 1 && enumValues[0] === null) { - return z.null(); - } - if (enumValues.length === 0) { - return z.never(); - } - if (enumValues.length === 1) { - return z.literal(enumValues[0]); - } - if (enumValues.every((v) => typeof v === "string")) { - return z.enum(enumValues); - } - const literalSchemas = enumValues.map((v) => z.literal(v)); - if (literalSchemas.length < 2) { - return literalSchemas[0]; - } - return z.union([literalSchemas[0], literalSchemas[1], ...literalSchemas.slice(2)]); - } - if (schema.const !== void 0) { - return z.literal(schema.const); - } - const type = schema.type; - if (Array.isArray(type)) { - const typeSchemas = type.map((t) => { - const typeSchema = { ...schema, type: t }; - return convertBaseSchema(typeSchema, ctx); - }); - if (typeSchemas.length === 0) { - return z.never(); - } - if (typeSchemas.length === 1) { - return typeSchemas[0]; - } - return z.union(typeSchemas); - } - if (!type) { - return z.any(); - } - let zodSchema; - switch (type) { - case "string": { - let stringSchema = z.string(); - if (schema.format) { - const format = schema.format; - if (format === "email") { - stringSchema = stringSchema.check(z.email()); - } else if (format === "uri" || format === "uri-reference") { - stringSchema = stringSchema.check(z.url()); - } else if (format === "uuid" || format === "guid") { - stringSchema = stringSchema.check(z.uuid()); - } else if (format === "date-time") { - stringSchema = stringSchema.check(z.iso.datetime()); - } else if (format === "date") { - stringSchema = stringSchema.check(z.iso.date()); - } else if (format === "time") { - stringSchema = stringSchema.check(z.iso.time()); - } else if (format === "duration") { - stringSchema = stringSchema.check(z.iso.duration()); - } else if (format === "ipv4") { - stringSchema = stringSchema.check(z.ipv4()); - } else if (format === "ipv6") { - stringSchema = stringSchema.check(z.ipv6()); - } else if (format === "mac") { - stringSchema = stringSchema.check(z.mac()); - } else if (format === "cidr") { - stringSchema = stringSchema.check(z.cidrv4()); - } else if (format === "cidr-v6") { - stringSchema = stringSchema.check(z.cidrv6()); - } else if (format === "base64") { - stringSchema = stringSchema.check(z.base64()); - } else if (format === "base64url") { - stringSchema = stringSchema.check(z.base64url()); - } else if (format === "e164") { - stringSchema = stringSchema.check(z.e164()); - } else if (format === "jwt") { - stringSchema = stringSchema.check(z.jwt()); - } else if (format === "emoji") { - stringSchema = stringSchema.check(z.emoji()); - } else if (format === "nanoid") { - stringSchema = stringSchema.check(z.nanoid()); - } else if (format === "cuid") { - stringSchema = stringSchema.check(z.cuid()); - } else if (format === "cuid2") { - stringSchema = stringSchema.check(z.cuid2()); - } else if (format === "ulid") { - stringSchema = stringSchema.check(z.ulid()); - } else if (format === "xid") { - stringSchema = stringSchema.check(z.xid()); - } else if (format === "ksuid") { - stringSchema = stringSchema.check(z.ksuid()); - } - } - if (typeof schema.minLength === "number") { - stringSchema = stringSchema.min(schema.minLength); - } - if (typeof schema.maxLength === "number") { - stringSchema = stringSchema.max(schema.maxLength); - } - if (schema.pattern) { - stringSchema = stringSchema.regex(new RegExp(schema.pattern)); - } - zodSchema = stringSchema; - break; - } - case "number": - case "integer": { - let numberSchema = type === "integer" ? z.number().int() : z.number(); - if (typeof schema.minimum === "number") { - numberSchema = numberSchema.min(schema.minimum); - } - if (typeof schema.maximum === "number") { - numberSchema = numberSchema.max(schema.maximum); - } - if (typeof schema.exclusiveMinimum === "number") { - numberSchema = numberSchema.gt(schema.exclusiveMinimum); - } else if (schema.exclusiveMinimum === true && typeof schema.minimum === "number") { - numberSchema = numberSchema.gt(schema.minimum); - } - if (typeof schema.exclusiveMaximum === "number") { - numberSchema = numberSchema.lt(schema.exclusiveMaximum); - } else if (schema.exclusiveMaximum === true && typeof schema.maximum === "number") { - numberSchema = numberSchema.lt(schema.maximum); - } - if (typeof schema.multipleOf === "number") { - numberSchema = numberSchema.multipleOf(schema.multipleOf); - } - zodSchema = numberSchema; - break; - } - case "boolean": { - zodSchema = z.boolean(); - break; - } - case "null": { - zodSchema = z.null(); - break; - } - case "object": { - const shape = {}; - const properties = schema.properties || {}; - const requiredSet = new Set(schema.required || []); - for (const [key, propSchema] of Object.entries(properties)) { - const propZodSchema = convertSchema(propSchema, ctx); - shape[key] = requiredSet.has(key) ? propZodSchema : propZodSchema.optional(); - } - if (schema.propertyNames) { - const keySchema = convertSchema(schema.propertyNames, ctx); - const valueSchema = schema.additionalProperties && typeof schema.additionalProperties === "object" ? convertSchema(schema.additionalProperties, ctx) : z.any(); - if (Object.keys(shape).length === 0) { - zodSchema = z.record(keySchema, valueSchema); - break; - } - const objectSchema2 = z.object(shape).passthrough(); - const recordSchema = z.looseRecord(keySchema, valueSchema); - zodSchema = z.intersection(objectSchema2, recordSchema); - break; - } - if (schema.patternProperties) { - const patternProps = schema.patternProperties; - const patternKeys = Object.keys(patternProps); - const looseRecords = []; - for (const pattern of patternKeys) { - const patternValue = convertSchema(patternProps[pattern], ctx); - const keySchema = z.string().regex(new RegExp(pattern)); - looseRecords.push(z.looseRecord(keySchema, patternValue)); - } - const schemasToIntersect = []; - if (Object.keys(shape).length > 0) { - schemasToIntersect.push(z.object(shape).passthrough()); - } - schemasToIntersect.push(...looseRecords); - if (schemasToIntersect.length === 0) { - zodSchema = z.object({}).passthrough(); - } else if (schemasToIntersect.length === 1) { - zodSchema = schemasToIntersect[0]; - } else { - let result = z.intersection(schemasToIntersect[0], schemasToIntersect[1]); - for (let i = 2; i < schemasToIntersect.length; i++) { - result = z.intersection(result, schemasToIntersect[i]); - } - zodSchema = result; - } - break; - } - const objectSchema = z.object(shape); - if (schema.additionalProperties === false) { - zodSchema = objectSchema.strict(); - } else if (typeof schema.additionalProperties === "object") { - zodSchema = objectSchema.catchall(convertSchema(schema.additionalProperties, ctx)); - } else { - zodSchema = objectSchema.passthrough(); - } - break; - } - case "array": { - const prefixItems = schema.prefixItems; - const items = schema.items; - if (prefixItems && Array.isArray(prefixItems)) { - const tupleItems = prefixItems.map((item) => convertSchema(item, ctx)); - const rest = items && typeof items === "object" && !Array.isArray(items) ? convertSchema(items, ctx) : void 0; - if (rest) { - zodSchema = z.tuple(tupleItems).rest(rest); - } else { - zodSchema = z.tuple(tupleItems); - } - if (typeof schema.minItems === "number") { - zodSchema = zodSchema.check(z.minLength(schema.minItems)); - } - if (typeof schema.maxItems === "number") { - zodSchema = zodSchema.check(z.maxLength(schema.maxItems)); - } - } else if (Array.isArray(items)) { - const tupleItems = items.map((item) => convertSchema(item, ctx)); - const rest = schema.additionalItems && typeof schema.additionalItems === "object" ? convertSchema(schema.additionalItems, ctx) : void 0; - if (rest) { - zodSchema = z.tuple(tupleItems).rest(rest); - } else { - zodSchema = z.tuple(tupleItems); - } - if (typeof schema.minItems === "number") { - zodSchema = zodSchema.check(z.minLength(schema.minItems)); - } - if (typeof schema.maxItems === "number") { - zodSchema = zodSchema.check(z.maxLength(schema.maxItems)); - } - } else if (items !== void 0) { - const element = convertSchema(items, ctx); - let arraySchema = z.array(element); - if (typeof schema.minItems === "number") { - arraySchema = arraySchema.min(schema.minItems); - } - if (typeof schema.maxItems === "number") { - arraySchema = arraySchema.max(schema.maxItems); - } - zodSchema = arraySchema; - } else { - zodSchema = z.array(z.any()); - } - break; - } - default: - throw new Error(`Unsupported type: ${type}`); - } - if (schema.description) { - zodSchema = zodSchema.describe(schema.description); - } - if (schema.default !== void 0) { - zodSchema = zodSchema.default(schema.default); - } - return zodSchema; -} -function convertSchema(schema, ctx) { - if (typeof schema === "boolean") { - return schema ? z.any() : z.never(); - } - let baseSchema = convertBaseSchema(schema, ctx); - const hasExplicitType = schema.type || schema.enum !== void 0 || schema.const !== void 0; - if (schema.anyOf && Array.isArray(schema.anyOf)) { - const options = schema.anyOf.map((s) => convertSchema(s, ctx)); - const anyOfUnion = z.union(options); - baseSchema = hasExplicitType ? z.intersection(baseSchema, anyOfUnion) : anyOfUnion; - } - if (schema.oneOf && Array.isArray(schema.oneOf)) { - const options = schema.oneOf.map((s) => convertSchema(s, ctx)); - const oneOfUnion = z.xor(options); - baseSchema = hasExplicitType ? z.intersection(baseSchema, oneOfUnion) : oneOfUnion; - } - if (schema.allOf && Array.isArray(schema.allOf)) { - if (schema.allOf.length === 0) { - baseSchema = hasExplicitType ? baseSchema : z.any(); - } else { - let result = hasExplicitType ? baseSchema : convertSchema(schema.allOf[0], ctx); - const startIdx = hasExplicitType ? 0 : 1; - for (let i = startIdx; i < schema.allOf.length; i++) { - result = z.intersection(result, convertSchema(schema.allOf[i], ctx)); - } - baseSchema = result; - } - } - if (schema.nullable === true && ctx.version === "openapi-3.0") { - baseSchema = z.nullable(baseSchema); - } - if (schema.readOnly === true) { - baseSchema = z.readonly(baseSchema); - } - const extraMeta = {}; - const coreMetadataKeys = ["$id", "id", "$comment", "$anchor", "$vocabulary", "$dynamicRef", "$dynamicAnchor"]; - for (const key of coreMetadataKeys) { - if (key in schema) { - extraMeta[key] = schema[key]; - } - } - const contentMetadataKeys = ["contentEncoding", "contentMediaType", "contentSchema"]; - for (const key of contentMetadataKeys) { - if (key in schema) { - extraMeta[key] = schema[key]; - } - } - for (const key of Object.keys(schema)) { - if (!RECOGNIZED_KEYS.has(key)) { - extraMeta[key] = schema[key]; - } - } - if (Object.keys(extraMeta).length > 0) { - ctx.registry.add(baseSchema, extraMeta); - } - return baseSchema; -} -function fromJSONSchema(schema, params) { - if (typeof schema === "boolean") { - return schema ? z.any() : z.never(); - } - const version2 = detectVersion(schema, params?.defaultTarget); - const defs = schema.$defs || schema.definitions || {}; - const ctx = { - version: version2, - defs, - refs: /* @__PURE__ */ new Map(), - processing: /* @__PURE__ */ new Set(), - rootSchema: schema, - registry: params?.registry ?? globalRegistry - }; - return convertSchema(schema, ctx); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/coerce.js -var coerce_exports = {}; -__export(coerce_exports, { - bigint: () => bigint3, - boolean: () => boolean3, - date: () => date4, - number: () => number3, - string: () => string3 -}); -function string3(params) { - return _coercedString(ZodString, params); -} -function number3(params) { - return _coercedNumber(ZodNumber, params); -} -function boolean3(params) { - return _coercedBoolean(ZodBoolean, params); -} -function bigint3(params) { - return _coercedBigint(ZodBigInt, params); -} -function date4(params) { - return _coercedDate(ZodDate, params); -} - -// ../../node_modules/.pnpm/zod@4.3.6/node_modules/zod/v4/classic/external.js -config(en_default()); - -// ../../node_modules/.pnpm/@scure+base@2.0.0/node_modules/@scure/base/index.js -function isBytes(a) { - return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; -} -function isArrayOf(isString, arr) { - if (!Array.isArray(arr)) - return false; - if (arr.length === 0) - return true; - if (isString) { - return arr.every((item) => typeof item === "string"); - } else { - return arr.every((item) => Number.isSafeInteger(item)); - } -} -function astr(label, input) { - if (typeof input !== "string") - throw new Error(`${label}: string expected`); - return true; -} -function anumber(n) { - if (!Number.isSafeInteger(n)) - throw new Error(`invalid integer: ${n}`); -} -function aArr(input) { - if (!Array.isArray(input)) - throw new Error("array expected"); -} -function astrArr(label, input) { - if (!isArrayOf(true, input)) - throw new Error(`${label}: array of strings expected`); -} -function anumArr(label, input) { - if (!isArrayOf(false, input)) - throw new Error(`${label}: array of numbers expected`); -} -// @__NO_SIDE_EFFECTS__ -function chain(...args) { - const id = (a) => a; - const wrap = (a, b) => (c) => a(b(c)); - const encode3 = args.map((x) => x.encode).reduceRight(wrap, id); - const decode3 = args.map((x) => x.decode).reduce(wrap, id); - return { encode: encode3, decode: decode3 }; -} -// @__NO_SIDE_EFFECTS__ -function alphabet(letters) { - const lettersA = typeof letters === "string" ? letters.split("") : letters; - const len = lettersA.length; - astrArr("alphabet", lettersA); - const indexes = new Map(lettersA.map((l, i) => [l, i])); - return { - encode: (digits) => { - aArr(digits); - return digits.map((i) => { - if (!Number.isSafeInteger(i) || i < 0 || i >= len) - throw new Error(`alphabet.encode: digit index outside alphabet "${i}". Allowed: ${letters}`); - return lettersA[i]; - }); - }, - decode: (input) => { - aArr(input); - return input.map((letter) => { - astr("alphabet.decode", letter); - const i = indexes.get(letter); - if (i === void 0) - throw new Error(`Unknown letter: "${letter}". Allowed: ${letters}`); - return i; - }); - } - }; -} -// @__NO_SIDE_EFFECTS__ -function join(separator = "") { - astr("join", separator); - return { - encode: (from) => { - astrArr("join.decode", from); - return from.join(separator); - }, - decode: (to) => { - astr("join.decode", to); - return to.split(separator); - } - }; -} -var gcd = (a, b) => b === 0 ? a : gcd(b, a % b); -var radix2carry = /* @__NO_SIDE_EFFECTS__ */ (from, to) => from + (to - gcd(from, to)); -var powers = /* @__PURE__ */ (() => { - let res = []; - for (let i = 0; i < 40; i++) - res.push(2 ** i); - return res; -})(); -function convertRadix2(data, from, to, padding) { - aArr(data); - if (from <= 0 || from > 32) - throw new Error(`convertRadix2: wrong from=${from}`); - if (to <= 0 || to > 32) - throw new Error(`convertRadix2: wrong to=${to}`); - if (/* @__PURE__ */ radix2carry(from, to) > 32) { - throw new Error(`convertRadix2: carry overflow from=${from} to=${to} carryBits=${/* @__PURE__ */ radix2carry(from, to)}`); - } - let carry = 0; - let pos = 0; - const max = powers[from]; - const mask = powers[to] - 1; - const res = []; - for (const n of data) { - anumber(n); - if (n >= max) - throw new Error(`convertRadix2: invalid data word=${n} from=${from}`); - carry = carry << from | n; - if (pos + from > 32) - throw new Error(`convertRadix2: carry overflow pos=${pos} from=${from}`); - pos += from; - for (; pos >= to; pos -= to) - res.push((carry >> pos - to & mask) >>> 0); - const pow = powers[pos]; - if (pow === void 0) - throw new Error("invalid carry"); - carry &= pow - 1; - } - carry = carry << to - pos & mask; - if (!padding && pos >= from) - throw new Error("Excess padding"); - if (!padding && carry > 0) - throw new Error(`Non-zero padding: ${carry}`); - if (padding && pos > 0) - res.push(carry >>> 0); - return res; -} -// @__NO_SIDE_EFFECTS__ -function radix2(bits, revPadding = false) { - anumber(bits); - if (bits <= 0 || bits > 32) - throw new Error("radix2: bits should be in (0..32]"); - if (/* @__PURE__ */ radix2carry(8, bits) > 32 || /* @__PURE__ */ radix2carry(bits, 8) > 32) - throw new Error("radix2: carry overflow"); - return { - encode: (bytes) => { - if (!isBytes(bytes)) - throw new Error("radix2.encode input should be Uint8Array"); - return convertRadix2(Array.from(bytes), 8, bits, !revPadding); - }, - decode: (digits) => { - anumArr("radix2.decode", digits); - return Uint8Array.from(convertRadix2(digits, bits, 8, revPadding)); - } - }; -} -var base64urlnopad = /* @__PURE__ */ chain(/* @__PURE__ */ radix2(6), /* @__PURE__ */ alphabet("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"), /* @__PURE__ */ join("")); - -// ../../packages/protocol/src/errors.ts -var ProtocolParseError = class extends Error { - code; - constructor(code, message) { - super(message); - this.name = "ProtocolParseError"; - this.code = code; - } -}; - -// ../../packages/protocol/src/base64url.ts -function invalidBase64url(input) { - return new ProtocolParseError( - "INVALID_BASE64URL", - `Invalid base64url input: ${input}` - ); -} -function encodeBase64url(input) { - return base64urlnopad.encode(input); -} -function decodeBase64url(input) { - if (input.length === 0) { - return new Uint8Array(); - } - try { - return base64urlnopad.decode(input); - } catch { - throw invalidBase64url(input); - } -} - -// ../../node_modules/.pnpm/ulid@3.0.2/node_modules/ulid/dist/node/index.js -import crypto2 from "crypto"; -var ENCODING = "0123456789ABCDEFGHJKMNPQRSTVWXYZ"; -var ENCODING_LEN = 32; -var RANDOM_LEN = 16; -var TIME_LEN = 10; -var TIME_MAX = 281474976710655; -var ULIDErrorCode; -(function(ULIDErrorCode2) { - ULIDErrorCode2["Base32IncorrectEncoding"] = "B32_ENC_INVALID"; - ULIDErrorCode2["DecodeTimeInvalidCharacter"] = "DEC_TIME_CHAR"; - ULIDErrorCode2["DecodeTimeValueMalformed"] = "DEC_TIME_MALFORMED"; - ULIDErrorCode2["EncodeTimeNegative"] = "ENC_TIME_NEG"; - ULIDErrorCode2["EncodeTimeSizeExceeded"] = "ENC_TIME_SIZE_EXCEED"; - ULIDErrorCode2["EncodeTimeValueMalformed"] = "ENC_TIME_MALFORMED"; - ULIDErrorCode2["PRNGDetectFailure"] = "PRNG_DETECT"; - ULIDErrorCode2["ULIDInvalid"] = "ULID_INVALID"; - ULIDErrorCode2["Unexpected"] = "UNEXPECTED"; - ULIDErrorCode2["UUIDInvalid"] = "UUID_INVALID"; -})(ULIDErrorCode || (ULIDErrorCode = {})); -var ULIDError = class extends Error { - constructor(errorCode, message) { - super(`${message} (${errorCode})`); - this.name = "ULIDError"; - this.code = errorCode; - } -}; -function decodeTime(id) { - if (id.length !== TIME_LEN + RANDOM_LEN) { - throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, "Malformed ULID"); - } - const time3 = id.substr(0, TIME_LEN).toUpperCase().split("").reverse().reduce((carry, char, index) => { - const encodingIndex = ENCODING.indexOf(char); - if (encodingIndex === -1) { - throw new ULIDError(ULIDErrorCode.DecodeTimeInvalidCharacter, `Time decode error: Invalid character: ${char}`); - } - return carry += encodingIndex * Math.pow(ENCODING_LEN, index); - }, 0); - if (time3 > TIME_MAX) { - throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, `Malformed ULID: timestamp too large: ${time3}`); - } - return time3; -} -function isValid(id) { - return typeof id === "string" && id.length === TIME_LEN + RANDOM_LEN && id.toUpperCase().split("").every((char) => ENCODING.indexOf(char) !== -1); -} - -// ../../packages/protocol/src/ulid.ts -var ULID_PATTERN = /^[0-9A-HJKMNP-TV-Z]{26}$/; -function invalidUlid(value) { - return new ProtocolParseError("INVALID_ULID", `Invalid ULID: ${value}`); -} -function parseUlid(value) { - if (!ULID_PATTERN.test(value) || !isValid(value)) { - throw invalidUlid(value); - } - return { - value, - timestampMs: decodeTime(value) - }; -} - -// ../../packages/protocol/src/did.ts -function invalidDid(value) { - return new ProtocolParseError("INVALID_DID", `Invalid DID: ${value}`); -} -function ensureDidUlid(value) { - try { - parseUlid(value); - } catch { - throw invalidDid(value); - } -} -function parseDid(value) { - const parts = value.split(":"); - if (parts.length !== 4) { - throw invalidDid(value); - } - const [scheme, method, rawKind, rawUlid] = parts; - if (scheme !== "did" || method !== "claw") { - throw invalidDid(value); - } - if (rawKind !== "human" && rawKind !== "agent") { - throw invalidDid(value); - } - ensureDidUlid(rawUlid); - return { - kind: rawKind, - ulid: rawUlid - }; -} - -// ../../packages/protocol/src/text.ts -function hasControlChars(value) { - for (const char of value) { - const code = char.charCodeAt(0); - if (code <= 31 || code === 127) { - return true; - } - } - return false; -} - -// ../../packages/protocol/src/ait.ts -var MAX_AGENT_DESCRIPTION_LENGTH = 280; -var AGENT_NAME_REGEX = /^[A-Za-z0-9._ -]{1,64}$/; -var MAX_FRAMEWORK_LENGTH = 32; -var ED25519_PUBLIC_KEY_LENGTH = 32; -function validateAgentName(name) { - return AGENT_NAME_REGEX.test(name); -} -var aitClaimsSchema = external_exports.object({ - iss: external_exports.string().min(1, "iss is required"), - sub: external_exports.string().min(1, "sub is required"), - ownerDid: external_exports.string().min(1, "ownerDid is required"), - name: external_exports.string().refine(validateAgentName, "name contains invalid characters or length"), - framework: external_exports.string().min(1, "framework is required").max(MAX_FRAMEWORK_LENGTH).refine( - (value) => !hasControlChars(value), - "framework contains control characters" - ), - description: external_exports.string().max(MAX_AGENT_DESCRIPTION_LENGTH).refine( - (value) => !hasControlChars(value), - "description contains control characters" - ).optional(), - cnf: external_exports.object({ - jwk: external_exports.object({ - kty: external_exports.literal("OKP"), - crv: external_exports.literal("Ed25519"), - x: external_exports.string().min(1) - }).strict() - }).strict(), - iat: external_exports.number().int().nonnegative(), - nbf: external_exports.number().int().nonnegative(), - exp: external_exports.number().int().nonnegative(), - jti: external_exports.string().min(1) -}).strict().superRefine((claims, ctx) => { - try { - const parsedSub = parseDid(claims.sub); - if (parsedSub.kind !== "agent") { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "sub must be an agent DID", - path: ["sub"] - }); - } - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "sub must be a valid DID", - path: ["sub"] - }); - } - try { - const parsedOwnerDid = parseDid(claims.ownerDid); - if (parsedOwnerDid.kind !== "human") { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "ownerDid must be a human DID", - path: ["ownerDid"] - }); - } - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "ownerDid must be a valid DID", - path: ["ownerDid"] - }); - } - try { - const decodedPublicKey = decodeBase64url(claims.cnf.jwk.x); - if (decodedPublicKey.length !== ED25519_PUBLIC_KEY_LENGTH) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "cnf.jwk.x must decode to 32-byte Ed25519 public key", - path: ["cnf", "jwk", "x"] - }); - } - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "cnf.jwk.x must be valid base64url", - path: ["cnf", "jwk", "x"] - }); - } - try { - parseUlid(claims.jti); - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "jti must be a valid ULID", - path: ["jti"] - }); - } - if (claims.exp <= claims.nbf) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "exp must be greater than nbf", - path: ["exp"] - }); - } - if (claims.exp <= claims.iat) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "exp must be greater than iat", - path: ["exp"] - }); - } -}); - -// ../../packages/protocol/src/crl.ts -var crlClaimsSchema = external_exports.object({ - iss: external_exports.string().min(1, "iss is required"), - jti: external_exports.string().min(1, "jti is required"), - iat: external_exports.number().int().nonnegative(), - exp: external_exports.number().int().nonnegative(), - revocations: external_exports.array( - external_exports.object({ - jti: external_exports.string().min(1, "revocation.jti is required"), - agentDid: external_exports.string().min(1, "agentDid is required"), - reason: external_exports.string().max(280).optional(), - revokedAt: external_exports.number().int().nonnegative() - }).strict().superRefine((revocation, ctx) => { - if (hasControlChars(revocation.agentDid)) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "revocation.agentDid contains control characters", - path: ["agentDid"] - }); - } - }) - ).min(1, "revocations must include at least one entry") -}).strict().superRefine((claims, ctx) => { - if (claims.exp <= claims.iat) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "exp must be greater than iat", - path: ["exp"] - }); - } - for (const [index, revocation] of claims.revocations.entries()) { - try { - const parsedAgentDid = parseDid(revocation.agentDid); - if (parsedAgentDid.kind !== "agent") { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "revocation.agentDid must refer to an agent DID", - path: ["revocations", index, "agentDid"] - }); - } - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "revocation.agentDid must be a valid DID", - path: ["revocations", index, "agentDid"] - }); - } - try { - parseUlid(revocation.jti); - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "revocation.jti must be a valid ULID", - path: ["revocations", index, "jti"] - }); - } - } -}); - -// ../../packages/protocol/src/endpoints.ts -var AGENT_AUTH_REFRESH_PATH = "/v1/agents/auth/refresh"; - -// ../../packages/protocol/src/http-signing.ts -var CLAW_PROOF_CANONICAL_VERSION = "CLAW-PROOF-V1"; -function canonicalizeRequest(input) { - return [ - CLAW_PROOF_CANONICAL_VERSION, - input.method.toUpperCase(), - input.pathWithQuery, - input.timestamp, - input.nonce, - input.bodyHash - ].join("\n"); -} - -// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/request/constants.js -var GET_MATCH_RESULT = /* @__PURE__ */ Symbol(); - -// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/utils/body.js -var parseBody = async (request, options = /* @__PURE__ */ Object.create(null)) => { - const { all = false, dot = false } = options; - const headers = request instanceof HonoRequest ? request.raw.headers : request.headers; - const contentType = headers.get("Content-Type"); - if (contentType?.startsWith("multipart/form-data") || contentType?.startsWith("application/x-www-form-urlencoded")) { - return parseFormData(request, { all, dot }); - } - return {}; -}; -async function parseFormData(request, options) { - const formData = await request.formData(); - if (formData) { - return convertFormDataToBodyData(formData, options); - } - return {}; -} -function convertFormDataToBodyData(formData, options) { - const form = /* @__PURE__ */ Object.create(null); - formData.forEach((value, key) => { - const shouldParseAllValues = options.all || key.endsWith("[]"); - if (!shouldParseAllValues) { - form[key] = value; - } else { - handleParsingAllValues(form, key, value); - } - }); - if (options.dot) { - Object.entries(form).forEach(([key, value]) => { - const shouldParseDotValues = key.includes("."); - if (shouldParseDotValues) { - handleParsingNestedValues(form, key, value); - delete form[key]; - } - }); - } - return form; -} -var handleParsingAllValues = (form, key, value) => { - if (form[key] !== void 0) { - if (Array.isArray(form[key])) { - ; - form[key].push(value); - } else { - form[key] = [form[key], value]; - } - } else { - if (!key.endsWith("[]")) { - form[key] = value; - } else { - form[key] = [value]; - } - } -}; -var handleParsingNestedValues = (form, key, value) => { - let nestedForm = form; - const keys = key.split("."); - keys.forEach((key2, index) => { - if (index === keys.length - 1) { - nestedForm[key2] = value; - } else { - if (!nestedForm[key2] || typeof nestedForm[key2] !== "object" || Array.isArray(nestedForm[key2]) || nestedForm[key2] instanceof File) { - nestedForm[key2] = /* @__PURE__ */ Object.create(null); - } - nestedForm = nestedForm[key2]; - } - }); -}; - -// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/utils/url.js -var tryDecode = (str, decoder) => { - try { - return decoder(str); - } catch { - return str.replace(/(?:%[0-9A-Fa-f]{2})+/g, (match2) => { - try { - return decoder(match2); - } catch { - return match2; - } - }); - } -}; -var _decodeURI = (value) => { - if (!/[%+]/.test(value)) { - return value; - } - if (value.indexOf("+") !== -1) { - value = value.replace(/\+/g, " "); - } - return value.indexOf("%") !== -1 ? tryDecode(value, decodeURIComponent_) : value; -}; -var _getQueryParam = (url2, key, multiple) => { - let encoded; - if (!multiple && key && !/[%+]/.test(key)) { - let keyIndex2 = url2.indexOf("?", 8); - if (keyIndex2 === -1) { - return void 0; - } - if (!url2.startsWith(key, keyIndex2 + 1)) { - keyIndex2 = url2.indexOf(`&${key}`, keyIndex2 + 1); - } - while (keyIndex2 !== -1) { - const trailingKeyCode = url2.charCodeAt(keyIndex2 + key.length + 1); - if (trailingKeyCode === 61) { - const valueIndex = keyIndex2 + key.length + 2; - const endIndex = url2.indexOf("&", valueIndex); - return _decodeURI(url2.slice(valueIndex, endIndex === -1 ? void 0 : endIndex)); - } else if (trailingKeyCode == 38 || isNaN(trailingKeyCode)) { - return ""; - } - keyIndex2 = url2.indexOf(`&${key}`, keyIndex2 + 1); - } - encoded = /[%+]/.test(url2); - if (!encoded) { - return void 0; - } - } - const results = {}; - encoded ??= /[%+]/.test(url2); - let keyIndex = url2.indexOf("?", 8); - while (keyIndex !== -1) { - const nextKeyIndex = url2.indexOf("&", keyIndex + 1); - let valueIndex = url2.indexOf("=", keyIndex); - if (valueIndex > nextKeyIndex && nextKeyIndex !== -1) { - valueIndex = -1; - } - let name = url2.slice( - keyIndex + 1, - valueIndex === -1 ? nextKeyIndex === -1 ? void 0 : nextKeyIndex : valueIndex - ); - if (encoded) { - name = _decodeURI(name); - } - keyIndex = nextKeyIndex; - if (name === "") { - continue; - } - let value; - if (valueIndex === -1) { - value = ""; - } else { - value = url2.slice(valueIndex + 1, nextKeyIndex === -1 ? void 0 : nextKeyIndex); - if (encoded) { - value = _decodeURI(value); - } - } - if (multiple) { - if (!(results[name] && Array.isArray(results[name]))) { - results[name] = []; - } - ; - results[name].push(value); - } else { - results[name] ??= value; - } - } - return key ? results[key] : results; -}; -var getQueryParam = _getQueryParam; -var getQueryParams = (url2, key) => { - return _getQueryParam(url2, key, true); -}; -var decodeURIComponent_ = decodeURIComponent; - -// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/request.js -var tryDecodeURIComponent = (str) => tryDecode(str, decodeURIComponent_); -var HonoRequest = class { - /** - * `.raw` can get the raw Request object. - * - * @see {@link https://hono.dev/docs/api/request#raw} - * - * @example - * ```ts - * // For Cloudflare Workers - * app.post('/', async (c) => { - * const metadata = c.req.raw.cf?.hostMetadata? - * ... - * }) - * ``` - */ - raw; - #validatedData; - // Short name of validatedData - #matchResult; - routeIndex = 0; - /** - * `.path` can get the pathname of the request. - * - * @see {@link https://hono.dev/docs/api/request#path} - * - * @example - * ```ts - * app.get('/about/me', (c) => { - * const pathname = c.req.path // `/about/me` - * }) - * ``` - */ - path; - bodyCache = {}; - constructor(request, path = "/", matchResult = [[]]) { - this.raw = request; - this.path = path; - this.#matchResult = matchResult; - this.#validatedData = {}; - } - param(key) { - return key ? this.#getDecodedParam(key) : this.#getAllDecodedParams(); - } - #getDecodedParam(key) { - const paramKey = this.#matchResult[0][this.routeIndex][1][key]; - const param = this.#getParamValue(paramKey); - return param && /\%/.test(param) ? tryDecodeURIComponent(param) : param; - } - #getAllDecodedParams() { - const decoded = {}; - const keys = Object.keys(this.#matchResult[0][this.routeIndex][1]); - for (const key of keys) { - const value = this.#getParamValue(this.#matchResult[0][this.routeIndex][1][key]); - if (value !== void 0) { - decoded[key] = /\%/.test(value) ? tryDecodeURIComponent(value) : value; - } - } - return decoded; - } - #getParamValue(paramKey) { - return this.#matchResult[1] ? this.#matchResult[1][paramKey] : paramKey; - } - query(key) { - return getQueryParam(this.url, key); - } - queries(key) { - return getQueryParams(this.url, key); - } - header(name) { - if (name) { - return this.raw.headers.get(name) ?? void 0; - } - const headerData = {}; - this.raw.headers.forEach((value, key) => { - headerData[key] = value; - }); - return headerData; - } - async parseBody(options) { - return this.bodyCache.parsedBody ??= await parseBody(this, options); - } - #cachedBody = (key) => { - const { bodyCache, raw } = this; - const cachedBody = bodyCache[key]; - if (cachedBody) { - return cachedBody; - } - const anyCachedKey = Object.keys(bodyCache)[0]; - if (anyCachedKey) { - return bodyCache[anyCachedKey].then((body) => { - if (anyCachedKey === "json") { - body = JSON.stringify(body); - } - return new Response(body)[key](); - }); - } - return bodyCache[key] = raw[key](); - }; - /** - * `.json()` can parse Request body of type `application/json` - * - * @see {@link https://hono.dev/docs/api/request#json} - * - * @example - * ```ts - * app.post('/entry', async (c) => { - * const body = await c.req.json() - * }) - * ``` - */ - json() { - return this.#cachedBody("text").then((text) => JSON.parse(text)); - } - /** - * `.text()` can parse Request body of type `text/plain` - * - * @see {@link https://hono.dev/docs/api/request#text} - * - * @example - * ```ts - * app.post('/entry', async (c) => { - * const body = await c.req.text() - * }) - * ``` - */ - text() { - return this.#cachedBody("text"); - } - /** - * `.arrayBuffer()` parse Request body as an `ArrayBuffer` - * - * @see {@link https://hono.dev/docs/api/request#arraybuffer} - * - * @example - * ```ts - * app.post('/entry', async (c) => { - * const body = await c.req.arrayBuffer() - * }) - * ``` - */ - arrayBuffer() { - return this.#cachedBody("arrayBuffer"); - } - /** - * Parses the request body as a `Blob`. - * @example - * ```ts - * app.post('/entry', async (c) => { - * const body = await c.req.blob(); - * }); - * ``` - * @see https://hono.dev/docs/api/request#blob - */ - blob() { - return this.#cachedBody("blob"); - } - /** - * Parses the request body as `FormData`. - * @example - * ```ts - * app.post('/entry', async (c) => { - * const body = await c.req.formData(); - * }); - * ``` - * @see https://hono.dev/docs/api/request#formdata - */ - formData() { - return this.#cachedBody("formData"); - } - /** - * Adds validated data to the request. - * - * @param target - The target of the validation. - * @param data - The validated data to add. - */ - addValidatedData(target, data) { - this.#validatedData[target] = data; - } - valid(target) { - return this.#validatedData[target]; - } - /** - * `.url()` can get the request url strings. - * - * @see {@link https://hono.dev/docs/api/request#url} - * - * @example - * ```ts - * app.get('/about/me', (c) => { - * const url = c.req.url // `http://localhost:8787/about/me` - * ... - * }) - * ``` - */ - get url() { - return this.raw.url; - } - /** - * `.method()` can get the method name of the request. - * - * @see {@link https://hono.dev/docs/api/request#method} - * - * @example - * ```ts - * app.get('/about/me', (c) => { - * const method = c.req.method // `GET` - * }) - * ``` - */ - get method() { - return this.raw.method; - } - get [GET_MATCH_RESULT]() { - return this.#matchResult; - } - /** - * `.matchedRoutes()` can return a matched route in the handler - * - * @deprecated - * - * Use matchedRoutes helper defined in "hono/route" instead. - * - * @see {@link https://hono.dev/docs/api/request#matchedroutes} - * - * @example - * ```ts - * app.use('*', async function logger(c, next) { - * await next() - * c.req.matchedRoutes.forEach(({ handler, method, path }, i) => { - * const name = handler.name || (handler.length < 2 ? '[handler]' : '[middleware]') - * console.log( - * method, - * ' ', - * path, - * ' '.repeat(Math.max(10 - path.length, 0)), - * name, - * i === c.req.routeIndex ? '<- respond from here' : '' - * ) - * }) - * }) - * ``` - */ - get matchedRoutes() { - return this.#matchResult[0].map(([[, route]]) => route); - } - /** - * `routePath()` can retrieve the path registered within the handler - * - * @deprecated - * - * Use routePath helper defined in "hono/route" instead. - * - * @see {@link https://hono.dev/docs/api/request#routepath} - * - * @example - * ```ts - * app.get('/posts/:id', (c) => { - * return c.json({ path: c.req.routePath }) - * }) - * ``` - */ - get routePath() { - return this.#matchResult[0].map(([[, route]]) => route)[this.routeIndex].path; - } -}; - -// ../../node_modules/.pnpm/hono@4.11.9/node_modules/hono/dist/router/reg-exp-router/node.js -var regExpMetaChars = new Set(".\\+*[^]$()"); - -// ../../packages/sdk/src/exceptions.ts -var AppError = class extends Error { - code; - status; - details; - expose; - constructor(options) { - super(options.message); - this.name = "AppError"; - this.code = options.code; - this.status = options.status; - this.details = options.details; - this.expose = options.expose ?? options.status < 500; - } -}; - -// ../../node_modules/.pnpm/@noble+ed25519@3.0.0/node_modules/@noble/ed25519/index.js -var ed25519_CURVE = { - p: 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffedn, - n: 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3edn, - h: 8n, - a: 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffecn, - d: 0x52036cee2b6ffe738cc740797779e89800700a4d4141d8ab75eb4dca135978a3n, - Gx: 0x216936d3cd6e53fec0a4e231fdd6dc5c692cc7609525a7b2c9562d608f25d51an, - Gy: 0x6666666666666666666666666666666666666666666666666666666666666658n -}; -var { p: P, n: N, Gx, Gy, a: _a2, d: _d, h } = ed25519_CURVE; -var L = 32; -var L2 = 64; -var captureTrace = (...args) => { - if ("captureStackTrace" in Error && typeof Error.captureStackTrace === "function") { - Error.captureStackTrace(...args); - } -}; -var err = (message = "") => { - const e = new Error(message); - captureTrace(e, err); - throw e; -}; -var isBig = (n) => typeof n === "bigint"; -var isStr = (s) => typeof s === "string"; -var isBytes2 = (a) => a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; -var abytes = (value, length, title = "") => { - const bytes = isBytes2(value); - const len = value?.length; - const needsLen = length !== void 0; - if (!bytes || needsLen && len !== length) { - const prefix = title && `"${title}" `; - const ofLen = needsLen ? ` of length ${length}` : ""; - const got = bytes ? `length=${len}` : `type=${typeof value}`; - err(prefix + "expected Uint8Array" + ofLen + ", got " + got); - } - return value; -}; -var u8n = (len) => new Uint8Array(len); -var u8fr = (buf) => Uint8Array.from(buf); -var padh = (n, pad) => n.toString(16).padStart(pad, "0"); -var bytesToHex = (b) => Array.from(abytes(b)).map((e) => padh(e, 2)).join(""); -var C = { _0: 48, _9: 57, A: 65, F: 70, a: 97, f: 102 }; -var _ch = (ch) => { - if (ch >= C._0 && ch <= C._9) - return ch - C._0; - if (ch >= C.A && ch <= C.F) - return ch - (C.A - 10); - if (ch >= C.a && ch <= C.f) - return ch - (C.a - 10); - return; -}; -var hexToBytes = (hex3) => { - const e = "hex invalid"; - if (!isStr(hex3)) - return err(e); - const hl = hex3.length; - const al = hl / 2; - if (hl % 2) - return err(e); - const array2 = u8n(al); - for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { - const n1 = _ch(hex3.charCodeAt(hi)); - const n2 = _ch(hex3.charCodeAt(hi + 1)); - if (n1 === void 0 || n2 === void 0) - return err(e); - array2[ai] = n1 * 16 + n2; - } - return array2; -}; -var cr = () => globalThis?.crypto; -var subtle = () => cr()?.subtle ?? err("crypto.subtle must be defined, consider polyfill"); -var concatBytes = (...arrs) => { - const r = u8n(arrs.reduce((sum, a) => sum + abytes(a).length, 0)); - let pad = 0; - arrs.forEach((a) => { - r.set(a, pad); - pad += a.length; - }); - return r; -}; -var big = BigInt; -var assertRange = (n, min, max, msg = "bad number: out of range") => isBig(n) && min <= n && n < max ? n : err(msg); -var M = (a, b = P) => { - const r = a % b; - return r >= 0n ? r : b + r; -}; -var modN = (a) => M(a, N); -var invert = (num, md) => { - if (num === 0n || md <= 0n) - err("no inverse n=" + num + " mod=" + md); - let a = M(num, md), b = md, x = 0n, y = 1n, u = 1n, v = 0n; - while (a !== 0n) { - const q = b / a, r = b % a; - const m = x - u * q, n = y - v * q; - b = a, a = r, x = u, y = v, u = m, v = n; - } - return b === 1n ? M(x, md) : err("no inverse"); -}; -var apoint = (p) => p instanceof Point ? p : err("Point expected"); -var B256 = 2n ** 256n; -var Point = class _Point { - static BASE; - static ZERO; - X; - Y; - Z; - T; - constructor(X, Y, Z, T) { - const max = B256; - this.X = assertRange(X, 0n, max); - this.Y = assertRange(Y, 0n, max); - this.Z = assertRange(Z, 1n, max); - this.T = assertRange(T, 0n, max); - Object.freeze(this); - } - static CURVE() { - return ed25519_CURVE; - } - static fromAffine(p) { - return new _Point(p.x, p.y, 1n, M(p.x * p.y)); - } - /** RFC8032 5.1.3: Uint8Array to Point. */ - static fromBytes(hex3, zip215 = false) { - const d = _d; - const normed = u8fr(abytes(hex3, L)); - const lastByte = hex3[31]; - normed[31] = lastByte & ~128; - const y = bytesToNumLE(normed); - const max = zip215 ? B256 : P; - assertRange(y, 0n, max); - const y2 = M(y * y); - const u = M(y2 - 1n); - const v = M(d * y2 + 1n); - let { isValid: isValid2, value: x } = uvRatio(u, v); - if (!isValid2) - err("bad point: y not sqrt"); - const isXOdd = (x & 1n) === 1n; - const isLastByteOdd = (lastByte & 128) !== 0; - if (!zip215 && x === 0n && isLastByteOdd) - err("bad point: x==0, isLastByteOdd"); - if (isLastByteOdd !== isXOdd) - x = M(-x); - return new _Point(x, y, 1n, M(x * y)); - } - static fromHex(hex3, zip215) { - return _Point.fromBytes(hexToBytes(hex3), zip215); - } - get x() { - return this.toAffine().x; - } - get y() { - return this.toAffine().y; - } - /** Checks if the point is valid and on-curve. */ - assertValidity() { - const a = _a2; - const d = _d; - const p = this; - if (p.is0()) - return err("bad point: ZERO"); - const { X, Y, Z, T } = p; - const X2 = M(X * X); - const Y2 = M(Y * Y); - const Z2 = M(Z * Z); - const Z4 = M(Z2 * Z2); - const aX2 = M(X2 * a); - const left = M(Z2 * M(aX2 + Y2)); - const right = M(Z4 + M(d * M(X2 * Y2))); - if (left !== right) - return err("bad point: equation left != right (1)"); - const XY = M(X * Y); - const ZT = M(Z * T); - if (XY !== ZT) - return err("bad point: equation left != right (2)"); - return this; - } - /** Equality check: compare points P&Q. */ - equals(other) { - const { X: X1, Y: Y1, Z: Z1 } = this; - const { X: X2, Y: Y2, Z: Z2 } = apoint(other); - const X1Z2 = M(X1 * Z2); - const X2Z1 = M(X2 * Z1); - const Y1Z2 = M(Y1 * Z2); - const Y2Z1 = M(Y2 * Z1); - return X1Z2 === X2Z1 && Y1Z2 === Y2Z1; - } - is0() { - return this.equals(I); - } - /** Flip point over y coordinate. */ - negate() { - return new _Point(M(-this.X), this.Y, this.Z, M(-this.T)); - } - /** Point doubling. Complete formula. Cost: `4M + 4S + 1*a + 6add + 1*2`. */ - double() { - const { X: X1, Y: Y1, Z: Z1 } = this; - const a = _a2; - const A = M(X1 * X1); - const B = M(Y1 * Y1); - const C2 = M(2n * M(Z1 * Z1)); - const D = M(a * A); - const x1y1 = X1 + Y1; - const E = M(M(x1y1 * x1y1) - A - B); - const G2 = D + B; - const F = G2 - C2; - const H = D - B; - const X3 = M(E * F); - const Y3 = M(G2 * H); - const T3 = M(E * H); - const Z3 = M(F * G2); - return new _Point(X3, Y3, Z3, T3); - } - /** Point addition. Complete formula. Cost: `8M + 1*k + 8add + 1*2`. */ - add(other) { - const { X: X1, Y: Y1, Z: Z1, T: T1 } = this; - const { X: X2, Y: Y2, Z: Z2, T: T2 } = apoint(other); - const a = _a2; - const d = _d; - const A = M(X1 * X2); - const B = M(Y1 * Y2); - const C2 = M(T1 * d * T2); - const D = M(Z1 * Z2); - const E = M((X1 + Y1) * (X2 + Y2) - A - B); - const F = M(D - C2); - const G2 = M(D + C2); - const H = M(B - a * A); - const X3 = M(E * F); - const Y3 = M(G2 * H); - const T3 = M(E * H); - const Z3 = M(F * G2); - return new _Point(X3, Y3, Z3, T3); - } - subtract(other) { - return this.add(apoint(other).negate()); - } - /** - * Point-by-scalar multiplication. Scalar must be in range 1 <= n < CURVE.n. - * Uses {@link wNAF} for base point. - * Uses fake point to mitigate side-channel leakage. - * @param n scalar by which point is multiplied - * @param safe safe mode guards against timing attacks; unsafe mode is faster - */ - multiply(n, safe = true) { - if (!safe && (n === 0n || this.is0())) - return I; - assertRange(n, 1n, N); - if (n === 1n) - return this; - if (this.equals(G)) - return wNAF(n).p; - let p = I; - let f = G; - for (let d = this; n > 0n; d = d.double(), n >>= 1n) { - if (n & 1n) - p = p.add(d); - else if (safe) - f = f.add(d); - } - return p; - } - multiplyUnsafe(scalar) { - return this.multiply(scalar, false); - } - /** Convert point to 2d xy affine point. (X, Y, Z) ∋ (x=X/Z, y=Y/Z) */ - toAffine() { - const { X, Y, Z } = this; - if (this.equals(I)) - return { x: 0n, y: 1n }; - const iz = invert(Z, P); - if (M(Z * iz) !== 1n) - err("invalid inverse"); - const x = M(X * iz); - const y = M(Y * iz); - return { x, y }; - } - toBytes() { - const { x, y } = this.assertValidity().toAffine(); - const b = numTo32bLE(y); - b[31] |= x & 1n ? 128 : 0; - return b; - } - toHex() { - return bytesToHex(this.toBytes()); - } - clearCofactor() { - return this.multiply(big(h), false); - } - isSmallOrder() { - return this.clearCofactor().is0(); - } - isTorsionFree() { - let p = this.multiply(N / 2n, false).double(); - if (N % 2n) - p = p.add(this); - return p.is0(); - } -}; -var G = new Point(Gx, Gy, 1n, M(Gx * Gy)); -var I = new Point(0n, 1n, 1n, 0n); -Point.BASE = G; -Point.ZERO = I; -var numTo32bLE = (num) => hexToBytes(padh(assertRange(num, 0n, B256), L2)).reverse(); -var bytesToNumLE = (b) => big("0x" + bytesToHex(u8fr(abytes(b)).reverse())); -var pow2 = (x, power) => { - let r = x; - while (power-- > 0n) { - r *= r; - r %= P; - } - return r; -}; -var pow_2_252_3 = (x) => { - const x2 = x * x % P; - const b2 = x2 * x % P; - const b4 = pow2(b2, 2n) * b2 % P; - const b5 = pow2(b4, 1n) * x % P; - const b10 = pow2(b5, 5n) * b5 % P; - const b20 = pow2(b10, 10n) * b10 % P; - const b40 = pow2(b20, 20n) * b20 % P; - const b80 = pow2(b40, 40n) * b40 % P; - const b160 = pow2(b80, 80n) * b80 % P; - const b240 = pow2(b160, 80n) * b80 % P; - const b250 = pow2(b240, 10n) * b10 % P; - const pow_p_5_8 = pow2(b250, 2n) * x % P; - return { pow_p_5_8, b2 }; -}; -var RM1 = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0n; -var uvRatio = (u, v) => { - const v3 = M(v * v * v); - const v7 = M(v3 * v3 * v); - const pow = pow_2_252_3(u * v7).pow_p_5_8; - let x = M(u * v3 * pow); - const vx2 = M(v * x * x); - const root1 = x; - const root2 = M(x * RM1); - const useRoot1 = vx2 === u; - const useRoot2 = vx2 === M(-u); - const noRoot = vx2 === M(-u * RM1); - if (useRoot1) - x = root1; - if (useRoot2 || noRoot) - x = root2; - if ((M(x) & 1n) === 1n) - x = M(-x); - return { isValid: useRoot1 || useRoot2, value: x }; -}; -var modL_LE = (hash2) => modN(bytesToNumLE(hash2)); -var sha512a = (...m) => hashes.sha512Async(concatBytes(...m)); -var hash2extK = (hashed) => { - const head = hashed.slice(0, L); - head[0] &= 248; - head[31] &= 127; - head[31] |= 64; - const prefix = hashed.slice(L, L2); - const scalar = modL_LE(head); - const point = G.multiply(scalar); - const pointBytes = point.toBytes(); - return { head, prefix, scalar, point, pointBytes }; -}; -var getExtendedPublicKeyAsync = (secretKey) => sha512a(abytes(secretKey, L)).then(hash2extK); -var hashFinishA = (res) => sha512a(res.hashable).then(res.finish); -var _sign = (e, rBytes, msg) => { - const { pointBytes: P2, scalar: s } = e; - const r = modL_LE(rBytes); - const R = G.multiply(r).toBytes(); - const hashable = concatBytes(R, P2, msg); - const finish = (hashed) => { - const S = modN(r + modL_LE(hashed) * s); - return abytes(concatBytes(R, numTo32bLE(S)), L2); - }; - return { hashable, finish }; -}; -var signAsync = async (message, secretKey) => { - const m = abytes(message); - const e = await getExtendedPublicKeyAsync(secretKey); - const rBytes = await sha512a(e.prefix, m); - return hashFinishA(_sign(e, rBytes, m)); -}; -var hashes = { - sha512Async: async (message) => { - const s = subtle(); - const m = concatBytes(message); - return u8n(await s.digest("SHA-512", m.buffer)); - }, - sha512: void 0 -}; -var W = 8; -var scalarBits = 256; -var pwindows = Math.ceil(scalarBits / W) + 1; -var pwindowSize = 2 ** (W - 1); -var precompute = () => { - const points = []; - let p = G; - let b = p; - for (let w = 0; w < pwindows; w++) { - b = p; - points.push(b); - for (let i = 1; i < pwindowSize; i++) { - b = b.add(p); - points.push(b); - } - p = b.double(); - } - return points; -}; -var Gpows = void 0; -var ctneg = (cnd, p) => { - const n = p.negate(); - return cnd ? n : p; -}; -var wNAF = (n) => { - const comp = Gpows || (Gpows = precompute()); - let p = I; - let f = G; - const pow_2_w = 2 ** W; - const maxNum = pow_2_w; - const mask = big(pow_2_w - 1); - const shiftBy = big(W); - for (let w = 0; w < pwindows; w++) { - let wbits = Number(n & mask); - n >>= shiftBy; - if (wbits > pwindowSize) { - wbits -= maxNum; - n += 1n; - } - const off = w * pwindowSize; - const offF = off; - const offP = off + Math.abs(wbits) - 1; - const isEven = w % 2 !== 0; - const isNeg = wbits < 0; - if (wbits === 0) { - f = f.add(ctneg(isEven, comp[offF])); - } else { - p = p.add(ctneg(isNeg, comp[offP])); - } - } - if (n !== 0n) - err("invalid wnaf"); - return { p, f }; -}; - -// ../../packages/sdk/src/crypto/ed25519.ts -async function signEd25519(message, secretKey) { - return signAsync(message, secretKey); -} -function encodeEd25519SignatureBase64url(signature) { - return encodeBase64url(signature); -} - -// ../../packages/sdk/src/http/constants.ts -var X_CLAW_TIMESTAMP = "X-Claw-Timestamp"; -var X_CLAW_NONCE = "X-Claw-Nonce"; -var X_CLAW_BODY_SHA256 = "X-Claw-Body-SHA256"; -var X_CLAW_PROOF = "X-Claw-Proof"; - -// ../../packages/sdk/src/http/utils.ts -var textEncoder = new TextEncoder(); -var ED25519_SECRET_KEY_LENGTH = 32; -function getCrypto() { - return globalThis.crypto; -} -function ensureString(value, label) { - if (typeof value !== "string" || value.trim() === "") { - throw new AppError({ - code: "HTTP_SIGNATURE_INVALID_INPUT", - message: "Input must be a non-empty string", - status: 400, - details: { - field: label - } - }); - } - return value; -} -function ensureBodyBytes(body) { - if (body === void 0) { - return new Uint8Array(); - } - if (!(body instanceof Uint8Array)) { - throw new AppError({ - code: "HTTP_SIGNATURE_INVALID_INPUT", - message: "body must be a Uint8Array when provided", - status: 400, - details: { - field: "body" - } - }); - } - return body; -} -function ensureSecretKey(key) { - if (!(key instanceof Uint8Array) || key.length !== ED25519_SECRET_KEY_LENGTH) { - throw new AppError({ - code: "HTTP_SIGNATURE_MISSING_SECRET", - message: "Secret key is required to sign HTTP requests", - status: 500, - details: { - keyLength: key instanceof Uint8Array ? key.length : null, - expectedKeyLength: ED25519_SECRET_KEY_LENGTH - } - }); - } -} -async function hashBodySha256Base64url(body) { - const cryptoObject = getCrypto(); - if (typeof cryptoObject !== "object" || typeof cryptoObject?.subtle !== "object" || typeof cryptoObject?.subtle?.digest !== "function") { - throw new AppError({ - code: "HTTP_SIGNATURE_CRYPTO_UNAVAILABLE", - message: "Web Crypto API is required for HTTP signing", - status: 500, - details: { - runtime: typeof cryptoObject - } - }); - } - const digest = await cryptoObject.subtle.digest("SHA-256", body); - return encodeBase64url(new Uint8Array(digest)); -} - -// ../../packages/sdk/src/http/sign.ts -async function signHttpRequest(input) { - ensureSecretKey(input.secretKey); - const method = ensureString(input.method, "method"); - const pathWithQuery = ensureString(input.pathWithQuery, "pathWithQuery"); - const timestamp = ensureString(input.timestamp, "timestamp"); - const nonce = ensureString(input.nonce, "nonce"); - const body = ensureBodyBytes(input.body); - const bodyHash = await hashBodySha256Base64url(body); - const canonicalRequest = canonicalizeRequest({ - method, - pathWithQuery, - timestamp, - nonce, - bodyHash - }); - const signature = await signEd25519( - textEncoder.encode(canonicalRequest), - input.secretKey - ); - const proof = encodeEd25519SignatureBase64url(signature); - return { - canonicalRequest, - proof, - headers: { - [X_CLAW_TIMESTAMP]: timestamp, - [X_CLAW_NONCE]: nonce, - [X_CLAW_BODY_SHA256]: bodyHash, - [X_CLAW_PROOF]: proof - } - }; -} - -// ../../packages/sdk/src/agent-auth-client.ts -var refreshSingleFlights = /* @__PURE__ */ new Map(); -var isRecord = (value) => { - return typeof value === "object" && value !== null; -}; -var parseNonEmptyString = (value) => { - if (typeof value !== "string") { - return ""; - } - return value.trim(); -}; -var parseJsonResponse = async (response) => { - try { - return await response.json(); - } catch { - return void 0; - } -}; -var toPathWithQuery = (requestUrl) => { - const parsed = new URL(requestUrl); - return `${parsed.pathname}${parsed.search}`; -}; -var parseRegistryErrorEnvelope = (payload) => { - if (!isRecord(payload)) { - return void 0; - } - const errorValue = payload.error; - if (!isRecord(errorValue)) { - return void 0; - } - return { - error: { - code: parseNonEmptyString(errorValue.code) || void 0, - message: parseNonEmptyString(errorValue.message) || void 0 - } - }; -}; -var parseAgentAuthBundle = (payload) => { - if (!isRecord(payload)) { - throw new AppError({ - code: "AGENT_AUTH_REFRESH_INVALID_RESPONSE", - message: "Registry returned an invalid refresh response payload", - status: 502, - expose: true - }); - } - const source = isRecord(payload.agentAuth) ? payload.agentAuth : payload; - const tokenType = source.tokenType; - const accessToken = source.accessToken; - const accessExpiresAt = source.accessExpiresAt; - const refreshToken = source.refreshToken; - const refreshExpiresAt = source.refreshExpiresAt; - if (tokenType !== "Bearer" || typeof accessToken !== "string" || typeof accessExpiresAt !== "string" || typeof refreshToken !== "string" || typeof refreshExpiresAt !== "string") { - throw new AppError({ - code: "AGENT_AUTH_REFRESH_INVALID_RESPONSE", - message: "Registry returned an invalid refresh response payload", - status: 502, - expose: true - }); - } - return { - tokenType, - accessToken, - accessExpiresAt, - refreshToken, - refreshExpiresAt - }; -}; -var toRefreshHttpError = (status, responseBody) => { - const parsedEnvelope = parseRegistryErrorEnvelope(responseBody); - const registryCode = parsedEnvelope?.error?.code; - const registryMessage = parsedEnvelope?.error?.message; - if (status === 400) { - return new AppError({ - code: "AGENT_AUTH_REFRESH_INVALID", - message: registryMessage ?? "Refresh request is invalid (400).", - status, - expose: true, - details: { - registryCode, - registryMessage - } - }); - } - if (status === 401) { - return new AppError({ - code: "AGENT_AUTH_REFRESH_UNAUTHORIZED", - message: registryMessage ?? "Refresh rejected (401). Agent credentials are invalid, revoked, or expired.", - status, - expose: true, - details: { - registryCode, - registryMessage - } - }); - } - if (status === 409) { - return new AppError({ - code: "AGENT_AUTH_REFRESH_CONFLICT", - message: registryMessage ?? "Refresh conflict (409). Retry request.", - status, - expose: true, - details: { - registryCode, - registryMessage - } - }); - } - if (status >= 500) { - return new AppError({ - code: "AGENT_AUTH_REFRESH_SERVER_ERROR", - message: `Registry server error (${status}). Try again later.`, - status: 503, - expose: true, - details: { - status - } - }); - } - return new AppError({ - code: "AGENT_AUTH_REFRESH_FAILED", - message: registryMessage ?? `Registry request failed during refresh (${status}).`, - status, - expose: true, - details: { - registryCode, - registryMessage, - status - } - }); -}; -var toRegistryAgentAuthRefreshRequestUrl = (registryUrl) => { - const normalizedBaseUrl = registryUrl.endsWith("/") ? registryUrl : `${registryUrl}/`; - return new URL( - AGENT_AUTH_REFRESH_PATH.slice(1), - normalizedBaseUrl - ).toString(); -}; -async function runRefreshSingleFlight(options) { - const existing = refreshSingleFlights.get(options.key); - if (existing) { - return existing; - } - const inFlight = options.run().finally(() => { - if (refreshSingleFlights.get(options.key) === inFlight) { - refreshSingleFlights.delete(options.key); - } - }); - refreshSingleFlights.set(options.key, inFlight); - return inFlight; -} -async function refreshAgentAuthWithClawProof(input) { - const fetchImpl = input.fetchImpl ?? globalThis.fetch; - if (typeof fetchImpl !== "function") { - throw new AppError({ - code: "AGENT_AUTH_REFRESH_NETWORK", - message: "fetch implementation is required", - status: 500, - expose: true - }); - } - const refreshUrl = toRegistryAgentAuthRefreshRequestUrl(input.registryUrl); - const refreshBody = JSON.stringify({ - refreshToken: input.refreshToken - }); - const nowMs = input.nowMs?.() ?? Date.now(); - const timestamp = String(Math.floor(nowMs / 1e3)); - const nonce = encodeBase64url(crypto.getRandomValues(new Uint8Array(16))); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: toPathWithQuery(refreshUrl), - timestamp, - nonce, - body: new TextEncoder().encode(refreshBody), - secretKey: input.secretKey - }); - let response; - try { - response = await fetchImpl(refreshUrl, { - method: "POST", - headers: { - authorization: `Claw ${input.ait}`, - "content-type": "application/json", - ...signed.headers - }, - body: refreshBody - }); - } catch { - throw new AppError({ - code: "AGENT_AUTH_REFRESH_NETWORK", - message: "Unable to connect to the registry. Check network access and registryUrl.", - status: 503, - expose: true - }); - } - const responseBody = await parseJsonResponse(response); - if (!response.ok) { - throw toRefreshHttpError(response.status, responseBody); - } - return parseAgentAuthBundle(responseBody); -} -function isRetryableAuthExpiryError(error48) { - if (!(error48 instanceof AppError)) { - return false; - } - return error48.status === 401; -} -async function executeWithAgentAuthRefreshRetry(input) { - const shouldRetry = input.shouldRetry ?? isRetryableAuthExpiryError; - const currentAuth = await input.getAuth(); - try { - return await input.perform(currentAuth); - } catch (error48) { - if (!shouldRetry(error48)) { - throw error48; - } - const refreshedAuth = await runRefreshSingleFlight({ - key: input.key, - run: async () => { - const latestAuth = await input.getAuth(); - const nextAuth = await input.refreshAuth(latestAuth); - await input.persistAuth(nextAuth); - return nextAuth; - } - }); - return input.perform(refreshedAuth); - } -} - -// ../../packages/sdk/src/runtime-environment.ts -var runtimeEnvironmentValues = [ - "development", - "production", - "test" -]; - -// ../../packages/sdk/src/config.ts -var environmentSchema = external_exports.enum(runtimeEnvironmentValues); -var registrySigningKeyStatusSchema = external_exports.enum(["active", "revoked"]); -var ED25519_PUBLIC_KEY_LENGTH2 = 32; -var registrySigningPublicKeySchema = external_exports.object({ - kid: external_exports.string().min(1), - alg: external_exports.literal("EdDSA"), - crv: external_exports.literal("Ed25519"), - x: external_exports.string().min(1), - status: registrySigningKeyStatusSchema -}).superRefine((value, ctx) => { - let decodedPublicKey; - try { - decodedPublicKey = decodeBase64url(value.x); - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - path: ["x"], - message: "x must be valid base64url" - }); - return; - } - if (decodedPublicKey.length !== ED25519_PUBLIC_KEY_LENGTH2) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - path: ["x"], - message: "x must decode to 32-byte Ed25519 public key" - }); - } -}); -var registrySigningKeysSchema = external_exports.array(registrySigningPublicKeySchema).superRefine((keys, ctx) => { - const seenKids = /* @__PURE__ */ new Set(); - for (const [index, key] of keys.entries()) { - if (seenKids.has(key.kid)) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - path: [index, "kid"], - message: `Duplicate kid "${key.kid}" is not allowed` - }); - } else { - seenKids.add(key.kid); - } - } -}); -var registrySigningKeysEnvSchema = external_exports.string().min(1).transform((value, ctx) => { - let parsed; - try { - parsed = JSON.parse(value); - } catch { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: "REGISTRY_SIGNING_KEYS must be valid JSON" - }); - return external_exports.NEVER; - } - const keys = registrySigningKeysSchema.safeParse(parsed); - if (!keys.success) { - for (const issue2 of keys.error.issues) { - ctx.addIssue({ - code: external_exports.ZodIssueCode.custom, - message: issue2.message, - path: issue2.path - }); - } - return external_exports.NEVER; - } - return keys.data; -}); -var registryConfigSchema = external_exports.object({ - ENVIRONMENT: environmentSchema, - APP_VERSION: external_exports.string().min(1).optional(), - BOOTSTRAP_SECRET: external_exports.string().min(1).optional(), - REGISTRY_SIGNING_KEY: external_exports.string().min(1).optional(), - REGISTRY_SIGNING_KEYS: registrySigningKeysEnvSchema.optional() -}); - -// ../../packages/sdk/src/crl/cache.ts -var DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1e3; -var DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1e3; - -// ../../packages/sdk/src/security/nonce-cache.ts -var DEFAULT_NONCE_TTL_MS = 5 * 60 * 1e3; - // src/transforms/peers-config.ts import { chmod, mkdir, readFile, writeFile } from "fs/promises"; import { homedir } from "os"; -import { dirname, join as join2 } from "path"; +import { dirname, join } from "path"; var CLAWDENTITY_DIR = ".clawdentity"; var PEERS_FILENAME = "peers.json"; var PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; -function isRecord2(value) { +function isRecord(value) { return typeof value === "object" && value !== null; } -function getErrorCode(error48) { - if (!isRecord2(error48)) { +function getErrorCode(error) { + if (!isRecord(error)) { return void 0; } - return typeof error48.code === "string" ? error48.code : void 0; + return typeof error.code === "string" ? error.code : void 0; } -function parseNonEmptyString2(value, label) { +function parseNonEmptyString(value, label) { if (typeof value !== "string") { throw new Error(`${label} must be a string`); } @@ -15610,7 +25,7 @@ function parseNonEmptyString2(value, label) { return trimmed; } function parsePeerAlias(value) { - const alias = parseNonEmptyString2(value, "peer alias"); + const alias = parseNonEmptyString(value, "peer alias"); if (alias.length > 128) { throw new Error("peer alias must be at most 128 characters"); } @@ -15621,15 +36,15 @@ function parsePeerAlias(value) { } return alias; } -function parseDid2(value) { - const did = parseNonEmptyString2(value, "did"); +function parseDid(value) { + const did = parseNonEmptyString(value, "did"); if (!did.startsWith("did:")) { throw new Error("did must start with 'did:'"); } return did; } function parseProxyUrl(value) { - const candidate = parseNonEmptyString2(value, "proxyUrl"); + const candidate = parseNonEmptyString(value, "proxyUrl"); try { return new URL(candidate).toString(); } catch { @@ -15640,13 +55,13 @@ function parsePeerName(value) { if (value === void 0) { return void 0; } - return parseNonEmptyString2(value, "name"); + return parseNonEmptyString(value, "name"); } function parsePeerEntry(value) { - if (!isRecord2(value)) { + if (!isRecord(value)) { throw new Error("peer entry must be an object"); } - const did = parseDid2(value.did); + const did = parseDid(value.did); const proxyUrl = parseProxyUrl(value.proxyUrl); const name = parsePeerName(value.name); if (name === void 0) { @@ -15655,7 +70,7 @@ function parsePeerEntry(value) { return { did, proxyUrl, name }; } function parsePeersConfig(value, source) { - if (!isRecord2(value)) { + if (!isRecord(value)) { throw new Error( `Peer config validation failed at ${source}: root must be an object` ); @@ -15664,7 +79,7 @@ function parsePeersConfig(value, source) { if (peersRaw === void 0) { return { peers: {} }; } - if (!isRecord2(peersRaw)) { + if (!isRecord(peersRaw)) { throw new Error( `Peer config validation failed at ${source}: peers must be an object` ); @@ -15674,8 +89,8 @@ function parsePeersConfig(value, source) { const normalizedAlias = parsePeerAlias(alias); try { peers[normalizedAlias] = parsePeerEntry(peerValue); - } catch (error48) { - const reason = error48 instanceof Error ? error48.message : String(error48); + } catch (error) { + const reason = error instanceof Error ? error.message : String(error); throw new Error( `Peer config validation failed at ${source}: peers.${normalizedAlias}: ${reason}` ); @@ -15688,21 +103,21 @@ function resolvePeersConfigPath(options = {}) { return options.configPath.trim(); } if (typeof options.configDir === "string" && options.configDir.trim().length > 0) { - return join2(options.configDir.trim(), PEERS_FILENAME); + return join(options.configDir.trim(), PEERS_FILENAME); } const home = typeof options.homeDir === "string" && options.homeDir.trim().length > 0 ? options.homeDir.trim() : homedir(); - return join2(home, CLAWDENTITY_DIR, PEERS_FILENAME); + return join(home, CLAWDENTITY_DIR, PEERS_FILENAME); } async function loadPeersConfig(options = {}) { const configPath = resolvePeersConfigPath(options); let rawJson; try { rawJson = await readFile(configPath, "utf8"); - } catch (error48) { - if (getErrorCode(error48) === "ENOENT") { + } catch (error) { + if (getErrorCode(error) === "ENOENT") { return { peers: {} }; } - throw error48; + throw error; } let parsed; try { @@ -15713,176 +128,12 @@ async function loadPeersConfig(options = {}) { return parsePeersConfig(parsed, configPath); } -// src/transforms/registry-auth.ts -import { - chmod as chmod2, - open, - readFile as readFile2, - rename, - stat, - unlink, - writeFile as writeFile2 -} from "fs/promises"; -import { join as join3 } from "path"; -var CLAWDENTITY_DIR2 = ".clawdentity"; -var AGENTS_DIR = "agents"; -var REGISTRY_AUTH_FILENAME = "registry-auth.json"; -var FILE_MODE = 384; -var LOCK_RETRY_DELAY_MS = 50; -var LOCK_MAX_ATTEMPTS = 200; -var STALE_LOCK_AGE_MS = 3e4; -function isRecord3(value) { - return typeof value === "object" && value !== null; -} -function getErrorCode2(error48) { - if (!isRecord3(error48)) { - return void 0; - } - return typeof error48.code === "string" ? error48.code : void 0; -} -function sleep(delayMs) { - return new Promise((resolve) => { - setTimeout(resolve, delayMs); - }); -} -function parseAgentAuthBundle2(payload, options) { - if (!isRecord3(payload)) { - throw new Error( - `Agent "${options.agentName}" has invalid ${REGISTRY_AUTH_FILENAME}` - ); - } - const tokenType = payload.tokenType; - const accessToken = payload.accessToken; - const accessExpiresAt = payload.accessExpiresAt; - const refreshToken = payload.refreshToken; - const refreshExpiresAt = payload.refreshExpiresAt; - if (tokenType !== "Bearer" || typeof accessToken !== "string" || typeof accessExpiresAt !== "string" || typeof refreshToken !== "string" || typeof refreshExpiresAt !== "string") { - throw new Error( - `Agent "${options.agentName}" has invalid ${REGISTRY_AUTH_FILENAME}` - ); - } - return { - tokenType, - accessToken, - accessExpiresAt, - refreshToken, - refreshExpiresAt - }; -} -function resolveAgentRegistryAuthPath(input) { - return join3( - input.homeDir, - CLAWDENTITY_DIR2, - AGENTS_DIR, - input.agentName, - REGISTRY_AUTH_FILENAME - ); -} -async function readAgentRegistryAuth(input) { - const registryAuthPath = resolveAgentRegistryAuthPath(input); - let rawRegistryAuth; - try { - rawRegistryAuth = await readFile2(registryAuthPath, "utf8"); - } catch (error48) { - if (getErrorCode2(error48) === "ENOENT") { - throw new Error( - `Agent "${input.agentName}" has no ${REGISTRY_AUTH_FILENAME}. Recreate agent identity or re-run auth bootstrap.` - ); - } - throw error48; - } - let parsed; - try { - parsed = JSON.parse(rawRegistryAuth); - } catch { - throw new Error( - `Agent "${input.agentName}" has invalid ${REGISTRY_AUTH_FILENAME} (must be valid JSON)` - ); - } - return parseAgentAuthBundle2(parsed, { agentName: input.agentName }); -} -async function writeAgentRegistryAuthAtomic(input) { - const registryAuthPath = resolveAgentRegistryAuthPath(input); - const tempPath = `${registryAuthPath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; - const content = `${JSON.stringify(input.auth, null, 2)} -`; - await writeFile2(tempPath, content, "utf8"); - await chmod2(tempPath, FILE_MODE); - try { - await rename(tempPath, registryAuthPath); - await chmod2(registryAuthPath, FILE_MODE); - } catch (error48) { - try { - await unlink(tempPath); - } catch { - } - throw error48; - } -} -async function withAgentRegistryAuthLock(input) { - const registryAuthPath = resolveAgentRegistryAuthPath(input); - const lockPath = `${registryAuthPath}.lock`; - let lockAcquired = false; - for (let attempt = 0; attempt < LOCK_MAX_ATTEMPTS; attempt += 1) { - try { - const lockHandle = await open(lockPath, "wx", FILE_MODE); - await lockHandle.writeFile(`${Date.now()}`); - await lockHandle.close(); - lockAcquired = true; - break; - } catch (error48) { - if (getErrorCode2(error48) !== "EEXIST") { - throw error48; - } - try { - const lockStat = await stat(lockPath); - if (Date.now() - lockStat.mtimeMs > STALE_LOCK_AGE_MS) { - await unlink(lockPath); - continue; - } - } catch (statError) { - if (getErrorCode2(statError) !== "ENOENT") { - throw statError; - } - } - await sleep(LOCK_RETRY_DELAY_MS); - } - } - if (!lockAcquired) { - throw new Error( - `Timed out waiting for ${REGISTRY_AUTH_FILENAME} lock for agent "${input.agentName}"` - ); - } - try { - return await input.operation(); - } finally { - try { - await unlink(lockPath); - } catch { - } - } -} - // src/transforms/relay-to-peer.ts -var CLAWDENTITY_DIR3 = ".clawdentity"; -var AGENTS_DIR2 = "agents"; -var SECRET_KEY_FILENAME = "secret.key"; -var AIT_FILENAME = "ait.jwt"; -var IDENTITY_FILENAME = "identity.json"; -var AGENT_NAME_ENV = "CLAWDENTITY_AGENT_NAME"; -var OPENCLAW_AGENT_NAME_FILENAME = "openclaw-agent-name"; -var NONCE_SIZE = 16; -var AGENT_ACCESS_HEADER = "x-claw-agent-access"; -var textEncoder2 = new TextEncoder(); -function isRecord4(value) { +var DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; +var DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; +function isRecord2(value) { return typeof value === "object" && value !== null; } -function getErrorCode3(error48) { - if (!isRecord4(error48)) { - return void 0; - } - return typeof error48.code === "string" ? error48.code : void 0; -} function parseRequiredString(value) { if (typeof value !== "string") { throw new Error("Input value must be a string"); @@ -15893,22 +144,14 @@ function parseRequiredString(value) { } return trimmed; } -function parseIdentityRegistryUrl(payload, options) { - if (!isRecord4(payload) || typeof payload.registryUrl !== "string") { - throw new Error( - `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)` - ); - } - const registryUrl = payload.registryUrl.trim(); - if (registryUrl.length === 0) { - throw new Error( - `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)` - ); +function removePeerField(payload) { + const outbound = {}; + for (const [key, value] of Object.entries(payload)) { + if (key !== "peer") { + outbound[key] = value; + } } - return registryUrl; -} -function resolvePathWithQuery(url2) { - return `${url2.pathname}${url2.search}`; + return outbound; } function resolveRelayFetch(fetchImpl) { const resolved = fetchImpl ?? globalThis.fetch; @@ -15917,128 +160,66 @@ function resolveRelayFetch(fetchImpl) { } return resolved; } -async function tryReadTrimmedFile(filePath, _label) { - let raw; +function parseConnectorBaseUrl(value) { + let parsed; try { - raw = await readFile3(filePath, "utf8"); - } catch (error48) { - if (getErrorCode3(error48) === "ENOENT") { - return void 0; - } - throw error48; + parsed = new URL(value); + } catch { + throw new Error("Connector base URL is invalid"); } - const trimmed = raw.trim(); - if (trimmed.length === 0) { - throw new Error("Required file content is empty"); + if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { + throw new Error("Connector base URL is invalid"); } - return trimmed; -} -async function readTrimmedFile(filePath, label) { - const content = await tryReadTrimmedFile(filePath, label); - if (content === void 0) { - throw new Error("Required file is missing"); + if (parsed.pathname === "/" && parsed.search.length === 0 && parsed.hash.length === 0) { + return parsed.origin; } - return content; + return parsed.toString(); } -async function resolveAgentName(input) { - const overridden = input.overrideName?.trim(); - if (overridden) { - return overridden; - } - const envAgentName = process.env[AGENT_NAME_ENV]?.trim(); - if (envAgentName) { - return envAgentName; - } - const selectedAgentPath = join4( - input.homeDir, - CLAWDENTITY_DIR3, - OPENCLAW_AGENT_NAME_FILENAME - ); - const selectedAgentName = await tryReadTrimmedFile( - selectedAgentPath, - OPENCLAW_AGENT_NAME_FILENAME - ); - if (selectedAgentName) { - return selectedAgentName; +function normalizeConnectorPath(value) { + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error("Connector outbound path is invalid"); } - const agentsDirectory = join4(input.homeDir, CLAWDENTITY_DIR3, AGENTS_DIR2); - let entries; - try { - entries = await readdir(agentsDirectory, { - withFileTypes: true - }); - } catch (error48) { - if (getErrorCode3(error48) === "ENOENT") { - throw new Error("No local agents found. Select one before relay setup."); - } - throw error48; + return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; +} +function resolveConnectorEndpoint(options) { + const baseUrlInput = options.connectorBaseUrl ?? process.env.CLAWDENTITY_CONNECTOR_BASE_URL ?? DEFAULT_CONNECTOR_BASE_URL; + const pathInput = options.connectorPath ?? process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH ?? DEFAULT_CONNECTOR_OUTBOUND_PATH; + const baseUrl = parseConnectorBaseUrl(baseUrlInput.trim()); + const path = normalizeConnectorPath(pathInput.trim()); + return new URL(path, baseUrl).toString(); +} +function mapConnectorFailure(status) { + if (status === 404) { + return new Error("Local connector outbound endpoint is unavailable"); } - const agentNames = entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name).sort(); - if (agentNames.length === 1) { - return agentNames[0]; + if (status === 409) { + return new Error("Peer alias is not configured"); } - if (agentNames.length === 0) { - throw new Error("No local agents found. Select one before relay setup."); + if (status === 400 || status === 422) { + return new Error("Local connector rejected outbound relay payload"); } - throw new Error( - "Multiple local agents found. Configure a selected relay agent first." - ); + return new Error("Local connector outbound relay request failed"); } -async function readAgentCredentials(input) { - const agentDir = join4( - input.homeDir, - CLAWDENTITY_DIR3, - AGENTS_DIR2, - input.agentName - ); - const secretPath = join4(agentDir, SECRET_KEY_FILENAME); - const aitPath = join4(agentDir, AIT_FILENAME); - const identityPath = join4(agentDir, IDENTITY_FILENAME); - const [encodedSecret, ait, rawIdentity] = await Promise.all([ - readTrimmedFile(secretPath, SECRET_KEY_FILENAME), - readTrimmedFile(aitPath, AIT_FILENAME), - readTrimmedFile(identityPath, IDENTITY_FILENAME) - ]); - let secretKey; - try { - secretKey = decodeBase64url(encodedSecret); - } catch { - throw new Error("Agent secret key is invalid"); - } - let parsedIdentity; +async function postToConnector(endpoint, payload, fetchImpl) { + let response; try { - parsedIdentity = JSON.parse(rawIdentity); + response = await fetchImpl(endpoint, { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify(payload) + }); } catch { - throw new Error( - `Agent "${input.agentName}" has invalid ${IDENTITY_FILENAME} (must be valid JSON)` - ); + throw new Error("Local connector outbound relay request failed"); } - const registryUrl = parseIdentityRegistryUrl(parsedIdentity, { - agentName: input.agentName - }); - return { - ait, - secretKey, - registryUrl - }; -} -function removePeerField(payload) { - const outbound = {}; - for (const [key, value] of Object.entries(payload)) { - if (key !== "peer") { - outbound[key] = value; - } + if (!response.ok) { + throw mapConnectorFailure(response.status); } - return outbound; -} -function isRetryableRelayAuthError(error48) { - return error48 instanceof AppError && error48.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && error48.status === 401; -} -function buildRefreshSingleFlightKey(input) { - return `${input.homeDir}:${input.agentName}`; } async function relayPayloadToPeer(payload, options = {}) { - if (!isRecord4(payload)) { + if (!isRecord2(payload)) { return payload; } const peerAliasValue = payload.peer; @@ -16051,114 +232,20 @@ async function relayPayloadToPeer(payload, options = {}) { if (!peerEntry) { throw new Error("Peer alias is not configured"); } - const home = typeof options.homeDir === "string" && options.homeDir.trim().length > 0 ? options.homeDir.trim() : homedir2(); - const agentName = await resolveAgentName({ - overrideName: options.agentName, - homeDir: home - }); - const { ait, secretKey, registryUrl } = await readAgentCredentials({ - agentName, - homeDir: home - }); - const outboundPayload = removePeerField(payload); - const body = JSON.stringify(outboundPayload); - const peerUrl = new URL(peerEntry.proxyUrl); + const connectorEndpoint = resolveConnectorEndpoint(options); const fetchImpl = resolveRelayFetch(options.fetchImpl); - const refreshSingleFlightKey = buildRefreshSingleFlightKey({ - homeDir: home, - agentName - }); - const sendRelayRequest = async (auth) => { - const unixSeconds = Math.floor( - (options.clock ?? Date.now)() / 1e3 - ).toString(); - const nonce = encodeBase64url( - (options.randomBytesImpl ?? randomBytes)(NONCE_SIZE) - ); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: resolvePathWithQuery(peerUrl), - timestamp: unixSeconds, - nonce, - body: textEncoder2.encode(body), - secretKey - }); - return fetchImpl(peerUrl.toString(), { - method: "POST", - headers: { - Authorization: `Claw ${ait}`, - "Content-Type": "application/json", - [AGENT_ACCESS_HEADER]: auth.accessToken, - ...signed.headers - }, - body - }); - }; - const performRelay = async (auth) => { - const response = await sendRelayRequest(auth); - if (!response.ok) { - if (response.status === 401) { - throw new AppError({ - code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", - message: "Peer relay rejected agent auth credentials", - status: 401, - expose: true - }); - } - throw new Error("Peer relay request failed"); - } - return null; - }; - const result = await executeWithAgentAuthRefreshRetry({ - key: refreshSingleFlightKey, - shouldRetry: isRetryableRelayAuthError, - getAuth: async () => readAgentRegistryAuth({ - homeDir: home, - agentName - }), - persistAuth: async () => { - }, - refreshAuth: async (currentAuth) => withAgentRegistryAuthLock({ - homeDir: home, - agentName, - operation: async () => { - const latestAuth = await readAgentRegistryAuth({ - homeDir: home, - agentName - }); - if (latestAuth.refreshToken !== currentAuth.refreshToken) { - return latestAuth; - } - let refreshedAuth; - try { - refreshedAuth = await refreshAgentAuthWithClawProof({ - registryUrl, - ait, - secretKey, - refreshToken: latestAuth.refreshToken, - fetchImpl - }); - } catch (error48) { - const afterFailureAuth = await readAgentRegistryAuth({ - homeDir: home, - agentName - }); - if (afterFailureAuth.refreshToken !== latestAuth.refreshToken) { - return afterFailureAuth; - } - throw error48; - } - await writeAgentRegistryAuthAtomic({ - homeDir: home, - agentName, - auth: refreshedAuth - }); - return refreshedAuth; - } - }), - perform: performRelay - }); - return result; + const outboundPayload = removePeerField(payload); + await postToConnector( + connectorEndpoint, + { + peer: peerAlias, + peerDid: peerEntry.did, + peerProxyUrl: peerEntry.proxyUrl, + payload: outboundPayload + }, + fetchImpl + ); + return null; } async function relayToPeer(ctx) { return relayPayloadToPeer(ctx?.payload); @@ -16167,11 +254,3 @@ export { relayToPeer as default, relayPayloadToPeer }; -/*! Bundled license information: - -@scure/base/index.js: - (*! scure-base - MIT License (c) 2022 Paul Miller (paulmillr.com) *) - -@noble/ed25519/index.js: - (*! noble-ed25519 - MIT License (c) 2019 Paul Miller (paulmillr.com) *) -*/ diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index b50efe5..3c87e8c 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -11,11 +11,16 @@ - Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. - Admin bootstrap must print the one-time PAT before attempting to persist it and depend on `persistBootstrapConfig` so config write failures are surfaced via CLI errors while the operator still sees the PAT. - API-key lifecycle command logic should stay in `commands/api-key.ts`; keep create/list/revoke request mapping explicit and keep token exposure limited to create output only. +- Connector runtime command logic should stay in `commands/connector.ts`; keep startup orchestration deterministic and avoid embedding connector runtime implementation details in the CLI. - Registry invite lifecycle command logic should stay in `commands/invite.ts`; keep it strictly scoped to registry onboarding invites and separate from `commands/openclaw.ts` peer-relay invite codes. - `invite redeem` must print the returned PAT once, then persist config in deterministic order (`registryUrl`, then `apiKey`) so bootstrap/onboarding state is predictable. - `invite` command routes must use endpoint constants from `@clawdentity/protocol` (`INVITES_PATH`, `INVITES_REDEEM_PATH`) instead of inline path literals. - Agent auth refresh state is stored per-agent at `~/.clawdentity/agents//registry-auth.json` and must be written with secure file permissions. - `agent auth refresh` must use `Authorization: Claw ` + PoP headers from local agent keys and must not require PAT config. +- `connector start ` must validate local agent material (`identity.json`, `ait.jwt`, `secret.key`, `registry-auth.json`) before starting runtime and must fail with stable CLI errors when files are missing/invalid. +- `connector start` must print the local outbound handoff endpoint so transform troubleshooting is deterministic. +- `connector service install ` must install user-scoped autostart integration (`launchd` on macOS, `systemd --user` on Linux) so connector runtime survives host restarts. +- `connector service uninstall ` must be idempotent and remove the generated service file even when the service is already stopped/unloaded. ## Skill Install Mode - Keep npm skill-install logic in shared helpers (`install-skill-mode.ts`) and invoke it from `postinstall.ts`; do not embed installer logic inside command factories. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 83c5047..fe13f8c 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -23,6 +23,16 @@ - `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. - Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. +## Connector Command Rules +- `connector start ` is the runtime entrypoint for local relay handoff and must remain long-running when connector runtime provides a wait/closed primitive. +- Validate agent local state before start (`identity.json`, `ait.jwt`, `secret.key`, `registry-auth.json`) and fail early with deterministic operator-facing errors. +- Keep connector startup wiring behind dependency-injected helpers so tests can mock module loading/runtime behavior without requiring a live connector package. +- Print resolved outbound endpoint and proxy websocket URL (when provided by runtime) so operators can verify local handoff and upstream connectivity. +- Parse and forward optional `registry-auth.json` expiry metadata (`accessExpiresAt`, `refreshExpiresAt`, `tokenType`) to connector runtime so startup refresh decisions can be made without re-reading CLI-side files. +- `connector service install ` must generate deterministic user-service files and wire autostart using OS-native tooling (`launchctl` or `systemctl --user`). +- `connector service install/uninstall` must keep service names/path generation stable from agent name so support/debug commands remain predictable. +- `connector service uninstall` must be safe to re-run (ignore already-stopped service errors and still remove service file). + ## Registry Invite Command Rules - `invite create` is for registry onboarding invites only (admin-authenticated), not peer-relay invite-code generation. - `invite create` must call `INVITES_PATH` from `@clawdentity/protocol` and include PAT bearer auth from resolved CLI config. diff --git a/apps/cli/src/commands/connector.test.ts b/apps/cli/src/commands/connector.test.ts new file mode 100644 index 0000000..43031e9 --- /dev/null +++ b/apps/cli/src/commands/connector.test.ts @@ -0,0 +1,344 @@ +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { createConnectorCommand } from "./connector.js"; + +function createErrnoError(code: string): NodeJS.ErrnoException { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +} + +async function runConnectorCommand( + args: string[], + input: { + execFileImpl?: ( + file: string, + args?: readonly string[], + ) => Promise<{ stderr: string; stdout: string }>; + getConfigDirImpl?: () => string; + getHomeDirImpl?: () => string; + loadConnectorModule?: () => Promise<{ + startConnectorRuntime?: (input: unknown) => Promise<{ + outboundUrl?: string; + waitUntilStopped?: () => Promise; + websocketUrl?: string; + }>; + }>; + mkdirImpl?: ( + path: string, + options?: { recursive?: boolean }, + ) => Promise; + readFileImpl?: (path: string, encoding: "utf8") => Promise; + removeFileImpl?: ( + filePath: string, + options?: { force?: boolean }, + ) => Promise; + resolveCurrentModulePathImpl?: () => string; + resolveCurrentPlatformImpl?: () => NodeJS.Platform; + resolveCurrentUidImpl?: () => number; + resolveConfigImpl?: () => Promise<{ registryUrl: string }>; + resolveNodeExecPathImpl?: () => string; + writeFileImpl?: ( + filePath: string, + data: string, + encoding: "utf8", + ) => Promise; + } = {}, +) { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createConnectorCommand({ + execFileImpl: input.execFileImpl, + getConfigDirImpl: input.getConfigDirImpl, + getHomeDirImpl: input.getHomeDirImpl, + loadConnectorModule: input.loadConnectorModule as + | (() => Promise<{ + startConnectorRuntime?: (input: unknown) => Promise<{ + outboundUrl?: string; + waitUntilStopped?: () => Promise; + websocketUrl?: string; + }>; + }>) + | undefined, + mkdirImpl: input.mkdirImpl, + readFileImpl: input.readFileImpl, + removeFileImpl: input.removeFileImpl, + resolveCurrentModulePathImpl: input.resolveCurrentModulePathImpl, + resolveCurrentPlatformImpl: input.resolveCurrentPlatformImpl, + resolveCurrentUidImpl: input.resolveCurrentUidImpl, + resolveConfigImpl: input.resolveConfigImpl, + resolveNodeExecPathImpl: input.resolveNodeExecPathImpl, + writeFileImpl: input.writeFileImpl, + }); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "connector", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +} + +describe("connector command", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("registers connector start command", () => { + const command = createConnectorCommand(); + expect(command.name()).toBe("connector"); + expect(command.commands.map((item) => item.name())).toContain("start"); + expect(command.commands.map((item) => item.name())).toContain("service"); + }); + + it("installs launchd service for connector autostart", async () => { + const writeFileImpl = vi.fn( + async (_path: string, _content: string, _encoding: BufferEncoding) => {}, + ); + const mkdirImpl = vi.fn(async (_path: string, _options: unknown) => {}); + const execFileImpl = vi.fn( + async (_file: string, _args?: readonly string[]) => ({ + stdout: "", + stderr: "", + }), + ); + + const result = await runConnectorCommand( + ["service", "install", "alpha-agent", "--platform", "launchd"], + { + execFileImpl, + getConfigDirImpl: () => "/mock-home/.clawdentity", + getHomeDirImpl: () => "/mock-home", + mkdirImpl, + resolveCurrentModulePathImpl: () => + "/mock-cli/dist/commands/connector.js", + resolveNodeExecPathImpl: () => "/mock-node/bin/node", + writeFileImpl, + }, + ); + + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(writeFileImpl.mock.calls[0]?.[0]).toBe( + "/mock-home/Library/LaunchAgents/com.clawdentity.clawdentity-connector-alpha-agent.plist", + ); + expect(writeFileImpl.mock.calls[0]?.[1]).toContain( + "ProgramArguments", + ); + expect(writeFileImpl.mock.calls[0]?.[1]).toContain( + "/mock-node/bin/node", + ); + expect(writeFileImpl.mock.calls[0]?.[1]).toContain( + "/mock-cli/dist/bin.js", + ); + expect(execFileImpl).toHaveBeenCalledWith("launchctl", [ + "load", + "-w", + "/mock-home/Library/LaunchAgents/com.clawdentity.clawdentity-connector-alpha-agent.plist", + ]); + expect(result.stdout).toContain( + "Connector service installed (launchd): com.clawdentity.clawdentity-connector-alpha-agent", + ); + expect(result.exitCode).toBeUndefined(); + }); + + it("uninstalls systemd service for connector autostart", async () => { + const execFileImpl = vi.fn( + async (_file: string, _args?: readonly string[]) => ({ + stdout: "", + stderr: "", + }), + ); + const removeFileImpl = vi.fn( + async (_path: string, _options: unknown) => {}, + ); + + const result = await runConnectorCommand( + ["service", "uninstall", "alpha-agent", "--platform", "systemd"], + { + execFileImpl, + getHomeDirImpl: () => "/mock-home", + removeFileImpl, + }, + ); + + expect(execFileImpl).toHaveBeenCalledWith("systemctl", [ + "--user", + "disable", + "--now", + "clawdentity-connector-alpha-agent.service", + ]); + expect(execFileImpl).toHaveBeenCalledWith("systemctl", [ + "--user", + "daemon-reload", + ]); + expect(removeFileImpl).toHaveBeenCalledWith( + "/mock-home/.config/systemd/user/clawdentity-connector-alpha-agent.service", + { force: true }, + ); + expect(result.stdout).toContain( + "Connector service uninstalled (systemd): clawdentity-connector-alpha-agent", + ); + expect(result.exitCode).toBeUndefined(); + }); + + it("starts connector runtime with local credentials and config", async () => { + const startConnectorRuntime = vi.fn(async () => ({ + outboundUrl: "http://127.0.0.1:19400/v1/outbound", + websocketUrl: "wss://proxy.example.com/v1/connector", + waitUntilStopped: async () => {}, + })); + const readFileImpl = vi.fn(async (path: string): Promise => { + if (path.endsWith("/ait.jwt")) { + return "mock.ait.jwt\n"; + } + + if (path.endsWith("/secret.key")) { + return "mock.secret.key\n"; + } + + if (path.endsWith("/identity.json")) { + return JSON.stringify({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + }); + } + + if (path.endsWith("/registry-auth.json")) { + return JSON.stringify({ + accessToken: "clw_agt_access", + refreshToken: "clw_rft_refresh", + }); + } + + throw createErrnoError("ENOENT"); + }); + + const result = await runConnectorCommand(["start", "alpha-agent"], { + getConfigDirImpl: () => "/mock-home/.clawdentity", + loadConnectorModule: async () => ({ + startConnectorRuntime, + }), + readFileImpl, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }); + + expect(startConnectorRuntime).toHaveBeenCalledTimes(1); + expect(startConnectorRuntime).toHaveBeenCalledWith( + expect.objectContaining({ + agentName: "alpha-agent", + configDir: "/mock-home/.clawdentity", + credentials: { + accessToken: "clw_agt_access", + agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + ait: "mock.ait.jwt", + refreshToken: "clw_rft_refresh", + secretKey: "mock.secret.key", + }, + outboundBaseUrl: "http://127.0.0.1:19400", + outboundPath: "/v1/outbound", + registryUrl: "https://api.clawdentity.com", + }), + ); + expect(result.stdout).toContain( + 'Starting connector runtime for agent "alpha-agent"...', + ); + expect(result.stdout).toContain( + "Connector outbound endpoint: http://127.0.0.1:19400/v1/outbound", + ); + expect(result.stdout).toContain( + "Connector proxy websocket: wss://proxy.example.com/v1/connector", + ); + expect(result.stdout).toContain("Connector runtime is active."); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when required agent credentials are missing", async () => { + const readFileImpl = vi.fn(async (_path: string): Promise => { + throw createErrnoError("ENOENT"); + }); + const startConnectorRuntime = vi.fn(async () => ({})); + + const result = await runConnectorCommand(["start", "alpha-agent"], { + getConfigDirImpl: () => "/mock-home/.clawdentity", + loadConnectorModule: async () => ({ + startConnectorRuntime, + }), + readFileImpl, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }); + + expect(startConnectorRuntime).not.toHaveBeenCalled(); + expect(result.exitCode).toBe(1); + expect(result.stderr).toContain( + "Local agent credentials are missing for connector startup", + ); + }); + + it("fails when connector package API is invalid", async () => { + const readFileImpl = vi.fn(async (path: string): Promise => { + if (path.endsWith(".json")) { + return JSON.stringify({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + refreshToken: "clw_rft_refresh", + }); + } + + return "value"; + }); + + const result = await runConnectorCommand(["start", "alpha-agent"], { + getConfigDirImpl: () => "/mock-home/.clawdentity", + loadConnectorModule: async () => ({}), + readFileImpl, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }); + + expect(result.exitCode).toBe(1); + expect(result.stderr).toContain( + "Connector package does not expose startConnectorRuntime", + ); + }); +}); diff --git a/apps/cli/src/commands/connector.ts b/apps/cli/src/commands/connector.ts new file mode 100644 index 0000000..e36b1f6 --- /dev/null +++ b/apps/cli/src/commands/connector.ts @@ -0,0 +1,1080 @@ +import { execFile as execFileCallback } from "node:child_process"; +import { mkdir, readFile, rm, writeFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; +import { promisify } from "node:util"; +import { AppError, createLogger } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { getConfigDir, resolveConfig } from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { assertValidAgentName } from "./agent-name.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "connector" }); +const execFile = promisify(execFileCallback); + +const AGENTS_DIR_NAME = "agents"; +const IDENTITY_FILE_NAME = "identity.json"; +const AIT_FILE_NAME = "ait.jwt"; +const SECRET_KEY_FILE_NAME = "secret.key"; +const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; +const SERVICE_LOG_DIR_NAME = "logs"; + +const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; +const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; + +type ConnectorCredentials = { + accessToken?: string; + accessExpiresAt?: string; + agentDid: string; + ait: string; + refreshToken: string; + refreshExpiresAt?: string; + secretKey: string; + tokenType?: "Bearer"; +}; + +type ConnectorStartInput = { + agentName: string; + configDir: string; + credentials: ConnectorCredentials; + openclawBaseUrl?: string; + openclawHookPath?: string; + openclawHookToken?: string; + outboundBaseUrl: string; + outboundPath: string; + proxyWebsocketUrl?: string; + registryUrl: string; +}; + +type ConnectorRuntime = { + closed?: Promise; + outboundUrl?: string; + proxyWebsocketUrl?: string; + waitForStop?: () => Promise; + waitUntilStopped?: () => Promise; + websocketUrl?: string; +}; + +type ConnectorModule = { + startConnectorRuntime?: ( + input: ConnectorStartInput, + ) => Promise; +}; + +type ReadFileText = (path: string, encoding: "utf8") => Promise; +type ResolveConfigLike = () => Promise<{ registryUrl: string }>; +type ExecFileLike = ( + file: string, + args?: readonly string[], +) => Promise<{ stderr: string; stdout: string }>; +type MkdirLike = ( + path: string, + options?: { recursive?: boolean }, +) => Promise; +type WriteFileLike = ( + filePath: string, + data: string, + encoding: "utf8", +) => Promise; +type RemoveFileLike = ( + filePath: string, + options?: { force?: boolean }, +) => Promise; +type ResolveHomeDirLike = () => string; +type ResolveNodeExecPathLike = () => string; +type ResolveCurrentPlatformLike = () => NodeJS.Platform; +type ResolveCurrentModulePathLike = () => string; +type ResolveCurrentUidLike = () => number; + +type ConnectorCommandDependencies = { + execFileImpl?: ExecFileLike; + getConfigDirImpl?: typeof getConfigDir; + getHomeDirImpl?: ResolveHomeDirLike; + loadConnectorModule?: () => Promise; + mkdirImpl?: MkdirLike; + readFileImpl?: ReadFileText; + removeFileImpl?: RemoveFileLike; + resolveCurrentModulePathImpl?: ResolveCurrentModulePathLike; + resolveCurrentPlatformImpl?: ResolveCurrentPlatformLike; + resolveCurrentUidImpl?: ResolveCurrentUidLike; + resolveConfigImpl?: ResolveConfigLike; + resolveNodeExecPathImpl?: ResolveNodeExecPathLike; + writeFileImpl?: WriteFileLike; +}; + +type ConnectorStartCommandOptions = { + openclawBaseUrl?: string; + openclawHookPath?: string; + openclawHookToken?: string; + proxyWsUrl?: string; +}; + +type ConnectorServicePlatform = "launchd" | "systemd"; + +type ConnectorServiceInstallCommandOptions = ConnectorStartCommandOptions & { + platform?: "auto" | ConnectorServicePlatform; +}; + +type ConnectorServiceUninstallCommandOptions = { + platform?: "auto" | ConnectorServicePlatform; +}; + +export type ConnectorStartResult = { + outboundUrl: string; + proxyWebsocketUrl?: string; + runtime?: ConnectorRuntime | undefined; +}; + +export type ConnectorServiceInstallResult = { + serviceFilePath: string; + serviceName: string; + platform: ConnectorServicePlatform; +}; + +export type ConnectorServiceUninstallResult = { + serviceFilePath: string; + serviceName: string; + platform: ConnectorServicePlatform; +}; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +function createCliError( + code: string, + message: string, + details?: Record, +): AppError { + return new AppError({ + code, + message, + status: 400, + details, + }); +} + +function parseNonEmptyString(value: unknown, label: string): string { + if (typeof value !== "string") { + throw createCliError( + "CLI_CONNECTOR_INVALID_INPUT", + "Connector input is invalid", + { + label, + }, + ); + } + + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_CONNECTOR_INVALID_INPUT", + "Connector input is invalid", + { + label, + }, + ); + } + + return trimmed; +} + +function parseAgentDid(value: unknown): string { + const did = parseNonEmptyString(value, "agent did"); + if (!did.startsWith("did:claw:agent:")) { + throw createCliError( + "CLI_CONNECTOR_INVALID_AGENT_IDENTITY", + "Agent identity is invalid for connector startup", + ); + } + + return did; +} + +function parseConnectorBaseUrl(value: string): string { + let parsed: URL; + try { + parsed = new URL(value); + } catch { + throw createCliError( + "CLI_CONNECTOR_INVALID_BASE_URL", + "Connector base URL is invalid", + ); + } + + if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { + throw createCliError( + "CLI_CONNECTOR_INVALID_BASE_URL", + "Connector base URL is invalid", + ); + } + + if ( + parsed.pathname === "/" && + parsed.search.length === 0 && + parsed.hash.length === 0 + ) { + return parsed.origin; + } + + return parsed.toString(); +} + +function normalizeOutboundPath(pathValue: string): string { + const trimmed = pathValue.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_CONNECTOR_INVALID_OUTBOUND_PATH", + "Connector outbound path is invalid", + ); + } + + return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; +} + +function resolveConnectorBaseUrl(): string { + const value = process.env.CLAWDENTITY_CONNECTOR_BASE_URL; + if (typeof value !== "string" || value.trim().length === 0) { + return DEFAULT_CONNECTOR_BASE_URL; + } + + return parseConnectorBaseUrl(value.trim()); +} + +function resolveConnectorOutboundPath(): string { + const value = process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH; + if (typeof value !== "string" || value.trim().length === 0) { + return DEFAULT_CONNECTOR_OUTBOUND_PATH; + } + + return normalizeOutboundPath(value); +} + +function resolveOutboundUrl(baseUrl: string, path: string): string { + return new URL(path, baseUrl).toString(); +} + +async function readRequiredTrimmedFile( + filePath: string, + label: string, + readFileImpl: ReadFileText, +): Promise { + let raw: string; + try { + raw = await readFileImpl(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_CONNECTOR_MISSING_AGENT_MATERIAL", + "Local agent credentials are missing for connector startup", + { label }, + ); + } + + throw error; + } + + const trimmed = raw.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_CONNECTOR_MISSING_AGENT_MATERIAL", + "Local agent credentials are missing for connector startup", + { label }, + ); + } + + return trimmed; +} + +function parseJsonRecord( + value: string, + code: string, + message: string, +): Record { + let parsed: unknown; + try { + parsed = JSON.parse(value); + } catch { + throw createCliError(code, message); + } + + if (!isRecord(parsed)) { + throw createCliError(code, message); + } + + return parsed; +} + +function parseRegistryAuth( + rawRegistryAuth: string, +): Pick< + ConnectorCredentials, + | "accessToken" + | "accessExpiresAt" + | "refreshToken" + | "refreshExpiresAt" + | "tokenType" +> { + const parsed = parseJsonRecord( + rawRegistryAuth, + "CLI_CONNECTOR_INVALID_REGISTRY_AUTH", + "Agent registry auth is invalid for connector startup", + ); + + const refreshToken = parseNonEmptyString(parsed.refreshToken, "refreshToken"); + const accessToken = + typeof parsed.accessToken === "string" && + parsed.accessToken.trim().length > 0 + ? parsed.accessToken.trim() + : undefined; + const accessExpiresAt = + typeof parsed.accessExpiresAt === "string" && + parsed.accessExpiresAt.trim().length > 0 + ? parsed.accessExpiresAt.trim() + : undefined; + const refreshExpiresAt = + typeof parsed.refreshExpiresAt === "string" && + parsed.refreshExpiresAt.trim().length > 0 + ? parsed.refreshExpiresAt.trim() + : undefined; + const tokenType = parsed.tokenType === "Bearer" ? "Bearer" : undefined; + + return { + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + tokenType, + }; +} + +function parseAgentIdentity(rawIdentity: string): { did: string } { + const parsed = parseJsonRecord( + rawIdentity, + "CLI_CONNECTOR_INVALID_AGENT_IDENTITY", + "Agent identity is invalid for connector startup", + ); + + return { + did: parseAgentDid(parsed.did), + }; +} + +async function loadDefaultConnectorModule(): Promise { + const connectorModuleName: string = "@clawdentity/connector"; + + try { + return (await import(connectorModuleName)) as ConnectorModule; + } catch { + throw createCliError( + "CLI_CONNECTOR_PACKAGE_UNAVAILABLE", + "Connector package is unavailable. Install @clawdentity/connector and retry.", + ); + } +} + +function resolveWaitPromise( + runtime: ConnectorRuntime | undefined, +): Promise | undefined { + if (!runtime || !isRecord(runtime)) { + return undefined; + } + + if (typeof runtime.waitUntilStopped === "function") { + return runtime.waitUntilStopped(); + } + + if (typeof runtime.waitForStop === "function") { + return runtime.waitForStop(); + } + + if (runtime.closed instanceof Promise) { + return runtime.closed.then(() => undefined); + } + + return undefined; +} + +function sanitizeServiceSegment(value: string): string { + return value.replaceAll(/[^a-zA-Z0-9_.-]+/g, "-"); +} + +function parseConnectorServicePlatformOption( + value: unknown, +): "auto" | ConnectorServicePlatform { + if (value === undefined) { + return "auto"; + } + + if (value === "auto" || value === "launchd" || value === "systemd") { + return value; + } + + throw createCliError( + "CLI_CONNECTOR_SERVICE_PLATFORM_INVALID", + "Connector service platform must be one of: auto, launchd, systemd", + ); +} + +function resolveConnectorServicePlatform( + inputPlatform: "auto" | ConnectorServicePlatform | undefined, + currentPlatform: NodeJS.Platform, +): ConnectorServicePlatform { + if (inputPlatform && inputPlatform !== "auto") { + return inputPlatform; + } + + if (currentPlatform === "darwin") { + return "launchd"; + } + + if (currentPlatform === "linux") { + return "systemd"; + } + + throw createCliError( + "CLI_CONNECTOR_SERVICE_PLATFORM_UNSUPPORTED", + "Connector service install is supported only on macOS (launchd) and Linux (systemd)", + { + platform: currentPlatform, + }, + ); +} + +function buildConnectorStartArgs( + agentName: string, + commandOptions: ConnectorStartCommandOptions, +): string[] { + const args = ["connector", "start", agentName]; + + if (commandOptions.proxyWsUrl) { + args.push("--proxy-ws-url", commandOptions.proxyWsUrl); + } + + if (commandOptions.openclawBaseUrl) { + args.push("--openclaw-base-url", commandOptions.openclawBaseUrl); + } + + if (commandOptions.openclawHookPath) { + args.push("--openclaw-hook-path", commandOptions.openclawHookPath); + } + + if (commandOptions.openclawHookToken) { + args.push("--openclaw-hook-token", commandOptions.openclawHookToken); + } + + return args; +} + +function resolveCliEntryPath( + resolveCurrentModulePathImpl: ResolveCurrentModulePathLike | undefined, +): string { + const modulePath = + resolveCurrentModulePathImpl?.() ?? fileURLToPath(import.meta.url); + return join(dirname(modulePath), "..", "bin.js"); +} + +function escapeXml(value: string): string { + return value + .replaceAll("&", "&") + .replaceAll("<", "<") + .replaceAll(">", ">") + .replaceAll('"', """) + .replaceAll("'", "'"); +} + +function quoteSystemdArgument(value: string): string { + return `"${value.replaceAll("\\", "\\\\").replaceAll('"', '\\"')}"`; +} + +function createSystemdServiceFileContent(input: { + command: string[]; + description: string; + errorLogPath: string; + outputLogPath: string; + workingDirectory: string; +}): string { + const execStart = input.command.map(quoteSystemdArgument).join(" "); + + return [ + "[Unit]", + `Description=${input.description}`, + "After=network-online.target", + "Wants=network-online.target", + "", + "[Service]", + "Type=simple", + `ExecStart=${execStart}`, + "Restart=always", + "RestartSec=2", + `WorkingDirectory=${quoteSystemdArgument(input.workingDirectory)}`, + `StandardOutput=append:${input.outputLogPath}`, + `StandardError=append:${input.errorLogPath}`, + "", + "[Install]", + "WantedBy=default.target", + "", + ].join("\n"); +} + +function createLaunchdPlistContent(input: { + command: string[]; + label: string; + errorLogPath: string; + outputLogPath: string; + workingDirectory: string; +}): string { + const commandItems = input.command + .map((arg) => ` ${escapeXml(arg)}`) + .join("\n"); + + return [ + '', + '', + '', + "", + " Label", + ` ${escapeXml(input.label)}`, + " ProgramArguments", + " ", + commandItems, + " ", + " RunAtLoad", + " ", + " KeepAlive", + " ", + " WorkingDirectory", + ` ${escapeXml(input.workingDirectory)}`, + " StandardOutPath", + ` ${escapeXml(input.outputLogPath)}`, + " StandardErrorPath", + ` ${escapeXml(input.errorLogPath)}`, + "", + "", + "", + ].join("\n"); +} + +function resolveServiceDependencies( + dependencies: ConnectorCommandDependencies, +) { + const execFileImpl: ExecFileLike = + dependencies.execFileImpl ?? + (async (file, args = []) => { + const result = await execFile(file, [...args]); + return { + stdout: result.stdout ?? "", + stderr: result.stderr ?? "", + }; + }); + + return { + execFileImpl, + getConfigDirImpl: dependencies.getConfigDirImpl ?? getConfigDir, + getHomeDirImpl: dependencies.getHomeDirImpl ?? homedir, + mkdirImpl: dependencies.mkdirImpl ?? mkdir, + removeFileImpl: dependencies.removeFileImpl ?? rm, + resolveCurrentModulePathImpl: dependencies.resolveCurrentModulePathImpl, + resolveCurrentPlatformImpl: + dependencies.resolveCurrentPlatformImpl ?? (() => process.platform), + resolveCurrentUidImpl: + dependencies.resolveCurrentUidImpl ?? + (() => { + if (typeof process.getuid !== "function") { + throw createCliError( + "CLI_CONNECTOR_SERVICE_UID_UNAVAILABLE", + "Current user id is unavailable in this runtime", + ); + } + return process.getuid(); + }), + resolveNodeExecPathImpl: + dependencies.resolveNodeExecPathImpl ?? (() => process.execPath), + writeFileImpl: dependencies.writeFileImpl ?? writeFile, + }; +} + +export async function installConnectorServiceForAgent( + agentName: string, + commandOptions: ConnectorServiceInstallCommandOptions = {}, + dependencies: ConnectorCommandDependencies = {}, +): Promise { + const serviceDependencies = resolveServiceDependencies(dependencies); + const servicePlatform = parseConnectorServicePlatformOption( + commandOptions.platform, + ); + const platform = resolveConnectorServicePlatform( + servicePlatform, + serviceDependencies.resolveCurrentPlatformImpl(), + ); + const configDir = serviceDependencies.getConfigDirImpl(); + const homeDir = serviceDependencies.getHomeDirImpl(); + const logsDir = join(configDir, SERVICE_LOG_DIR_NAME); + const serviceName = sanitizeServiceSegment( + `clawdentity-connector-${agentName}`, + ); + const startArgs = buildConnectorStartArgs(agentName, commandOptions); + const command = [ + serviceDependencies.resolveNodeExecPathImpl(), + resolveCliEntryPath(serviceDependencies.resolveCurrentModulePathImpl), + ...startArgs, + ]; + const outputLogPath = join(logsDir, `${serviceName}.out.log`); + const errorLogPath = join(logsDir, `${serviceName}.err.log`); + + await serviceDependencies.mkdirImpl(logsDir, { recursive: true }); + + if (platform === "systemd") { + const serviceDir = join(homeDir, ".config", "systemd", "user"); + const serviceFilePath = join(serviceDir, `${serviceName}.service`); + + await serviceDependencies.mkdirImpl(serviceDir, { recursive: true }); + await serviceDependencies.writeFileImpl( + serviceFilePath, + createSystemdServiceFileContent({ + command, + description: `Clawdentity connector (${agentName})`, + outputLogPath, + errorLogPath, + workingDirectory: homeDir, + }), + "utf8", + ); + + try { + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "daemon-reload", + ]); + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "enable", + "--now", + `${serviceName}.service`, + ]); + } catch (error) { + throw createCliError( + "CLI_CONNECTOR_SERVICE_INSTALL_FAILED", + "Failed to install systemd connector service", + { + reason: error instanceof Error ? error.message : "unknown", + }, + ); + } + + return { + platform, + serviceName, + serviceFilePath, + }; + } + + const launchAgentsDir = join(homeDir, "Library", "LaunchAgents"); + const serviceNameWithDomain = `com.clawdentity.${serviceName}`; + const serviceFilePath = join( + launchAgentsDir, + `${serviceNameWithDomain}.plist`, + ); + + await serviceDependencies.mkdirImpl(launchAgentsDir, { recursive: true }); + await serviceDependencies.writeFileImpl( + serviceFilePath, + createLaunchdPlistContent({ + command, + label: serviceNameWithDomain, + outputLogPath, + errorLogPath, + workingDirectory: homeDir, + }), + "utf8", + ); + + try { + await serviceDependencies.execFileImpl("launchctl", [ + "unload", + "-w", + serviceFilePath, + ]); + } catch { + // Ignore unload failures for first install or already-unloaded service. + } + + try { + await serviceDependencies.execFileImpl("launchctl", [ + "load", + "-w", + serviceFilePath, + ]); + } catch (error) { + throw createCliError( + "CLI_CONNECTOR_SERVICE_INSTALL_FAILED", + "Failed to install launchd connector service", + { + reason: error instanceof Error ? error.message : "unknown", + }, + ); + } + + return { + platform, + serviceName: serviceNameWithDomain, + serviceFilePath, + }; +} + +export async function uninstallConnectorServiceForAgent( + agentName: string, + commandOptions: ConnectorServiceUninstallCommandOptions = {}, + dependencies: ConnectorCommandDependencies = {}, +): Promise { + const serviceDependencies = resolveServiceDependencies(dependencies); + const servicePlatform = parseConnectorServicePlatformOption( + commandOptions.platform, + ); + const platform = resolveConnectorServicePlatform( + servicePlatform, + serviceDependencies.resolveCurrentPlatformImpl(), + ); + const homeDir = serviceDependencies.getHomeDirImpl(); + const serviceName = sanitizeServiceSegment( + `clawdentity-connector-${agentName}`, + ); + + if (platform === "systemd") { + const serviceFilePath = join( + homeDir, + ".config", + "systemd", + "user", + `${serviceName}.service`, + ); + + try { + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "disable", + "--now", + `${serviceName}.service`, + ]); + } catch { + // Continue uninstall to keep command idempotent. + } + + await serviceDependencies.removeFileImpl(serviceFilePath, { force: true }); + + try { + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "daemon-reload", + ]); + } catch { + // Continue uninstall; unit file is already removed. + } + + return { + platform, + serviceName, + serviceFilePath, + }; + } + + const serviceNameWithDomain = `com.clawdentity.${serviceName}`; + const serviceFilePath = join( + homeDir, + "Library", + "LaunchAgents", + `${serviceNameWithDomain}.plist`, + ); + + try { + await serviceDependencies.execFileImpl("launchctl", [ + "unload", + "-w", + serviceFilePath, + ]); + } catch { + // Continue uninstall to keep command idempotent. + } + + await serviceDependencies.removeFileImpl(serviceFilePath, { force: true }); + + return { + platform, + serviceName: serviceNameWithDomain, + serviceFilePath, + }; +} + +export async function startConnectorForAgent( + agentName: string, + commandOptions: ConnectorStartCommandOptions = {}, + dependencies: ConnectorCommandDependencies = {}, +): Promise { + const resolveConfigImpl: ResolveConfigLike = + dependencies.resolveConfigImpl ?? resolveConfig; + const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; + const readFileImpl: ReadFileText = + dependencies.readFileImpl ?? ((path, encoding) => readFile(path, encoding)); + const loadConnectorModule = + dependencies.loadConnectorModule ?? loadDefaultConnectorModule; + const configDir = getConfigDirImpl(); + const agentDirectory = join(configDir, AGENTS_DIR_NAME, agentName); + + const [ + rawAit, + rawSecretKey, + rawIdentity, + rawRegistryAuth, + config, + connectorModule, + ] = await Promise.all([ + readRequiredTrimmedFile( + join(agentDirectory, AIT_FILE_NAME), + AIT_FILE_NAME, + readFileImpl, + ), + readRequiredTrimmedFile( + join(agentDirectory, SECRET_KEY_FILE_NAME), + SECRET_KEY_FILE_NAME, + readFileImpl, + ), + readRequiredTrimmedFile( + join(agentDirectory, IDENTITY_FILE_NAME), + IDENTITY_FILE_NAME, + readFileImpl, + ), + readRequiredTrimmedFile( + join(agentDirectory, REGISTRY_AUTH_FILE_NAME), + REGISTRY_AUTH_FILE_NAME, + readFileImpl, + ), + resolveConfigImpl(), + loadConnectorModule(), + ]); + + if (typeof connectorModule.startConnectorRuntime !== "function") { + throw createCliError( + "CLI_CONNECTOR_INVALID_PACKAGE_API", + "Connector package does not expose startConnectorRuntime", + ); + } + + const identity = parseAgentIdentity(rawIdentity); + const registryAuth = parseRegistryAuth(rawRegistryAuth); + const outboundBaseUrl = resolveConnectorBaseUrl(); + const outboundPath = resolveConnectorOutboundPath(); + const runtime = await connectorModule.startConnectorRuntime({ + agentName, + configDir, + registryUrl: config.registryUrl, + outboundBaseUrl, + outboundPath, + proxyWebsocketUrl: commandOptions.proxyWsUrl, + openclawBaseUrl: commandOptions.openclawBaseUrl, + openclawHookPath: commandOptions.openclawHookPath, + openclawHookToken: commandOptions.openclawHookToken, + credentials: { + agentDid: identity.did, + ait: rawAit, + secretKey: rawSecretKey, + refreshToken: registryAuth.refreshToken, + accessToken: registryAuth.accessToken, + accessExpiresAt: registryAuth.accessExpiresAt, + refreshExpiresAt: registryAuth.refreshExpiresAt, + tokenType: registryAuth.tokenType, + }, + }); + const outboundUrl = + runtime && isRecord(runtime) && typeof runtime.outboundUrl === "string" + ? runtime.outboundUrl + : resolveOutboundUrl(outboundBaseUrl, outboundPath); + const proxyWebsocketUrl = + runtime && isRecord(runtime) + ? typeof runtime.websocketUrl === "string" + ? runtime.websocketUrl + : typeof runtime.proxyWebsocketUrl === "string" + ? runtime.proxyWebsocketUrl + : undefined + : undefined; + + return { + outboundUrl, + proxyWebsocketUrl, + runtime, + }; +} + +export function createConnectorCommand( + dependencies: ConnectorCommandDependencies = {}, +): Command { + const connector = new Command("connector") + .description("Run local connector runtime for OpenClaw relay handoff") + .addCommand( + new Command("start") + .description("Start connector runtime for a local agent") + .argument("", "Local agent name") + .option( + "--proxy-ws-url ", + "Proxy websocket URL (or CLAWDENTITY_PROXY_WS_URL)", + ) + .option( + "--openclaw-base-url ", + "OpenClaw base URL (default OPENCLAW_BASE_URL or http://127.0.0.1:18789)", + ) + .option( + "--openclaw-hook-path ", + "OpenClaw hooks path (default OPENCLAW_HOOK_PATH or /hooks/agent)", + ) + .option( + "--openclaw-hook-token ", + "OpenClaw hooks token (default OPENCLAW_HOOK_TOKEN)", + ) + .action( + withErrorHandling( + "connector start", + async ( + agentNameInput: string, + commandOptions: ConnectorStartCommandOptions, + ) => { + const agentName = assertValidAgentName(agentNameInput); + + writeStdoutLine( + `Starting connector runtime for agent "${agentName}"...`, + ); + + const started = await startConnectorForAgent( + agentName, + { + proxyWsUrl: commandOptions.proxyWsUrl, + openclawBaseUrl: commandOptions.openclawBaseUrl, + openclawHookPath: commandOptions.openclawHookPath, + openclawHookToken: commandOptions.openclawHookToken, + }, + dependencies, + ); + + writeStdoutLine( + `Connector outbound endpoint: ${started.outboundUrl}`, + ); + if (started.proxyWebsocketUrl) { + writeStdoutLine( + `Connector proxy websocket: ${started.proxyWebsocketUrl}`, + ); + } + writeStdoutLine("Connector runtime is active."); + + const waitPromise = resolveWaitPromise(started.runtime); + if (waitPromise) { + await waitPromise; + } + }, + ), + ), + ) + .addCommand( + new Command("service") + .description("Install or remove connector autostart service") + .addCommand( + new Command("install") + .description("Install and start connector service at login/restart") + .argument("", "Local agent name") + .option( + "--platform ", + "Service platform: auto | launchd | systemd", + ) + .option( + "--proxy-ws-url ", + "Proxy websocket URL (or CLAWDENTITY_PROXY_WS_URL)", + ) + .option( + "--openclaw-base-url ", + "OpenClaw base URL override for connector runtime", + ) + .option( + "--openclaw-hook-path ", + "OpenClaw hooks path override for connector runtime", + ) + .option( + "--openclaw-hook-token ", + "OpenClaw hooks token override for connector runtime", + ) + .action( + withErrorHandling( + "connector service install", + async ( + agentNameInput: string, + commandOptions: ConnectorServiceInstallCommandOptions, + ) => { + const agentName = assertValidAgentName(agentNameInput); + const installed = await installConnectorServiceForAgent( + agentName, + { + platform: commandOptions.platform, + proxyWsUrl: commandOptions.proxyWsUrl, + openclawBaseUrl: commandOptions.openclawBaseUrl, + openclawHookPath: commandOptions.openclawHookPath, + openclawHookToken: commandOptions.openclawHookToken, + }, + dependencies, + ); + + writeStdoutLine( + `Connector service installed (${installed.platform}): ${installed.serviceName}`, + ); + writeStdoutLine(`Service file: ${installed.serviceFilePath}`); + }, + ), + ), + ) + .addCommand( + new Command("uninstall") + .description("Uninstall connector autostart service") + .argument("", "Local agent name") + .option( + "--platform ", + "Service platform: auto | launchd | systemd", + ) + .action( + withErrorHandling( + "connector service uninstall", + async ( + agentNameInput: string, + commandOptions: ConnectorServiceUninstallCommandOptions, + ) => { + const agentName = assertValidAgentName(agentNameInput); + const uninstalled = await uninstallConnectorServiceForAgent( + agentName, + { + platform: commandOptions.platform, + }, + dependencies, + ); + + writeStdoutLine( + `Connector service uninstalled (${uninstalled.platform}): ${uninstalled.serviceName}`, + ); + writeStdoutLine( + `Service file removed: ${uninstalled.serviceFilePath}`, + ); + }, + ), + ), + ), + ); + + logger.debug("cli.connector.command_registered", { + command: "connector", + }); + + return connector; +} diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 0535877..fce4260 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -50,6 +50,14 @@ describe("cli", () => { expect(hasApiKeyCommand).toBe(true); }); + it("registers the connector command", () => { + const hasConnectorCommand = createProgram() + .commands.map((command) => command.name()) + .includes("connector"); + + expect(hasConnectorCommand).toBe(true); + }); + it("registers the openclaw command", () => { const hasOpenclawCommand = createProgram() .commands.map((command) => command.name()) diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index c2724df..4b7f2f0 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -3,6 +3,7 @@ import { createAdminCommand } from "./commands/admin.js"; import { createAgentCommand } from "./commands/agent.js"; import { createApiKeyCommand } from "./commands/api-key.js"; import { createConfigCommand } from "./commands/config.js"; +import { createConnectorCommand } from "./commands/connector.js"; import { createInviteCommand } from "./commands/invite.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; import { createVerifyCommand } from "./commands/verify.js"; @@ -16,6 +17,7 @@ export const createProgram = (): Command => { .addCommand(createAdminCommand()) .addCommand(createAgentCommand()) .addCommand(createApiKeyCommand()) + .addCommand(createConnectorCommand()) .addCommand(createConfigCommand()) .addCommand(createInviteCommand()) .addCommand(createOpenclawCommand()) diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 42e7ef5..193f1d3 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -2,16 +2,11 @@ ## Purpose - Define conventions for the OpenClaw skill package that relays selected payloads to remote Clawdentity peers. -- Keep peer routing config, credential loading, and PoP signing deterministic and testable. +- Keep peer routing config and local connector handoff deterministic and testable. ## Filesystem Contracts - Peer routing map lives at `~/.clawdentity/peers.json` by default. -- Local agent credentials are read from `~/.clawdentity/agents//secret.key` and `~/.clawdentity/agents//ait.jwt`. -- Agent selection order for relay runtime: - - explicit transform override (`agentName`) - - environment (`CLAWDENTITY_AGENT_NAME`) - - `~/.clawdentity/openclaw-agent-name` - - single local agent auto-detection +- Local relay handoff targets connector runtime endpoint `http://127.0.0.1:19400/v1/outbound` by default (override via connector env/options when needed). - Relay setup should preserve local OpenClaw upstream URL in `~/.clawdentity/openclaw-relay.json` for proxy runtime fallback. - Never commit local runtime files (`peers.json`, `secret.key`, `ait.jwt`) to the repository. @@ -21,17 +16,16 @@ - `src/transforms/relay-to-peer.ts` must: - expose default export accepting OpenClaw transform context (`ctx.payload`) - read `payload.peer` - - resolve peer proxy URL from peers config - - sign outbound POST with `signHttpRequest` - - send `Authorization: Claw ` and `X-Claw-*` PoP headers - - remove `peer` from forwarded JSON payload + - resolve peer metadata from peers config to preserve alias semantics + - send outbound payload to local connector endpoint as JSON + - remove `peer` from forwarded application payload and wrap it in connector relay envelope - return `null` after successful relay so local handling is skipped - If `payload.peer` is absent, return payload unchanged. - Keep setup flow CLI-driven via `clawdentity openclaw setup`; do not add `configure-hooks.sh`. ## Maintainability - Keep filesystem path logic centralized; avoid hardcoding `~/.clawdentity` paths across multiple files. -- Keep relay behavior pure except for explicit dependencies (`fetch`, clock, random bytes, filesystem) so tests stay deterministic. +- Keep relay behavior pure except for explicit dependencies (`fetch`, filesystem) so tests stay deterministic. - Prefer schema-first runtime validation over ad-hoc guards. ## Validation Commands diff --git a/apps/openclaw-skill/src/AGENTS.md b/apps/openclaw-skill/src/AGENTS.md index 0a23597..9a1eabd 100644 --- a/apps/openclaw-skill/src/AGENTS.md +++ b/apps/openclaw-skill/src/AGENTS.md @@ -7,17 +7,15 @@ - Keep network relay behavior in `transforms/relay-to-peer.ts`. ## Safety Rules -- Validate external input (`payload`, peer config JSON, selected agent name) before use. -- Resolve selected agent in deterministic order: explicit option, env var, `~/.clawdentity/openclaw-agent-name`, then single-agent fallback. -- Do not log or persist secret material from `secret.key` or `ait.jwt`. -- Keep outbound peer requests as JSON POSTs with explicit auth + PoP headers. -- Require outbound relay requests to include `x-claw-agent-access` from local `registry-auth.json`. -- Keep refresh/write operations for `registry-auth.json` lock-protected and atomic. -- On relay `401` auth failures, use shared SDK refresh+retry orchestration and retry exactly once. +- Validate external input (`payload`, peer config JSON) before use. +- Do not log relay payload contents or local connector credential material. +- Keep transform relay path as local connector handoff only (`http://127.0.0.1:19400/v1/outbound` by default), not direct peer HTTP calls. +- Keep peer alias semantics deterministic: validate `payload.peer` against peers config before connector handoff. +- Keep connector failure mapping deterministic (`404` endpoint unavailable, `409` peer alias conflict, network failure generic outage). - Keep peer schema strict (`did`, `proxyUrl`, optional `name`) and reject malformed values early. ## Testing Rules - Use temp directories for filesystem tests; no dependency on real user home state. -- Mock `fetch` in relay tests and assert emitted headers/body. -- Cover both happy path and failure paths (missing peer mapping, missing credentials, invalid config). -- Include refresh-retry tests: first relay `401` -> registry refresh -> one retry success. +- Mock `fetch` in relay tests and assert local connector endpoint + request body contract. +- Cover both happy path and failure paths (missing peer mapping, invalid peers config, connector rejection). +- Include deterministic connector failure tests (endpoint missing, network unavailable). diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts index a894267..28ae131 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts @@ -1,13 +1,6 @@ -import { - mkdirSync, - mkdtempSync, - readFileSync, - rmSync, - writeFileSync, -} from "node:fs"; +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; -import { encodeBase64url } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; import relayToPeer, { relayPayloadToPeer } from "./relay-to-peer.js"; @@ -16,14 +9,13 @@ type RelaySandbox = { homeDir: string; }; -function createRelaySandbox(agentName: string): RelaySandbox { +function createRelaySandbox(): RelaySandbox { const homeDir = mkdtempSync( join(tmpdir(), "clawdentity-openclaw-skill-relay-"), ); const clawdentityDir = join(homeDir, ".clawdentity"); - const agentDirectory = join(clawdentityDir, "agents", agentName); - mkdirSync(agentDirectory, { recursive: true }); + mkdirSync(clawdentityDir, { recursive: true }); writeFileSync( join(clawdentityDir, "peers.json"), @@ -43,42 +35,6 @@ function createRelaySandbox(agentName: string): RelaySandbox { "utf8", ); - writeFileSync( - join(agentDirectory, "secret.key"), - encodeBase64url(Uint8Array.from({ length: 32 }, (_, index) => index + 1)), - "utf8", - ); - writeFileSync(join(agentDirectory, "ait.jwt"), "mock.ait.jwt", "utf8"); - writeFileSync( - join(agentDirectory, "identity.json"), - `${JSON.stringify( - { - did: "did:claw:agent:01ALPHA", - name: agentName, - framework: "openclaw", - registryUrl: "https://registry.example.com", - }, - null, - 2, - )}\n`, - "utf8", - ); - writeFileSync( - join(agentDirectory, "registry-auth.json"), - `${JSON.stringify( - { - tokenType: "Bearer", - accessToken: "clw_agt_access_initial", - accessExpiresAt: "2030-01-01T00:00:00.000Z", - refreshToken: "clw_rft_refresh_initial", - refreshExpiresAt: "2030-02-01T00:00:00.000Z", - }, - null, - 2, - )}\n`, - "utf8", - ); - return { cleanup: () => { rmSync(homeDir, { recursive: true, force: true }); @@ -87,49 +43,9 @@ function createRelaySandbox(agentName: string): RelaySandbox { }; } -function writeAgentCredentials(homeDir: string, agentName: string): void { - const agentDirectory = join(homeDir, ".clawdentity", "agents", agentName); - mkdirSync(agentDirectory, { recursive: true }); - writeFileSync( - join(agentDirectory, "secret.key"), - encodeBase64url(Uint8Array.from({ length: 32 }, (_, index) => index + 1)), - "utf8", - ); - writeFileSync(join(agentDirectory, "ait.jwt"), "mock.ait.jwt", "utf8"); - writeFileSync( - join(agentDirectory, "identity.json"), - `${JSON.stringify( - { - did: "did:claw:agent:01ALPHA", - name: agentName, - framework: "openclaw", - registryUrl: "https://registry.example.com", - }, - null, - 2, - )}\n`, - "utf8", - ); - writeFileSync( - join(agentDirectory, "registry-auth.json"), - `${JSON.stringify( - { - tokenType: "Bearer", - accessToken: "clw_agt_access_initial", - accessExpiresAt: "2030-01-01T00:00:00.000Z", - refreshToken: "clw_rft_refresh_initial", - refreshExpiresAt: "2030-02-01T00:00:00.000Z", - }, - null, - 2, - )}\n`, - "utf8", - ); -} - describe("relay-to-peer transform", () => { - it("relays peer payloads with Claw authorization and PoP headers", async () => { - const sandbox = createRelaySandbox("alpha-agent"); + it("posts outbound relay payload to local connector endpoint", async () => { + const sandbox = createRelaySandbox(); const fetchMock = vi.fn(async () => new Response("", { status: 202 })); try { @@ -143,13 +59,7 @@ describe("relay-to-peer transform", () => { }, { homeDir: sandbox.homeDir, - agentName: "alpha-agent", fetchImpl: fetchMock as typeof fetch, - clock: () => 1_700_000_000_000, - randomBytesImpl: () => - Uint8Array.from([ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - ]), }, ); @@ -160,25 +70,54 @@ describe("relay-to-peer transform", () => { string, RequestInit, ]; - expect(url).toBe("https://peer.example.com/hooks/agent?source=skill"); + expect(url).toBe("http://127.0.0.1:19400/v1/outbound"); expect(requestInit.method).toBe("POST"); expect(requestInit.body).toBe( JSON.stringify({ - message: "hello", - metadata: { - turn: 1, + peer: "beta", + peerDid: "did:claw:agent:01BETA", + peerProxyUrl: "https://peer.example.com/hooks/agent?source=skill", + payload: { + message: "hello", + metadata: { + turn: 1, + }, }, }), ); const headers = new Headers(requestInit.headers); - expect(headers.get("authorization")).toBe("Claw mock.ait.jwt"); expect(headers.get("content-type")).toBe("application/json"); - expect(headers.get("x-claw-agent-access")).toBe("clw_agt_access_initial"); - expect(headers.get("x-claw-timestamp")).toBe("1700000000"); - expect(headers.get("x-claw-nonce")).toBe("AQIDBAUGBwgJCgsMDQ4PEA"); - expect(headers.get("x-claw-body-sha256")).toMatch(/^[A-Za-z0-9_-]+$/); - expect(headers.get("x-claw-proof")).toMatch(/^[A-Za-z0-9_-]+$/); + } finally { + sandbox.cleanup(); + } + }); + + it("supports connector endpoint override", async () => { + const sandbox = createRelaySandbox(); + const fetchMock = vi.fn(async () => new Response("", { status: 200 })); + + try { + const result = await relayPayloadToPeer( + { + peer: "beta", + message: "hello", + }, + { + connectorBaseUrl: "http://127.0.0.1:19555", + connectorPath: "/relay/outbound", + homeDir: sandbox.homeDir, + fetchImpl: fetchMock as typeof fetch, + }, + ); + + expect(result).toBeNull(); + expect(fetchMock).toHaveBeenCalledWith( + "http://127.0.0.1:19555/relay/outbound", + expect.objectContaining({ + method: "POST", + }), + ); } finally { sandbox.cleanup(); } @@ -200,7 +139,7 @@ describe("relay-to-peer transform", () => { }); it("throws when the peer alias is unknown", async () => { - const sandbox = createRelaySandbox("alpha-agent"); + const sandbox = createRelaySandbox(); try { await expect( @@ -211,7 +150,6 @@ describe("relay-to-peer transform", () => { }, { homeDir: sandbox.homeDir, - agentName: "alpha-agent", fetchImpl: vi.fn( async () => new Response("", { status: 200 }), ) as typeof fetch, @@ -223,42 +161,9 @@ describe("relay-to-peer transform", () => { } }); - it("uses ~/.clawdentity/openclaw-agent-name when env is missing", async () => { - const sandbox = createRelaySandbox("alpha-agent"); - const previousAgentName = process.env.CLAWDENTITY_AGENT_NAME; - delete process.env.CLAWDENTITY_AGENT_NAME; - writeFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-agent-name"), - "alpha-agent\n", - "utf8", - ); - - try { - const result = await relayPayloadToPeer( - { - peer: "beta", - message: "hello", - }, - { - homeDir: sandbox.homeDir, - fetchImpl: vi.fn( - async () => new Response("", { status: 200 }), - ) as typeof fetch, - }, - ); - - expect(result).toBeNull(); - } finally { - process.env.CLAWDENTITY_AGENT_NAME = previousAgentName; - sandbox.cleanup(); - } - }); - - it("throws when multiple local agents exist without selection", async () => { - const sandbox = createRelaySandbox("alpha-agent"); - const previousAgentName = process.env.CLAWDENTITY_AGENT_NAME; - delete process.env.CLAWDENTITY_AGENT_NAME; - writeAgentCredentials(sandbox.homeDir, "gamma-agent"); + it("maps connector 404 response to deterministic error", async () => { + const sandbox = createRelaySandbox(); + const fetchMock = vi.fn(async () => new Response("", { status: 404 })); try { await expect( @@ -269,87 +174,34 @@ describe("relay-to-peer transform", () => { }, { homeDir: sandbox.homeDir, - fetchImpl: vi.fn( - async () => new Response("", { status: 200 }), - ) as typeof fetch, + fetchImpl: fetchMock as typeof fetch, }, ), - ).rejects.toThrow("Multiple local agents found"); + ).rejects.toThrow("Local connector outbound endpoint is unavailable"); } finally { - process.env.CLAWDENTITY_AGENT_NAME = previousAgentName; sandbox.cleanup(); } }); - it("refreshes auth and retries once when peer returns 401", async () => { - const sandbox = createRelaySandbox("alpha-agent"); - const fetchMock = vi.fn(async (input: unknown, init?: RequestInit) => { - const url = - typeof input === "string" - ? input - : input instanceof URL - ? input.toString() - : ""; - if (url === "https://peer.example.com/hooks/agent?source=skill") { - const headers = new Headers(init?.headers); - const accessToken = headers.get("x-claw-agent-access"); - if (accessToken === "clw_agt_access_initial") { - return new Response("", { status: 401 }); - } - if (accessToken === "clw_agt_access_refreshed") { - return new Response("", { status: 202 }); - } - } - - if (url === "https://registry.example.com/v1/agents/auth/refresh") { - return new Response( - JSON.stringify({ - agentAuth: { - tokenType: "Bearer", - accessToken: "clw_agt_access_refreshed", - accessExpiresAt: "2030-03-01T00:00:00.000Z", - refreshToken: "clw_rft_refresh_refreshed", - refreshExpiresAt: "2030-04-01T00:00:00.000Z", - }, - }), - { status: 200 }, - ); - } - - return new Response("not found", { status: 404 }); + it("maps connector network failures to deterministic error", async () => { + const sandbox = createRelaySandbox(); + const fetchMock = vi.fn(async () => { + throw new Error("connection refused"); }); try { - const result = await relayPayloadToPeer( - { - peer: "beta", - message: "retry me", - }, - { - homeDir: sandbox.homeDir, - agentName: "alpha-agent", - fetchImpl: fetchMock as typeof fetch, - }, - ); - - expect(result).toBeNull(); - expect(fetchMock).toHaveBeenCalledTimes(3); - const registryAuth = JSON.parse( - String( - readFileSync( - join( - sandbox.homeDir, - ".clawdentity", - "agents", - "alpha-agent", - "registry-auth.json", - ), - "utf8", - ), + await expect( + relayPayloadToPeer( + { + peer: "beta", + message: "hello", + }, + { + homeDir: sandbox.homeDir, + fetchImpl: fetchMock as typeof fetch, + }, ), - ) as { accessToken: string; refreshToken: string }; - expect(registryAuth.accessToken).toBe("clw_agt_access_refreshed"); - expect(registryAuth.refreshToken).toBe("clw_rft_refresh_refreshed"); + ).rejects.toThrow("Local connector outbound relay request failed"); } finally { sandbox.cleanup(); } diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.ts index 8b69bdd..3b0f5e5 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.ts @@ -1,60 +1,32 @@ -import { randomBytes } from "node:crypto"; -import { readdir, readFile } from "node:fs/promises"; -import { homedir } from "node:os"; -import { join } from "node:path"; -import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; -import { - type AgentAuthBundle, - AppError, - executeWithAgentAuthRefreshRetry, - refreshAgentAuthWithClawProof, - signHttpRequest, -} from "@clawdentity/sdk"; import { loadPeersConfig, type PeersConfigPathOptions, } from "./peers-config.js"; -import { - readAgentRegistryAuth, - withAgentRegistryAuthLock, - writeAgentRegistryAuthAtomic, -} from "./registry-auth.js"; - -const CLAWDENTITY_DIR = ".clawdentity"; -const AGENTS_DIR = "agents"; -const SECRET_KEY_FILENAME = "secret.key"; -const AIT_FILENAME = "ait.jwt"; -const IDENTITY_FILENAME = "identity.json"; -const AGENT_NAME_ENV = "CLAWDENTITY_AGENT_NAME"; -const OPENCLAW_AGENT_NAME_FILENAME = "openclaw-agent-name"; -const NONCE_SIZE = 16; -const AGENT_ACCESS_HEADER = "x-claw-agent-access"; - -const textEncoder = new TextEncoder(); + +const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; +const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; export type RelayToPeerOptions = PeersConfigPathOptions & { - agentName?: string; + connectorBaseUrl?: string; + connectorPath?: string; fetchImpl?: typeof fetch; - clock?: () => number; - randomBytesImpl?: (size: number) => Uint8Array; }; export type RelayTransformContext = { payload?: unknown; }; +type ConnectorRelayRequest = { + payload: Record; + peer: string; + peerDid: string; + peerProxyUrl: string; +}; + function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } -function getErrorCode(error: unknown): string | undefined { - if (!isRecord(error)) { - return undefined; - } - - return typeof error.code === "string" ? error.code : undefined; -} - function parseRequiredString(value: unknown): string { if (typeof value !== "string") { throw new Error("Input value must be a string"); @@ -68,28 +40,18 @@ function parseRequiredString(value: unknown): string { return trimmed; } -function parseIdentityRegistryUrl( - payload: unknown, - options: { agentName: string }, -): string { - if (!isRecord(payload) || typeof payload.registryUrl !== "string") { - throw new Error( - `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)`, - ); - } +function removePeerField( + payload: Record, +): Record { + const outbound: Record = {}; - const registryUrl = payload.registryUrl.trim(); - if (registryUrl.length === 0) { - throw new Error( - `Agent "${options.agentName}" has invalid ${IDENTITY_FILENAME} (missing registryUrl)`, - ); + for (const [key, value] of Object.entries(payload)) { + if (key !== "peer") { + outbound[key] = value; + } } - return registryUrl; -} - -function resolvePathWithQuery(url: URL): string { - return `${url.pathname}${url.search}`; + return outbound; } function resolveRelayFetch(fetchImpl?: typeof fetch): typeof fetch { @@ -101,174 +63,91 @@ function resolveRelayFetch(fetchImpl?: typeof fetch): typeof fetch { return resolved; } -async function tryReadTrimmedFile( - filePath: string, - _label: string, -): Promise { - let raw: string; - +function parseConnectorBaseUrl(value: string): string { + let parsed: URL; try { - raw = await readFile(filePath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return undefined; - } + parsed = new URL(value); + } catch { + throw new Error("Connector base URL is invalid"); + } - throw error; + if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { + throw new Error("Connector base URL is invalid"); } - const trimmed = raw.trim(); - if (trimmed.length === 0) { - throw new Error("Required file content is empty"); + if ( + parsed.pathname === "/" && + parsed.search.length === 0 && + parsed.hash.length === 0 + ) { + return parsed.origin; } - return trimmed; + return parsed.toString(); } -async function readTrimmedFile( - filePath: string, - label: string, -): Promise { - const content = await tryReadTrimmedFile(filePath, label); - if (content === undefined) { - throw new Error("Required file is missing"); +function normalizeConnectorPath(value: string): string { + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error("Connector outbound path is invalid"); } - return content; + return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; } -async function resolveAgentName(input: { - overrideName?: string; - homeDir: string; -}): Promise { - const overridden = input.overrideName?.trim(); - if (overridden) { - return overridden; - } +function resolveConnectorEndpoint(options: RelayToPeerOptions): string { + const baseUrlInput = + options.connectorBaseUrl ?? + process.env.CLAWDENTITY_CONNECTOR_BASE_URL ?? + DEFAULT_CONNECTOR_BASE_URL; + const pathInput = + options.connectorPath ?? + process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH ?? + DEFAULT_CONNECTOR_OUTBOUND_PATH; - const envAgentName = process.env[AGENT_NAME_ENV]?.trim(); - if (envAgentName) { - return envAgentName; - } + const baseUrl = parseConnectorBaseUrl(baseUrlInput.trim()); + const path = normalizeConnectorPath(pathInput.trim()); - const selectedAgentPath = join( - input.homeDir, - CLAWDENTITY_DIR, - OPENCLAW_AGENT_NAME_FILENAME, - ); - const selectedAgentName = await tryReadTrimmedFile( - selectedAgentPath, - OPENCLAW_AGENT_NAME_FILENAME, - ); - if (selectedAgentName) { - return selectedAgentName; - } - - const agentsDirectory = join(input.homeDir, CLAWDENTITY_DIR, AGENTS_DIR); - let entries: Array<{ isDirectory: () => boolean; name: string }>; - try { - entries = (await readdir(agentsDirectory, { - withFileTypes: true, - })) as Array<{ isDirectory: () => boolean; name: string }>; - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - throw new Error("No local agents found. Select one before relay setup."); - } + return new URL(path, baseUrl).toString(); +} - throw error; +function mapConnectorFailure(status: number): Error { + if (status === 404) { + return new Error("Local connector outbound endpoint is unavailable"); } - const agentNames = entries - .filter((entry) => entry.isDirectory()) - .map((entry) => entry.name) - .sort(); - - if (agentNames.length === 1) { - return agentNames[0]; + if (status === 409) { + return new Error("Peer alias is not configured"); } - if (agentNames.length === 0) { - throw new Error("No local agents found. Select one before relay setup."); + if (status === 400 || status === 422) { + return new Error("Local connector rejected outbound relay payload"); } - throw new Error( - "Multiple local agents found. Configure a selected relay agent first.", - ); + return new Error("Local connector outbound relay request failed"); } -async function readAgentCredentials(input: { - agentName: string; - homeDir: string; -}): Promise<{ ait: string; secretKey: Uint8Array; registryUrl: string }> { - const agentDir = join( - input.homeDir, - CLAWDENTITY_DIR, - AGENTS_DIR, - input.agentName, - ); - const secretPath = join(agentDir, SECRET_KEY_FILENAME); - const aitPath = join(agentDir, AIT_FILENAME); - const identityPath = join(agentDir, IDENTITY_FILENAME); - - const [encodedSecret, ait, rawIdentity] = await Promise.all([ - readTrimmedFile(secretPath, SECRET_KEY_FILENAME), - readTrimmedFile(aitPath, AIT_FILENAME), - readTrimmedFile(identityPath, IDENTITY_FILENAME), - ]); - - let secretKey: Uint8Array; +async function postToConnector( + endpoint: string, + payload: ConnectorRelayRequest, + fetchImpl: typeof fetch, +): Promise { + let response: Response; try { - secretKey = decodeBase64url(encodedSecret); + response = await fetchImpl(endpoint, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(payload), + }); } catch { - throw new Error("Agent secret key is invalid"); + throw new Error("Local connector outbound relay request failed"); } - let parsedIdentity: unknown; - try { - parsedIdentity = JSON.parse(rawIdentity); - } catch { - throw new Error( - `Agent "${input.agentName}" has invalid ${IDENTITY_FILENAME} (must be valid JSON)`, - ); + if (!response.ok) { + throw mapConnectorFailure(response.status); } - const registryUrl = parseIdentityRegistryUrl(parsedIdentity, { - agentName: input.agentName, - }); - - return { - ait, - secretKey, - registryUrl, - }; -} - -function removePeerField( - payload: Record, -): Record { - const outbound: Record = {}; - - for (const [key, value] of Object.entries(payload)) { - if (key !== "peer") { - outbound[key] = value; - } - } - - return outbound; -} - -function isRetryableRelayAuthError(error: unknown): boolean { - return ( - error instanceof AppError && - error.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && - error.status === 401 - ); -} - -function buildRefreshSingleFlightKey(input: { - homeDir: string; - agentName: string; -}): string { - return `${input.homeDir}:${input.agentName}`; } export async function relayPayloadToPeer( @@ -292,129 +171,21 @@ export async function relayPayloadToPeer( throw new Error("Peer alias is not configured"); } - const home = - typeof options.homeDir === "string" && options.homeDir.trim().length > 0 - ? options.homeDir.trim() - : homedir(); - const agentName = await resolveAgentName({ - overrideName: options.agentName, - homeDir: home, - }); - const { ait, secretKey, registryUrl } = await readAgentCredentials({ - agentName, - homeDir: home, - }); - - const outboundPayload = removePeerField(payload); - const body = JSON.stringify(outboundPayload); - const peerUrl = new URL(peerEntry.proxyUrl); + const connectorEndpoint = resolveConnectorEndpoint(options); const fetchImpl = resolveRelayFetch(options.fetchImpl); - const refreshSingleFlightKey = buildRefreshSingleFlightKey({ - homeDir: home, - agentName, - }); - - const sendRelayRequest = async (auth: AgentAuthBundle): Promise => { - const unixSeconds = Math.floor( - (options.clock ?? Date.now)() / 1000, - ).toString(); - const nonce = encodeBase64url( - (options.randomBytesImpl ?? randomBytes)(NONCE_SIZE), - ); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: resolvePathWithQuery(peerUrl), - timestamp: unixSeconds, - nonce, - body: textEncoder.encode(body), - secretKey, - }); - - return fetchImpl(peerUrl.toString(), { - method: "POST", - headers: { - Authorization: `Claw ${ait}`, - "Content-Type": "application/json", - [AGENT_ACCESS_HEADER]: auth.accessToken, - ...signed.headers, - }, - body, - }); - }; - - const performRelay = async (auth: AgentAuthBundle): Promise => { - const response = await sendRelayRequest(auth); - if (!response.ok) { - if (response.status === 401) { - throw new AppError({ - code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", - message: "Peer relay rejected agent auth credentials", - status: 401, - expose: true, - }); - } - - throw new Error("Peer relay request failed"); - } + const outboundPayload = removePeerField(payload); + await postToConnector( + connectorEndpoint, + { + peer: peerAlias, + peerDid: peerEntry.did, + peerProxyUrl: peerEntry.proxyUrl, + payload: outboundPayload, + }, + fetchImpl, + ); - return null; - }; - - const result = await executeWithAgentAuthRefreshRetry({ - key: refreshSingleFlightKey, - shouldRetry: isRetryableRelayAuthError, - getAuth: async () => - readAgentRegistryAuth({ - homeDir: home, - agentName, - }), - persistAuth: async () => {}, - refreshAuth: async (currentAuth) => - withAgentRegistryAuthLock({ - homeDir: home, - agentName, - operation: async () => { - const latestAuth = await readAgentRegistryAuth({ - homeDir: home, - agentName, - }); - if (latestAuth.refreshToken !== currentAuth.refreshToken) { - return latestAuth; - } - - let refreshedAuth: AgentAuthBundle; - try { - refreshedAuth = await refreshAgentAuthWithClawProof({ - registryUrl, - ait, - secretKey, - refreshToken: latestAuth.refreshToken, - fetchImpl, - }); - } catch (error) { - const afterFailureAuth = await readAgentRegistryAuth({ - homeDir: home, - agentName, - }); - if (afterFailureAuth.refreshToken !== latestAuth.refreshToken) { - return afterFailureAuth; - } - - throw error; - } - await writeAgentRegistryAuthAtomic({ - homeDir: home, - agentName, - auth: refreshedAuth, - }); - - return refreshedAuth; - }, - }), - perform: performRelay, - }); - - return result; + return null; } export default async function relayToPeer( diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 784368b..16183c0 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -1,18 +1,13 @@ # Proxy local/development template # For local Wrangler development, copy values into .dev.vars. -# For cloud deploys, set OPENCLAW_HOOK_TOKEN as a Wrangler secret: -# wrangler secret put OPENCLAW_HOOK_TOKEN --env -# For cloud deploy scripts, export a reachable upstream first: -# export OPENCLAW_BASE_URL=https://openclaw-.example.com - -# Required -OPENCLAW_HOOK_TOKEN=replace-with-openclaw-hook-token +# OpenClaw vars are optional for relay-mode proxy operation. +# Keep them only for backwards compatibility with older local setups. +# OPENCLAW_HOOK_TOKEN=optional-openclaw-hook-token +# OPENCLAW_BASE_URL=http://127.0.0.1:18789 # Runtime vars ENVIRONMENT=local REGISTRY_URL=https://dev.api.clawdentity.com -# Optional when ~/.clawdentity/openclaw-relay.json exists from `clawdentity openclaw setup` -OPENCLAW_BASE_URL=http://127.0.0.1:18789 INJECT_IDENTITY_INTO_MESSAGE=false # Optional policy/runtime overrides diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 846b663..5bad736 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -12,11 +12,9 @@ - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. -- For remote Worker deployments (`development`/`production`), require `OPENCLAW_BASE_URL` to be an externally reachable non-loopback URL; never rely on local loopback defaults. - Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-off (`false`); only enable when operators need webhook `message` augmentation with verified identity context. -- Require hook token input via env (`OPENCLAW_HOOK_TOKEN` or OpenClaw-compatible alias `OPENCLAW_HOOKS_TOKEN`) and never log the token value. -- For Worker deploys, set hook tokens via Wrangler secrets for remote environments (`wrangler secret put ... --env `); use CLI `--var` overrides only for local dev runs. -- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (required token, registry URL, base URL, and optional policy/rate-limit vars). +- Keep OpenClaw env inputs (`OPENCLAW_BASE_URL`, `OPENCLAW_HOOK_TOKEN` / `OPENCLAW_HOOKS_TOKEN`) backward-compatible but optional for relay-mode startup. +- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw compatibility vars, and policy/rate-limit vars). - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) @@ -26,6 +24,10 @@ - empty/whitespace values (and null-like values) in inherited env must not block `.env` or config-file fallbacks - dotenv merge semantics must match parser semantics (non-empty value wins). - If hook token env vars are missing, resolve fallback token from `hooks.token` in `openclaw.json` (`OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH`, default `$OPENCLAW_STATE_DIR/openclaw.json`). +- Route relay sessions via Durable Objects: + - `GET /v1/relay/connect` keys connector sessions by authenticated caller agent DID. + - `POST /hooks/agent` keys recipient delivery by `x-claw-recipient-agent-did`. + - Do not route sessions via `OWNER_AGENT_DID`. - Keep env alias support stable for operator UX: - `LISTEN_PORT` or `PORT` - `OPENCLAW_HOOK_TOKEN` or `OPENCLAW_HOOKS_TOKEN` @@ -52,6 +54,8 @@ - Return `403` when requests are verified but agent DID is not allowlisted. - Return `429` with `PROXY_RATE_LIMIT_EXCEEDED` when an allowlisted verified agent DID exceeds its request budget within the configured window. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. +- Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. +- Keep `/v1/relay/connect` auth strict with verified Claw auth + PoP headers, but do not require `x-claw-agent-access`. ## CRL Policy - Keep CRL timing defaults explicit in `src/config.ts` (`5m` refresh, `15m` max age) unless explicitly overridden. @@ -66,6 +70,7 @@ ## Server Runtime - Keep `src/server.ts` as the HTTP app/runtime entry. - Keep `src/worker.ts` as the Cloudflare Worker fetch entry and `src/node-server.ts` as the Node compatibility entry. +- Keep `AgentRelaySession` exported from `src/worker.ts` and bound/migrated in `wrangler.jsonc`. - Keep middleware order stable: request context -> request logging -> auth verification -> agent DID rate limit -> error handler. - Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200. - Log startup and request completion with structured JSON logs; never log secrets or tokens. diff --git a/apps/proxy/package.json b/apps/proxy/package.json index d1ea7c9..11ee81e 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -25,11 +25,11 @@ }, "scripts": { "build": "tsup", - "deploy:dev": "wrangler deploy --env development --var OPENCLAW_BASE_URL:${OPENCLAW_BASE_URL:?set OPENCLAW_BASE_URL}", - "deploy:production": "wrangler deploy --env production --var OPENCLAW_BASE_URL:${OPENCLAW_BASE_URL:?set OPENCLAW_BASE_URL}", - "dev": "wrangler dev --env local --var OPENCLAW_HOOK_TOKEN:dev-proxy-hook-token", - "dev:development": "wrangler dev --env development --var OPENCLAW_HOOK_TOKEN:dev-proxy-hook-token", - "dev:fresh": "wrangler dev --env local --name clawdentity-proxy-local-fresh --port 8789 --persist-to .wrangler/state-fresh --var OPENCLAW_HOOK_TOKEN:fresh-proxy-hook-token", + "deploy:dev": "wrangler deploy --env development", + "deploy:production": "wrangler deploy --env production", + "dev": "wrangler dev --env local", + "dev:development": "wrangler dev --env development", + "dev:fresh": "wrangler dev --env local --name clawdentity-proxy-local-fresh --port 8789 --persist-to .wrangler/state-fresh", "format": "biome format .", "lint": "biome lint .", "start": "node ./dist/bin.js", @@ -38,6 +38,7 @@ }, "dependencies": { "@hono/node-server": "^1.19.6", + "@clawdentity/connector": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", "dotenv": "^17.2.3", diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 900a57a..dffe1b8 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -11,7 +11,7 @@ - Keep per-agent DID throttling in `agent-rate-limit-middleware.ts`; do not blend rate-limit state or counters into `auth-middleware.ts`. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. - Keep OpenClaw base URL fallback logic in `config.ts`: `OPENCLAW_BASE_URL` env -> `~/.clawdentity/openclaw-relay.json` -> default. -- Keep Worker runtime guardrails in `worker.ts`: block loopback/default OpenClaw upstream URLs for `development`/`production` so cloud deploys fail fast with config errors. +- Keep OpenClaw compatibility vars optional for relay-mode runtime; never require `OPENCLAW_BASE_URL` or hook token for cloud relay startup. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. @@ -26,16 +26,20 @@ - Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. +- Keep relay websocket connect handling isolated in `relay-connect-route.ts`; `server.ts` should only compose middleware/routes. +- Keep DO runtime behavior in `agent-relay-session.ts` (websocket accept, heartbeat alarm, connector delivery RPC). - Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. +- Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. +- Keep `/v1/relay/connect` keyed by authenticated connector DID from auth middleware, and reject non-websocket requests with clear client errors. - Keep rate-limit failure semantics stable: verified requests over budget map to `429` with code `PROXY_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.rate_limit.exceeded`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. - Keep `/hooks/agent` input contract strict: require `Content-Type: application/json` and reject malformed JSON with explicit client errors. - Keep agent-access validation centralized in `auth-middleware.ts` and call registry `POST /v1/agents/auth/validate`; treat non-`204` non-`401` responses as dependency failures (`503`). -- Keep `/hooks/agent` upstream failure mapping explicit: timeout errors -> `504`, network errors -> `502`, and never log `openclawHookToken` or request payload. +- Keep relay delivery failure mapping explicit for `/hooks/agent`: DO delivery/RPC failures -> `502`, unavailable DO namespace -> `503`. - Keep identity message injection optional and default-off (`INJECT_IDENTITY_INTO_MESSAGE=false`) so forwarding behavior is unchanged unless explicitly enabled. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. - When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index 9d80148..412fec3 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -28,6 +28,13 @@ vi.mock("./auth-middleware.js", async () => { }; }); +import { RELAY_RECIPIENT_AGENT_DID_HEADER } from "./agent-hook-route.js"; +import type { + AgentRelaySessionNamespace, + AgentRelaySessionStub, + RelayDeliveryInput, + RelayDeliveryResult, +} from "./agent-relay-session.js"; import { parseProxyConfig } from "./config.js"; import { createProxyApp } from "./server.js"; @@ -45,142 +52,142 @@ function hasDisallowedControlCharacter(value: string): boolean { return false; } +function createRelayHarness(input?: { + deliverResult?: RelayDeliveryResult; + throwOnDeliver?: boolean; +}) { + const deliverResult = input?.deliverResult ?? { + delivered: true, + connectedSockets: 1, + }; + const receivedInputs: RelayDeliveryInput[] = []; + + const fetchRpc = vi.fn(async (request: Request) => { + if (request.method !== "POST") { + return new Response("not found", { status: 404 }); + } + + const relayInput = (await request.json()) as RelayDeliveryInput; + receivedInputs.push(relayInput); + + if (input?.throwOnDeliver) { + return new Response("delivery failed", { status: 502 }); + } + + return Response.json(deliverResult, { status: 202 }); + }); + + const relaySession: AgentRelaySessionStub = { + fetch: fetchRpc, + }; + + const durableObjectId = { + toString: () => "relay-session-id", + } as unknown as DurableObjectId; + + const idFromName = vi.fn((_name: string) => durableObjectId); + const get = vi.fn((_id: DurableObjectId) => relaySession); + + return { + idFromName, + get, + fetchRpc, + receivedInputs, + namespace: { + idFromName, + get, + } satisfies AgentRelaySessionNamespace, + }; +} + function createHookRouteApp(input: { - fetchImpl: typeof fetch; - timeoutMs?: number; - openclawBaseUrl?: string; + relayNamespace?: AgentRelaySessionNamespace; injectIdentityIntoMessage?: boolean; + now?: () => Date; }) { return createProxyApp({ config: parseProxyConfig({ - OPENCLAW_BASE_URL: input.openclawBaseUrl ?? "http://openclaw.local", - OPENCLAW_HOOK_TOKEN: "openclaw-secret", INJECT_IDENTITY_INTO_MESSAGE: input.injectIdentityIntoMessage, }), hooks: { - fetchImpl: input.fetchImpl, - timeoutMs: input.timeoutMs, + now: input.now, + resolveSessionNamespace: () => input.relayNamespace, }, }); } -function resolveRequestUrl(input: unknown): string { - if (typeof input === "string") { - return input; - } - - if (input instanceof URL) { - return input.toString(); - } - - if ( - typeof input === "object" && - input !== null && - "url" in input && - typeof (input as { url?: unknown }).url === "string" - ) { - return (input as { url: string }).url; - } - - return ""; -} - describe("POST /hooks/agent", () => { - it("forwards JSON payload and returns upstream status/body", async () => { - const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { - return new Response( - JSON.stringify({ - accepted: true, - echoedBody: init?.body, - }), - { - status: 202, - headers: { - "content-type": "application/json", - }, - }, - ); - }); + it("delivers hook payload to recipient relay session", async () => { + const relayHarness = createRelayHarness(); + const now = new Date("2026-02-16T20:00:00.000Z"); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, + now: () => now, }); const response = await app.request("/hooks/agent", { method: "POST", headers: { "content-type": "application/json; charset=utf-8", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ event: "agent.started", }), }); - expect(fetchMock).toHaveBeenCalledTimes(1); - const [calledInput, calledInit] = fetchMock.mock.calls[0] as [ - unknown, - RequestInit | undefined, - ]; - const calledHeaders = (calledInit?.headers ?? {}) as Record; - - expect(resolveRequestUrl(calledInput)).toBe( - "http://openclaw.local/hooks/agent", + expect(response.status).toBe(202); + expect(relayHarness.idFromName).toHaveBeenCalledWith( + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", ); - expect(calledInit?.method).toBe("POST"); - expect(calledInit?.body).toBe(JSON.stringify({ event: "agent.started" })); - expect(calledHeaders["content-type"]).toBe("application/json"); - expect(calledHeaders["x-openclaw-token"]).toBe("openclaw-secret"); - expect(typeof calledHeaders["x-request-id"]).toBe("string"); - expect(calledHeaders["x-request-id"].length).toBeGreaterThan(0); + expect(relayHarness.get).toHaveBeenCalledTimes(1); + expect(relayHarness.fetchRpc).toHaveBeenCalledTimes(1); + const [relayInput] = relayHarness.receivedInputs; + expect(relayInput.senderAgentDid).toBe("did:claw:agent:alpha"); + expect(relayInput.recipientAgentDid).toBe( + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + ); + expect(relayInput.payload).toEqual({ event: "agent.started" }); + expect(typeof relayInput.requestId).toBe("string"); + expect(relayInput.requestId.length).toBeGreaterThan(0); - expect(response.status).toBe(202); - expect(response.headers.get("content-type")).toContain("application/json"); const body = (await response.json()) as { accepted: boolean; - echoedBody: unknown; + delivered: boolean; + connectedSockets: number; }; - expect(body.accepted).toBe(true); - expect(body.echoedBody).toBe(JSON.stringify({ event: "agent.started" })); + expect(body).toEqual({ + accepted: true, + delivered: true, + connectedSockets: 1, + }); }); - it("preserves OpenClaw base path prefixes when building hook URL", async () => { - let forwardedUrl = ""; - const fetchMock = vi.fn(async (input: unknown) => { - forwardedUrl = resolveRequestUrl(input); - return new Response("{}", { status: 202 }); - }); + it("delivers through DO fetch RPC", async () => { + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, - openclawBaseUrl: "http://openclaw.local/api", + relayNamespace: relayHarness.namespace, }); - await app.request("/hooks/agent", { + const response = await app.request("/hooks/agent", { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ event: "agent.started" }), }); - expect(fetchMock).toHaveBeenCalledTimes(1); - expect(forwardedUrl).toBe("http://openclaw.local/api/hooks/agent"); + expect(response.status).toBe(202); + expect(relayHarness.fetchRpc).toHaveBeenCalledTimes(1); }); it("prepends sanitized identity block when message injection is enabled", async () => { - const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { - return new Response( - JSON.stringify({ - echoedBody: init?.body, - }), - { - status: 202, - headers: { - "content-type": "application/json", - }, - }, - ); - }); + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, injectIdentityIntoMessage: true, }); @@ -188,6 +195,8 @@ describe("POST /hooks/agent", () => { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ message: "Summarize this payload", @@ -195,15 +204,11 @@ describe("POST /hooks/agent", () => { }); expect(response.status).toBe(202); - expect(fetchMock).toHaveBeenCalledTimes(1); - - const [, calledInit] = fetchMock.mock.calls[0] as [ - unknown, - RequestInit | undefined, - ]; - const forwardedPayload = JSON.parse(String(calledInit?.body)) as { + const [relayInput] = relayHarness.receivedInputs; + const forwardedPayload = relayInput.payload as { message: string; }; + expect(forwardedPayload.message).toBe( [ "[Clawdentity Identity]", @@ -218,11 +223,9 @@ describe("POST /hooks/agent", () => { }); it("keeps payload unchanged when message injection is enabled but auth is missing", async () => { - const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { - return new Response(String(init?.body), { status: 202 }); - }); + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, injectIdentityIntoMessage: true, }); const rawPayload = { @@ -234,25 +237,20 @@ describe("POST /hooks/agent", () => { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", "x-test-missing-auth": "1", }, body: JSON.stringify(rawPayload), }); - expect(response.status).toBe(202); - const [, calledInit] = fetchMock.mock.calls[0] as [ - unknown, - RequestInit | undefined, - ]; - expect(String(calledInit?.body)).toBe(JSON.stringify(rawPayload)); + expect(response.status).toBe(500); }); it("keeps payload unchanged when message is missing or non-string", async () => { - const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { - return new Response(String(init?.body), { status: 202 }); - }); + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, injectIdentityIntoMessage: true, }); @@ -260,6 +258,8 @@ describe("POST /hooks/agent", () => { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ event: "agent.started", @@ -270,38 +270,24 @@ describe("POST /hooks/agent", () => { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ message: { nested: true }, }), }); - const [, firstInit] = fetchMock.mock.calls[0] as [unknown, RequestInit]; - const [, secondInit] = fetchMock.mock.calls[1] as [unknown, RequestInit]; - expect(String(firstInit.body)).toBe( - JSON.stringify({ event: "agent.started" }), - ); - expect(String(secondInit.body)).toBe( - JSON.stringify({ message: { nested: true } }), - ); + const [firstRelayInput, secondRelayInput] = relayHarness.receivedInputs; + + expect(firstRelayInput.payload).toEqual({ event: "agent.started" }); + expect(secondRelayInput.payload).toEqual({ message: { nested: true } }); }); it("sanitizes identity fields and enforces length limits", async () => { - const fetchMock = vi.fn(async (_input: unknown, init?: RequestInit) => { - return new Response( - JSON.stringify({ - echoedBody: init?.body, - }), - { - status: 202, - headers: { - "content-type": "application/json", - }, - }, - ); - }); + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, injectIdentityIntoMessage: true, }); @@ -309,6 +295,8 @@ describe("POST /hooks/agent", () => { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", "x-test-dirty-auth": "1", }, body: JSON.stringify({ @@ -317,11 +305,9 @@ describe("POST /hooks/agent", () => { }); expect(response.status).toBe(202); - const [, calledInit] = fetchMock.mock.calls[0] as [ - unknown, - RequestInit | undefined, - ]; - const forwardedPayload = JSON.parse(String(calledInit?.body)) as { + const [relayInput] = relayHarness.receivedInputs; + + const forwardedPayload = relayInput.payload as { message: string; }; expect(forwardedPayload.message).toContain("[Clawdentity Identity]"); @@ -337,20 +323,22 @@ describe("POST /hooks/agent", () => { }); it("rejects non-json content types", async () => { - const fetchMock = vi.fn(async () => new Response("{}", { status: 200 })); + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, }); const response = await app.request("/hooks/agent", { method: "POST", headers: { "content-type": "text/plain", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: "hello", }); - expect(fetchMock).not.toHaveBeenCalled(); + expect(relayHarness.fetchRpc).not.toHaveBeenCalled(); expect(response.status).toBe(415); const body = (await response.json()) as { error: { code: string; message: string; requestId: string }; @@ -361,20 +349,22 @@ describe("POST /hooks/agent", () => { }); it("rejects invalid JSON payloads", async () => { - const fetchMock = vi.fn(async () => new Response("{}", { status: 200 })); + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, }); const response = await app.request("/hooks/agent", { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: "{not valid json", }); - expect(fetchMock).not.toHaveBeenCalled(); + expect(relayHarness.fetchRpc).not.toHaveBeenCalled(); expect(response.status).toBe(400); const body = (await response.json()) as { error: { code: string; message: string; requestId: string }; @@ -384,73 +374,109 @@ describe("POST /hooks/agent", () => { expect(typeof body.error.requestId).toBe("string"); }); - it("maps upstream network errors to 502", async () => { - const fetchMock = vi.fn(async () => { - throw new TypeError("fetch failed"); + it("rejects missing recipient DID header", async () => { + const relayHarness = createRelayHarness(); + const app = createHookRouteApp({ + relayNamespace: relayHarness.namespace, }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_HOOK_RECIPIENT_REQUIRED"); + }); + + it("rejects invalid recipient DID header", async () => { + const relayHarness = createRelayHarness(); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, + relayNamespace: relayHarness.namespace, }); const response = await app.request("/hooks/agent", { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: "did:claw:human:not-agent", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_HOOK_RECIPIENT_INVALID"); + }); + + it("returns 503 when relay session namespace is unavailable", async () => { + const app = createHookRouteApp({ + relayNamespace: undefined, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_UNAVAILABLE"); + }); + + it("maps relay delivery failures to 502", async () => { + const relayHarness = createRelayHarness({ throwOnDeliver: true }); + const app = createHookRouteApp({ + relayNamespace: relayHarness.namespace, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ event: "agent.started" }), }); - expect(fetchMock).toHaveBeenCalledTimes(1); expect(response.status).toBe(502); - const body = (await response.json()) as { - error: { code: string; message: string; requestId: string }; - }; - expect(body.error.code).toBe("PROXY_HOOK_UPSTREAM_UNAVAILABLE"); - expect(body.error.message).toBe("OpenClaw hook upstream request failed"); - expect(typeof body.error.requestId).toBe("string"); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_DELIVERY_FAILED"); }); - it("maps upstream timeout errors to 504", async () => { - const fetchMock = vi.fn( - (_input: unknown, init?: RequestInit): Promise => - new Promise((_resolve, reject) => { - const signal = init?.signal; - if (signal == null) { - reject(new Error("signal is required")); - return; - } - - signal.addEventListener( - "abort", - () => { - const timeoutError = new Error("request aborted"); - timeoutError.name = "AbortError"; - reject(timeoutError); - }, - { once: true }, - ); - }), - ); + it("returns 502 when target connector is offline", async () => { + const relayHarness = createRelayHarness({ + deliverResult: { + delivered: false, + connectedSockets: 0, + }, + }); const app = createHookRouteApp({ - fetchImpl: fetchMock as unknown as typeof fetch, - timeoutMs: 5, + relayNamespace: relayHarness.namespace, }); const response = await app.request("/hooks/agent", { method: "POST", headers: { "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", }, body: JSON.stringify({ event: "agent.started" }), }); - expect(fetchMock).toHaveBeenCalledTimes(1); - expect(response.status).toBe(504); - const body = (await response.json()) as { - error: { code: string; message: string; requestId: string }; - }; - expect(body.error.code).toBe("PROXY_HOOK_UPSTREAM_TIMEOUT"); - expect(body.error.message).toBe("OpenClaw hook upstream request timed out"); - expect(typeof body.error.requestId).toBe("string"); + expect(response.status).toBe(502); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_CONNECTOR_OFFLINE"); }); }); diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index 92a18f3..4bb7e7b 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -1,28 +1,40 @@ +import { + parseDid, + RELAY_RECIPIENT_AGENT_DID_HEADER, +} from "@clawdentity/protocol"; import { AppError, type Logger } from "@clawdentity/sdk"; import type { Context } from "hono"; +import { + type AgentRelaySessionNamespace, + deliverToRelaySession, + type RelayDeliveryInput, +} from "./agent-relay-session.js"; import type { ProxyRequestVariables } from "./auth-middleware.js"; -const AGENT_HOOK_PATH = "hooks/agent"; -export const DEFAULT_AGENT_HOOK_TIMEOUT_MS = 10_000; const MAX_AGENT_DID_LENGTH = 160; const MAX_OWNER_DID_LENGTH = 160; const MAX_ISSUER_LENGTH = 200; const MAX_AIT_JTI_LENGTH = 64; +export { RELAY_RECIPIENT_AGENT_DID_HEADER } from "@clawdentity/protocol"; + export type AgentHookRuntimeOptions = { - fetchImpl?: typeof fetch; - timeoutMs?: number; injectIdentityIntoMessage?: boolean; + now?: () => Date; + resolveSessionNamespace?: ( + c: ProxyContext, + ) => AgentRelaySessionNamespace | undefined; }; type CreateAgentHookHandlerOptions = AgentHookRuntimeOptions & { logger: Logger; - openclawBaseUrl: string; - openclawHookToken: string; }; type ProxyContext = Context<{ Variables: ProxyRequestVariables; + Bindings: { + AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; + }; }>; function isJsonContentType(contentTypeHeader: string | undefined): boolean { @@ -34,25 +46,6 @@ function isJsonContentType(contentTypeHeader: string | undefined): boolean { return mediaType.trim().toLowerCase() === "application/json"; } -function toOpenclawHookUrl(openclawBaseUrl: string): string { - const normalizedBase = openclawBaseUrl.endsWith("/") - ? openclawBaseUrl - : `${openclawBaseUrl}/`; - return new URL(AGENT_HOOK_PATH, normalizedBase).toString(); -} - -function toErrorName(error: unknown): string { - if (error instanceof Error && error.name.trim().length > 0) { - return error.name; - } - - return "unknown"; -} - -function isAbortError(error: unknown): boolean { - return toErrorName(error) === "AbortError"; -} - function stripControlChars(value: string): string { let result = ""; for (const char of value) { @@ -111,13 +104,58 @@ function injectIdentityBlockIntoPayload( }; } +function parseRecipientAgentDid(c: ProxyContext): string { + const recipientHeader = c.req.header(RELAY_RECIPIENT_AGENT_DID_HEADER); + if ( + typeof recipientHeader !== "string" || + recipientHeader.trim().length === 0 + ) { + throw new AppError({ + code: "PROXY_HOOK_RECIPIENT_REQUIRED", + message: "X-Claw-Recipient-Agent-Did header is required", + status: 400, + expose: true, + }); + } + + const recipientDid = recipientHeader.trim(); + let parsedDid: ReturnType; + try { + parsedDid = parseDid(recipientDid); + } catch { + throw new AppError({ + code: "PROXY_HOOK_RECIPIENT_INVALID", + message: "X-Claw-Recipient-Agent-Did must be a valid agent DID", + status: 400, + expose: true, + }); + } + + if (parsedDid.kind !== "agent") { + throw new AppError({ + code: "PROXY_HOOK_RECIPIENT_INVALID", + message: "X-Claw-Recipient-Agent-Did must be a valid agent DID", + status: 400, + expose: true, + }); + } + + return recipientDid; +} + +function resolveDefaultSessionNamespace( + c: ProxyContext, +): AgentRelaySessionNamespace | undefined { + return c.env.AGENT_RELAY_SESSION; +} + export function createAgentHookHandler( options: CreateAgentHookHandlerOptions, ): (c: ProxyContext) => Promise { - const fetchImpl = options.fetchImpl ?? fetch; - const timeoutMs = options.timeoutMs ?? DEFAULT_AGENT_HOOK_TIMEOUT_MS; const injectIdentityIntoMessage = options.injectIdentityIntoMessage ?? false; - const hookUrl = toOpenclawHookUrl(options.openclawBaseUrl); + const now = options.now ?? (() => new Date()); + const resolveSessionNamespace = + options.resolveSessionNamespace ?? resolveDefaultSessionNamespace; return async (c) => { if (!isJsonContentType(c.req.header("content-type"))) { @@ -145,70 +183,84 @@ export function createAgentHookHandler( payload = injectIdentityBlockIntoPayload(payload, c.get("auth")); } + const auth = c.get("auth"); + if (auth === undefined) { + throw new AppError({ + code: "PROXY_HOOK_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const recipientAgentDid = parseRecipientAgentDid(c); + const sessionNamespace = resolveSessionNamespace(c); + if (sessionNamespace === undefined) { + throw new AppError({ + code: "PROXY_RELAY_UNAVAILABLE", + message: "Relay session namespace is unavailable", + status: 503, + }); + } + const requestId = c.get("requestId"); - const startedAt = Date.now(); - const controller = new AbortController(); - let timedOut = false; - const timeoutHandle = setTimeout(() => { - timedOut = true; - controller.abort(); - }, timeoutMs); - - let upstreamResponse: Response; + const relayInput: RelayDeliveryInput = { + requestId, + senderAgentDid: auth.agentDid, + recipientAgentDid, + payload, + }; + + const relaySession = sessionNamespace.get( + sessionNamespace.idFromName(recipientAgentDid), + ); + + let deliveryResult: Awaited>; try { - upstreamResponse = await fetchImpl(hookUrl, { - method: "POST", - headers: { - "content-type": "application/json", - "x-openclaw-token": options.openclawHookToken, - "x-request-id": requestId, - }, - body: JSON.stringify(payload), - signal: controller.signal, - }); + deliveryResult = await deliverToRelaySession(relaySession, relayInput); } catch (error) { - if (timedOut || isAbortError(error)) { - options.logger.warn("proxy.hooks.agent.timeout", { - requestId, - timeoutMs, - }); - throw new AppError({ - code: "PROXY_HOOK_UPSTREAM_TIMEOUT", - message: "OpenClaw hook upstream request timed out", - status: 504, - }); - } - - options.logger.warn("proxy.hooks.agent.network_error", { + options.logger.warn("proxy.hooks.agent.relay_delivery_failed", { requestId, - errorName: toErrorName(error), + senderAgentDid: auth.agentDid, + recipientAgentDid, + errorName: error instanceof Error ? error.name : "unknown", }); + throw new AppError({ - code: "PROXY_HOOK_UPSTREAM_UNAVAILABLE", - message: "OpenClaw hook upstream request failed", + code: "PROXY_RELAY_DELIVERY_FAILED", + message: "Relay delivery failed", status: 502, }); - } finally { - clearTimeout(timeoutHandle); } - options.logger.info("proxy.hooks.agent.forwarded", { - requestId, - upstreamStatus: upstreamResponse.status, - durationMs: Date.now() - startedAt, - }); + if (!deliveryResult.delivered) { + options.logger.warn("proxy.hooks.agent.connector_offline", { + requestId, + recipientAgentDid, + }); - const responseBody = await upstreamResponse.text(); - const responseHeaders: Record = {}; - const upstreamContentType = upstreamResponse.headers.get("content-type"); - if (typeof upstreamContentType === "string") { - responseHeaders["content-type"] = upstreamContentType; + throw new AppError({ + code: "PROXY_RELAY_CONNECTOR_OFFLINE", + message: "Target connector is offline", + status: 502, + }); } - return c.body( - responseBody, - upstreamResponse.status as 200, - responseHeaders, + options.logger.info("proxy.hooks.agent.delivered_to_relay", { + requestId, + senderAgentDid: auth.agentDid, + recipientAgentDid, + delivered: deliveryResult.delivered, + connectedSockets: deliveryResult.connectedSockets, + sentAt: now().toISOString(), + }); + + return c.json( + { + accepted: true, + delivered: deliveryResult.delivered, + connectedSockets: deliveryResult.connectedSockets, + }, + 202, ); }; } diff --git a/apps/proxy/src/agent-relay-session.test.ts b/apps/proxy/src/agent-relay-session.test.ts new file mode 100644 index 0000000..6ae1835 --- /dev/null +++ b/apps/proxy/src/agent-relay-session.test.ts @@ -0,0 +1,268 @@ +import { parseFrame } from "@clawdentity/connector"; +import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; +import { AgentRelaySession } from "./agent-relay-session.js"; + +type MockWebSocket = { + send: ReturnType; + close: ReturnType; +}; + +const SENDER_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7"; +const RECIPIENT_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8"; + +function createMockSocket(): MockWebSocket { + return { + send: vi.fn(), + close: vi.fn(), + }; +} + +function createStateHarness() { + const connectedSockets: WebSocket[] = []; + const storage = { + setAlarm: vi.fn(async (_scheduled: number | Date) => {}), + deleteAlarm: vi.fn(async () => {}), + }; + + const state = { + acceptWebSocket: vi.fn((socket: WebSocket) => { + connectedSockets.push(socket); + }), + getWebSockets: vi.fn(() => connectedSockets), + storage, + }; + + return { + state, + storage, + connectedSockets, + }; +} + +describe("AgentRelaySession", () => { + it("accepts websocket connects with hibernation state and schedules heartbeat alarm", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + + const originalWebSocketPair = (globalThis as { WebSocketPair?: unknown }) + .WebSocketPair; + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + + (globalThis as unknown as { WebSocketPair: unknown }).WebSocketPair = + class { + 0 = pairClient as unknown as WebSocket; + 1 = pairServer as unknown as WebSocket; + }; + + try { + const request = new Request( + `https://relay.example.test${RELAY_CONNECT_PATH}`, + { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }, + ); + + let connectResponse: Response | undefined; + let connectError: unknown; + try { + connectResponse = await relaySession.fetch(request); + } catch (error) { + connectError = error; + } + + expect(harness.state.acceptWebSocket).toHaveBeenCalledTimes(1); + expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ + "did:claw:agent:connector", + ]); + expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); + + // Node's WHATWG Response may reject status 101 in tests; Workers runtime accepts it. + if (connectResponse !== undefined) { + expect(connectResponse.status).toBe(101); + } else { + expect(connectError).toBeInstanceOf(RangeError); + } + } finally { + if (originalWebSocketPair === undefined) { + delete (globalThis as { WebSocketPair?: unknown }).WebSocketPair; + } else { + (globalThis as { WebSocketPair?: unknown }).WebSocketPair = + originalWebSocketPair; + } + } + }); + + it("returns 426 for non-websocket connect requests", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + + const response = await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + }), + ); + + expect(response.status).toBe(426); + expect(harness.state.acceptWebSocket).not.toHaveBeenCalled(); + }); + + it("delivers relay frames to active websocket connectors", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + connectorSocket.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 1), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + const result = await relaySession.deliverToConnector({ + requestId: "req-1", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + expect(result).toEqual({ + delivered: true, + connectedSockets: 1, + }); + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + const relayPayload = parseFrame(connectorSocket.send.mock.calls[0]?.[0]); + expect(relayPayload.type).toBe("deliver"); + if (relayPayload.type === "deliver") { + expect(relayPayload.fromAgentDid).toBe(SENDER_AGENT_DID); + expect(relayPayload.toAgentDid).toBe(RECIPIENT_AGENT_DID); + } + expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); + }); + + it("returns not-delivered when no connector socket is active", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + + const result = await relaySession.deliverToConnector({ + requestId: "req-2", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + expect(result).toEqual({ + delivered: false, + connectedSockets: 0, + }); + expect(harness.storage.setAlarm).not.toHaveBeenCalled(); + }); + + it("sends heartbeat frames on alarm when connectors are active", async () => { + const harness = createStateHarness(); + const connectorSocket = createMockSocket(); + harness.connectedSockets.push(connectorSocket as unknown as WebSocket); + + const relaySession = new AgentRelaySession(harness.state); + await relaySession.alarm(); + + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + expect(String(connectorSocket.send.mock.calls[0]?.[0])).toContain( + '"type":"heartbeat"', + ); + expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); + }); + + it("handles heartbeat websocket frames by replying with heartbeat_ack and refreshing heartbeat", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + const connectorSocket = createMockSocket() as unknown as WebSocket; + const heartbeatId = generateUlid(Date.now() + 2); + + await relaySession.webSocketMessage( + connectorSocket, + JSON.stringify({ + v: 1, + type: "heartbeat", + id: heartbeatId, + ts: new Date().toISOString(), + }), + ); + + expect( + (connectorSocket as unknown as MockWebSocket).send, + ).toHaveBeenCalledTimes(1); + const ackFrame = parseFrame( + (connectorSocket as unknown as MockWebSocket).send.mock.calls[0]?.[0], + ); + expect(ackFrame.type).toBe("heartbeat_ack"); + if (ackFrame.type === "heartbeat_ack") { + expect(ackFrame.ackId).toBe(heartbeatId); + } + expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); + }); + + it("supports fetch RPC delivery endpoint for compatibility", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + connectorSocket.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 3), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + const response = await relaySession.fetch( + new Request("https://relay.example.test/rpc/deliver-to-connector", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + requestId: "req-3", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }), + }), + ); + + expect(response.status).toBe(202); + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + }); +}); diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts new file mode 100644 index 0000000..6897c17 --- /dev/null +++ b/apps/proxy/src/agent-relay-session.ts @@ -0,0 +1,325 @@ +import { + CONNECTOR_FRAME_VERSION, + DEFAULT_RELAY_DELIVER_TIMEOUT_MS, + type DeliverFrame, + type HeartbeatAckFrame, + parseFrame, + serializeFrame, +} from "@clawdentity/connector"; +import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; + +const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; +const RELAY_RPC_DELIVER_PATH = "/rpc/deliver-to-connector"; +const RELAY_HEARTBEAT_INTERVAL_MS = 30_000; + +type DurableObjectStorageLike = { + deleteAlarm?: () => Promise | void; + setAlarm: (scheduledTime: number | Date) => Promise | void; +}; + +type DurableObjectStateLike = { + acceptWebSocket: (socket: WebSocket, tags?: string[]) => void; + getWebSockets: () => WebSocket[]; + storage: DurableObjectStorageLike; +}; + +export type RelayDeliveryInput = { + payload: unknown; + recipientAgentDid: string; + requestId: string; + senderAgentDid: string; +}; + +export type RelayDeliveryResult = { + connectedSockets: number; + delivered: boolean; +}; + +export type AgentRelaySessionStub = { + deliverToConnector?: ( + input: RelayDeliveryInput, + ) => Promise; + fetch: (request: Request) => Promise; +}; + +export type AgentRelaySessionNamespace = { + get: (id: DurableObjectId) => AgentRelaySessionStub; + idFromName: (name: string) => DurableObjectId; +}; + +type PendingDelivery = { + reject: (error: unknown) => void; + resolve: (accepted: boolean) => void; + timeoutHandle: ReturnType; +}; + +function toHeartbeatFrame(): string { + return serializeFrame({ + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat", + id: generateUlid(Date.now()), + ts: new Date().toISOString(), + }); +} + +function toHeartbeatAckFrame(ackId: string): string { + const ackFrame: HeartbeatAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat_ack", + id: generateUlid(Date.now()), + ts: new Date().toISOString(), + ackId, + }; + + return serializeFrame(ackFrame); +} + +function toDeliverFrame(input: RelayDeliveryInput): DeliverFrame { + return { + v: CONNECTOR_FRAME_VERSION, + type: "deliver", + id: generateUlid(Date.now()), + ts: new Date().toISOString(), + fromAgentDid: input.senderAgentDid, + toAgentDid: input.recipientAgentDid, + payload: input.payload, + }; +} + +function parseDeliveryInput(value: unknown): RelayDeliveryInput { + if (typeof value !== "object" || value === null) { + throw new TypeError("Relay delivery input must be an object"); + } + + const input = value as Partial; + if ( + typeof input.requestId !== "string" || + typeof input.senderAgentDid !== "string" || + typeof input.recipientAgentDid !== "string" + ) { + throw new TypeError("Relay delivery input is invalid"); + } + + return { + requestId: input.requestId, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + payload: input.payload, + }; +} + +export async function deliverToRelaySession( + relaySession: AgentRelaySessionStub, + input: RelayDeliveryInput, +): Promise { + const response = await relaySession.fetch( + new Request(`https://agent-relay-session${RELAY_RPC_DELIVER_PATH}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(input), + }), + ); + + if (!response.ok) { + throw new Error("Relay session delivery RPC failed"); + } + + return (await response.json()) as RelayDeliveryResult; +} + +export class AgentRelaySession { + private readonly pendingDeliveries = new Map(); + private readonly state: DurableObjectStateLike; + + constructor(state: DurableObjectStateLike) { + this.state = state; + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + if (url.pathname === RELAY_CONNECT_PATH) { + return this.handleConnect(request); + } + + if (request.method === "POST" && url.pathname === RELAY_RPC_DELIVER_PATH) { + let input: RelayDeliveryInput; + try { + input = parseDeliveryInput(await request.json()); + } catch { + return new Response("Invalid relay delivery input", { status: 400 }); + } + + try { + const result = await this.deliverToConnector(input); + return Response.json(result, { status: 202 }); + } catch { + return new Response("Relay delivery failed", { status: 502 }); + } + } + + return new Response("Not found", { status: 404 }); + } + + async alarm(): Promise { + const sockets = this.state.getWebSockets(); + if (sockets.length === 0) { + return; + } + + const heartbeatFrame = toHeartbeatFrame(); + for (const socket of sockets) { + try { + socket.send(heartbeatFrame); + } catch { + try { + socket.close(1011, "heartbeat_send_failed"); + } catch { + // Ignore close errors for already-closed sockets. + } + } + } + + await this.scheduleHeartbeat(); + } + + async deliverToConnector( + input: RelayDeliveryInput, + ): Promise { + const sockets = this.state.getWebSockets(); + if (sockets.length === 0) { + return { + delivered: false, + connectedSockets: 0, + }; + } + + const socket = sockets[0]; + const frame = toDeliverFrame(input); + const framePayload = serializeFrame(frame); + + const accepted = await new Promise((resolve, reject) => { + const timeoutHandle = setTimeout(() => { + this.pendingDeliveries.delete(frame.id); + reject(new Error("Relay connector acknowledgement timed out")); + }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); + + this.pendingDeliveries.set(frame.id, { + resolve, + reject, + timeoutHandle, + }); + + try { + socket.send(framePayload); + } catch (error) { + clearTimeout(timeoutHandle); + this.pendingDeliveries.delete(frame.id); + reject(error); + } + }); + + return { + delivered: accepted, + connectedSockets: sockets.length, + }; + } + + async webSocketMessage( + ws: WebSocket, + message: string | ArrayBuffer, + ): Promise { + const frameResult = (() => { + try { + return parseFrame(message); + } catch { + return null; + } + })(); + + if (frameResult === null) { + await this.scheduleHeartbeat(); + return; + } + + const frame = frameResult; + + if (frame.type === "heartbeat") { + ws.send(toHeartbeatAckFrame(frame.id)); + await this.scheduleHeartbeat(); + return; + } + + if (frame.type === "deliver_ack") { + const pending = this.pendingDeliveries.get(frame.ackId); + if (pending) { + clearTimeout(pending.timeoutHandle); + this.pendingDeliveries.delete(frame.ackId); + pending.resolve(frame.accepted); + } + await this.scheduleHeartbeat(); + return; + } + + if (frame.type === "heartbeat_ack") { + await this.scheduleHeartbeat(); + return; + } + + await this.scheduleHeartbeat(); + } + + async webSocketClose(): Promise { + if (this.state.getWebSockets().length === 0) { + await this.state.storage.deleteAlarm?.(); + this.rejectPendingDeliveries(new Error("Connector socket closed")); + return; + } + + await this.scheduleHeartbeat(); + } + + async webSocketError(): Promise { + this.rejectPendingDeliveries(new Error("Connector socket error")); + await this.webSocketClose(); + } + + private async handleConnect(request: Request): Promise { + const upgradeHeader = request.headers.get("upgrade"); + if (upgradeHeader?.toLowerCase() !== "websocket") { + return new Response("Expected websocket upgrade", { status: 426 }); + } + + const connectorAgentDid = + request.headers.get(CONNECTOR_AGENT_DID_HEADER)?.trim() ?? ""; + if (connectorAgentDid.length === 0) { + return new Response("Missing connector agent DID", { status: 400 }); + } + + const pair = new WebSocketPair(); + const client = pair[0]; + const server = pair[1]; + + this.state.acceptWebSocket(server, [connectorAgentDid]); + await this.scheduleHeartbeat(); + + return new Response(null, { + status: 101, + webSocket: client, + }); + } + + private rejectPendingDeliveries(error: Error): void { + for (const [deliveryId, pending] of this.pendingDeliveries) { + clearTimeout(pending.timeoutHandle); + pending.reject(error); + this.pendingDeliveries.delete(deliveryId); + } + } + + private async scheduleHeartbeat(): Promise { + await this.state.storage.setAlarm(Date.now() + RELAY_HEARTBEAT_INTERVAL_MS); + } +} diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index 7476a8a..b7eb271 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -12,7 +12,10 @@ import { signHttpRequest, } from "@clawdentity/sdk"; import { describe, expect, it, vi } from "vitest"; +import { RELAY_RECIPIENT_AGENT_DID_HEADER } from "./agent-hook-route.js"; +import type { AgentRelaySessionNamespace } from "./agent-relay-session.js"; import { parseProxyConfig } from "./config.js"; +import { RELAY_CONNECT_PATH } from "./relay-connect-route.js"; import { createProxyApp } from "./server.js"; const REGISTRY_KID = "registry-active-kid"; @@ -37,6 +40,7 @@ type AuthHarness = { claims: Awaited>; createSignedHeaders: (input?: { body?: string; + method?: "GET" | "POST"; nonce?: string; pathWithQuery?: string; timestamp?: string; @@ -209,10 +213,28 @@ async function createAuthHarness( const allowListAgents = options.allowCurrentAgent === false ? [] : [claims.sub]; const allowListOwners = options.allowCurrentOwner ? [claims.ownerDid] : []; + const relaySession = { + fetch: vi.fn(async (request: Request) => { + if (request.method === "POST") { + return Response.json( + { + delivered: true, + connectedSockets: 1, + }, + { status: 202 }, + ); + } + + return new Response(null, { status: 204 }); + }), + }; + const relayNamespace = { + idFromName: vi.fn((_name: string) => ({}) as DurableObjectId), + get: vi.fn((_id: DurableObjectId) => relaySession), + } satisfies AgentRelaySessionNamespace; const app = createProxyApp({ config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", ...(allowListAgents.length > 0 ? { ALLOWLIST_AGENTS: allowListAgents.join(",") } : {}), @@ -228,20 +250,11 @@ async function createAuthHarness( clock: () => NOW_MS, }, hooks: { - fetchImpl: vi.fn( - async () => - new Response( - JSON.stringify({ - ok: true, - }), - { - status: 202, - headers: { - "content-type": "application/json", - }, - }, - ), - ) as typeof fetch, + resolveSessionNamespace: () => relayNamespace, + now: () => new Date(NOW_MS), + }, + relay: { + resolveSessionNamespace: () => relayNamespace, }, registerRoutes: (nextApp) => { nextApp.post("/protected", (c) => { @@ -258,14 +271,15 @@ async function createAuthHarness( app, claims, createSignedHeaders: async (input = {}) => { - const body = input.body ?? BODY_JSON; + const method = input.method ?? "POST"; + const body = input.body ?? (method === "GET" ? "" : BODY_JSON); const nonce = input.nonce ?? "nonce-1"; const pathWithQuery = input.pathWithQuery ?? "/protected"; const timestamp = input.timestamp ?? String(input.timestampSeconds ?? NOW_SECONDS); const signed = await signHttpRequest({ - method: "POST", + method, pathWithQuery, timestamp, nonce, @@ -275,7 +289,7 @@ async function createAuthHarness( return { authorization: `Claw ${ait}`, - "content-type": "application/json", + ...(method === "POST" ? { "content-type": "application/json" } : {}), ...signed.headers, }; }, @@ -639,6 +653,7 @@ describe("proxy auth middleware", () => { headers: { ...headers, "x-claw-agent-access": "clw_agt_validtoken", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: harness.claims.sub, }, body: BODY_JSON, }); @@ -646,6 +661,49 @@ describe("proxy auth middleware", () => { expect(response.status).toBe(202); }); + it("requires x-claw-agent-access for relay websocket connect", async () => { + const harness = await createAuthHarness({ + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + method: "GET", + pathWithQuery: RELAY_CONNECT_PATH, + nonce: "nonce-relay-connect", + }); + const response = await harness.app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + ...headers, + upgrade: "websocket", + }, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AGENT_ACCESS_REQUIRED"); + }); + + it("accepts relay websocket connect when x-claw-agent-access validates", async () => { + const harness = await createAuthHarness({ + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + method: "GET", + pathWithQuery: RELAY_CONNECT_PATH, + nonce: "nonce-relay-connect-agent-access-valid", + }); + const response = await harness.app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + ...headers, + upgrade: "websocket", + "x-claw-agent-access": "clw_agt_validtoken", + }, + }); + + expect(response.status).toBe(204); + }); + it("rejects non-health route when Authorization scheme is not Claw", async () => { const harness = await createAuthHarness(); const response = await harness.app.request("/protected", { diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 8887416..c70f914 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -1,6 +1,7 @@ import { AGENT_AUTH_VALIDATE_PATH, decodeBase64url, + RELAY_CONNECT_PATH, } from "@clawdentity/protocol"; import { AitJwtError, @@ -605,7 +606,7 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { }); } - if (c.req.path === "/hooks/agent") { + if (c.req.path === "/hooks/agent" || c.req.path === RELAY_CONNECT_PATH) { const accessToken = parseAgentAccessHeader( c.req.header("x-claw-agent-access"), ); diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 8cb9aae..0cccd0a 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -20,15 +20,13 @@ import { const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; describe("proxy config", () => { - it("parses required settings and applies defaults", () => { - const config = parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "super-secret-hook-token", - }); + it("parses defaults without requiring OpenClaw token", () => { + const config = parseProxyConfig({}); expect(config).toEqual({ listenPort: DEFAULT_PROXY_LISTEN_PORT, openclawBaseUrl: DEFAULT_OPENCLAW_BASE_URL, - openclawHookToken: "super-secret-hook-token", + openclawHookToken: undefined, registryUrl: DEFAULT_REGISTRY_URL, environment: DEFAULT_PROXY_ENVIRONMENT, allowList: { @@ -83,14 +81,13 @@ describe("proxy config", () => { }); }); - it("throws on missing hook token", () => { - expect(() => parseProxyConfig({})).toThrow(ProxyConfigError); + it("accepts missing hook token for relay-only startup", () => { + expect(() => parseProxyConfig({})).not.toThrow(); }); it("throws on malformed allow list JSON", () => { expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", ALLOW_LIST: "{not-json", }), ).toThrow(ProxyConfigError); @@ -99,7 +96,6 @@ describe("proxy config", () => { it("throws when deprecated ALLOW_ALL_VERIFIED is set", () => { expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", ALLOW_ALL_VERIFIED: "true", }), ).toThrow(ProxyConfigError); @@ -108,7 +104,6 @@ describe("proxy config", () => { it("throws when ALLOW_LIST includes unknown keys", () => { expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", ALLOW_LIST: JSON.stringify({ owners: [], agents: [], @@ -121,7 +116,6 @@ describe("proxy config", () => { it("throws on unsupported environment value", () => { expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", ENVIRONMENT: "staging", }), ).toThrow(ProxyConfigError); @@ -130,13 +124,11 @@ describe("proxy config", () => { it("throws on invalid agent DID rate-limit values", () => { expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: "0", }), ).toThrow(ProxyConfigError); expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", AGENT_RATE_LIMIT_WINDOW_MS: "-1", }), ).toThrow(ProxyConfigError); @@ -145,7 +137,6 @@ describe("proxy config", () => { it("throws on invalid injectIdentityIntoMessage value", () => { expect(() => parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", INJECT_IDENTITY_INTO_MESSAGE: "maybe", }), ).toThrow(ProxyConfigError); @@ -206,6 +197,24 @@ describe("proxy config loading", () => { } }); + it("allows loading config when no OpenClaw token fallback is present", () => { + const sandbox = createSandbox(); + try { + const config = loadProxyConfig( + {}, + { + cwd: sandbox.cwd, + homeDir: sandbox.root, + }, + ); + + expect(config.openclawHookToken).toBeUndefined(); + expect(config.openclawBaseUrl).toBe(DEFAULT_OPENCLAW_BASE_URL); + } finally { + sandbox.cleanup(); + } + }); + it("loads INJECT_IDENTITY_INTO_MESSAGE from .env", () => { const sandbox = createSandbox(); try { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 817a67b..4afae66 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -81,7 +81,7 @@ const proxyRuntimeEnvSchema = z.object({ .max(65535) .default(DEFAULT_PROXY_LISTEN_PORT), OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), - OPENCLAW_HOOK_TOKEN: z.string().trim().min(1), + OPENCLAW_HOOK_TOKEN: z.string().trim().min(1).optional(), REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), ENVIRONMENT: z .enum(proxyEnvironmentValues) @@ -127,7 +127,7 @@ const proxyAllowListSchema = z export const proxyConfigSchema = z.object({ listenPort: z.number().int().min(1).max(65535), openclawBaseUrl: z.string().url(), - openclawHookToken: z.string().min(1), + openclawHookToken: z.string().min(1).optional(), registryUrl: z.string().url(), environment: z.enum(proxyEnvironmentValues), allowList: proxyAllowListSchema, diff --git a/apps/proxy/src/index.test.ts b/apps/proxy/src/index.test.ts index 18b0095..92622ae 100644 --- a/apps/proxy/src/index.test.ts +++ b/apps/proxy/src/index.test.ts @@ -22,4 +22,11 @@ describe("proxy", () => { initializeProxyRuntime({ OPENCLAW_BASE_URL: "bad-url" }), ).toThrow(ProxyConfigError); }); + + it("supports relay runtime startup without OpenClaw vars", () => { + const runtime = initializeProxyRuntime({}); + + expect(runtime.version).toBe(PROXY_VERSION); + expect(runtime.config.openclawHookToken).toBeUndefined(); + }); }); diff --git a/apps/proxy/src/relay-connect-route.test.ts b/apps/proxy/src/relay-connect-route.test.ts new file mode 100644 index 0000000..60869ee --- /dev/null +++ b/apps/proxy/src/relay-connect-route.test.ts @@ -0,0 +1,126 @@ +import { describe, expect, it, vi } from "vitest"; + +vi.mock("./auth-middleware.js", async () => { + const { createMiddleware } = await import("hono/factory"); + + return { + createProxyAuthMiddleware: () => + createMiddleware(async (c, next) => { + c.set("auth", { + agentDid: "did:claw:agent:connector", + ownerDid: "did:claw:owner:connector", + issuer: "https://registry.example.com", + aitJti: "ait-jti-connector", + cnfPublicKey: "test-public-key", + }); + await next(); + }), + }; +}); + +import type { + AgentRelaySessionNamespace, + AgentRelaySessionStub, +} from "./agent-relay-session.js"; +import { parseProxyConfig } from "./config.js"; +import { RELAY_CONNECT_PATH } from "./relay-connect-route.js"; +import { createProxyApp } from "./server.js"; + +function createRelayNamespaceHarness() { + const fetchRelaySession = vi.fn( + async (_request: Request) => new Response(null, { status: 204 }), + ); + const relaySession: AgentRelaySessionStub = { + fetch: fetchRelaySession, + }; + + const durableObjectId = { + toString: () => "connector-session-id", + } as unknown as DurableObjectId; + + const idFromName = vi.fn((_name: string) => durableObjectId); + const get = vi.fn((_id: DurableObjectId) => relaySession); + + return { + idFromName, + get, + fetchRelaySession, + namespace: { + idFromName, + get, + } satisfies AgentRelaySessionNamespace, + }; +} + +function createRelayConnectApp(input: { + relayNamespace?: AgentRelaySessionNamespace; +}) { + return createProxyApp({ + config: parseProxyConfig({}), + relay: { + resolveSessionNamespace: () => input.relayNamespace, + }, + }); +} + +describe(`GET ${RELAY_CONNECT_PATH}`, () => { + it("forwards websocket connect requests to DO session keyed by authenticated connector DID", async () => { + const relayHarness = createRelayNamespaceHarness(); + const app = createRelayConnectApp({ + relayNamespace: relayHarness.namespace, + }); + + const response = await app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + upgrade: "websocket", + }, + }); + + expect(response.status).toBe(204); + expect(relayHarness.idFromName).toHaveBeenCalledWith( + "did:claw:agent:connector", + ); + expect(relayHarness.get).toHaveBeenCalledTimes(1); + expect(relayHarness.fetchRelaySession).toHaveBeenCalledTimes(1); + + const [forwardedRequest] = relayHarness.fetchRelaySession.mock.calls[0] as [ + Request, + ]; + expect(forwardedRequest.headers.get("x-claw-connector-agent-did")).toBe( + "did:claw:agent:connector", + ); + }); + + it("requires websocket upgrade header", async () => { + const relayHarness = createRelayNamespaceHarness(); + const app = createRelayConnectApp({ + relayNamespace: relayHarness.namespace, + }); + + const response = await app.request(RELAY_CONNECT_PATH, { + method: "GET", + }); + + expect(response.status).toBe(426); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_UPGRADE_REQUIRED"); + }); + + it("returns 503 when relay session namespace is unavailable", async () => { + const app = createRelayConnectApp({ + relayNamespace: undefined, + }); + + const response = await app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + upgrade: "websocket", + }, + }); + + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_UNAVAILABLE"); + }); +}); diff --git a/apps/proxy/src/relay-connect-route.ts b/apps/proxy/src/relay-connect-route.ts new file mode 100644 index 0000000..5311d4d --- /dev/null +++ b/apps/proxy/src/relay-connect-route.ts @@ -0,0 +1,91 @@ +import { RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { AppError, type Logger } from "@clawdentity/sdk"; +import type { Context } from "hono"; +import type { AgentRelaySessionNamespace } from "./agent-relay-session.js"; +import type { ProxyRequestVariables } from "./auth-middleware.js"; + +type ProxyContext = Context<{ + Variables: ProxyRequestVariables; + Bindings: { + AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; + }; +}>; + +export { RELAY_CONNECT_PATH } from "@clawdentity/protocol"; + +export type RelayConnectRuntimeOptions = { + resolveSessionNamespace?: ( + c: ProxyContext, + ) => AgentRelaySessionNamespace | undefined; +}; + +type CreateRelayConnectHandlerOptions = RelayConnectRuntimeOptions & { + logger: Logger; +}; + +const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; + +function resolveDefaultNamespace( + c: ProxyContext, +): AgentRelaySessionNamespace | undefined { + return c.env.AGENT_RELAY_SESSION; +} + +export function createRelayConnectHandler( + options: CreateRelayConnectHandlerOptions, +): (c: ProxyContext) => Promise { + const resolveSessionNamespace = + options.resolveSessionNamespace ?? resolveDefaultNamespace; + + return async (c) => { + if (c.req.header("upgrade")?.toLowerCase() !== "websocket") { + throw new AppError({ + code: "PROXY_RELAY_UPGRADE_REQUIRED", + message: "WebSocket upgrade is required", + status: 426, + expose: true, + }); + } + + const auth = c.get("auth"); + if (auth === undefined) { + throw new AppError({ + code: "PROXY_RELAY_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const sessionNamespace = resolveSessionNamespace(c); + if (sessionNamespace === undefined) { + throw new AppError({ + code: "PROXY_RELAY_UNAVAILABLE", + message: "Relay session namespace is unavailable", + status: 503, + }); + } + + const sessionId = sessionNamespace.idFromName(auth.agentDid); + const relaySession = sessionNamespace.get(sessionId); + + const relayHeaders = new Headers(c.req.raw.headers); + relayHeaders.set(CONNECTOR_AGENT_DID_HEADER, auth.agentDid); + + const forwardedRequest = new Request( + `https://agent-relay-session${RELAY_CONNECT_PATH}`, + { + method: "GET", + headers: relayHeaders, + }, + ); + + const response = await relaySession.fetch(forwardedRequest); + options.logger.info("proxy.relay.connect", { + requestId: c.get("requestId"), + agentDid: auth.agentDid, + status: response.status, + }); + + return response; + }; +} diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts index 3dfb0a1..f0a1c16 100644 --- a/apps/proxy/src/server.test.ts +++ b/apps/proxy/src/server.test.ts @@ -72,10 +72,12 @@ describe("proxy server", () => { } }); - it("fails startup when required config is missing", () => { + it("fails startup when config is invalid", () => { expect(() => startProxyServer({ - env: {}, + env: { + OPENCLAW_BASE_URL: "bad-url", + }, }), ).toThrow(ProxyConfigError); }); diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 3d413ef..9682665 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -1,3 +1,4 @@ +import { RELAY_CONNECT_PATH } from "@clawdentity/protocol"; import { type CrlCache, createHonoErrorHandler, @@ -13,12 +14,17 @@ import { createAgentHookHandler, } from "./agent-hook-route.js"; import { createAgentRateLimitMiddleware } from "./agent-rate-limit-middleware.js"; +import type { AgentRelaySessionNamespace } from "./agent-relay-session.js"; import { createProxyAuthMiddleware, type ProxyRequestVariables, } from "./auth-middleware.js"; import type { ProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; +import { + createRelayConnectHandler, + type RelayConnectRuntimeOptions, +} from "./relay-connect-route.js"; type ProxyAuthRuntimeOptions = { fetchImpl?: typeof fetch; @@ -38,9 +44,13 @@ type CreateProxyAppOptions = { auth?: ProxyAuthRuntimeOptions; rateLimit?: ProxyRateLimitRuntimeOptions; hooks?: AgentHookRuntimeOptions; + relay?: RelayConnectRuntimeOptions; }; export type ProxyApp = Hono<{ + Bindings: { + AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; + }; Variables: ProxyRequestVariables; }>; @@ -51,6 +61,9 @@ function resolveLogger(logger?: Logger): Logger { export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { const logger = resolveLogger(options.logger); const app = new Hono<{ + Bindings: { + AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; + }; Variables: ProxyRequestVariables; }>(); @@ -85,12 +98,17 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { "/hooks/agent", createAgentHookHandler({ logger, - openclawBaseUrl: options.config.openclawBaseUrl, - openclawHookToken: options.config.openclawHookToken, injectIdentityIntoMessage: options.config.injectIdentityIntoMessage, ...options.hooks, }), ); + app.get( + RELAY_CONNECT_PATH, + createRelayConnectHandler({ + logger, + ...options.relay, + }), + ); options.registerRoutes?.(app); return app; diff --git a/apps/proxy/src/worker.test.ts b/apps/proxy/src/worker.test.ts index 52ca289..2b7a1cd 100644 --- a/apps/proxy/src/worker.test.ts +++ b/apps/proxy/src/worker.test.ts @@ -16,7 +16,6 @@ describe("proxy worker", () => { new Request("https://proxy.example.test/health"), { ENVIRONMENT: "local", - OPENCLAW_HOOK_TOKEN: "proxy-hook-token", } satisfies ProxyWorkerBindings, createExecutionContext(), ); @@ -34,64 +33,55 @@ describe("proxy worker", () => { }); }); - it("returns config validation error when required bindings are missing", async () => { + it("allows startup with empty bindings for relay mode", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), {} satisfies ProxyWorkerBindings, createExecutionContext(), ); - expect(response.status).toBe(500); + expect(response.status).toBe(200); const payload = (await response.json()) as { - error: { - code: string; - }; + status: string; + environment: string; }; - expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(payload.status).toBe("ok"); + expect(payload.environment).toBe("development"); }); - it("returns config validation error when deployed env uses loopback upstream", async () => { + it("accepts deployed env without OpenClaw vars in relay mode", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), { ENVIRONMENT: "development", - OPENCLAW_HOOK_TOKEN: "proxy-hook-token", } satisfies ProxyWorkerBindings, createExecutionContext(), ); - expect(response.status).toBe(500); + expect(response.status).toBe(200); const payload = (await response.json()) as { - error: { - code: string; - details: { - fieldErrors?: Record; - }; - }; + status: string; + environment: string; }; - expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); - expect(payload.error.details.fieldErrors?.OPENCLAW_BASE_URL?.[0]).toContain( - "externally reachable URL", - ); + expect(payload.status).toBe("ok"); + expect(payload.environment).toBe("development"); }); - it("accepts non-loopback upstream in deployed env", async () => { + it("returns config validation error for malformed OPENCLAW_BASE_URL", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), { - ENVIRONMENT: "development", - OPENCLAW_HOOK_TOKEN: "proxy-hook-token", - OPENCLAW_BASE_URL: "https://openclaw-dev.internal.example", + OPENCLAW_BASE_URL: "bad-url", } satisfies ProxyWorkerBindings, createExecutionContext(), ); - expect(response.status).toBe(200); + expect(response.status).toBe(500); const payload = (await response.json()) as { - status: string; - environment: string; + error: { + code: string; + }; }; - expect(payload.status).toBe("ok"); - expect(payload.environment).toBe("development"); + expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); }); }); diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 913b1e9..45d6e03 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -1,6 +1,9 @@ import { createLogger } from "@clawdentity/sdk"; import { - DEFAULT_OPENCLAW_BASE_URL, + AgentRelaySession, + type AgentRelaySessionNamespace, +} from "./agent-relay-session.js"; +import { type ProxyConfig, ProxyConfigError, parseProxyConfig, @@ -13,6 +16,7 @@ export type ProxyWorkerBindings = { OPENCLAW_BASE_URL?: string; OPENCLAW_HOOK_TOKEN?: string; OPENCLAW_HOOKS_TOKEN?: string; + AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; REGISTRY_URL?: string; CLAWDENTITY_REGISTRY_URL?: string; ENVIRONMENT?: string; @@ -67,7 +71,6 @@ function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { } const config = parseProxyConfig(env); - assertWorkerOpenclawBaseUrl(config); const app = createProxyApp({ config, logger }); cachedRuntime = { @@ -78,61 +81,6 @@ function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { return cachedRuntime; } -function isLoopbackHostname(hostname: string): boolean { - const normalized = hostname.toLowerCase(); - if ( - normalized === "localhost" || - normalized === "::1" || - normalized === "0.0.0.0" - ) { - return true; - } - - const ipv4Match = normalized.match(/^(\d{1,3})(?:\.(\d{1,3})){3}$/); - if (!ipv4Match) { - return false; - } - - const segments = normalized.split(".").map(Number); - if (segments.some((segment) => Number.isNaN(segment) || segment > 255)) { - return false; - } - - return segments[0] === 127; -} - -function assertWorkerOpenclawBaseUrl(config: ProxyConfig): void { - if (config.environment === "local" || config.environment === "test") { - return; - } - - let parsed: URL; - try { - parsed = new URL(config.openclawBaseUrl); - } catch { - throw new ProxyConfigError("Proxy configuration is invalid", { - fieldErrors: { - OPENCLAW_BASE_URL: ["OPENCLAW_BASE_URL must be a valid absolute URL"], - }, - formErrors: [], - }); - } - - if ( - config.openclawBaseUrl === DEFAULT_OPENCLAW_BASE_URL || - isLoopbackHostname(parsed.hostname) - ) { - throw new ProxyConfigError("Proxy configuration is invalid", { - fieldErrors: { - OPENCLAW_BASE_URL: [ - "OPENCLAW_BASE_URL must be an externally reachable URL for deployed Worker environments", - ], - }, - formErrors: [], - }); - } -} - function toConfigErrorResponse(error: ProxyConfigError): Response { logger.error(error.message, { code: error.code, @@ -181,4 +129,5 @@ const worker = { }, }; +export { AgentRelaySession }; export default worker; diff --git a/apps/proxy/wrangler.jsonc b/apps/proxy/wrangler.jsonc index 3e0b85d..50691d7 100644 --- a/apps/proxy/wrangler.jsonc +++ b/apps/proxy/wrangler.jsonc @@ -4,9 +4,37 @@ "main": "src/worker.ts", "compatibility_date": "2025-09-01", "compatibility_flags": ["nodejs_compat"], + "durable_objects": { + "bindings": [ + { + "name": "AGENT_RELAY_SESSION", + "class_name": "AgentRelaySession" + } + ] + }, + "migrations": [ + { + "tag": "v1-agent-relay-session", + "new_sqlite_classes": ["AgentRelaySession"] + } + ], "env": { "local": { "name": "clawdentity-proxy-local", + "durable_objects": { + "bindings": [ + { + "name": "AGENT_RELAY_SESSION", + "class_name": "AgentRelaySession" + } + ] + }, + "migrations": [ + { + "tag": "v1-agent-relay-session", + "new_sqlite_classes": ["AgentRelaySession"] + } + ], "vars": { "ENVIRONMENT": "local", "REGISTRY_URL": "https://dev.api.clawdentity.com", @@ -16,6 +44,20 @@ }, "development": { "name": "clawdentity-proxy-development", + "durable_objects": { + "bindings": [ + { + "name": "AGENT_RELAY_SESSION", + "class_name": "AgentRelaySession" + } + ] + }, + "migrations": [ + { + "tag": "v1-agent-relay-session", + "new_sqlite_classes": ["AgentRelaySession"] + } + ], "vars": { "ENVIRONMENT": "development", "REGISTRY_URL": "https://dev.api.clawdentity.com", @@ -24,6 +66,20 @@ }, "production": { "name": "clawdentity-proxy", + "durable_objects": { + "bindings": [ + { + "name": "AGENT_RELAY_SESSION", + "class_name": "AgentRelaySession" + } + ] + }, + "migrations": [ + { + "tag": "v1-agent-relay-session", + "new_sqlite_classes": ["AgentRelaySession"] + } + ], "vars": { "ENVIRONMENT": "production", "REGISTRY_URL": "https://api.clawdentity.com", diff --git a/packages/connector/AGENTS.md b/packages/connector/AGENTS.md new file mode 100644 index 0000000..692bb65 --- /dev/null +++ b/packages/connector/AGENTS.md @@ -0,0 +1,18 @@ +# AGENTS.md (packages/connector) + +## Purpose +- Provide a runtime-portable connector client for WebSocket relay integration and local OpenClaw delivery. + +## Design Rules +- Keep frame contracts in `src/frames.ts` as the single schema authority. +- Validate all inbound and outbound frames through zod schemas; do not bypass parser helpers. +- Reuse shared protocol validators (`parseDid`, `parseUlid`) instead of duplicating DID/ULID logic. +- Keep reconnect and heartbeat behavior deterministic and testable via dependency injection (`webSocketFactory`, `fetchImpl`, clock/random). +- Keep local OpenClaw delivery concerns in `src/client.ts`; do not spread HTTP delivery logic across modules. +- Keep local OpenClaw restart handling bounded: retry only transient delivery failures with capped backoff and an overall retry budget so connector ack behavior remains compatible with relay DO delivery timeouts. +- Refresh agent access credentials at runtime startup when cached access tokens are missing or near expiry before attempting relay WebSocket connection, while persisting refreshed auth atomically to `registry-auth.json`. + +## Testing Rules +- `src/frames.test.ts` must cover roundtrip serialization and explicit invalid-frame failures. +- Client tests must mock WebSocket/fetch and verify heartbeat ack, delivery forwarding, reconnect, and outbound queue flush behavior. +- Keep tests fully offline and deterministic (fake timers where timing matters). diff --git a/packages/connector/package.json b/packages/connector/package.json new file mode 100644 index 0000000..5870edd --- /dev/null +++ b/packages/connector/package.json @@ -0,0 +1,31 @@ +{ + "name": "@clawdentity/connector", + "version": "0.0.0", + "private": true, + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "scripts": { + "build": "tsup", + "format": "biome format .", + "lint": "biome lint .", + "test": "vitest run", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@clawdentity/protocol": "workspace:*", + "@clawdentity/sdk": "workspace:*", + "ws": "^8.18.3", + "zod": "^4.1.12" + }, + "devDependencies": { + "@types/node": "^22.17.2", + "@types/ws": "^8.18.1" + } +} diff --git a/packages/connector/src/client.test.ts b/packages/connector/src/client.test.ts new file mode 100644 index 0000000..0c59a8f --- /dev/null +++ b/packages/connector/src/client.test.ts @@ -0,0 +1,369 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { afterEach, describe, expect, it, vi } from "vitest"; +import { ConnectorClient } from "./client.js"; +import { parseFrame, serializeFrame } from "./frames.js"; + +class MockWebSocket { + readonly url: string; + readyState = 0; + readonly sent: string[] = []; + + private readonly listeners: Record void>> = { + open: new Set(), + message: new Set(), + close: new Set(), + error: new Set(), + }; + + constructor(url: string) { + this.url = url; + } + + addEventListener(type: string, listener: (event: unknown) => void): void { + this.listeners[type]?.add(listener); + } + + send(data: string): void { + if (this.readyState !== 1) { + throw new Error("socket is not open"); + } + + this.sent.push(data); + } + + close(code?: number, reason?: string): void { + if (this.readyState === 3) { + return; + } + + this.readyState = 3; + this.emit("close", { + code, + reason, + wasClean: true, + }); + } + + open(): void { + this.readyState = 1; + this.emit("open", {}); + } + + message(data: unknown): void { + this.emit("message", { data }); + } + + failClose(code = 1006, reason = ""): void { + this.readyState = 3; + this.emit("close", { + code, + reason, + wasClean: false, + }); + } + + private emit(type: string, event: unknown): void { + for (const listener of this.listeners[type] ?? []) { + listener(event); + } + } +} + +function createAgentDid(seedMs: number): string { + return makeAgentDid(generateUlid(seedMs)); +} + +afterEach(() => { + vi.useRealTimers(); + vi.restoreAllMocks(); +}); + +describe("ConnectorClient", () => { + it("acks inbound heartbeat frames", async () => { + const sockets: MockWebSocket[] = []; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + sockets[0].open(); + + const heartbeatId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "heartbeat", + id: heartbeatId, + ts: "2026-01-01T00:00:00.000Z", + }), + ); + + await vi.waitFor(() => { + expect(sockets[0].sent).toHaveLength(1); + }); + + const outbound = parseFrame(sockets[0].sent[0]); + expect(outbound.type).toBe("heartbeat_ack"); + if (outbound.type !== "heartbeat_ack") { + throw new Error("expected heartbeat_ack frame"); + } + expect(outbound.ackId).toBe(heartbeatId); + + client.disconnect(); + }); + + it("forwards deliver frames to local openclaw and acks success", async () => { + const sockets: MockWebSocket[] = []; + const fetchMock = vi + .fn() + .mockResolvedValue(new Response("ok", { status: 200 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + openclawHookToken: "hook-secret", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const [url, requestInit] = fetchMock.mock.calls[0]; + expect(url).toBe("http://127.0.0.1:18789/hooks/agent"); + expect(requestInit?.method).toBe("POST"); + expect(requestInit?.headers).toMatchObject({ + "content-type": "application/json", + "x-openclaw-token": "hook-secret", + "x-request-id": deliverId, + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + + it("acks delivery failure when local openclaw rejects", async () => { + const sockets: MockWebSocket[] = []; + const fetchMock = vi + .fn() + .mockResolvedValue(new Response("bad", { status: 400 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(false); + expect(ack.reason).toContain("status 400"); + + client.disconnect(); + }); + + it("retries transient local openclaw failures and eventually acks success", async () => { + const sockets: MockWebSocket[] = []; + const fetchMock = vi + .fn() + .mockRejectedValueOnce(new Error("connect ECONNREFUSED 127.0.0.1:18789")) + .mockRejectedValueOnce(new Error("connect ECONNREFUSED 127.0.0.1:18789")) + .mockResolvedValue(new Response("ok", { status: 200 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + openclawDeliverTimeoutMs: 100, + openclawDeliverRetryInitialDelayMs: 1, + openclawDeliverRetryMaxDelayMs: 2, + openclawDeliverRetryBudgetMs: 500, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(fetchMock).toHaveBeenCalledTimes(3); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + + it("reconnects after websocket closes", () => { + vi.useFakeTimers(); + + const sockets: MockWebSocket[] = []; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 100, + reconnectMaxDelayMs: 100, + reconnectJitterRatio: 0, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + sockets[0].open(); + sockets[0].failClose(1006, "network down"); + + vi.advanceTimersByTime(99); + expect(sockets).toHaveLength(1); + + vi.advanceTimersByTime(1); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("queues outbound enqueue frames until connected", async () => { + const sockets: MockWebSocket[] = []; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + expect(client.getQueuedOutboundCount()).toBe(0); + + const enqueueFrame = client.enqueueOutbound({ + toAgentDid: createAgentDid(1700000000000), + payload: { message: "queued message" }, + }); + + expect(client.getQueuedOutboundCount()).toBe(1); + expect(sockets[0].sent).toHaveLength(0); + + sockets[0].open(); + + await vi.waitFor(() => { + expect(client.getQueuedOutboundCount()).toBe(0); + expect(sockets[0].sent).toHaveLength(1); + }); + + const outbound = parseFrame(sockets[0].sent[0]); + expect(outbound.type).toBe("enqueue"); + expect(outbound.id).toBe(enqueueFrame.id); + + client.disconnect(); + }); +}); diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts new file mode 100644 index 0000000..70c6338 --- /dev/null +++ b/packages/connector/src/client.ts @@ -0,0 +1,675 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { createLogger, type Logger } from "@clawdentity/sdk"; +import { + CONNECTOR_FRAME_VERSION, + DEFAULT_HEARTBEAT_INTERVAL_MS, + DEFAULT_OPENCLAW_DELIVER_MAX_ATTEMPTS, + DEFAULT_OPENCLAW_DELIVER_RETRY_BACKOFF_FACTOR, + DEFAULT_OPENCLAW_DELIVER_RETRY_BUDGET_MS, + DEFAULT_OPENCLAW_DELIVER_RETRY_INITIAL_DELAY_MS, + DEFAULT_OPENCLAW_DELIVER_RETRY_MAX_DELAY_MS, + DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, + DEFAULT_OPENCLAW_HOOK_PATH, + DEFAULT_RECONNECT_BACKOFF_FACTOR, + DEFAULT_RECONNECT_JITTER_RATIO, + DEFAULT_RECONNECT_MAX_DELAY_MS, + DEFAULT_RECONNECT_MIN_DELAY_MS, + WS_READY_STATE_OPEN, +} from "./constants.js"; +import { + type ConnectorFrame, + type DeliverAckFrame, + type DeliverFrame, + type EnqueueFrame, + enqueueFrameSchema, + type HeartbeatAckFrame, + type HeartbeatFrame, + parseFrame, + serializeFrame, +} from "./frames.js"; + +type ConnectorWebSocketEventType = "open" | "message" | "close" | "error"; +type ConnectorWebSocketListener = (event: unknown) => void; + +export type ConnectorWebSocket = { + readonly readyState: number; + send: (data: string) => void; + close: (code?: number, reason?: string) => void; + addEventListener: ( + type: ConnectorWebSocketEventType, + listener: ConnectorWebSocketListener, + ) => void; +}; + +export type ConnectorClientHooks = { + onConnected?: () => void; + onDisconnected?: (event: { + code: number; + reason: string; + wasClean: boolean; + }) => void; + onFrame?: (frame: ConnectorFrame) => void; + onDeliverSucceeded?: (frame: DeliverFrame) => void; + onDeliverFailed?: (frame: DeliverFrame, error: unknown) => void; +}; + +export type ConnectorClientOptions = { + connectorUrl: string; + connectionHeaders?: Record; + openclawBaseUrl: string; + openclawHookToken?: string; + openclawHookPath?: string; + heartbeatIntervalMs?: number; + reconnectMinDelayMs?: number; + reconnectMaxDelayMs?: number; + reconnectBackoffFactor?: number; + reconnectJitterRatio?: number; + openclawDeliverTimeoutMs?: number; + openclawDeliverMaxAttempts?: number; + openclawDeliverRetryInitialDelayMs?: number; + openclawDeliverRetryMaxDelayMs?: number; + openclawDeliverRetryBackoffFactor?: number; + openclawDeliverRetryBudgetMs?: number; + webSocketFactory?: ( + url: string, + headers: Record, + ) => ConnectorWebSocket; + fetchImpl?: typeof fetch; + logger?: Logger; + hooks?: ConnectorClientHooks; + now?: () => number; + random?: () => number; + ulidFactory?: (time?: number) => string; +}; + +export type ConnectorOutboundEnqueueInput = { + toAgentDid: string; + payload: unknown; + conversationId?: string; + replyTo?: string; +}; + +function isAbortError(error: unknown): boolean { + return error instanceof Error && error.name === "AbortError"; +} + +function resolveWebSocketFactory( + webSocketFactory: ConnectorClientOptions["webSocketFactory"], +): (url: string, headers: Record) => ConnectorWebSocket { + if (webSocketFactory !== undefined) { + return webSocketFactory; + } + + if (typeof WebSocket !== "function") { + throw new Error("WebSocket implementation is required"); + } + + return (_url: string, headers: Record) => { + if (Object.keys(headers).length > 0) { + throw new Error( + "Connection headers require a custom webSocketFactory implementation", + ); + } + + return new WebSocket(_url) as ConnectorWebSocket; + }; +} + +function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { + const normalizedBase = baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`; + const normalizedHookPath = hookPath.startsWith("/") + ? hookPath.slice(1) + : hookPath; + return new URL(normalizedHookPath, normalizedBase).toString(); +} + +function sanitizeErrorReason(error: unknown): string { + if (!(error instanceof Error)) { + return "Unknown delivery error"; + } + + return error.message.trim().slice(0, 200) || "Unknown delivery error"; +} + +class LocalOpenclawDeliveryError extends Error { + readonly retryable: boolean; + + constructor(input: { message: string; retryable: boolean }) { + super(input.message); + this.name = "LocalOpenclawDeliveryError"; + this.retryable = input.retryable; + } +} + +function isRetryableOpenclawDeliveryError(error: unknown): boolean { + return ( + error instanceof LocalOpenclawDeliveryError && error.retryable === true + ); +} + +function isObject(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function readMessageEventData(event: unknown): unknown { + if (!isObject(event)) { + return undefined; + } + + return event.data; +} + +function readCloseEvent(event: unknown): { + code: number; + reason: string; + wasClean: boolean; +} { + if (!isObject(event)) { + return { + code: 1006, + reason: "", + wasClean: false, + }; + } + + return { + code: typeof event.code === "number" ? event.code : 1006, + reason: typeof event.reason === "string" ? event.reason : "", + wasClean: typeof event.wasClean === "boolean" ? event.wasClean : false, + }; +} + +function normalizeConnectionHeaders( + headers: Record | undefined, +): Record { + if (headers === undefined) { + return {}; + } + + const normalized: Record = {}; + for (const [rawKey, rawValue] of Object.entries(headers)) { + const key = rawKey.trim(); + const value = rawValue.trim(); + if (key.length === 0 || value.length === 0) { + continue; + } + normalized[key] = value; + } + + return normalized; +} + +export class ConnectorClient { + private readonly connectorUrl: string; + private readonly connectionHeaders: Record; + private readonly openclawHookUrl: string; + private readonly openclawHookToken?: string; + private readonly heartbeatIntervalMs: number; + private readonly reconnectMinDelayMs: number; + private readonly reconnectMaxDelayMs: number; + private readonly reconnectBackoffFactor: number; + private readonly reconnectJitterRatio: number; + private readonly openclawDeliverTimeoutMs: number; + private readonly openclawDeliverMaxAttempts: number; + private readonly openclawDeliverRetryInitialDelayMs: number; + private readonly openclawDeliverRetryMaxDelayMs: number; + private readonly openclawDeliverRetryBackoffFactor: number; + private readonly openclawDeliverRetryBudgetMs: number; + private readonly webSocketFactory: ( + url: string, + headers: Record, + ) => ConnectorWebSocket; + private readonly fetchImpl: typeof fetch; + private readonly logger: Logger; + private readonly hooks: ConnectorClientHooks; + private readonly now: () => number; + private readonly random: () => number; + private readonly ulidFactory: (time?: number) => string; + + private socket: ConnectorWebSocket | undefined; + private reconnectTimeout: ReturnType | undefined; + private heartbeatInterval: ReturnType | undefined; + private reconnectAttempt = 0; + private started = false; + private readonly outboundQueue: EnqueueFrame[] = []; + + constructor(options: ConnectorClientOptions) { + this.connectorUrl = options.connectorUrl; + this.connectionHeaders = normalizeConnectionHeaders( + options.connectionHeaders, + ); + this.openclawHookToken = options.openclawHookToken; + this.heartbeatIntervalMs = + options.heartbeatIntervalMs ?? DEFAULT_HEARTBEAT_INTERVAL_MS; + this.reconnectMinDelayMs = + options.reconnectMinDelayMs ?? DEFAULT_RECONNECT_MIN_DELAY_MS; + this.reconnectMaxDelayMs = + options.reconnectMaxDelayMs ?? DEFAULT_RECONNECT_MAX_DELAY_MS; + this.reconnectBackoffFactor = + options.reconnectBackoffFactor ?? DEFAULT_RECONNECT_BACKOFF_FACTOR; + this.reconnectJitterRatio = + options.reconnectJitterRatio ?? DEFAULT_RECONNECT_JITTER_RATIO; + this.openclawDeliverTimeoutMs = + options.openclawDeliverTimeoutMs ?? DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS; + this.openclawDeliverMaxAttempts = Math.max( + 1, + Math.floor( + options.openclawDeliverMaxAttempts ?? + DEFAULT_OPENCLAW_DELIVER_MAX_ATTEMPTS, + ), + ); + this.openclawDeliverRetryInitialDelayMs = Math.max( + 0, + Math.floor( + options.openclawDeliverRetryInitialDelayMs ?? + DEFAULT_OPENCLAW_DELIVER_RETRY_INITIAL_DELAY_MS, + ), + ); + this.openclawDeliverRetryMaxDelayMs = Math.max( + this.openclawDeliverRetryInitialDelayMs, + Math.floor( + options.openclawDeliverRetryMaxDelayMs ?? + DEFAULT_OPENCLAW_DELIVER_RETRY_MAX_DELAY_MS, + ), + ); + this.openclawDeliverRetryBackoffFactor = Math.max( + 1, + options.openclawDeliverRetryBackoffFactor ?? + DEFAULT_OPENCLAW_DELIVER_RETRY_BACKOFF_FACTOR, + ); + this.openclawDeliverRetryBudgetMs = Math.max( + this.openclawDeliverTimeoutMs, + Math.floor( + options.openclawDeliverRetryBudgetMs ?? + DEFAULT_OPENCLAW_DELIVER_RETRY_BUDGET_MS, + ), + ); + this.webSocketFactory = resolveWebSocketFactory(options.webSocketFactory); + this.fetchImpl = options.fetchImpl ?? fetch; + this.logger = + options.logger ?? + createLogger({ service: "connector", module: "client" }); + this.hooks = options.hooks ?? {}; + this.now = options.now ?? Date.now; + this.random = options.random ?? Math.random; + this.ulidFactory = options.ulidFactory ?? generateUlid; + + this.openclawHookUrl = toOpenclawHookUrl( + options.openclawBaseUrl, + options.openclawHookPath ?? DEFAULT_OPENCLAW_HOOK_PATH, + ); + } + + connect(): void { + if (this.started) { + return; + } + + this.started = true; + this.connectSocket(); + } + + disconnect(): void { + this.started = false; + this.clearReconnectTimeout(); + this.clearHeartbeatInterval(); + + if (this.socket !== undefined) { + const socket = this.socket; + this.socket = undefined; + socket.close(1000, "client disconnect"); + } + } + + isConnected(): boolean { + return this.socket?.readyState === WS_READY_STATE_OPEN; + } + + getQueuedOutboundCount(): number { + return this.outboundQueue.length; + } + + enqueueOutbound(input: ConnectorOutboundEnqueueInput): EnqueueFrame { + const frame = enqueueFrameSchema.parse({ + v: CONNECTOR_FRAME_VERSION, + type: "enqueue", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + toAgentDid: input.toAgentDid, + payload: input.payload, + conversationId: input.conversationId, + replyTo: input.replyTo, + }); + + this.outboundQueue.push(frame); + this.flushOutboundQueue(); + return frame; + } + + private connectSocket(): void { + this.clearReconnectTimeout(); + + try { + this.socket = this.webSocketFactory( + this.connectorUrl, + this.connectionHeaders, + ); + } catch (error) { + this.logger.warn("connector.websocket.create_failed", { + reason: sanitizeErrorReason(error), + }); + this.scheduleReconnect(); + return; + } + + this.socket.addEventListener("open", () => { + this.reconnectAttempt = 0; + this.logger.info("connector.websocket.connected", { + url: this.connectorUrl, + }); + this.startHeartbeatInterval(); + this.flushOutboundQueue(); + this.hooks.onConnected?.(); + }); + + this.socket.addEventListener("message", (event) => { + void this.handleIncomingMessage(readMessageEventData(event)); + }); + + this.socket.addEventListener("close", (event) => { + this.clearHeartbeatInterval(); + this.socket = undefined; + + const closeEvent = readCloseEvent(event); + + this.logger.warn("connector.websocket.closed", { + closeCode: closeEvent.code, + reason: closeEvent.reason, + wasClean: closeEvent.wasClean, + }); + + this.hooks.onDisconnected?.({ + code: closeEvent.code, + reason: closeEvent.reason, + wasClean: closeEvent.wasClean, + }); + + if (this.started) { + this.scheduleReconnect(); + } + }); + + this.socket.addEventListener("error", () => { + this.logger.warn("connector.websocket.error", { + url: this.connectorUrl, + }); + }); + } + + private scheduleReconnect(): void { + if (!this.started) { + return; + } + + const exponentialDelay = + this.reconnectMinDelayMs * + this.reconnectBackoffFactor ** this.reconnectAttempt; + const boundedDelay = Math.min(exponentialDelay, this.reconnectMaxDelayMs); + + const jitterRange = boundedDelay * this.reconnectJitterRatio; + const jitterOffset = + jitterRange === 0 ? 0 : (this.random() * 2 - 1) * jitterRange; + + const delayMs = Math.max(0, Math.floor(boundedDelay + jitterOffset)); + this.reconnectAttempt += 1; + + this.reconnectTimeout = setTimeout(() => { + this.connectSocket(); + }, delayMs); + } + + private clearReconnectTimeout(): void { + if (this.reconnectTimeout !== undefined) { + clearTimeout(this.reconnectTimeout); + this.reconnectTimeout = undefined; + } + } + + private startHeartbeatInterval(): void { + this.clearHeartbeatInterval(); + + if (this.heartbeatIntervalMs <= 0) { + return; + } + + this.heartbeatInterval = setInterval(() => { + const frame: HeartbeatFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + }; + + this.sendFrame(frame); + }, this.heartbeatIntervalMs); + } + + private clearHeartbeatInterval(): void { + if (this.heartbeatInterval !== undefined) { + clearInterval(this.heartbeatInterval); + this.heartbeatInterval = undefined; + } + } + + private flushOutboundQueue(): void { + if (!this.isConnected()) { + return; + } + + while (this.outboundQueue.length > 0 && this.isConnected()) { + const nextFrame = this.outboundQueue[0]; + const sent = this.sendFrame(nextFrame); + if (!sent) { + return; + } + this.outboundQueue.shift(); + } + } + + private sendFrame(frame: ConnectorFrame): boolean { + const socket = this.socket; + if (socket === undefined || socket.readyState !== WS_READY_STATE_OPEN) { + return false; + } + + const payload = serializeFrame(frame); + + try { + socket.send(payload); + return true; + } catch (error) { + this.logger.warn("connector.websocket.send_failed", { + frameType: frame.type, + reason: sanitizeErrorReason(error), + }); + return false; + } + } + + private async handleIncomingMessage(rawFrame: unknown): Promise { + let frame: ConnectorFrame; + + try { + frame = parseFrame(rawFrame); + } catch (error) { + this.logger.warn("connector.frame.parse_failed", { + reason: sanitizeErrorReason(error), + }); + return; + } + + this.hooks.onFrame?.(frame); + + if (frame.type === "heartbeat") { + this.handleHeartbeatFrame(frame); + return; + } + + if (frame.type === "deliver") { + await this.handleDeliverFrame(frame); + return; + } + } + + private handleHeartbeatFrame(frame: HeartbeatFrame): void { + const ackFrame: HeartbeatAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat_ack", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + ackId: frame.id, + }; + + this.sendFrame(ackFrame); + } + + private async handleDeliverFrame(frame: DeliverFrame): Promise { + try { + await this.deliverToLocalOpenclawWithRetry(frame); + const ackFrame: DeliverAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "deliver_ack", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + ackId: frame.id, + accepted: true, + }; + + this.sendFrame(ackFrame); + this.hooks.onDeliverSucceeded?.(frame); + } catch (error) { + const ackFrame: DeliverAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "deliver_ack", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + ackId: frame.id, + accepted: false, + reason: sanitizeErrorReason(error), + }; + + this.sendFrame(ackFrame); + this.hooks.onDeliverFailed?.(frame, error); + } + } + + private async deliverToLocalOpenclaw(frame: DeliverFrame): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => { + controller.abort(); + }, this.openclawDeliverTimeoutMs); + + const headers: Record = { + "content-type": "application/json", + "x-request-id": frame.id, + }; + + if (this.openclawHookToken !== undefined) { + headers["x-openclaw-token"] = this.openclawHookToken; + } + + try { + const response = await this.fetchImpl(this.openclawHookUrl, { + method: "POST", + headers, + body: JSON.stringify(frame.payload), + signal: controller.signal, + }); + + if (!response.ok) { + throw new LocalOpenclawDeliveryError({ + message: `Local OpenClaw hook rejected payload with status ${response.status}`, + retryable: + response.status >= 500 || + response.status === 404 || + response.status === 429, + }); + } + } catch (error) { + if (isAbortError(error)) { + throw new LocalOpenclawDeliveryError({ + message: "Local OpenClaw hook request timed out", + retryable: true, + }); + } + + if (error instanceof LocalOpenclawDeliveryError) { + throw error; + } + + throw new LocalOpenclawDeliveryError({ + message: sanitizeErrorReason(error), + retryable: true, + }); + } finally { + clearTimeout(timeout); + } + } + + private async deliverToLocalOpenclawWithRetry( + frame: DeliverFrame, + ): Promise { + const startedAt = this.now(); + let attempt = 1; + let retryDelayMs = this.openclawDeliverRetryInitialDelayMs; + + while (true) { + try { + await this.deliverToLocalOpenclaw(frame); + return; + } catch (error) { + const retryable = isRetryableOpenclawDeliveryError(error); + const attemptsRemaining = attempt < this.openclawDeliverMaxAttempts; + const elapsedMs = this.now() - startedAt; + const hasBudgetForRetry = + elapsedMs + retryDelayMs + this.openclawDeliverTimeoutMs <= + this.openclawDeliverRetryBudgetMs; + const shouldRetry = + retryable && attemptsRemaining && hasBudgetForRetry && this.started; + + this.logger.warn("connector.openclaw.deliver_failed", { + ackId: frame.id, + attempt, + retryable, + shouldRetry, + reason: sanitizeErrorReason(error), + }); + + if (!shouldRetry) { + throw error; + } + + await this.wait(retryDelayMs); + retryDelayMs = Math.min( + this.openclawDeliverRetryMaxDelayMs, + Math.floor(retryDelayMs * this.openclawDeliverRetryBackoffFactor), + ); + attempt += 1; + } + } + } + + private async wait(delayMs: number): Promise { + await new Promise((resolve) => { + setTimeout(resolve, delayMs); + }); + } + + private makeFrameId(): string { + return this.ulidFactory(this.now()); + } + + private makeTimestamp(): string { + return new Date(this.now()).toISOString(); + } +} diff --git a/packages/connector/src/constants.ts b/packages/connector/src/constants.ts new file mode 100644 index 0000000..70aefeb --- /dev/null +++ b/packages/connector/src/constants.ts @@ -0,0 +1,27 @@ +export const CONNECTOR_VERSION = "0.0.0"; + +export const CONNECTOR_FRAME_VERSION = 1; + +export const DEFAULT_OPENCLAW_HOOK_PATH = "/hooks/agent"; +export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; +export const DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS = 10_000; +export const DEFAULT_OPENCLAW_DELIVER_MAX_ATTEMPTS = 4; +export const DEFAULT_OPENCLAW_DELIVER_RETRY_INITIAL_DELAY_MS = 300; +export const DEFAULT_OPENCLAW_DELIVER_RETRY_MAX_DELAY_MS = 2_000; +export const DEFAULT_OPENCLAW_DELIVER_RETRY_BACKOFF_FACTOR = 2; + +export const DEFAULT_HEARTBEAT_INTERVAL_MS = 30_000; +export const DEFAULT_RECONNECT_MIN_DELAY_MS = 1_000; +export const DEFAULT_RECONNECT_MAX_DELAY_MS = 30_000; +export const DEFAULT_RECONNECT_BACKOFF_FACTOR = 2; +export const DEFAULT_RECONNECT_JITTER_RATIO = 0.2; + +export const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; +export const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; +export const DEFAULT_RELAY_DELIVER_TIMEOUT_MS = 15_000; +export const DEFAULT_OPENCLAW_DELIVER_RETRY_BUDGET_MS = + DEFAULT_RELAY_DELIVER_TIMEOUT_MS - 1_000; + +export const AGENT_ACCESS_HEADER = "x-claw-agent-access"; + +export const WS_READY_STATE_OPEN = 1; diff --git a/packages/connector/src/frames.test.ts b/packages/connector/src/frames.test.ts new file mode 100644 index 0000000..f8af964 --- /dev/null +++ b/packages/connector/src/frames.test.ts @@ -0,0 +1,106 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { + ConnectorFrameParseError, + parseFrame, + serializeFrame, +} from "./frames.js"; + +function createAgentDid(seedMs: number): string { + return makeAgentDid(generateUlid(seedMs)); +} + +describe("connector frame parsing", () => { + it("roundtrips a valid enqueue frame", () => { + const frame = { + v: 1 as const, + type: "enqueue" as const, + id: generateUlid(1700000000000), + ts: "2026-01-01T00:00:00.000Z", + toAgentDid: createAgentDid(1700000000100), + payload: { + message: "hello", + }, + conversationId: "conv_123", + replyTo: "https://example.com/hooks/agent", + }; + + const serialized = serializeFrame(frame); + const parsed = parseFrame(serialized); + + expect(parsed).toEqual(frame); + }); + + it("parses binary frame payloads", () => { + const heartbeat = { + v: 1 as const, + type: "heartbeat" as const, + id: generateUlid(1700000000000), + ts: "2026-01-01T00:00:00.000Z", + }; + + const raw = new TextEncoder().encode(JSON.stringify(heartbeat)); + + expect(parseFrame(raw)).toEqual(heartbeat); + }); + + it("throws INVALID_JSON on malformed json", () => { + expect(() => parseFrame("{not json")).toThrowError( + ConnectorFrameParseError, + ); + + try { + parseFrame("{not json"); + throw new Error("expected parseFrame to throw"); + } catch (error) { + expect(error).toBeInstanceOf(ConnectorFrameParseError); + expect((error as ConnectorFrameParseError).code).toBe("INVALID_JSON"); + } + }); + + it("throws INVALID_FRAME on invalid shape", () => { + const invalid = { + v: 1, + type: "enqueue", + id: generateUlid(1700000000000), + ts: "2026-01-01T00:00:00.000Z", + toAgentDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + payload: { + message: "hello", + }, + }; + + try { + parseFrame(invalid); + throw new Error("expected parseFrame to throw"); + } catch (error) { + expect(error).toBeInstanceOf(ConnectorFrameParseError); + expect((error as ConnectorFrameParseError).code).toBe("INVALID_FRAME"); + } + }); + + it("rejects unknown frame type", () => { + expect(() => + parseFrame({ + v: 1, + type: "ping", + id: generateUlid(1700000000000), + ts: "2026-01-01T00:00:00.000Z", + }), + ).toThrow(ConnectorFrameParseError); + }); + + it("rejects blank reason values", () => { + expect(() => + parseFrame({ + v: 1, + type: "deliver_ack", + id: generateUlid(1700000000000), + ts: "2026-01-01T00:00:00.000Z", + ackId: generateUlid(1700000000100), + accepted: false, + reason: " ", + }), + ).toThrow(ConnectorFrameParseError); + }); +}); diff --git a/packages/connector/src/frames.ts b/packages/connector/src/frames.ts new file mode 100644 index 0000000..a572e06 --- /dev/null +++ b/packages/connector/src/frames.ts @@ -0,0 +1,220 @@ +import { parseDid, parseUlid } from "@clawdentity/protocol"; +import { z } from "zod"; +import { CONNECTOR_FRAME_VERSION } from "./constants.js"; + +const ISO_TIMESTAMP_PATTERN = + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,9})?(?:Z|[+-]\d{2}:\d{2})$/; + +const FRAME_TYPES = [ + "heartbeat", + "heartbeat_ack", + "deliver", + "deliver_ack", + "enqueue", + "enqueue_ack", +] as const; + +export const connectorFrameTypeSchema = z.enum(FRAME_TYPES); + +const ulidStringSchema = z.string().superRefine((value, ctx) => { + try { + parseUlid(value); + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "must be a valid ULID", + }); + } +}); + +const agentDidSchema = z.string().superRefine((value, ctx) => { + try { + const parsedDid = parseDid(value); + if (parsedDid.kind !== "agent") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "must be an agent DID", + }); + } + } catch { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "must be a valid DID", + }); + } +}); + +const isoTimestampSchema = z.string().superRefine((value, ctx) => { + if (!ISO_TIMESTAMP_PATTERN.test(value) || Number.isNaN(Date.parse(value))) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "must be a valid ISO-8601 timestamp", + }); + } +}); + +const nonEmptyStringSchema = z + .string() + .transform((value) => value.trim()) + .pipe(z.string().min(1)); + +const frameBaseSchema = z + .object({ + v: z.literal(CONNECTOR_FRAME_VERSION), + id: ulidStringSchema, + ts: isoTimestampSchema, + }) + .strict(); + +export const heartbeatFrameSchema = frameBaseSchema + .extend({ + type: z.literal("heartbeat"), + }) + .strict(); + +export const heartbeatAckFrameSchema = frameBaseSchema + .extend({ + type: z.literal("heartbeat_ack"), + ackId: ulidStringSchema, + }) + .strict(); + +export const deliverFrameSchema = frameBaseSchema + .extend({ + type: z.literal("deliver"), + fromAgentDid: agentDidSchema, + toAgentDid: agentDidSchema, + payload: z.unknown(), + contentType: nonEmptyStringSchema.optional(), + conversationId: nonEmptyStringSchema.optional(), + replyTo: z.string().url().optional(), + }) + .strict(); + +export const deliverAckFrameSchema = frameBaseSchema + .extend({ + type: z.literal("deliver_ack"), + ackId: ulidStringSchema, + accepted: z.boolean(), + reason: nonEmptyStringSchema.optional(), + }) + .strict(); + +export const enqueueFrameSchema = frameBaseSchema + .extend({ + type: z.literal("enqueue"), + toAgentDid: agentDidSchema, + payload: z.unknown(), + conversationId: nonEmptyStringSchema.optional(), + replyTo: z.string().url().optional(), + }) + .strict(); + +export const enqueueAckFrameSchema = frameBaseSchema + .extend({ + type: z.literal("enqueue_ack"), + ackId: ulidStringSchema, + accepted: z.boolean(), + reason: nonEmptyStringSchema.optional(), + }) + .strict(); + +export const connectorFrameSchema = z.discriminatedUnion("type", [ + heartbeatFrameSchema, + heartbeatAckFrameSchema, + deliverFrameSchema, + deliverAckFrameSchema, + enqueueFrameSchema, + enqueueAckFrameSchema, +]); + +export type HeartbeatFrame = z.infer; +export type HeartbeatAckFrame = z.infer; +export type DeliverFrame = z.infer; +export type DeliverAckFrame = z.infer; +export type EnqueueFrame = z.infer; +export type EnqueueAckFrame = z.infer; + +export type ConnectorFrame = z.infer; + +export type ConnectorFrameParseErrorCode = "INVALID_JSON" | "INVALID_FRAME"; + +export class ConnectorFrameParseError extends Error { + readonly code: ConnectorFrameParseErrorCode; + readonly issues?: z.ZodIssue[]; + + constructor(options: { + code: ConnectorFrameParseErrorCode; + message: string; + issues?: z.ZodIssue[]; + cause?: unknown; + }) { + super(options.message); + this.name = "ConnectorFrameParseError"; + this.code = options.code; + this.issues = options.issues; + + if ("cause" in Error.prototype || options.cause !== undefined) { + (this as Error & { cause?: unknown }).cause = options.cause; + } + } +} + +function decodeFrameInput(input: unknown): unknown { + if (typeof input === "string") { + try { + return JSON.parse(input); + } catch (error) { + throw new ConnectorFrameParseError({ + code: "INVALID_JSON", + message: "Connector frame must be valid JSON", + cause: error, + }); + } + } + + if (input instanceof ArrayBuffer) { + return decodeFrameInput(new TextDecoder().decode(new Uint8Array(input))); + } + + if (ArrayBuffer.isView(input)) { + return decodeFrameInput( + new TextDecoder().decode( + new Uint8Array(input.buffer, input.byteOffset, input.byteLength), + ), + ); + } + + return input; +} + +export function parseFrame(input: unknown): ConnectorFrame { + const decoded = decodeFrameInput(input); + const parsed = connectorFrameSchema.safeParse(decoded); + + if (!parsed.success) { + throw new ConnectorFrameParseError({ + code: "INVALID_FRAME", + message: "Connector frame does not match schema", + issues: parsed.error.issues, + cause: parsed.error, + }); + } + + return parsed.data; +} + +export function serializeFrame(frame: ConnectorFrame): string { + const parsed = connectorFrameSchema.safeParse(frame); + + if (!parsed.success) { + throw new ConnectorFrameParseError({ + code: "INVALID_FRAME", + message: "Connector frame does not match schema", + issues: parsed.error.issues, + cause: parsed.error, + }); + } + + return JSON.stringify(parsed.data); +} diff --git a/packages/connector/src/index.ts b/packages/connector/src/index.ts new file mode 100644 index 0000000..961a229 --- /dev/null +++ b/packages/connector/src/index.ts @@ -0,0 +1,54 @@ +export type { + ConnectorClientHooks, + ConnectorClientOptions, + ConnectorOutboundEnqueueInput, + ConnectorWebSocket, +} from "./client.js"; +export { ConnectorClient } from "./client.js"; +export { + AGENT_ACCESS_HEADER, + CONNECTOR_FRAME_VERSION, + CONNECTOR_VERSION, + DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_HEARTBEAT_INTERVAL_MS, + DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, + DEFAULT_OPENCLAW_HOOK_PATH, + DEFAULT_RECONNECT_BACKOFF_FACTOR, + DEFAULT_RECONNECT_JITTER_RATIO, + DEFAULT_RECONNECT_MAX_DELAY_MS, + DEFAULT_RECONNECT_MIN_DELAY_MS, + DEFAULT_RELAY_DELIVER_TIMEOUT_MS, + WS_READY_STATE_OPEN, +} from "./constants.js"; + +export type { + ConnectorFrame, + ConnectorFrameParseErrorCode, + DeliverAckFrame, + DeliverFrame, + EnqueueAckFrame, + EnqueueFrame, + HeartbeatAckFrame, + HeartbeatFrame, +} from "./frames.js"; +export { + ConnectorFrameParseError, + connectorFrameSchema, + connectorFrameTypeSchema, + deliverAckFrameSchema, + deliverFrameSchema, + enqueueAckFrameSchema, + enqueueFrameSchema, + heartbeatAckFrameSchema, + heartbeatFrameSchema, + parseFrame, + serializeFrame, +} from "./frames.js"; + +export type { + ConnectorRuntimeHandle, + StartConnectorRuntimeInput, +} from "./runtime.js"; +export { startConnectorRuntime } from "./runtime.js"; diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts new file mode 100644 index 0000000..ab39ee0 --- /dev/null +++ b/packages/connector/src/runtime.ts @@ -0,0 +1,594 @@ +import { randomBytes } from "node:crypto"; +import { mkdir, rename, writeFile } from "node:fs/promises"; +import { + createServer, + type IncomingMessage, + type ServerResponse, +} from "node:http"; +import { dirname, join } from "node:path"; +import { + decodeBase64url, + encodeBase64url, + RELAY_RECIPIENT_AGENT_DID_HEADER, +} from "@clawdentity/protocol"; +import { + type AgentAuthBundle, + AppError, + createLogger, + executeWithAgentAuthRefreshRetry, + type Logger, + refreshAgentAuthWithClawProof, + signHttpRequest, +} from "@clawdentity/sdk"; +import { WebSocket as NodeWebSocket } from "ws"; +import { ConnectorClient, type ConnectorWebSocket } from "./client.js"; +import { + AGENT_ACCESS_HEADER, + DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_OPENCLAW_HOOK_PATH, +} from "./constants.js"; + +type ConnectorRuntimeCredentials = { + accessExpiresAt?: string; + accessToken?: string; + agentDid: string; + ait: string; + refreshExpiresAt?: string; + refreshToken: string; + secretKey: string; + tokenType?: "Bearer"; +}; + +export type StartConnectorRuntimeInput = { + agentName: string; + configDir: string; + credentials: ConnectorRuntimeCredentials; + fetchImpl?: typeof fetch; + logger?: Logger; + openclawBaseUrl?: string; + openclawHookPath?: string; + openclawHookToken?: string; + outboundBaseUrl?: string; + outboundPath?: string; + proxyWebsocketUrl?: string; + registryUrl: string; +}; + +export type ConnectorRuntimeHandle = { + outboundUrl: string; + stop: () => Promise; + waitUntilStopped: () => Promise; + websocketUrl: string; +}; + +type OutboundRelayRequest = { + payload: unknown; + peer: string; + peerDid: string; + peerProxyUrl: string; +}; + +const REGISTRY_AUTH_FILENAME = "registry-auth.json"; +const AGENTS_DIR_NAME = "agents"; +const REFRESH_SINGLE_FLIGHT_PREFIX = "connector-runtime"; +const NONCE_SIZE = 16; +const MAX_OUTBOUND_BODY_BYTES = 1024 * 1024; +const ACCESS_TOKEN_REFRESH_SKEW_MS = 30_000; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function toPathWithQuery(url: URL): string { + return `${url.pathname}${url.search}`; +} + +function parseRequiredString(value: unknown, field: string): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw new Error(`Invalid ${field}`); + } + + return value.trim(); +} + +function normalizeOutboundBaseUrl(baseUrlInput: string | undefined): URL { + const raw = baseUrlInput?.trim() || DEFAULT_CONNECTOR_BASE_URL; + let parsed: URL; + + try { + parsed = new URL(raw); + } catch { + throw new Error("Connector outbound base URL is invalid"); + } + + if (parsed.protocol !== "http:") { + throw new Error("Connector outbound base URL must use http://"); + } + + return parsed; +} + +function normalizeOutboundPath(pathInput: string | undefined): string { + const raw = pathInput?.trim() || DEFAULT_CONNECTOR_OUTBOUND_PATH; + if (raw.length === 0) { + throw new Error("Connector outbound path is invalid"); + } + + return raw.startsWith("/") ? raw : `/${raw}`; +} + +function normalizeWebSocketUrl(urlInput: string | undefined): string { + const raw = urlInput?.trim() ?? process.env.CLAWDENTITY_PROXY_WS_URL?.trim(); + if (!raw) { + throw new Error( + "Proxy websocket URL is required (set --proxy-ws-url or CLAWDENTITY_PROXY_WS_URL)", + ); + } + + const parsed = new URL(raw); + if (parsed.protocol === "https:") { + parsed.protocol = "wss:"; + } else if (parsed.protocol === "http:") { + parsed.protocol = "ws:"; + } + + if (parsed.protocol !== "wss:" && parsed.protocol !== "ws:") { + throw new Error("Proxy websocket URL must use ws:// or wss://"); + } + + return parsed.toString(); +} + +function resolveOpenclawBaseUrl(input?: string): string { + const value = + input?.trim() || + process.env.OPENCLAW_BASE_URL?.trim() || + DEFAULT_OPENCLAW_BASE_URL; + return value; +} + +function resolveOpenclawHookPath(input?: string): string { + const value = + input?.trim() || + process.env.OPENCLAW_HOOK_PATH?.trim() || + DEFAULT_OPENCLAW_HOOK_PATH; + return value.startsWith("/") ? value : `/${value}`; +} + +function resolveOpenclawHookToken(input?: string): string | undefined { + const value = input?.trim() || process.env.OPENCLAW_HOOK_TOKEN?.trim(); + if (!value) { + return undefined; + } + return value; +} + +function toInitialAuthBundle( + credentials: ConnectorRuntimeCredentials, +): AgentAuthBundle { + return { + tokenType: "Bearer", + accessToken: credentials.accessToken?.trim() || "", + accessExpiresAt: + credentials.accessExpiresAt?.trim() || "1970-01-01T00:00:00.000Z", + refreshToken: parseRequiredString(credentials.refreshToken, "refreshToken"), + refreshExpiresAt: + credentials.refreshExpiresAt?.trim() || "2100-01-01T00:00:00.000Z", + }; +} + +function parseIsoTimestampMs(value: string): number | undefined { + const parsed = Date.parse(value); + if (!Number.isFinite(parsed)) { + return undefined; + } + + return parsed; +} + +function shouldRefreshAccessToken( + auth: AgentAuthBundle, + nowMs: number, +): boolean { + if (auth.accessToken.trim().length === 0) { + return true; + } + + const expiresAtMs = parseIsoTimestampMs(auth.accessExpiresAt); + if (expiresAtMs === undefined) { + return false; + } + + return expiresAtMs <= nowMs + ACCESS_TOKEN_REFRESH_SKEW_MS; +} + +function parseOutboundRelayRequest(payload: unknown): OutboundRelayRequest { + if (!isRecord(payload)) { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_INVALID_REQUEST", + message: "Outbound relay request must be an object", + status: 400, + expose: true, + }); + } + + return { + peer: parseRequiredString(payload.peer, "peer"), + peerDid: parseRequiredString(payload.peerDid, "peerDid"), + peerProxyUrl: parseRequiredString(payload.peerProxyUrl, "peerProxyUrl"), + payload: payload.payload, + }; +} + +function createWebSocketFactory(): ( + url: string, + headers: Record, +) => ConnectorWebSocket { + return (url: string, headers: Record) => { + const socket = new NodeWebSocket(url, { + headers, + }); + + return { + get readyState() { + return socket.readyState; + }, + send: (data: string) => { + socket.send(data); + }, + close: (code?: number, reason?: string) => { + socket.close(code, reason); + }, + addEventListener: (type, listener) => { + if (type === "open") { + socket.on("open", () => listener({})); + return; + } + + if (type === "message") { + socket.on("message", (data) => { + const text = + typeof data === "string" + ? data + : Array.isArray(data) + ? Buffer.concat(data).toString("utf8") + : Buffer.isBuffer(data) + ? data.toString("utf8") + : Buffer.from(data).toString("utf8"); + listener({ data: text }); + }); + return; + } + + if (type === "close") { + socket.on("close", (code, reason) => { + listener({ + code: Number(code), + reason: reason.toString("utf8"), + wasClean: Number(code) === 1000, + }); + }); + return; + } + + socket.on("error", (error) => listener({ error })); + }, + }; + }; +} + +async function writeRegistryAuthAtomic(input: { + auth: AgentAuthBundle; + configDir: string; + agentName: string; +}): Promise { + const targetPath = join( + input.configDir, + AGENTS_DIR_NAME, + input.agentName, + REGISTRY_AUTH_FILENAME, + ); + const tmpPath = `${targetPath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + + await mkdir(dirname(targetPath), { recursive: true }); + await writeFile(tmpPath, `${JSON.stringify(input.auth, null, 2)}\n`, "utf8"); + await rename(tmpPath, targetPath); +} + +async function readRequestJson(req: IncomingMessage): Promise { + const chunks: Buffer[] = []; + let totalBytes = 0; + + for await (const chunk of req) { + const next = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as string); + totalBytes += next.length; + if (totalBytes > MAX_OUTBOUND_BODY_BYTES) { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_TOO_LARGE", + message: "Outbound relay payload too large", + status: 413, + expose: true, + }); + } + chunks.push(next); + } + + const bodyText = Buffer.concat(chunks).toString("utf8").trim(); + if (bodyText.length === 0) { + return {}; + } + + try { + return JSON.parse(bodyText); + } catch { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_INVALID_JSON", + message: "Outbound relay payload must be valid JSON", + status: 400, + expose: true, + }); + } +} + +function writeJson( + res: ServerResponse, + status: number, + payload: Record, +): void { + res.statusCode = status; + res.setHeader("content-type", "application/json; charset=utf-8"); + res.end(`${JSON.stringify(payload)}\n`); +} + +function isRetryableRelayAuthError(error: unknown): boolean { + return ( + error instanceof AppError && + error.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && + error.status === 401 + ); +} + +async function buildUpgradeHeaders(input: { + ait: string; + accessToken: string; + wsUrl: URL; + secretKey: Uint8Array; +}): Promise> { + const timestamp = Math.floor(Date.now() / 1000).toString(); + const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); + const signed = await signHttpRequest({ + method: "GET", + pathWithQuery: toPathWithQuery(input.wsUrl), + timestamp, + nonce, + secretKey: input.secretKey, + }); + + return { + authorization: `Claw ${input.ait}`, + [AGENT_ACCESS_HEADER]: input.accessToken, + ...signed.headers, + }; +} + +export async function startConnectorRuntime( + input: StartConnectorRuntimeInput, +): Promise { + const logger = + input.logger ?? createLogger({ service: "connector", module: "runtime" }); + const fetchImpl = input.fetchImpl ?? fetch; + + const secretKey = decodeBase64url( + parseRequiredString(input.credentials.secretKey, "secretKey"), + ); + + let currentAuth = toInitialAuthBundle(input.credentials); + + if (shouldRefreshAccessToken(currentAuth, Date.now())) { + currentAuth = await refreshAgentAuthWithClawProof({ + registryUrl: input.registryUrl, + ait: input.credentials.ait, + secretKey, + refreshToken: currentAuth.refreshToken, + fetchImpl, + }); + await writeRegistryAuthAtomic({ + configDir: input.configDir, + agentName: input.agentName, + auth: currentAuth, + }); + } + + const wsUrl = normalizeWebSocketUrl(input.proxyWebsocketUrl); + const wsParsed = new URL(wsUrl); + const upgradeHeaders = await buildUpgradeHeaders({ + wsUrl: wsParsed, + ait: input.credentials.ait, + accessToken: currentAuth.accessToken, + secretKey, + }); + + const connectorClient = new ConnectorClient({ + connectorUrl: wsParsed.toString(), + connectionHeaders: upgradeHeaders, + openclawBaseUrl: resolveOpenclawBaseUrl(input.openclawBaseUrl), + openclawHookPath: resolveOpenclawHookPath(input.openclawHookPath), + openclawHookToken: resolveOpenclawHookToken(input.openclawHookToken), + fetchImpl, + logger, + webSocketFactory: createWebSocketFactory(), + }); + + const outboundBaseUrl = normalizeOutboundBaseUrl(input.outboundBaseUrl); + const outboundPath = normalizeOutboundPath(input.outboundPath); + const outboundUrl = new URL(outboundPath, outboundBaseUrl).toString(); + + const relayToPeer = async (request: OutboundRelayRequest): Promise => { + const peerUrl = new URL(request.peerProxyUrl); + const body = JSON.stringify(request.payload ?? {}); + const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; + + const performRelay = async (auth: AgentAuthBundle): Promise => { + const unixSeconds = Math.floor(Date.now() / 1000).toString(); + const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(peerUrl), + timestamp: unixSeconds, + nonce, + body: new TextEncoder().encode(body), + secretKey, + }); + + const response = await fetchImpl(peerUrl.toString(), { + method: "POST", + headers: { + Authorization: `Claw ${input.credentials.ait}`, + "Content-Type": "application/json", + [AGENT_ACCESS_HEADER]: auth.accessToken, + [RELAY_RECIPIENT_AGENT_DID_HEADER]: request.peerDid, + ...signed.headers, + }, + body, + }); + + if (!response.ok) { + if (response.status === 401) { + throw new AppError({ + code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", + message: "Peer relay rejected agent auth credentials", + status: 401, + expose: true, + }); + } + + throw new AppError({ + code: "CONNECTOR_OUTBOUND_DELIVERY_FAILED", + message: "Peer relay request failed", + status: 502, + }); + } + }; + + await executeWithAgentAuthRefreshRetry({ + key: refreshKey, + shouldRetry: isRetryableRelayAuthError, + getAuth: async () => currentAuth, + persistAuth: async (nextAuth) => { + currentAuth = nextAuth; + await writeRegistryAuthAtomic({ + configDir: input.configDir, + agentName: input.agentName, + auth: nextAuth, + }); + }, + refreshAuth: async (auth) => + refreshAgentAuthWithClawProof({ + registryUrl: input.registryUrl, + ait: input.credentials.ait, + secretKey, + refreshToken: auth.refreshToken, + fetchImpl, + }), + perform: performRelay, + }); + }; + + const server = createServer(async (req, res) => { + const requestPath = req.url + ? new URL(req.url, outboundBaseUrl).pathname + : "/"; + + if (requestPath !== outboundPath) { + writeJson(res, 404, { error: "Not Found" }); + return; + } + + if (req.method !== "POST") { + res.statusCode = 405; + res.setHeader("allow", "POST"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + try { + const requestBody = await readRequestJson(req); + const relayRequest = parseOutboundRelayRequest(requestBody); + await relayToPeer(relayRequest); + writeJson(res, 202, { accepted: true, peer: relayRequest.peer }); + } catch (error) { + if (error instanceof AppError) { + logger.warn("connector.outbound.rejected", { + code: error.code, + status: error.status, + message: error.message, + }); + writeJson(res, error.status, { + error: { + code: error.code, + message: error.message, + }, + }); + return; + } + + logger.error("connector.outbound.failed", { + errorName: error instanceof Error ? error.name : "unknown", + }); + writeJson(res, 500, { + error: { + code: "CONNECTOR_OUTBOUND_INTERNAL", + message: "Connector outbound relay failed", + }, + }); + } + }); + + let stoppedResolve: (() => void) | undefined; + const stoppedPromise = new Promise((resolve) => { + stoppedResolve = resolve; + }); + + const stop = async (): Promise => { + connectorClient.disconnect(); + await new Promise((resolve, reject) => { + server.close((error) => { + if (error) { + reject(error); + return; + } + resolve(); + }); + }); + stoppedResolve?.(); + }; + + await new Promise((resolve, reject) => { + server.once("error", reject); + server.listen( + Number(outboundBaseUrl.port || "80"), + outboundBaseUrl.hostname, + () => { + server.off("error", reject); + resolve(); + }, + ); + }); + + connectorClient.connect(); + + logger.info("connector.runtime.started", { + outboundUrl, + websocketUrl: wsUrl, + agentDid: input.credentials.agentDid, + }); + + return { + outboundUrl, + websocketUrl: wsUrl, + stop, + waitUntilStopped: async () => stoppedPromise, + }; +} diff --git a/packages/connector/tsconfig.json b/packages/connector/tsconfig.json new file mode 100644 index 0000000..c8f8d9b --- /dev/null +++ b/packages/connector/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "lib": ["ES2022", "DOM"], + "types": ["node"], + "outDir": "./dist" + }, + "include": ["src"] +} diff --git a/packages/connector/tsup.config.ts b/packages/connector/tsup.config.ts new file mode 100644 index 0000000..7a3d66a --- /dev/null +++ b/packages/connector/tsup.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm"], + dts: true, + clean: true, +}); diff --git a/packages/connector/vitest.config.ts b/packages/connector/vitest.config.ts new file mode 100644 index 0000000..e2ec332 --- /dev/null +++ b/packages/connector/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + globals: true, + }, +}); diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index dd0b1a1..9e2c021 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -19,6 +19,7 @@ - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. - Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `AGENT_AUTH_REFRESH_PATH`, `AGENT_AUTH_VALIDATE_PATH`, `ME_API_KEYS_PATH`) so registry, proxy, and CLI stay contract-synchronized. +- Keep relay contract constants in protocol exports (`RELAY_CONNECT_PATH`, `RELAY_RECIPIENT_AGENT_DID_HEADER`) so connector and hook routing stay synchronized across apps. - Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. - Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 62246dd..9cd5eda 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -5,3 +5,5 @@ export const AGENT_AUTH_VALIDATE_PATH = "/v1/agents/auth/validate"; export const INVITES_PATH = "/v1/invites"; export const INVITES_REDEEM_PATH = "/v1/invites/redeem"; export const ME_API_KEYS_PATH = "/v1/me/api-keys"; +export const RELAY_CONNECT_PATH = "/v1/relay/connect"; +export const RELAY_RECIPIENT_AGENT_DID_HEADER = "x-claw-recipient-agent-did"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index eb1a63b..3d99db8 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -28,6 +28,8 @@ import { parseCrlClaims, parseDid, parseUlid, + RELAY_CONNECT_PATH, + RELAY_RECIPIENT_AGENT_DID_HEADER, validateAgentName, } from "./index.js"; @@ -44,6 +46,8 @@ describe("protocol", () => { expect(INVITES_PATH).toBe("/v1/invites"); expect(INVITES_REDEEM_PATH).toBe("/v1/invites/redeem"); expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); + expect(RELAY_CONNECT_PATH).toBe("/v1/relay/connect"); + expect(RELAY_RECIPIENT_AGENT_DID_HEADER).toBe("x-claw-recipient-agent-did"); }); it("exports helpers from package root", () => { diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 2e6c03e..0aa6e18 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -28,6 +28,8 @@ export { INVITES_PATH, INVITES_REDEEM_PATH, ME_API_KEYS_PATH, + RELAY_CONNECT_PATH, + RELAY_RECIPIENT_AGENT_DID_HEADER, } from "./endpoints.js"; export type { ProtocolParseErrorCode } from "./errors.js"; export { ProtocolParseError } from "./errors.js"; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f1d0137..b484608 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -35,6 +35,9 @@ importers: apps/cli: dependencies: + '@clawdentity/connector': + specifier: workspace:* + version: link:../../packages/connector commander: specifier: ^13.1.0 version: 13.1.0 @@ -64,6 +67,9 @@ importers: apps/proxy: dependencies: + '@clawdentity/connector': + specifier: workspace:* + version: link:../../packages/connector '@clawdentity/protocol': specifier: workspace:* version: link:../../packages/protocol @@ -115,6 +121,28 @@ importers: specifier: ^0.31.9 version: 0.31.9 + packages/connector: + dependencies: + '@clawdentity/protocol': + specifier: workspace:* + version: link:../protocol + '@clawdentity/sdk': + specifier: workspace:* + version: link:../sdk + ws: + specifier: ^8.18.3 + version: 8.19.0 + zod: + specifier: ^4.1.12 + version: 4.3.6 + devDependencies: + '@types/node': + specifier: ^22.17.2 + version: 22.19.11 + '@types/ws': + specifier: ^8.18.1 + version: 8.18.1 + packages/protocol: dependencies: '@scure/base': @@ -1114,6 +1142,9 @@ packages: '@types/node@22.19.11': resolution: {integrity: sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==} + '@types/ws@8.18.1': + resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} + '@vitest/expect@4.0.18': resolution: {integrity: sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==} @@ -2327,6 +2358,18 @@ packages: utf-8-validate: optional: true + ws@8.19.0: + resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} @@ -2951,6 +2994,10 @@ snapshots: dependencies: undici-types: 6.21.0 + '@types/ws@8.18.1': + dependencies: + '@types/node': 22.19.11 + '@vitest/expect@4.0.18': dependencies: '@standard-schema/spec': 1.1.0 @@ -4120,6 +4167,8 @@ snapshots: ws@8.18.0: {} + ws@8.19.0: {} + y18n@5.0.8: {} yaml@2.8.2: {} diff --git a/tsconfig.base.json b/tsconfig.base.json index 0fdf122..dead24d 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -15,6 +15,7 @@ "sourceMap": true, "baseUrl": ".", "paths": { + "@clawdentity/connector": ["packages/connector/src/index.ts"], "@clawdentity/protocol": ["packages/protocol/src/index.ts"], "@clawdentity/sdk": ["packages/sdk/src/index.ts"] } From 22c876869714fa8ab05033bc93ad13c5b2c0ff5b Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 11:35:40 +0530 Subject: [PATCH 069/190] feat(issue-76): harden hook session default and enable identity injection --- README.md | 4 ++-- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/openclaw.test.ts | 2 +- apps/cli/src/commands/openclaw.ts | 2 +- apps/proxy/.env.example | 2 +- apps/proxy/AGENTS.md | 2 +- apps/proxy/src/AGENTS.md | 2 +- apps/proxy/src/config.test.ts | 8 ++++++++ apps/proxy/src/config.ts | 2 +- apps/proxy/wrangler.jsonc | 6 +++--- 10 files changed, 20 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index f2b2fed..7585508 100644 --- a/README.md +++ b/README.md @@ -459,9 +459,9 @@ clawdentity/ - Enforces caller allowlist policy by DID. - Applies per-agent rate limiting. - Keeps `hooks.token` private and only injects it internally during forward. -- Optional: set `INJECT_IDENTITY_INTO_MESSAGE=true` to prepend a sanitized identity block +- By default, `INJECT_IDENTITY_INTO_MESSAGE=true` to prepend a sanitized identity block (`agentDid`, `ownerDid`, `issuer`, `aitJti`) into `/hooks/agent` payload `message`. - Default is `false`, which keeps payloads unchanged. + Set `INJECT_IDENTITY_INTO_MESSAGE=false` to keep payloads unchanged. ### Proxy Worker local runs diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index fe13f8c..ec590e4 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -21,6 +21,7 @@ - `openclaw setup` must be idempotent for relay mapping updates and peer map writes. - `openclaw setup` must persist/update `~/.clawdentity/openclaw-relay.json` with the resolved `openclawBaseUrl` so downstream proxy runtime can boot without manual env edits. - `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. +- `openclaw setup` must set `hooks.allowRequestSessionKey=false` by default and retain `hooks.allowedSessionKeyPrefixes` enforcement for safer `/hooks/agent` session routing. - Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. ## Connector Command Rules diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index e586687..22efde7 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -125,7 +125,7 @@ describe("openclaw command helpers", () => { }; expect(openclawConfig.hooks.enabled).toBe(true); - expect(openclawConfig.hooks.allowRequestSessionKey).toBe(true); + expect(openclawConfig.hooks.allowRequestSessionKey).toBe(false); expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); expect( openclawConfig.hooks.mappings?.some( diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index d91c985..90d3982 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -689,7 +689,7 @@ async function patchOpenclawConfig(openclawConfigPath: string): Promise { const hooks = isRecord(config.hooks) ? { ...config.hooks } : {}; hooks.enabled = true; - hooks.allowRequestSessionKey = true; + hooks.allowRequestSessionKey = false; hooks.allowedSessionKeyPrefixes = normalizeStringArrayWithValue( hooks.allowedSessionKeyPrefixes, "hook:", diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 16183c0..61b0c53 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -8,7 +8,7 @@ # Runtime vars ENVIRONMENT=local REGISTRY_URL=https://dev.api.clawdentity.com -INJECT_IDENTITY_INTO_MESSAGE=false +INJECT_IDENTITY_INTO_MESSAGE=true # Optional policy/runtime overrides # ALLOW_LIST={"owners":[],"agents":[]} diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 5bad736..ff2acff 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -12,7 +12,7 @@ - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. -- Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-off (`false`); only enable when operators need webhook `message` augmentation with verified identity context. +- Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-on (`true`); disable only when operators need unchanged webhook `message` forwarding. - Keep OpenClaw env inputs (`OPENCLAW_BASE_URL`, `OPENCLAW_HOOK_TOKEN` / `OPENCLAW_HOOKS_TOKEN`) backward-compatible but optional for relay-mode startup. - Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw compatibility vars, and policy/rate-limit vars). - Load env files with OpenClaw precedence and no overrides: diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index dffe1b8..fccee82 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -40,6 +40,6 @@ - Keep `/hooks/agent` input contract strict: require `Content-Type: application/json` and reject malformed JSON with explicit client errors. - Keep agent-access validation centralized in `auth-middleware.ts` and call registry `POST /v1/agents/auth/validate`; treat non-`204` non-`401` responses as dependency failures (`503`). - Keep relay delivery failure mapping explicit for `/hooks/agent`: DO delivery/RPC failures -> `502`, unavailable DO namespace -> `503`. -- Keep identity message injection optional and default-off (`INJECT_IDENTITY_INTO_MESSAGE=false`) so forwarding behavior is unchanged unless explicitly enabled. +- Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. - When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 0cccd0a..24181a2 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -65,6 +65,14 @@ describe("proxy config", () => { expect(config.injectIdentityIntoMessage).toBe(true); }); + it("allows disabling identity injection via env override", () => { + const config = parseProxyConfig({ + INJECT_IDENTITY_INTO_MESSAGE: "false", + }); + + expect(config.injectIdentityIntoMessage).toBe(false); + }); + it("parses allow list object and override env lists", () => { const config = parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "token", diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 4afae66..388b1a5 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -28,7 +28,7 @@ export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; export const DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE = 60; export const DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS = 60 * 1000; -export const DEFAULT_INJECT_IDENTITY_INTO_MESSAGE = false; +export const DEFAULT_INJECT_IDENTITY_INTO_MESSAGE = true; export class ProxyConfigError extends Error { readonly code = "CONFIG_VALIDATION_FAILED"; diff --git a/apps/proxy/wrangler.jsonc b/apps/proxy/wrangler.jsonc index 50691d7..2124e86 100644 --- a/apps/proxy/wrangler.jsonc +++ b/apps/proxy/wrangler.jsonc @@ -39,7 +39,7 @@ "ENVIRONMENT": "local", "REGISTRY_URL": "https://dev.api.clawdentity.com", "OPENCLAW_BASE_URL": "http://127.0.0.1:18789", - "INJECT_IDENTITY_INTO_MESSAGE": "false" + "INJECT_IDENTITY_INTO_MESSAGE": "true" } }, "development": { @@ -61,7 +61,7 @@ "vars": { "ENVIRONMENT": "development", "REGISTRY_URL": "https://dev.api.clawdentity.com", - "INJECT_IDENTITY_INTO_MESSAGE": "false" + "INJECT_IDENTITY_INTO_MESSAGE": "true" } }, "production": { @@ -83,7 +83,7 @@ "vars": { "ENVIRONMENT": "production", "REGISTRY_URL": "https://api.clawdentity.com", - "INJECT_IDENTITY_INTO_MESSAGE": "false" + "INJECT_IDENTITY_INTO_MESSAGE": "true" } } } From e0a02ff1c7410e3314a2d1a1e29cb21c7872638b Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 12:35:28 +0530 Subject: [PATCH 070/190] feat(cli): add openclaw doctor and relay test diagnostics --- apps/cli/src/commands/AGENTS.md | 13 +- apps/cli/src/commands/openclaw.test.ts | 157 ++++++ apps/cli/src/commands/openclaw.ts | 732 +++++++++++++++++++++++++ 3 files changed, 901 insertions(+), 1 deletion(-) diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index ec590e4..5ce17f5 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -22,7 +22,7 @@ - `openclaw setup` must persist/update `~/.clawdentity/openclaw-relay.json` with the resolved `openclawBaseUrl` so downstream proxy runtime can boot without manual env edits. - `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. - `openclaw setup` must set `hooks.allowRequestSessionKey=false` by default and retain `hooks.allowedSessionKeyPrefixes` enforcement for safer `/hooks/agent` session routing. -- Keep error messages static (no interpolated runtime values); include variable context only in error details/log fields. +- Keep thrown command errors static (no interpolated runtime values); include variable context in error details/log fields. Diagnostic check output (`openclaw doctor`, `openclaw relay test`) may include concrete paths/aliases so operators can remediate quickly. ## Connector Command Rules - `connector start ` is the runtime entrypoint for local relay handoff and must remain long-running when connector runtime provides a wait/closed primitive. @@ -69,3 +69,14 @@ - Mock network and filesystem dependencies in command tests. - Include success and failure scenarios for external calls, parsing, and cache behavior. - Assert exit code behavior in addition to stdout/stderr text. + +## OpenClaw Diagnostic Command Rules +- `openclaw doctor` must stay read-only and validate required local state: resolved CLI config (`registryUrl` + `apiKey`), selected agent marker, local agent credentials, peers map integrity (and requested `--peer` alias), transform presence, hook mapping, and OpenClaw base URL resolution. +- `openclaw doctor` must print deterministic check IDs and actionable fix hints for each failed check. +- `openclaw doctor --json` must emit a stable machine-readable envelope with overall status + per-check results for CI scripting. + +## OpenClaw Relay Test Command Rules +- `openclaw relay test --peer ` must run doctor-style preflight checks before sending the probe payload. +- Relay probe must target local OpenClaw `POST /hooks/send-to-peer` with deterministic payload fields (`peer`, `sessionId`, `message`). +- Relay test output must summarize endpoint, HTTP status, and remediation guidance when delivery fails. +- `openclaw relay test --json` must emit a stable result envelope and include preflight details when preflight failed. diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 22efde7..d248fe2 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -11,6 +11,8 @@ import { describe, expect, it } from "vitest"; import { createOpenclawInviteCode, decodeOpenclawInviteCode, + runOpenclawDoctor, + runOpenclawRelayTest, setupOpenclawRelayFromInvite, } from "./openclaw.js"; @@ -316,4 +318,159 @@ describe("openclaw command helpers", () => { sandbox.cleanup(); } }); + + it("reports healthy doctor status when relay setup is complete", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("healthy"); + expect(result.checks.every((check) => check.status === "pass")).toBe( + true, + ); + } finally { + sandbox.cleanup(); + } + }); + + it("reports missing peer alias in doctor output", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + peerAlias: "gamma", + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.peers" && + check.status === "fail" && + check.message.includes("peer alias is missing: gamma"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("returns relay test success for accepted probe", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawRelayTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async () => new Response(null, { status: 204 }), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("success"); + expect(result.httpStatus).toBe(204); + expect(result.endpoint).toBe("http://127.0.0.1:18789/hooks/send-to-peer"); + } finally { + sandbox.cleanup(); + } + }); + + it("returns relay test failure when probe is rejected", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawRelayTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async () => + new Response("connector offline", { status: 500 }), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("failure"); + expect(result.httpStatus).toBe(500); + expect(result.message).toBe( + "Relay probe failed inside local relay pipeline", + ); + } finally { + sandbox.cleanup(); + } + }); }); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 90d3982..2e48ae8 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -8,6 +8,7 @@ import { } from "@clawdentity/protocol"; import { AppError, createLogger, nowIso } from "@clawdentity/sdk"; import { Command } from "commander"; +import { resolveConfig } from "../config/manager.js"; import { writeStdoutLine } from "../io.js"; import { assertValidAgentName } from "./agent-name.js"; import { withErrorHandling } from "./helpers.js"; @@ -27,6 +28,7 @@ const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; const HOOK_MAPPING_ID = "clawdentity-send-to-peer"; const HOOK_PATH_SEND_TO_PEER = "send-to-peer"; +const OPENCLAW_SEND_TO_PEER_HOOK_PATH = "hooks/send-to-peer"; const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; const INVITE_CODE_PREFIX = "clawd1_"; const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; @@ -59,6 +61,27 @@ type OpenclawSetupOptions = { homeDir?: string; }; +type OpenclawDoctorOptions = { + homeDir?: string; + openclawDir?: string; + peerAlias?: string; + resolveConfigImpl?: typeof resolveConfig; + json?: boolean; +}; + +type OpenclawRelayTestOptions = { + peer: string; + homeDir?: string; + openclawDir?: string; + openclawBaseUrl?: string; + hookToken?: string; + sessionId?: string; + message?: string; + fetchImpl?: typeof fetch; + resolveConfigImpl?: typeof resolveConfig; + json?: boolean; +}; + type PeerEntry = { did: string; proxyUrl: string; @@ -92,6 +115,44 @@ type OpenclawRelayRuntimeConfig = { updatedAt?: string; }; +type OpenclawDoctorCheckId = + | "config.registry" + | "state.selectedAgent" + | "state.credentials" + | "state.peers" + | "state.transform" + | "state.hookMapping" + | "state.openclawBaseUrl"; + +type OpenclawDoctorCheckStatus = "pass" | "fail"; + +export type OpenclawDoctorCheckResult = { + id: OpenclawDoctorCheckId; + label: string; + status: OpenclawDoctorCheckStatus; + message: string; + remediationHint?: string; + details?: Record; +}; + +export type OpenclawDoctorResult = { + status: "healthy" | "unhealthy"; + checkedAt: string; + checks: OpenclawDoctorCheckResult[]; +}; + +export type OpenclawRelayTestResult = { + status: "success" | "failure"; + checkedAt: string; + peerAlias: string; + endpoint: string; + message: string; + httpStatus?: number; + remediationHint?: string; + details?: Record; + preflight?: OpenclawDoctorResult; +}; + function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } @@ -708,6 +769,594 @@ async function patchOpenclawConfig(openclawConfigPath: string): Promise { ); } +function toDoctorCheck( + input: OpenclawDoctorCheckResult, +): OpenclawDoctorCheckResult { + return input; +} + +function toDoctorResult( + checks: OpenclawDoctorCheckResult[], +): OpenclawDoctorResult { + return { + status: checks.every((check) => check.status === "pass") + ? "healthy" + : "unhealthy", + checkedAt: nowIso(), + checks, + }; +} + +function isRelayHookMapping(value: unknown): boolean { + if (!isRecord(value)) { + return false; + } + + if (value.id === HOOK_MAPPING_ID) { + return true; + } + + if (!isRecord(value.match)) { + return false; + } + + return value.match.path === HOOK_PATH_SEND_TO_PEER; +} + +function hasRelayTransformModule(value: unknown): boolean { + if (!isRecord(value) || !isRecord(value.transform)) { + return false; + } + + return value.transform.module === RELAY_MODULE_FILE_NAME; +} + +function parseDoctorPeerAlias(peerAlias?: string): string | undefined { + if (peerAlias === undefined) { + return undefined; + } + + return parsePeerAlias(peerAlias); +} + +function resolveHookToken(optionValue?: string): string | undefined { + const trimmedOption = optionValue?.trim(); + if (trimmedOption !== undefined && trimmedOption.length > 0) { + return trimmedOption; + } + + const envValue = process.env.OPENCLAW_HOOK_TOKEN?.trim(); + if (envValue !== undefined && envValue.length > 0) { + return envValue; + } + + return undefined; +} + +function resolveProbeMessage(optionValue?: string): string { + const trimmed = optionValue?.trim(); + if (trimmed !== undefined && trimmed.length > 0) { + return trimmed; + } + + return "clawdentity relay probe"; +} + +function resolveProbeSessionId(optionValue?: string): string { + const trimmed = optionValue?.trim(); + if (trimmed !== undefined && trimmed.length > 0) { + return trimmed; + } + + return "clawdentity-relay-test"; +} + +function formatDoctorCheckLine(check: OpenclawDoctorCheckResult): string { + const icon = check.status === "pass" ? "✅" : "❌"; + return `${icon} ${check.label}: ${check.message}`; +} + +function printDoctorResult(result: OpenclawDoctorResult): void { + writeStdoutLine(`OpenClaw doctor status: ${result.status}`); + for (const check of result.checks) { + writeStdoutLine(formatDoctorCheckLine(check)); + if (check.status === "fail" && check.remediationHint) { + writeStdoutLine(`Fix: ${check.remediationHint}`); + } + } +} + +function printRelayTestResult(result: OpenclawRelayTestResult): void { + writeStdoutLine(`Relay test status: ${result.status}`); + writeStdoutLine(`Peer alias: ${result.peerAlias}`); + writeStdoutLine(`Endpoint: ${result.endpoint}`); + if (typeof result.httpStatus === "number") { + writeStdoutLine(`HTTP status: ${result.httpStatus}`); + } + writeStdoutLine(`Message: ${result.message}`); + if (result.remediationHint) { + writeStdoutLine(`Fix: ${result.remediationHint}`); + } +} + +function toSendToPeerEndpoint(openclawBaseUrl: string): string { + const normalizedBase = openclawBaseUrl.endsWith("/") + ? openclawBaseUrl + : `${openclawBaseUrl}/`; + return new URL(OPENCLAW_SEND_TO_PEER_HOOK_PATH, normalizedBase).toString(); +} + +export async function runOpenclawDoctor( + options: OpenclawDoctorOptions = {}, +): Promise { + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const peerAlias = parseDoctorPeerAlias(options.peerAlias); + const checks: OpenclawDoctorCheckResult[] = []; + + const resolveConfigImpl = options.resolveConfigImpl ?? resolveConfig; + const resolvedConfig = await resolveConfigImpl(); + if ( + typeof resolvedConfig.registryUrl !== "string" || + resolvedConfig.registryUrl.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "registryUrl is missing", + remediationHint: + "Run: clawdentity config set registryUrl ", + }), + ); + } else if ( + typeof resolvedConfig.apiKey !== "string" || + resolvedConfig.apiKey.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "apiKey is missing", + remediationHint: "Run: clawdentity config set apiKey ", + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: "registryUrl and apiKey are configured", + }), + ); + } + + const selectedAgentPath = resolveOpenclawAgentNamePath(homeDir); + let selectedAgentName: string | undefined; + try { + const selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); + selectedAgentName = assertValidAgentName(selectedAgentRaw.trim()); + checks.push( + toDoctorCheck({ + id: "state.selectedAgent", + label: "Selected agent marker", + status: "pass", + message: `selected agent is ${selectedAgentName}`, + }), + ); + } catch (error) { + const missing = getErrorCode(error) === "ENOENT"; + checks.push( + toDoctorCheck({ + id: "state.selectedAgent", + label: "Selected agent marker", + status: "fail", + message: missing + ? `missing ${selectedAgentPath}` + : "selected agent marker is invalid", + remediationHint: + "Run: clawdentity openclaw setup --invite-code ", + }), + ); + } + + if (selectedAgentName === undefined) { + checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "fail", + message: "cannot validate credentials without selected agent marker", + remediationHint: + "Run: clawdentity openclaw setup --invite-code ", + }), + ); + } else { + try { + await ensureLocalAgentCredentials(homeDir, selectedAgentName); + checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "pass", + message: "ait.jwt and secret.key are present", + }), + ); + } catch (error) { + const details = error instanceof AppError ? error.details : undefined; + const filePath = + details && typeof details.filePath === "string" + ? details.filePath + : undefined; + checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "fail", + message: + filePath === undefined + ? "agent credentials are missing or invalid" + : `credential file missing or empty: ${filePath}`, + remediationHint: + "Run: clawdentity agent create --framework openclaw", + details: + filePath === undefined + ? undefined + : { filePath, selectedAgentName }, + }), + ); + } + } + + const peersPath = resolvePeersPath(homeDir); + let peersConfig: PeersConfig | undefined; + try { + peersConfig = await loadPeersConfig(peersPath); + const peerAliases = Object.keys(peersConfig.peers); + if (peerAlias !== undefined) { + if (peersConfig.peers[peerAlias] === undefined) { + checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: `peer alias is missing: ${peerAlias}`, + remediationHint: + "Run: clawdentity openclaw setup --invite-code --peer-alias ", + details: { peersPath, peerAlias }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: `peer alias exists: ${peerAlias}`, + details: { peersPath, peerAlias }, + }), + ); + } + } else if (peerAliases.length === 0) { + checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: "no peers are configured", + remediationHint: + "Run: clawdentity openclaw setup --invite-code ", + details: { peersPath }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: `configured peers: ${peerAliases.length}`, + details: { peersPath }, + }), + ); + } + } catch { + checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: `invalid peers config at ${peersPath}`, + remediationHint: + "Fix JSON in ~/.clawdentity/peers.json or rerun openclaw setup", + details: { peersPath }, + }), + ); + } + + const transformTargetPath = resolveTransformTargetPath(openclawDir); + try { + const transformContents = await readFile(transformTargetPath, "utf8"); + if (transformContents.trim().length === 0) { + checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: `transform file is empty: ${transformTargetPath}`, + remediationHint: "Run: npm install clawdentity --skill", + details: { transformTargetPath }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "pass", + message: "relay transform file exists", + details: { transformTargetPath }, + }), + ); + } + } catch { + checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: `missing transform file: ${transformTargetPath}`, + remediationHint: "Run: npm install clawdentity --skill", + details: { transformTargetPath }, + }), + ); + } + + const openclawConfigPath = resolveOpenclawConfigPath(openclawDir); + try { + const openclawConfig = await readJsonFile(openclawConfigPath); + if (!isRecord(openclawConfig)) { + throw new Error("root"); + } + const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; + const mappings = Array.isArray(hooks.mappings) + ? hooks.mappings.filter(isRecord) + : []; + const relayMapping = mappings.find((mapping) => + isRelayHookMapping(mapping), + ); + if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { + checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `missing send-to-peer mapping in ${openclawConfigPath}`, + remediationHint: + "Run: clawdentity openclaw setup --invite-code ", + details: { openclawConfigPath }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "pass", + message: "send-to-peer mapping is configured", + details: { openclawConfigPath }, + }), + ); + } + } catch { + checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure ~/.openclaw/openclaw.json exists and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + } + + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); + try { + const openclawBaseUrl = await resolveOpenclawBaseUrl({ + relayRuntimeConfigPath, + }); + checks.push( + toDoctorCheck({ + id: "state.openclawBaseUrl", + label: "OpenClaw base URL", + status: "pass", + message: `resolved to ${openclawBaseUrl}`, + }), + ); + } catch { + checks.push( + toDoctorCheck({ + id: "state.openclawBaseUrl", + label: "OpenClaw base URL", + status: "fail", + message: `unable to resolve OpenClaw base URL from ${relayRuntimeConfigPath}`, + remediationHint: + "Run: clawdentity openclaw setup --invite-code --openclaw-base-url ", + }), + ); + } + + return toDoctorResult(checks); +} + +function parseRelayProbeFailure(input: { + status: number; + responseBody: string; +}): Pick { + if (input.status === 401 || input.status === 403) { + return { + message: "OpenClaw hook token was rejected", + remediationHint: + "Pass a valid token with --hook-token or set OPENCLAW_HOOK_TOKEN", + }; + } + + if (input.status === 404) { + return { + message: "OpenClaw send-to-peer hook is unavailable", + remediationHint: + "Run: clawdentity openclaw setup --invite-code ", + }; + } + + if (input.status === 500) { + return { + message: "Relay probe failed inside local relay pipeline", + remediationHint: + "Check connector runtime and peer alias; rerun clawdentity openclaw doctor --peer ", + }; + } + + return { + message: `Relay probe failed with HTTP ${input.status}`, + remediationHint: + input.responseBody.trim().length > 0 + ? `Inspect response body: ${input.responseBody.trim()}` + : "Check local OpenClaw and connector logs", + }; +} + +export async function runOpenclawRelayTest( + options: OpenclawRelayTestOptions, +): Promise { + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const peerAlias = parsePeerAlias(options.peer); + const preflight = await runOpenclawDoctor({ + homeDir, + openclawDir, + peerAlias, + resolveConfigImpl: options.resolveConfigImpl, + }); + const checkedAt = nowIso(); + + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); + let openclawBaseUrl = DEFAULT_OPENCLAW_BASE_URL; + try { + openclawBaseUrl = await resolveOpenclawBaseUrl({ + optionValue: options.openclawBaseUrl, + relayRuntimeConfigPath, + }); + } catch { + return { + status: "failure", + checkedAt, + peerAlias, + endpoint: toSendToPeerEndpoint(DEFAULT_OPENCLAW_BASE_URL), + message: "Unable to resolve OpenClaw base URL", + remediationHint: + "Set OPENCLAW_BASE_URL or run openclaw setup with --openclaw-base-url", + preflight, + }; + } + + const endpoint = toSendToPeerEndpoint(openclawBaseUrl); + if (preflight.status === "unhealthy") { + const firstFailure = preflight.checks.find( + (check) => check.status === "fail", + ); + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + message: + firstFailure === undefined + ? "Preflight checks failed" + : `Preflight failed: ${firstFailure.label}`, + remediationHint: firstFailure?.remediationHint, + preflight, + }; + } + + const hookToken = resolveHookToken(options.hookToken); + const fetchImpl = options.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + message: "fetch implementation is unavailable", + remediationHint: "Run relay test in a Node runtime with fetch support", + preflight, + }; + } + + let response: Response; + try { + response = await fetchImpl(endpoint, { + method: "POST", + headers: { + "content-type": "application/json", + ...(hookToken === undefined ? {} : { "x-openclaw-token": hookToken }), + }, + body: JSON.stringify({ + peer: peerAlias, + sessionId: resolveProbeSessionId(options.sessionId), + message: resolveProbeMessage(options.message), + }), + }); + } catch { + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + message: "Relay probe request failed", + remediationHint: "Ensure local OpenClaw is running and reachable", + preflight, + }; + } + + if (response.ok) { + return { + status: "success", + checkedAt, + peerAlias, + endpoint, + httpStatus: response.status, + message: "Relay probe accepted", + preflight, + }; + } + + const responseBody = await response.text(); + const failure = parseRelayProbeFailure({ + status: response.status, + responseBody, + }); + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + httpStatus: response.status, + message: failure.message, + remediationHint: failure.remediationHint, + details: + responseBody.trim().length > 0 + ? { responseBody: responseBody.trim() } + : undefined, + preflight, + }; +} + export function createOpenclawInviteCode( options: OpenclawInviteOptions, ): OpenclawInviteResult { @@ -896,5 +1545,88 @@ export const createOpenclawCommand = (): Command => { ), ); + openclawCommand + .command("doctor") + .description("Validate local OpenClaw relay setup and print remediation") + .option("--peer ", "Validate that a specific peer alias exists") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "openclaw doctor", + async (options: OpenclawDoctorOptions) => { + const result = await runOpenclawDoctor(options); + if (options.json) { + writeStdoutLine(JSON.stringify(result, null, 2)); + } else { + printDoctorResult(result); + } + + if (result.status === "unhealthy") { + process.exitCode = 1; + } + }, + ), + ); + + const relayCommand = openclawCommand + .command("relay") + .description("Run OpenClaw relay diagnostics"); + + relayCommand + .command("test") + .description("Send a relay probe to a configured peer alias") + .requiredOption("--peer ", "Peer alias in ~/.clawdentity/peers.json") + .option( + "--openclaw-base-url ", + "Base URL for local OpenClaw hook API (default OPENCLAW_BASE_URL or relay runtime config)", + ) + .option( + "--hook-token ", + "OpenClaw hook token (default OPENCLAW_HOOK_TOKEN)", + ) + .option("--session-id ", "Session id for the probe payload") + .option("--message ", "Probe message body") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "openclaw relay test", + async (options: OpenclawRelayTestOptions) => { + const result = await runOpenclawRelayTest(options); + + if (options.json) { + writeStdoutLine(JSON.stringify(result, null, 2)); + } else { + printRelayTestResult(result); + if ( + result.preflight !== undefined && + result.preflight.status === "unhealthy" + ) { + writeStdoutLine("Preflight details:"); + for (const check of result.preflight.checks) { + if (check.status === "fail") { + writeStdoutLine(formatDoctorCheckLine(check)); + if (check.remediationHint) { + writeStdoutLine(`Fix: ${check.remediationHint}`); + } + } + } + } + } + + if (result.status === "failure") { + process.exitCode = 1; + } + }, + ), + ); + return openclawCommand; }; From cd3d8ab0f0ccfb9564ec97ad7135c515ddfc9a58 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 12:48:20 +0530 Subject: [PATCH 071/190] fix(cli): harden openclaw doctor diagnostics checks --- apps/cli/src/commands/openclaw.test.ts | 81 ++++++++++++++++++++++++++ apps/cli/src/commands/openclaw.ts | 79 ++++++++++++++----------- 2 files changed, 127 insertions(+), 33 deletions(-) diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index d248fe2..93e630b 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -397,6 +397,87 @@ describe("openclaw command helpers", () => { } }); + it("does not throw when CLI config resolution fails", async () => { + const sandbox = createSandbox(); + + try { + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + resolveConfigImpl: async () => { + throw new Error("invalid config"); + }, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "config.registry" && + check.status === "fail" && + check.message === "unable to resolve CLI config", + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook mapping check when mapping path is wrong", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { mappings?: Array> }; + }; + const mappings = openclawConfig.hooks.mappings ?? []; + const targetMapping = mappings.find( + (mapping) => mapping.id === "clawdentity-send-to-peer", + ); + if (targetMapping === undefined) { + throw new Error("expected clawdentity-send-to-peer mapping"); + } + targetMapping.match = { path: "not-send-to-peer" }; + writeFileSync(openclawConfigPath, JSON.stringify(openclawConfig), "utf8"); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.hookMapping" && check.status === "fail", + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + it("returns relay test success for accepted probe", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 2e48ae8..b1d9b7d 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -792,15 +792,15 @@ function isRelayHookMapping(value: unknown): boolean { return false; } - if (value.id === HOOK_MAPPING_ID) { - return true; + if (!isRecord(value.match) || value.match.path !== HOOK_PATH_SEND_TO_PEER) { + return false; } - if (!isRecord(value.match)) { + if (typeof value.id === "string" && value.id !== HOOK_MAPPING_ID) { return false; } - return value.match.path === HOOK_PATH_SEND_TO_PEER; + return true; } function hasRelayTransformModule(value: unknown): boolean { @@ -895,41 +895,54 @@ export async function runOpenclawDoctor( const checks: OpenclawDoctorCheckResult[] = []; const resolveConfigImpl = options.resolveConfigImpl ?? resolveConfig; - const resolvedConfig = await resolveConfigImpl(); - if ( - typeof resolvedConfig.registryUrl !== "string" || - resolvedConfig.registryUrl.trim().length === 0 - ) { + try { + const resolvedConfig = await resolveConfigImpl(); + if ( + typeof resolvedConfig.registryUrl !== "string" || + resolvedConfig.registryUrl.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "registryUrl is missing", + remediationHint: + "Run: clawdentity config set registryUrl ", + }), + ); + } else if ( + typeof resolvedConfig.apiKey !== "string" || + resolvedConfig.apiKey.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "apiKey is missing", + remediationHint: "Run: clawdentity config set apiKey ", + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: "registryUrl and apiKey are configured", + }), + ); + } + } catch { checks.push( toDoctorCheck({ id: "config.registry", label: "CLI config", status: "fail", - message: "registryUrl is missing", + message: "unable to resolve CLI config", remediationHint: - "Run: clawdentity config set registryUrl ", - }), - ); - } else if ( - typeof resolvedConfig.apiKey !== "string" || - resolvedConfig.apiKey.trim().length === 0 - ) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "apiKey is missing", - remediationHint: "Run: clawdentity config set apiKey ", - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "pass", - message: "registryUrl and apiKey are configured", + "Fix ~/.clawdentity/config.json or rerun: clawdentity config init", }), ); } From 4973622a6c65f8f5a647fc5f195f55ffb6366e7e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 12:49:36 +0530 Subject: [PATCH 072/190] docs(cli): tighten openclaw diagnostic guardrails --- apps/cli/src/commands/AGENTS.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 5ce17f5..2c335cc 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -72,6 +72,8 @@ ## OpenClaw Diagnostic Command Rules - `openclaw doctor` must stay read-only and validate required local state: resolved CLI config (`registryUrl` + `apiKey`), selected agent marker, local agent credentials, peers map integrity (and requested `--peer` alias), transform presence, hook mapping, and OpenClaw base URL resolution. +- `openclaw doctor` must treat malformed/unreadable CLI config as a failed diagnostic check, not a thrown exception, so full per-check output remains available. +- Relay hook mapping validation must require the expected mapping path (`send-to-peer`) and only accept optional `id` when it matches `clawdentity-send-to-peer`. - `openclaw doctor` must print deterministic check IDs and actionable fix hints for each failed check. - `openclaw doctor --json` must emit a stable machine-readable envelope with overall status + per-check results for CI scripting. From 86e9de2e29f459b3906e46699972a14ceeff4aab Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 13:07:02 +0530 Subject: [PATCH 073/190] fix(cli): honor --peer option in openclaw doctor --- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/openclaw.test.ts | 65 +++++++++++++++++++++++++- apps/cli/src/commands/openclaw.ts | 14 +++++- 3 files changed, 77 insertions(+), 3 deletions(-) diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 2c335cc..0688eff 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -9,6 +9,7 @@ - Use `withErrorHandling` for command actions unless a command has a documented reason not to. - Route all user-facing messages through `writeStdoutLine`/`writeStderrLine`. - For new command-domain errors, use SDK `AppError` with stable `code` values. +- Normalize Commander option keys at the command boundary when helper/runtime option names differ (for example `--peer` -> `peerAlias`) so flags are never silently ignored. ## Verification Command Rules - `verify` must preserve the `✅`/`❌` output contract with explicit reasons. diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 93e630b..77781ca 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -6,9 +6,10 @@ import { writeFileSync, } from "node:fs"; import { tmpdir } from "node:os"; -import { join } from "node:path"; +import { dirname, join } from "node:path"; import { describe, expect, it } from "vitest"; import { + createOpenclawCommand, createOpenclawInviteCode, decodeOpenclawInviteCode, runOpenclawDoctor, @@ -478,6 +479,68 @@ describe("openclaw command helpers", () => { } }); + it("applies --peer filter for doctor command", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const originalHome = process.env.HOME; + const originalExitCode = process.exitCode; + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const configPath = join(sandbox.homeDir, ".clawdentity", "config.json"); + mkdirSync(dirname(configPath), { recursive: true }); + writeFileSync( + configPath, + JSON.stringify( + { + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }, + null, + 2, + ), + "utf8", + ); + + const baseline = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + apiKey: "test-api-key", + }), + }); + expect(baseline.status).toBe("healthy"); + + process.env.HOME = sandbox.homeDir; + process.exitCode = undefined; + + const command = createOpenclawCommand(); + await command.parseAsync( + ["doctor", "--peer", "gamma", "--openclaw-dir", sandbox.openclawDir], + { from: "user" }, + ); + + expect(process.exitCode).toBe(1); + } finally { + process.env.HOME = originalHome; + process.exitCode = originalExitCode; + sandbox.cleanup(); + } + }); + it("returns relay test success for accepted probe", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index b1d9b7d..e09f53d 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -69,6 +69,12 @@ type OpenclawDoctorOptions = { json?: boolean; }; +type OpenclawDoctorCommandOptions = { + peer?: string; + openclawDir?: string; + json?: boolean; +}; + type OpenclawRelayTestOptions = { peer: string; homeDir?: string; @@ -1570,8 +1576,12 @@ export const createOpenclawCommand = (): Command => { .action( withErrorHandling( "openclaw doctor", - async (options: OpenclawDoctorOptions) => { - const result = await runOpenclawDoctor(options); + async (options: OpenclawDoctorCommandOptions) => { + const result = await runOpenclawDoctor({ + openclawDir: options.openclawDir, + peerAlias: options.peer, + json: options.json, + }); if (options.json) { writeStdoutLine(JSON.stringify(result, null, 2)); } else { From bae4e19aa834864012ff499e3c23729933b796e7 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 14:06:55 +0530 Subject: [PATCH 074/190] harden public API abuse limits for free plan --- apps/proxy/AGENTS.md | 3 +- apps/proxy/src/AGENTS.md | 3 + .../proxy/src/public-rate-limit-middleware.ts | 81 ++++++++++++ apps/proxy/src/server.test.ts | 69 ++++++++++ apps/proxy/src/server.ts | 21 +++ apps/registry/src/AGENTS.md | 4 + apps/registry/src/rate-limit.ts | 6 + apps/registry/src/server.test.ts | 124 ++++++++++++++++++ apps/registry/src/server.ts | 64 ++++++++- 9 files changed, 368 insertions(+), 7 deletions(-) create mode 100644 apps/proxy/src/public-rate-limit-middleware.ts diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index ff2acff..7bf73b2 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -52,6 +52,7 @@ - When CRL verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning dependency failure. - Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. - Return `403` when requests are verified but agent DID is not allowlisted. +- Return `429` with `PROXY_PUBLIC_RATE_LIMIT_EXCEEDED` when repeated unauthenticated probes exceed public-route IP budget. - Return `429` with `PROXY_RATE_LIMIT_EXCEEDED` when an allowlisted verified agent DID exceeds its request budget within the configured window. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. @@ -71,7 +72,7 @@ - Keep `src/server.ts` as the HTTP app/runtime entry. - Keep `src/worker.ts` as the Cloudflare Worker fetch entry and `src/node-server.ts` as the Node compatibility entry. - Keep `AgentRelaySession` exported from `src/worker.ts` and bound/migrated in `wrangler.jsonc`. -- Keep middleware order stable: request context -> request logging -> auth verification -> agent DID rate limit -> error handler. +- Keep middleware order stable: request context -> request logging -> public-route IP rate limit -> auth verification -> agent DID rate limit -> error handler. - Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200. - Log startup and request completion with structured JSON logs; never log secrets or tokens. - If identity injection is enabled, mutate only `payload.message` when it is a string; preserve all other payload fields unchanged. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index fccee82..c540a1e 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -9,6 +9,7 @@ - Keep Node runtime startup in `node-server.ts`; use `bin.ts` as Node process entrypoint. - Keep inbound auth verification in `auth-middleware.ts` with focused helpers for token parsing, registry material loading, CRL checks, and replay protection. - Keep per-agent DID throttling in `agent-rate-limit-middleware.ts`; do not blend rate-limit state or counters into `auth-middleware.ts`. +- Keep pre-auth public-route IP throttling in `public-rate-limit-middleware.ts`; do not blend unauthenticated probe controls into `auth-middleware.ts`. - Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. - Keep OpenClaw base URL fallback logic in `config.ts`: `OPENCLAW_BASE_URL` env -> `~/.clawdentity/openclaw-relay.json` -> default. - Keep OpenClaw compatibility vars optional for relay-mode runtime; never require `OPENCLAW_BASE_URL` or hook token for cloud relay startup. @@ -33,7 +34,9 @@ - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. - Keep `/v1/relay/connect` keyed by authenticated connector DID from auth middleware, and reject non-websocket requests with clear client errors. +- Keep pre-auth IP throttling enabled for `/hooks/agent` and `/v1/relay/connect` so repeated unauthenticated probes fail with `429` before auth/registry work. - Keep rate-limit failure semantics stable: verified requests over budget map to `429` with code `PROXY_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.rate_limit.exceeded`. +- Keep pre-auth rate-limit failure semantics stable: repeated public-route probes map to `429` with code `PROXY_PUBLIC_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.public_rate_limit.exceeded`. - Keep `X-Claw-Timestamp` parsing strict: accept digit-only unix-seconds strings and reject mixed/decimal formats. - Keep AIT verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_AIT_KID` before rejecting. - Keep CRL verification resilient to routine key rotation: retry once with a forced keyset refresh on `UNKNOWN_CRL_KID` before dependency-failure mapping. diff --git a/apps/proxy/src/public-rate-limit-middleware.ts b/apps/proxy/src/public-rate-limit-middleware.ts new file mode 100644 index 0000000..6122ff1 --- /dev/null +++ b/apps/proxy/src/public-rate-limit-middleware.ts @@ -0,0 +1,81 @@ +import { AppError, type Logger } from "@clawdentity/sdk"; +import { createMiddleware } from "hono/factory"; + +type InMemoryBucket = { + windowStartedAtMs: number; + count: number; +}; + +export const DEFAULT_PRE_AUTH_IP_RATE_LIMIT_REQUESTS_PER_MINUTE = 120; +export const DEFAULT_PRE_AUTH_IP_RATE_LIMIT_WINDOW_MS = 60 * 1000; + +export type PublicRateLimitMiddlewareOptions = { + logger: Logger; + paths: string[]; + maxRequests: number; + windowMs: number; + nowMs?: () => number; +}; + +function resolveClientIp(request: Request): string { + const cfIp = request.headers.get("cf-connecting-ip"); + if (typeof cfIp === "string" && cfIp.trim().length > 0) { + return cfIp.trim(); + } + + return "unknown"; +} + +export function createPublicRateLimitMiddleware( + options: PublicRateLimitMiddlewareOptions, +) { + const nowMs = options.nowMs ?? Date.now; + const buckets = new Map(); + + return createMiddleware(async (c, next) => { + const matchedPath = options.paths.find((path) => path === c.req.path); + if (!matchedPath) { + await next(); + return; + } + + const now = nowMs(); + for (const [key, bucket] of buckets.entries()) { + if (now - bucket.windowStartedAtMs >= options.windowMs) { + buckets.delete(key); + } + } + + const clientIp = resolveClientIp(c.req.raw); + const key = `${matchedPath}:${clientIp}`; + const existing = buckets.get(key); + + if (!existing || now - existing.windowStartedAtMs >= options.windowMs) { + buckets.set(key, { + windowStartedAtMs: now, + count: 1, + }); + await next(); + return; + } + + if (existing.count >= options.maxRequests) { + options.logger.warn("proxy.public_rate_limit.exceeded", { + path: matchedPath, + clientIp, + windowMs: options.windowMs, + maxRequests: options.maxRequests, + }); + throw new AppError({ + code: "PROXY_PUBLIC_RATE_LIMIT_EXCEEDED", + message: "Too many requests", + status: 429, + expose: true, + }); + } + + existing.count += 1; + buckets.set(key, existing); + await next(); + }); +} diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts index f0a1c16..4a0e802 100644 --- a/apps/proxy/src/server.test.ts +++ b/apps/proxy/src/server.test.ts @@ -1,3 +1,4 @@ +import { RELAY_CONNECT_PATH } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; import { DEFAULT_PROXY_ENVIRONMENT, @@ -81,4 +82,72 @@ describe("proxy server", () => { }), ).toThrow(ProxyConfigError); }); + + it("returns 429 for repeated unauthenticated probes on /hooks/agent from same IP", async () => { + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + }), + rateLimit: { + publicIpMaxRequests: 2, + publicIpWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "198.51.100.41", + }, + body: JSON.stringify({}), + }); + expect(response.status).toBe(401); + } + + const rateLimited = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "198.51.100.41", + }, + body: JSON.stringify({}), + }); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PUBLIC_RATE_LIMIT_EXCEEDED"); + }); + + it("returns 429 for repeated unauthenticated probes on relay connect from same IP", async () => { + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + }), + rateLimit: { + publicIpMaxRequests: 2, + publicIpWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await app.request(RELAY_CONNECT_PATH, { + headers: { + "CF-Connecting-IP": "198.51.100.42", + }, + }); + expect(response.status).toBe(401); + } + + const rateLimited = await app.request(RELAY_CONNECT_PATH, { + headers: { + "CF-Connecting-IP": "198.51.100.42", + }, + }); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PUBLIC_RATE_LIMIT_EXCEEDED"); + }); }); diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 9682665..6e55961 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -21,6 +21,11 @@ import { } from "./auth-middleware.js"; import type { ProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; +import { + createPublicRateLimitMiddleware, + DEFAULT_PRE_AUTH_IP_RATE_LIMIT_REQUESTS_PER_MINUTE, + DEFAULT_PRE_AUTH_IP_RATE_LIMIT_WINDOW_MS, +} from "./public-rate-limit-middleware.js"; import { createRelayConnectHandler, type RelayConnectRuntimeOptions, @@ -35,6 +40,8 @@ type ProxyAuthRuntimeOptions = { type ProxyRateLimitRuntimeOptions = { nowMs?: () => number; + publicIpMaxRequests?: number; + publicIpWindowMs?: number; }; type CreateProxyAppOptions = { @@ -69,6 +76,20 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { app.use("*", createRequestContextMiddleware()); app.use("*", createRequestLoggingMiddleware(logger)); + app.use( + "*", + createPublicRateLimitMiddleware({ + logger, + paths: ["/hooks/agent", RELAY_CONNECT_PATH], + maxRequests: + options.rateLimit?.publicIpMaxRequests ?? + DEFAULT_PRE_AUTH_IP_RATE_LIMIT_REQUESTS_PER_MINUTE, + windowMs: + options.rateLimit?.publicIpWindowMs ?? + DEFAULT_PRE_AUTH_IP_RATE_LIMIT_WINDOW_MS, + nowMs: options.rateLimit?.nowMs, + }), + ); app.use( "*", createProxyAuthMiddleware({ diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 4101479..dc26e25 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -28,6 +28,7 @@ ## CRL Snapshot Contract - `GET /v1/crl` is a public endpoint and must remain unauthenticated so SDK/proxy clients can refresh revocation state without PAT bootstrap dependencies. +- Apply per-client-IP throttling on `GET /v1/crl` and return `429 RATE_LIMIT_EXCEEDED` when over budget. - Success response shape must remain `{ crl: }` where `crl` is an EdDSA-signed token with `typ=CRL`. - Build CRL claims from the full `revocations` table (MVP full snapshot), joining each row to `agents.did` for `revocations[].agentDid`. - Keep CRL cache headers explicit and short-lived (`max-age=300` + `stale-while-revalidate`) for predictable revocation propagation. @@ -47,6 +48,7 @@ ## Validation - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. +- For route-limit tests, prefer `createRegistryApp({ rateLimit: ... })` overrides to keep tests deterministic without weakening production defaults. - When using fake D1 adapters in route tests, make select responses honor bound parameters, selected-column projection, and join semantics so query-shape regressions are caught. - Fake D1 join emulation should drop rows when `innerJoin` targets are missing so tests catch missing/incorrect joins instead of masking them with stubbed values. @@ -127,6 +129,7 @@ ## POST /v1/agents/auth/refresh Contract - Public endpoint (no PAT): auth is agent-scoped via `Authorization: Claw ` + PoP headers + refresh token payload. +- Apply per-client-IP throttling and return `429 RATE_LIMIT_EXCEEDED` before auth parsing when over budget. - Verify AIT against active registry signing keys and enforce deterministic issuer mapping for environment. - Verify PoP using canonical request inputs and public key from AIT `cnf`. - Enforce timestamp skew checks for replay-window reduction. @@ -139,6 +142,7 @@ ## POST /v1/agents/auth/validate Contract - Public endpoint used by proxy runtime auth enforcement; request must include `x-claw-agent-access` and JSON payload `{ agentDid, aitJti }`. +- Apply per-client-IP throttling and return `429 RATE_LIMIT_EXCEEDED` before payload/auth validation when over budget. - Validate `agentDid` + `aitJti` against active agent state (`agents.status=active`, `agents.current_jti` match). - Validate access token against active session hash/prefix material with constant-time comparison. - Expired access credentials must return `401 AGENT_AUTH_VALIDATE_EXPIRED` without rotating refresh credentials. diff --git a/apps/registry/src/rate-limit.ts b/apps/registry/src/rate-limit.ts index 99bffc4..c7c7b61 100644 --- a/apps/registry/src/rate-limit.ts +++ b/apps/registry/src/rate-limit.ts @@ -4,6 +4,12 @@ import type { MiddlewareHandler } from "hono"; export const RESOLVE_RATE_LIMIT_WINDOW_MS = 60_000; export const RESOLVE_RATE_LIMIT_MAX_REQUESTS = 10; export const RESOLVE_RATE_LIMIT_MAX_BUCKETS = 10_000; +export const CRL_RATE_LIMIT_WINDOW_MS = 60_000; +export const CRL_RATE_LIMIT_MAX_REQUESTS = 30; +export const AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS = 60_000; +export const AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS = 20; +export const AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS = 60_000; +export const AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS = 120; type InMemoryBucket = { windowStartedAtMs: number; diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 563a26f..e3eda92 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -3091,6 +3091,44 @@ describe("GET /v1/crl", () => { expect(body.error.message).toBe("CRL snapshot is not available"); }); + it("returns 429 when rate limit is exceeded for the same client", async () => { + const { database } = createFakeDb([]); + const appInstance = createRegistryApp({ + rateLimit: { + crlMaxRequests: 2, + crlWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await appInstance.request( + "/v1/crl", + { + headers: { + "CF-Connecting-IP": "203.0.113.77", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(404); + } + + const rateLimited = await appInstance.request( + "/v1/crl", + { + headers: { + "CF-Connecting-IP": "203.0.113.77", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); + it("returns 500 when CRL signing configuration is missing", async () => { const agentId = generateUlid(1700400000600); const { database } = createFakeDb( @@ -6289,6 +6327,49 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { ]), ); }); + + it("returns 429 when refresh rate limit is exceeded for the same client", async () => { + const appInstance = createRegistryApp({ + rateLimit: { + agentAuthRefreshMaxRequests: 2, + agentAuthRefreshWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await appInstance.request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.88", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + } + + const rateLimited = await appInstance.request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.88", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); }); describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { @@ -6536,6 +6617,49 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { expect.arrayContaining([expect.objectContaining({ matched_rows: 0 })]), ); }); + + it("returns 429 when validate rate limit is exceeded for the same client", async () => { + const appInstance = createRegistryApp({ + rateLimit: { + agentAuthValidateMaxRequests: 2, + agentAuthValidateWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await appInstance.request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.99", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + } + + const rateLimited = await appInstance.request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.99", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); }); describe("DELETE /v1/agents/:id/auth/revoke", () => { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 452bad7..cab1a1d 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -96,6 +96,12 @@ import { parseInviteRedeemPayload, } from "./invite-lifecycle.js"; import { + AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS, + AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS, + AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS, + AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS, + CRL_RATE_LIMIT_MAX_REQUESTS, + CRL_RATE_LIMIT_WINDOW_MS, createInMemoryRateLimit, RESOLVE_RATE_LIMIT_MAX_REQUESTS, RESOLVE_RATE_LIMIT_WINDOW_MS, @@ -180,6 +186,22 @@ type CrlSnapshotRow = { agent_did: string; }; +type RegistryRateLimitRuntimeOptions = { + nowMs?: () => number; + resolveMaxRequests?: number; + resolveWindowMs?: number; + crlMaxRequests?: number; + crlWindowMs?: number; + agentAuthRefreshMaxRequests?: number; + agentAuthRefreshWindowMs?: number; + agentAuthValidateMaxRequests?: number; + agentAuthValidateWindowMs?: number; +}; + +type CreateRegistryAppOptions = { + rateLimit?: RegistryRateLimitRuntimeOptions; +}; + function crlBuildError(options: { environment: RegistryConfig["ENVIRONMENT"]; message: string; @@ -614,7 +636,7 @@ function adminBootstrapAlreadyCompletedError(): AppError { }); } -function createRegistryApp() { +function createRegistryApp(options: CreateRegistryAppOptions = {}) { let cachedConfig: RegistryConfig | undefined; function getConfig(bindings: Bindings): RegistryConfig { @@ -630,10 +652,40 @@ function createRegistryApp() { Bindings: Bindings; Variables: { requestId: string; human: AuthenticatedHuman }; }>(); + const rateLimitOptions = options.rateLimit; const resolveRateLimit = createInMemoryRateLimit({ bucketKey: "resolve", - maxRequests: RESOLVE_RATE_LIMIT_MAX_REQUESTS, - windowMs: RESOLVE_RATE_LIMIT_WINDOW_MS, + maxRequests: + rateLimitOptions?.resolveMaxRequests ?? RESOLVE_RATE_LIMIT_MAX_REQUESTS, + windowMs: rateLimitOptions?.resolveWindowMs ?? RESOLVE_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + const crlRateLimit = createInMemoryRateLimit({ + bucketKey: "crl", + maxRequests: + rateLimitOptions?.crlMaxRequests ?? CRL_RATE_LIMIT_MAX_REQUESTS, + windowMs: rateLimitOptions?.crlWindowMs ?? CRL_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + const agentAuthRefreshRateLimit = createInMemoryRateLimit({ + bucketKey: "agent_auth_refresh", + maxRequests: + rateLimitOptions?.agentAuthRefreshMaxRequests ?? + AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS, + windowMs: + rateLimitOptions?.agentAuthRefreshWindowMs ?? + AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + const agentAuthValidateRateLimit = createInMemoryRateLimit({ + bucketKey: "agent_auth_validate", + maxRequests: + rateLimitOptions?.agentAuthValidateMaxRequests ?? + AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS, + windowMs: + rateLimitOptions?.agentAuthValidateWindowMs ?? + AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, }); app.use("*", createRequestContextMiddleware()); @@ -796,7 +848,7 @@ function createRegistryApp() { ); }); - app.get("/v1/crl", async (c) => { + app.get("/v1/crl", crlRateLimit, async (c) => { const config = getConfig(c.env); const db = createDb(c.env.DB); @@ -1531,7 +1583,7 @@ function createRegistryApp() { ); }); - app.post(AGENT_AUTH_REFRESH_PATH, async (c) => { + app.post(AGENT_AUTH_REFRESH_PATH, agentAuthRefreshRateLimit, async (c) => { const config = getConfig(c.env); const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); @@ -1712,7 +1764,7 @@ function createRegistryApp() { }); }); - app.post(AGENT_AUTH_VALIDATE_PATH, async (c) => { + app.post(AGENT_AUTH_VALIDATE_PATH, agentAuthValidateRateLimit, async (c) => { let payload: unknown; try { payload = await c.req.json(); From 8006d4220a68d20741e4287c2678b7ead6238bb4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 15:24:48 +0530 Subject: [PATCH 075/190] feat: add cli publish pipeline and relay proxy updates --- .github/AGENTS.md | 29 ++++- .github/workflows/deploy-develop.yml | 69 ++++++++++- .github/workflows/publish-cli.yml | 113 ++++++++++++++++++ PRD.md | 21 +++- README.md | 25 +++- apps/cli/AGENTS.md | 15 ++- apps/cli/package.json | 8 +- apps/cli/scripts/AGENTS.md | 1 + apps/cli/scripts/openclaw-relay-docker-e2e.sh | 16 ++- apps/cli/skill-bundle/AGENTS.md | 2 +- apps/cli/src/AGENTS.md | 1 + apps/cli/src/commands/connector.ts | 14 +-- apps/cli/tsup.config.ts | 6 +- apps/proxy/AGENTS.md | 3 +- apps/proxy/src/AGENTS.md | 2 + apps/proxy/src/index.test.ts | 21 +++- apps/proxy/src/index.ts | 24 +++- apps/proxy/src/server.test.ts | 15 +++ apps/proxy/src/server.ts | 3 +- apps/proxy/src/worker.test.ts | 5 +- apps/proxy/src/worker.ts | 11 +- sparkling-sauteeing-brook.md | 2 +- 22 files changed, 358 insertions(+), 48 deletions(-) create mode 100644 .github/workflows/publish-cli.yml diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 53d7525..70ca063 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -18,21 +18,38 @@ ## Deployment Rules (Develop) - `deploy-develop.yml` runs on pushes to `develop`. - Run full quality gates before deployment: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build`. -- Use `pnpm --filter @clawdentity/registry run deploy:dev` so migration+deploy logic stays centralized. -- Verify `https://dev.api.clawdentity.com/health` returns `status: "ok"` and `environment: "development"`. +- Deploy both workers in the same workflow: + - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy + - proxy (`apps/proxy`, env `development`) after registry health passes +- Verify registry health at `https://dev.api.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. - Use workflow concurrency groups to prevent overlapping deploys for the same environment. +## Release Rules (CLI) +- `publish-cli.yml` is manual (`workflow_dispatch`) and must accept explicit `version` + `dist_tag` inputs. +- Run CLI quality gates before publish: `pnpm -F clawdentity lint`, `typecheck`, `test`, `build`. +- Publish only package `apps/cli` as npm package `clawdentity`. +- Keep published runtime manifest free of `workspace:*` runtime dependencies. +- Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. + ## Secrets and Permissions -- Required secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`. +- Required deploy secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`. - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. -- Keep token scope minimal: `Workers Scripts:Write`, `Workers Routes:Write`, `Zone:Read`, `D1:Database:Admin + Migrate`, `D1:Database:Read/Write`, and `Account:Read`. +- Optional deploy secret: `PROXY_HEALTH_URL` (only needed if proxy workers.dev URL cannot be resolved in CI output). +- Required publish secret: `NPM_TOKEN`. +- Keep Cloudflare token scope minimal for current workflows: + - `Workers Scripts:Edit` + - `Workers Routes:Edit` (zone-level, custom domains) + - `D1:Edit` + - add `Cloudflare Pages:Edit` only when Pages deploy workflow is introduced. ## Migration Rollback Strategy (Develop) - Capture pre-deploy artifacts: - - `wrangler deployments list --env dev --json` + - `wrangler --cwd apps/registry deployments list --env dev --json` + - `wrangler --cwd apps/proxy deployments list --env development --json` - `wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` - `wrangler d1 export clawdentity-db-dev --remote --output ` - Upload artifacts on every run for operator recovery. - On failed deploy: - - Worker rollback: `wrangler rollback --env dev` + - Registry rollback: `wrangler --cwd apps/registry rollback --env dev` + - Proxy rollback: `wrangler --cwd apps/proxy rollback --env development` - DB rollback: `wrangler d1 time-travel restore clawdentity-db-dev --env dev --timestamp ` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 4c41fd3..1b84a1c 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -13,7 +13,7 @@ permissions: contents: read jobs: deploy: - name: Lint, test, migrate, and deploy to dev + name: Lint, test, migrate, and deploy registry + proxy to dev runs-on: ubuntu-latest timeout-minutes: 30 env: @@ -22,6 +22,7 @@ jobs: CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} APP_VERSION: ${{ github.sha }} + PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} steps: - name: Checkout uses: actions/checkout@v4 @@ -64,16 +65,17 @@ jobs: PREDEPLOY_TS=$(date -u +%Y-%m-%dT%H:%M:%SZ) echo "PREDEPLOY_TS=${PREDEPLOY_TS}" >> "${GITHUB_ENV}" printf "%s\n" "${PREDEPLOY_TS}" > artifacts/predeploy.timestamp - wrangler --cwd apps/registry deployments list --env dev --json > artifacts/worker-deployments-pre.json + wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-pre.json + wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql - - name: Apply dev migrations and deploy + - name: Apply registry dev migrations and deploy run: | wrangler --cwd apps/registry d1 migrations apply clawdentity-db-dev --remote --env dev wrangler --cwd apps/registry deploy --env dev --var APP_VERSION:${APP_VERSION} - - name: Verify health endpoint + - name: Verify registry health endpoint run: | python3 - <<'PY' import json, os, sys, urllib.request, urllib.error @@ -98,20 +100,75 @@ jobs: print("healthcheck passed", data) PY + - name: Deploy proxy to development environment + run: | + mkdir -p artifacts + PROXY_DEPLOY_OUTPUT_FILE="artifacts/proxy-deploy-output.txt" + wrangler --cwd apps/proxy deploy --env development --var APP_VERSION:${APP_VERSION} 2>&1 | tee "${PROXY_DEPLOY_OUTPUT_FILE}" + + PROXY_WORKERS_DEV_URL="$(grep -Eo 'https://[[:alnum:]._-]+\.workers\.dev' "${PROXY_DEPLOY_OUTPUT_FILE}" | head -n 1 || true)" + PROXY_HEALTH_URL="" + + if [ -n "${PROXY_WORKERS_DEV_URL}" ]; then + PROXY_HEALTH_URL="${PROXY_WORKERS_DEV_URL}/health" + elif [ -n "${PROXY_HEALTH_URL_OVERRIDE}" ]; then + PROXY_HEALTH_URL="${PROXY_HEALTH_URL_OVERRIDE%/}/health" + fi + + if [ -z "${PROXY_HEALTH_URL}" ]; then + echo "Unable to resolve proxy health URL. Set optional PROXY_HEALTH_URL secret if workers.dev URL is unavailable." >&2 + exit 1 + fi + + echo "PROXY_HEALTH_URL=${PROXY_HEALTH_URL}" >> "${GITHUB_ENV}" + echo "Resolved proxy health URL: ${PROXY_HEALTH_URL}" + + - name: Verify proxy health endpoint + run: | + python3 - <<'PY' + import json, os, sys, urllib.request, urllib.error + url = os.environ.get("PROXY_HEALTH_URL", "") + expected_version = os.environ.get("APP_VERSION", "") + if not url: + raise SystemExit("PROXY_HEALTH_URL was not set") + try: + resp = urllib.request.urlopen(url, timeout=10) + except urllib.error.HTTPError as exc: + sys.stderr.write(f"proxy health check failed ({exc.code} {exc.reason})\n") + sys.exit(1) + data = json.load(resp) + if data.get("status") != "ok" or data.get("environment") != "development": + raise SystemExit(f"unexpected proxy health payload: {data}") + if not expected_version: + raise SystemExit("APP_VERSION was not set in workflow environment") + if data.get("version") == "0.0.0": + raise SystemExit(f"unexpected placeholder proxy version after deploy: {data}") + if data.get("version") != expected_version: + raise SystemExit( + f"unexpected proxy version: expected {expected_version}, got {data.get('version')}" + ) + print("proxy healthcheck passed", data) + PY + - name: Capture post-deploy state if: always() run: | mkdir -p artifacts - wrangler --cwd apps/registry deployments list --env dev --json > artifacts/worker-deployments-post.json || true + wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-post.json || true + wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-post.json || true wrangler --cwd apps/registry d1 migrations list clawdentity-db-dev --remote --env dev > artifacts/d1-migrations-post.txt || true - name: Rollback instructions on failure if: failure() run: | - echo "Worker rollback:" + echo "Registry Worker rollback:" echo " wrangler --cwd apps/registry deployments list --env dev --json" echo " wrangler --cwd apps/registry rollback --env dev -y -m \"ci rollback\"" echo "" + echo "Proxy Worker rollback:" + echo " wrangler --cwd apps/proxy deployments list --env development --json" + echo " wrangler --cwd apps/proxy rollback --env development -y -m \"ci rollback\"" + echo "" echo "D1 rollback:" echo " wrangler --cwd apps/registry d1 time-travel restore clawdentity-db-dev --env dev --timestamp \"${PREDEPLOY_TS}\"" echo " # or restore via bookmark from artifacts/d1-time-travel-pre.json" diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml new file mode 100644 index 0000000..d2c151b --- /dev/null +++ b/.github/workflows/publish-cli.yml @@ -0,0 +1,113 @@ +name: Publish CLI +on: + workflow_dispatch: + inputs: + version: + description: "Release version for clawdentity (semver)" + required: true + type: string + dist_tag: + description: "npm dist-tag" + required: true + default: "latest" + type: string + +concurrency: + group: publish-cli-${{ github.ref }} + cancel-in-progress: false + +permissions: + contents: read + id-token: write + +jobs: + publish: + name: Validate and publish clawdentity to npm + runs-on: ubuntu-latest + timeout-minutes: 30 + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: pnpm + registry-url: https://registry.npmjs.org + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.23.0 + + - name: Validate required secrets + run: | + test -n "${NODE_AUTH_TOKEN}" + + - name: Validate release version format + run: | + python3 - <<'PY' + import os, re, sys + version = "${{ inputs.version }}" + if not re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-[0-9A-Za-z.-]+)?$", version): + raise SystemExit(f"invalid semver version: {version}") + print("version accepted:", version) + PY + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Lint CLI package + run: pnpm -F clawdentity lint + + - name: Typecheck CLI package + run: pnpm -F clawdentity typecheck + + - name: Test CLI package + run: pnpm -F clawdentity test + + - name: Build CLI package + run: pnpm -F clawdentity build + + - name: Set package version for release + run: npm --prefix apps/cli pkg set version=${{ inputs.version }} + + - name: Validate publish manifest + run: | + node <<'NODE' + const fs = require("node:fs"); + const pkg = JSON.parse(fs.readFileSync("apps/cli/package.json", "utf8")); + if (pkg.name !== "clawdentity") { + throw new Error(`Unexpected package name: ${pkg.name}`); + } + if (pkg.private === true) { + throw new Error("Package is private; publish would fail"); + } + const runtimeDeps = { + ...(pkg.dependencies || {}), + ...(pkg.peerDependencies || {}), + ...(pkg.optionalDependencies || {}), + }; + const workspaceRuntimeDeps = Object.entries(runtimeDeps).filter(([, value]) => + String(value).includes("workspace:"), + ); + if (workspaceRuntimeDeps.length > 0) { + throw new Error( + `Found workspace runtime dependencies: ${workspaceRuntimeDeps + .map(([name, value]) => `${name}@${value}`) + .join(", ")}`, + ); + } + console.log("publish manifest validated"); + NODE + + - name: Dry-run package contents + run: npm --prefix apps/cli pack --dry-run + + - name: Publish package + run: npm --prefix apps/cli publish --access public --provenance --tag ${{ inputs.dist_tag }} diff --git a/PRD.md b/PRD.md index 38eed19..4c3aef5 100644 --- a/PRD.md +++ b/PRD.md @@ -1,6 +1,6 @@ # PRD — Clawdentity MVP (v0.1) -**Last updated:** 2026-02-11 +**Last updated:** 2026-02-17 **Owner:** Ravi Kiran Vemula **Status:** Ready for execution (deployment-first gate enabled) **Primary target:** OpenClaw Gateway webhooks (`/hooks/*`) @@ -167,6 +167,16 @@ Verifier must enforce: - Re-running install must be idempotent and safe. - Missing source artifacts must fail with actionable errors. +### 6.9 Deployment and release automation +- `develop` deploy workflow must deploy both registry and proxy after full quality gates pass. +- Registry deploy must run D1 migrations before Worker deploy. +- Registry and proxy `/health` checks must validate: + - `status = "ok"` + - `environment = "development"` + - `version = git commit SHA` passed via `APP_VERSION`. +- CLI release must use manual GitHub workflow dispatch with explicit semver version input. +- Published npm package must be `clawdentity` and must not include `workspace:*` runtime dependencies. + --- ## 7) Non-functional requirements @@ -191,9 +201,10 @@ Verifier must enforce: ## 9) Rollout plan 1) Establish workspace and deployment baseline -2) Deploy and verify baseline environments and health checks -3) Execute MVP feature backlog after the deployment gate passes -4) Execute Phase 2/3 enhancements from HLD after MVP stability +2) Deploy and verify `develop` baseline environments and health checks (registry + proxy) +3) Establish manual npm release gate for `clawdentity` CLI package +4) Execute MVP feature backlog after deployment and release gates pass +5) Execute Phase 2/3 enhancements from HLD after MVP stability --- @@ -228,3 +239,5 @@ Governance rules: 4) Revocation propagation test within CRL refresh window 5) Replay attack rejection via nonce reuse checks 6) CI gate: lint -> typecheck -> test -> build +7) Deploy gate: registry and proxy health checks validate `APP_VERSION == github.sha` in `develop` +8) Release gate: `publish-cli.yml` validates CLI package and publishes `clawdentity` with npm provenance diff --git a/README.md b/README.md index 7585508..c92a066 100644 --- a/README.md +++ b/README.md @@ -468,9 +468,22 @@ clawdentity/ - Local env (`ENVIRONMENT=local`): `pnpm dev:proxy` - Development env (`ENVIRONMENT=development`): `pnpm dev:proxy:development` - Fresh deploy-like env: `pnpm dev:proxy:fresh` +- Development deploy command: `pnpm -F @clawdentity/proxy run deploy:dev` - Production deploy command: `pnpm -F @clawdentity/proxy run deploy:production` - Environment intent: `local` is local Wrangler development only; `development` and `production` are cloud deployment environments. +### Develop deployment automation + +- GitHub workflow: `.github/workflows/deploy-develop.yml` +- Trigger: push to `develop` +- Runs full quality gates, then deploys: + - registry (`apps/registry`, env `dev`) with D1 migrations + - proxy (`apps/proxy`, env `development`) +- Health checks must pass with `version == $GITHUB_SHA` for: + - `https://dev.api.clawdentity.com/health` + - deployed proxy `/health` URL (workers.dev URL extracted from wrangler output, or optional `PROXY_HEALTH_URL` secret override) +- Required GitHub secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID` + ### 4) Operator lifecycle tooling (CLI) - Handled by: `apps/cli` @@ -523,7 +536,7 @@ The CLI package ships bundled skill assets so clean installs do not depend on a For user-like OpenClaw relay validation with existing Docker agents, run: ```bash -pnpm -F @clawdentity/cli run test:e2e:openclaw-docker +pnpm -F clawdentity run test:e2e:openclaw-docker ``` Defaults target: @@ -542,6 +555,16 @@ Common environment overrides: - `ALPHA_CONTAINER`, `BETA_CONTAINER` - `REGISTRY_URL`, `PROXY_HOOK_URL`, `PROXY_WS_URL` +### CLI npm release (manual) + +- GitHub workflow: `.github/workflows/publish-cli.yml` +- Trigger: `workflow_dispatch` with inputs: + - `version` (semver, required) + - `dist_tag` (default `latest`) +- Required GitHub secret: `NPM_TOKEN` +- Publish target: npm package `clawdentity` +- Workflow runs CLI lint/typecheck/test/build before publishing. + --- ## MVP goals diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 79fbb60..8ada913 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -8,6 +8,7 @@ - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Keep `src/postinstall.ts` as a thin install entrypoint only; it should detect npm `--skill` mode and call shared installer helpers without mutating runtime CLI command wiring. +- Keep package identity clear: workspace package name is `clawdentity` and published install entrypoint remains `npm install clawdentity --skill`. - Implement command groups under `src/commands/*` and register them from `createProgram()`. - Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`, `connector`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. @@ -18,9 +19,11 @@ - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. - Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. - npm `--skill` installer behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. -- Keep `skill-bundle/openclaw-skill/` in sync with `apps/openclaw-skill` via `pnpm -F @clawdentity/cli run sync:skill-bundle` before build/pack so `postinstall --skill` works in clean installs. +- Keep `skill-bundle/openclaw-skill/` in sync with `apps/openclaw-skill` via `pnpm -F clawdentity run sync:skill-bundle` before build/pack so `postinstall --skill` works in clean installs. - Keep `skill-bundle/openclaw-skill/dist/relay-to-peer.mjs` tracked in git so clean-checkout tests and packaged installs have the required relay artifact before workspace builds run. -- When running the `@clawdentity/cli` test suite (`pnpm -F @clawdentity/cli test`), build `@clawdentity/openclaw-skill` and resync the skill bundle first so `relay-to-peer.mjs` exists on clean checkout and tests pass with deterministic artifacts. +- When running the CLI test suite (`pnpm -F clawdentity test`), build `@clawdentity/openclaw-skill` and resync the skill bundle first so `relay-to-peer.mjs` exists on clean checkout and tests pass with deterministic artifacts. +- Keep runtime dependencies publish-safe: avoid `workspace:*` entries in published runtime deps (`dependencies`, `peerDependencies`, `optionalDependencies`), and bundle internal packages into CLI dist. +- Keep release automation in `.github/workflows/publish-cli.yml` manual-only with explicit semver input and npm provenance. ## Config and Secrets - Local CLI config lives at `~/.clawdentity/config.json`. @@ -67,10 +70,10 @@ - Favor exporting pure helper functions so invite/setup logic can be unit-tested without needing to run the full CLI parse flow if you need tighter coverage. ## Validation Commands -- `pnpm -F @clawdentity/cli lint` -- `pnpm -F @clawdentity/cli typecheck` -- `pnpm -F @clawdentity/cli test` -- `pnpm -F @clawdentity/cli build` +- `pnpm -F clawdentity lint` +- `pnpm -F clawdentity typecheck` +- `pnpm -F clawdentity test` +- `pnpm -F clawdentity build` - For cross-package changes, run root checks: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build`. ## Refactor Trigger diff --git a/apps/cli/package.json b/apps/cli/package.json index a934cf8..af5ef2e 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,8 +1,10 @@ { - "name": "@clawdentity/cli", + "name": "clawdentity", "version": "0.0.0", - "private": true, "type": "module", + "publishConfig": { + "access": "public" + }, "main": "./dist/index.js", "types": "./dist/index.d.ts", "bin": { @@ -31,10 +33,10 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@clawdentity/connector": "workspace:*", "commander": "^13.1.0" }, "devDependencies": { + "@clawdentity/connector": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", "@types/node": "^22.18.11" diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md index dd099fb..cb7e131 100644 --- a/apps/cli/scripts/AGENTS.md +++ b/apps/cli/scripts/AGENTS.md @@ -7,6 +7,7 @@ - `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. - `openclaw-relay-docker-e2e.sh` is the source of truth for local Docker-based OpenClaw relay E2E validation (invite onboarding, skill artifacts, bidirectional relay, and connector failure/recovery checks). - Scripts must fail with actionable errors when required source artifacts are missing. +- Docker E2E skill install should default to global package root `clawdentity`, with legacy fallback to `@clawdentity/cli` for backward compatibility. - Docker E2E scripts must keep reset behavior explicit (`RESET_MODE=skill|full|none`) and must only remove known skill-generated files in skill-reset mode. - Docker E2E relay scripts should accept `CLAWDENTITY_E2E_PAT`, but when absent they must first attempt to reuse existing container CLI config PAT before fallback bootstrap so pre-bootstrapped environments remain runnable. - Connector startup failures in Docker E2E should include the agent DID in diagnostics so operator allowlist mismatches can be fixed quickly. diff --git a/apps/cli/scripts/openclaw-relay-docker-e2e.sh b/apps/cli/scripts/openclaw-relay-docker-e2e.sh index 8a6359e..5b0edc6 100755 --- a/apps/cli/scripts/openclaw-relay-docker-e2e.sh +++ b/apps/cli/scripts/openclaw-relay-docker-e2e.sh @@ -19,7 +19,7 @@ CONTAINER_OPENCLAW_BASE_URL="${CONTAINER_OPENCLAW_BASE_URL:-http://127.0.0.1:187 ALPHA_HOOK_TOKEN="${ALPHA_HOOK_TOKEN:-alpha-hook-secret}" BETA_HOOK_TOKEN="${BETA_HOOK_TOKEN:-beta-hook-secret}" BOOTSTRAP_SECRET="${BOOTSTRAP_SECRET:-clawdentity-local-bootstrap}" -CLI_GLOBAL_PACKAGE_ROOT="${CLI_GLOBAL_PACKAGE_ROOT:-/home/node/.local/lib/node_modules/@clawdentity/cli}" +CLI_GLOBAL_PACKAGE_ROOT="${CLI_GLOBAL_PACKAGE_ROOT:-/home/node/.local/lib/node_modules/clawdentity}" RESET_MODE="${RESET_MODE:-skill}" CLAWDENTITY_E2E_PAT="${CLAWDENTITY_E2E_PAT:-}" @@ -89,8 +89,18 @@ reset_skill_state() { install_skill_assets() { local container="$1" - container_exec "$container" "test -f \"$CLI_GLOBAL_PACKAGE_ROOT/postinstall.mjs\"" - container_exec "$container" "npm_config_skill=true node \"$CLI_GLOBAL_PACKAGE_ROOT/postinstall.mjs\" >/dev/null" + local package_root="$CLI_GLOBAL_PACKAGE_ROOT" + local legacy_package_root="/home/node/.local/lib/node_modules/@clawdentity/cli" + + if ! container_exec "$container" "test -f \"$package_root/postinstall.mjs\""; then + if container_exec "$container" "test -f \"$legacy_package_root/postinstall.mjs\""; then + package_root="$legacy_package_root" + else + fail "postinstall.mjs not found in CLI package root: $package_root" + fi + fi + + container_exec "$container" "npm_config_skill=true node \"$package_root/postinstall.mjs\" >/dev/null" } ensure_agent_identity() { diff --git a/apps/cli/skill-bundle/AGENTS.md b/apps/cli/skill-bundle/AGENTS.md index 34eafac..65a0819 100644 --- a/apps/cli/skill-bundle/AGENTS.md +++ b/apps/cli/skill-bundle/AGENTS.md @@ -5,7 +5,7 @@ ## Rules - Treat this folder as generated release input; do not hand-edit bundled files. -- Regenerate by running `pnpm -F @clawdentity/cli run sync:skill-bundle` after changes in `apps/openclaw-skill`. +- Regenerate by running `pnpm -F clawdentity run sync:skill-bundle` after changes in `apps/openclaw-skill`. - Required bundled files: - `openclaw-skill/skill/SKILL.md` - `openclaw-skill/skill/references/*` diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 3c87e8c..6472686 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -12,6 +12,7 @@ - Admin bootstrap must print the one-time PAT before attempting to persist it and depend on `persistBootstrapConfig` so config write failures are surfaced via CLI errors while the operator still sees the PAT. - API-key lifecycle command logic should stay in `commands/api-key.ts`; keep create/list/revoke request mapping explicit and keep token exposure limited to create output only. - Connector runtime command logic should stay in `commands/connector.ts`; keep startup orchestration deterministic and avoid embedding connector runtime implementation details in the CLI. +- Keep connector runtime import bundled at build time (from `@clawdentity/connector`) so published `clawdentity` installs do not depend on unpublished workspace runtime packages. - Registry invite lifecycle command logic should stay in `commands/invite.ts`; keep it strictly scoped to registry onboarding invites and separate from `commands/openclaw.ts` peer-relay invite codes. - `invite redeem` must print the returned PAT once, then persist config in deterministic order (`registryUrl`, then `apiKey`) so bootstrap/onboarding state is predictable. - `invite` command routes must use endpoint constants from `@clawdentity/protocol` (`INVITES_PATH`, `INVITES_REDEEM_PATH`) instead of inline path literals. diff --git a/apps/cli/src/commands/connector.ts b/apps/cli/src/commands/connector.ts index e36b1f6..eeb4c22 100644 --- a/apps/cli/src/commands/connector.ts +++ b/apps/cli/src/commands/connector.ts @@ -4,6 +4,7 @@ import { homedir } from "node:os"; import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; import { promisify } from "node:util"; +import { startConnectorRuntime as bundledStartConnectorRuntime } from "@clawdentity/connector"; import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; @@ -371,16 +372,9 @@ function parseAgentIdentity(rawIdentity: string): { did: string } { } async function loadDefaultConnectorModule(): Promise { - const connectorModuleName: string = "@clawdentity/connector"; - - try { - return (await import(connectorModuleName)) as ConnectorModule; - } catch { - throw createCliError( - "CLI_CONNECTOR_PACKAGE_UNAVAILABLE", - "Connector package is unavailable. Install @clawdentity/connector and retry.", - ); - } + return { + startConnectorRuntime: bundledStartConnectorRuntime, + }; } function resolveWaitPromise( diff --git a/apps/cli/tsup.config.ts b/apps/cli/tsup.config.ts index 9d03333..df576e4 100644 --- a/apps/cli/tsup.config.ts +++ b/apps/cli/tsup.config.ts @@ -5,7 +5,11 @@ export default defineConfig({ format: ["esm"], bundle: true, splitting: false, - noExternal: ["@clawdentity/protocol", "@clawdentity/sdk"], + noExternal: [ + "@clawdentity/connector", + "@clawdentity/protocol", + "@clawdentity/sdk", + ], platform: "node", target: "node22", dts: true, diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 7bf73b2..fcc03fb 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -7,6 +7,7 @@ ## Runtime Configuration - Keep runtime config centralized in `src/config.ts`. - Keep Cloudflare Worker deployment config in `wrangler.jsonc` with explicit `local`, `development`, and `production` environments. +- Keep deploy traceability explicit by passing `APP_VERSION` (or fallback `PROXY_VERSION`) via Worker bindings; `/health` must surface the resolved version. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. @@ -73,6 +74,6 @@ - Keep `src/worker.ts` as the Cloudflare Worker fetch entry and `src/node-server.ts` as the Node compatibility entry. - Keep `AgentRelaySession` exported from `src/worker.ts` and bound/migrated in `wrangler.jsonc`. - Keep middleware order stable: request context -> request logging -> public-route IP rate limit -> auth verification -> agent DID rate limit -> error handler. -- Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200. +- Keep `/health` response contract stable: `{ status, version, environment }` with HTTP 200; version should reflect deploy-time `APP_VERSION` when provided. - Log startup and request completion with structured JSON logs; never log secrets or tokens. - If identity injection is enabled, mutate only `payload.message` when it is a string; preserve all other payload fields unchanged. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index c540a1e..7ce9e8d 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -2,6 +2,7 @@ ## Source Layout - Keep `index.ts` as runtime bootstrap surface and version export. +- Keep version resolution in `index.ts` deterministic: prefer `APP_VERSION`, then `PROXY_VERSION`, then fallback constant for local/dev defaults. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. - Keep agent DID rate-limit env parsing in `config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE`, `AGENT_RATE_LIMIT_WINDOW_MS`) and validate as positive integers. - Keep HTTP app composition in `server.ts`. @@ -30,6 +31,7 @@ - Keep relay websocket connect handling isolated in `relay-connect-route.ts`; `server.ts` should only compose middleware/routes. - Keep DO runtime behavior in `agent-relay-session.ts` (websocket accept, heartbeat alarm, connector delivery RPC). - Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. +- Keep worker runtime cache keys sensitive to deploy-time version bindings so `/health` reflects fresh `APP_VERSION` after deploy. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. diff --git a/apps/proxy/src/index.test.ts b/apps/proxy/src/index.test.ts index 92622ae..d2f08cf 100644 --- a/apps/proxy/src/index.test.ts +++ b/apps/proxy/src/index.test.ts @@ -1,6 +1,10 @@ import { describe, expect, it } from "vitest"; import { ProxyConfigError } from "./config.js"; -import { initializeProxyRuntime, PROXY_VERSION } from "./index.js"; +import { + initializeProxyRuntime, + PROXY_VERSION, + resolveProxyVersion, +} from "./index.js"; describe("proxy", () => { it("exports PROXY_VERSION", () => { @@ -29,4 +33,19 @@ describe("proxy", () => { expect(runtime.version).toBe(PROXY_VERSION); expect(runtime.config.openclawHookToken).toBeUndefined(); }); + + it("prefers APP_VERSION for runtime version", () => { + expect( + resolveProxyVersion({ + APP_VERSION: "sha-1234", + PROXY_VERSION: "ignored", + }), + ).toBe("sha-1234"); + }); + + it("falls back to PROXY_VERSION binding when APP_VERSION is absent", () => { + expect(resolveProxyVersion({ PROXY_VERSION: "proxy-1.2.3" })).toBe( + "proxy-1.2.3", + ); + }); }); diff --git a/apps/proxy/src/index.ts b/apps/proxy/src/index.ts index da12fb8..847cf80 100644 --- a/apps/proxy/src/index.ts +++ b/apps/proxy/src/index.ts @@ -2,6 +2,7 @@ import type { ProxyConfig } from "./config.js"; import { loadProxyConfig } from "./config.js"; export const PROXY_VERSION = "0.0.0"; +const APP_VERSION_ENV_KEYS = ["APP_VERSION", "PROXY_VERSION"] as const; export type ProxyRuntime = { version: string; @@ -13,11 +14,32 @@ function resolveDefaultEnv(): unknown { return nodeProcess?.env ?? {}; } +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function resolveProxyVersion( + env: unknown = resolveDefaultEnv(), +): string { + if (!isRecord(env)) { + return PROXY_VERSION; + } + + for (const key of APP_VERSION_ENV_KEYS) { + const candidate = env[key]; + if (typeof candidate === "string" && candidate.trim().length > 0) { + return candidate.trim(); + } + } + + return PROXY_VERSION; +} + export function initializeProxyRuntime( env: unknown = resolveDefaultEnv(), ): ProxyRuntime { return { - version: PROXY_VERSION, + version: resolveProxyVersion(env), config: loadProxyConfig(env), }; } diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts index 4a0e802..3611cfc 100644 --- a/apps/proxy/src/server.test.ts +++ b/apps/proxy/src/server.test.ts @@ -48,6 +48,21 @@ describe("proxy server", () => { expect(body.environment).toBe("local"); }); + it("uses provided app version when supplied by runtime", async () => { + const app = createProxyApp({ + config: parseProxyConfig({ + OPENCLAW_HOOK_TOKEN: "token", + }), + version: "sha-123456", + }); + + const res = await app.request("/health"); + const body = (await res.json()) as { version: string }; + + expect(res.status).toBe(200); + expect(body.version).toBe("sha-123456"); + }); + it("emits structured request completion log for /health", async () => { const logSpy = vi.spyOn(console, "info").mockImplementation(() => {}); try { diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 6e55961..ddc3d67 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -46,6 +46,7 @@ type ProxyRateLimitRuntimeOptions = { type CreateProxyAppOptions = { config: ProxyConfig; + version?: string; logger?: Logger; registerRoutes?: (app: ProxyApp) => void; auth?: ProxyAuthRuntimeOptions; @@ -111,7 +112,7 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { app.get("/health", (c) => c.json({ status: "ok", - version: PROXY_VERSION, + version: options.version ?? PROXY_VERSION, environment: options.config.environment, }), ); diff --git a/apps/proxy/src/worker.test.ts b/apps/proxy/src/worker.test.ts index 2b7a1cd..405b983 100644 --- a/apps/proxy/src/worker.test.ts +++ b/apps/proxy/src/worker.test.ts @@ -15,6 +15,7 @@ describe("proxy worker", () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), { + APP_VERSION: "sha-worker-123", ENVIRONMENT: "local", } satisfies ProxyWorkerBindings, createExecutionContext(), @@ -28,7 +29,7 @@ describe("proxy worker", () => { }; expect(payload).toEqual({ status: "ok", - version: PROXY_VERSION, + version: "sha-worker-123", environment: "local", }); }); @@ -43,9 +44,11 @@ describe("proxy worker", () => { expect(response.status).toBe(200); const payload = (await response.json()) as { status: string; + version: string; environment: string; }; expect(payload.status).toBe("ok"); + expect(payload.version).toBe(PROXY_VERSION); expect(payload.environment).toBe("development"); }); diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 45d6e03..87c9a5d 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -8,6 +8,7 @@ import { ProxyConfigError, parseProxyConfig, } from "./config.js"; +import { resolveProxyVersion } from "./index.js"; import { createProxyApp, type ProxyApp } from "./server.js"; export type ProxyWorkerBindings = { @@ -30,6 +31,8 @@ export type ProxyWorkerBindings = { AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: string; AGENT_RATE_LIMIT_WINDOW_MS?: string; INJECT_IDENTITY_INTO_MESSAGE?: string; + APP_VERSION?: string; + PROXY_VERSION?: string; [key: string]: unknown; }; @@ -59,6 +62,8 @@ function toCacheKey(env: ProxyWorkerBindings): string { env.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, env.AGENT_RATE_LIMIT_WINDOW_MS, env.INJECT_IDENTITY_INTO_MESSAGE, + env.APP_VERSION, + env.PROXY_VERSION, ]; return keyParts.map((value) => String(value ?? "")).join("|"); @@ -71,7 +76,11 @@ function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { } const config = parseProxyConfig(env); - const app = createProxyApp({ config, logger }); + const app = createProxyApp({ + config, + logger, + version: resolveProxyVersion(env), + }); cachedRuntime = { key, diff --git a/sparkling-sauteeing-brook.md b/sparkling-sauteeing-brook.md index 8c7b169..0d16dce 100644 --- a/sparkling-sauteeing-brook.md +++ b/sparkling-sauteeing-brook.md @@ -65,7 +65,7 @@ clawdentity/ apps/ registry/ -- @clawdentity/registry (Hono on Workers + D1) proxy/ -- @clawdentity/proxy (Hono on Node/Bun, sidecar) - cli/ -- @clawdentity/cli (Commander.js, bin: "clawdentity") + cli/ -- clawdentity (Commander.js, bin: "clawdentity") ``` **Build order:** protocol -> sdk -> (registry | proxy | cli) in parallel From 1322d9dc4b04a0d3c96654a1c2d99dbf4c1ff810 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 15:35:48 +0530 Subject: [PATCH 076/190] fix: harden deploy snapshot and align cli runtime version --- .github/AGENTS.md | 3 ++- .github/workflows/deploy-develop.yml | 3 ++- apps/cli/AGENTS.md | 1 + apps/cli/src/index.test.ts | 8 ++++++-- apps/cli/src/index.ts | 18 +++++++++++++++++- 5 files changed, 28 insertions(+), 5 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 70ca063..ea8d206 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -45,9 +45,10 @@ ## Migration Rollback Strategy (Develop) - Capture pre-deploy artifacts: - `wrangler --cwd apps/registry deployments list --env dev --json` - - `wrangler --cwd apps/proxy deployments list --env development --json` + - `wrangler --cwd apps/proxy deployments list --env development --json || true` (non-blocking for first deploy before proxy Worker exists) - `wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` - `wrangler d1 export clawdentity-db-dev --remote --output ` +- Keep deploy snapshot collection non-blocking for Worker deployment listings (pre and post) so rollback artifact capture does not fail the workflow when a Worker has no prior deployment history. - Upload artifacts on every run for operator recovery. - On failed deploy: - Registry rollback: `wrangler --cwd apps/registry rollback --env dev` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 1b84a1c..fdc899b 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -66,7 +66,8 @@ jobs: echo "PREDEPLOY_TS=${PREDEPLOY_TS}" >> "${GITHUB_ENV}" printf "%s\n" "${PREDEPLOY_TS}" > artifacts/predeploy.timestamp wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-pre.json - wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json + # First proxy deploy may not have an existing Worker/deployments yet. + wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json || true wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 8ada913..2ddd47c 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -9,6 +9,7 @@ - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Keep `src/postinstall.ts` as a thin install entrypoint only; it should detect npm `--skill` mode and call shared installer helpers without mutating runtime CLI command wiring. - Keep package identity clear: workspace package name is `clawdentity` and published install entrypoint remains `npm install clawdentity --skill`. +- Keep runtime version parity: source `CLI_VERSION` from the package metadata (`package.json`) at runtime, never from a hardcoded literal in `src/index.ts`. - Implement command groups under `src/commands/*` and register them from `createProgram()`. - Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`, `connector`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index fce4260..43a8b16 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -1,9 +1,13 @@ +import { createRequire } from "node:module"; import { describe, expect, it } from "vitest"; import { CLI_VERSION, createProgram } from "./index.js"; +const require = createRequire(import.meta.url); +const packageJson = require("../package.json") as { version: string }; + describe("cli", () => { it("exports CLI_VERSION", () => { - expect(CLI_VERSION).toBe("0.0.0"); + expect(CLI_VERSION).toBe(packageJson.version); }); it("creates a program named clawdentity", () => { @@ -91,6 +95,6 @@ describe("cli", () => { exitCode: 0, }); - expect(output.join("")).toContain("0.0.0"); + expect(output.join("")).toContain(packageJson.version); }); }); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 4b7f2f0..5c4e656 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,3 +1,4 @@ +import { createRequire } from "node:module"; import { Command } from "commander"; import { createAdminCommand } from "./commands/admin.js"; import { createAgentCommand } from "./commands/agent.js"; @@ -8,7 +9,22 @@ import { createInviteCommand } from "./commands/invite.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; import { createVerifyCommand } from "./commands/verify.js"; -export const CLI_VERSION = "0.0.0"; +const require = createRequire(import.meta.url); + +const resolveCliVersion = (): string => { + const packageJson = require("../package.json") as { version?: unknown }; + + if ( + typeof packageJson.version === "string" && + packageJson.version.length > 0 + ) { + return packageJson.version; + } + + throw new Error("Unable to resolve CLI version from package metadata."); +}; + +export const CLI_VERSION = resolveCliVersion(); export const createProgram = (): Command => { return new Command("clawdentity") From 87b73b93e41d70a96e320a832bce3634509ec50f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 15:55:29 +0530 Subject: [PATCH 077/190] fix: sync lockfile with cli manifest --- pnpm-lock.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b484608..0f6b94e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -35,13 +35,13 @@ importers: apps/cli: dependencies: - '@clawdentity/connector': - specifier: workspace:* - version: link:../../packages/connector commander: specifier: ^13.1.0 version: 13.1.0 devDependencies: + '@clawdentity/connector': + specifier: workspace:* + version: link:../../packages/connector '@clawdentity/protocol': specifier: workspace:* version: link:../../packages/protocol From b04707db5c808aa367bfa4ef2bedfbd0dc04a853 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 16:03:02 +0530 Subject: [PATCH 078/190] fix: enforce strict local docker e2e skill root --- apps/cli/scripts/AGENTS.md | 4 ++-- apps/cli/scripts/openclaw-relay-docker-e2e.sh | 10 ++-------- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md index cb7e131..298ed4b 100644 --- a/apps/cli/scripts/AGENTS.md +++ b/apps/cli/scripts/AGENTS.md @@ -5,9 +5,9 @@ ## Rules - `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. -- `openclaw-relay-docker-e2e.sh` is the source of truth for local Docker-based OpenClaw relay E2E validation (invite onboarding, skill artifacts, bidirectional relay, and connector failure/recovery checks). +- `openclaw-relay-docker-e2e.sh` is the source of truth for local-only Docker-based OpenClaw relay E2E validation (invite onboarding, skill artifacts, bidirectional relay, and connector failure/recovery checks). - Scripts must fail with actionable errors when required source artifacts are missing. -- Docker E2E skill install should default to global package root `clawdentity`, with legacy fallback to `@clawdentity/cli` for backward compatibility. +- Docker E2E skill install must use strict global package root `clawdentity` only and fail fast when `postinstall.mjs` is missing (no backward-compatibility fallback). - Docker E2E scripts must keep reset behavior explicit (`RESET_MODE=skill|full|none`) and must only remove known skill-generated files in skill-reset mode. - Docker E2E relay scripts should accept `CLAWDENTITY_E2E_PAT`, but when absent they must first attempt to reuse existing container CLI config PAT before fallback bootstrap so pre-bootstrapped environments remain runnable. - Connector startup failures in Docker E2E should include the agent DID in diagnostics so operator allowlist mismatches can be fixed quickly. diff --git a/apps/cli/scripts/openclaw-relay-docker-e2e.sh b/apps/cli/scripts/openclaw-relay-docker-e2e.sh index 5b0edc6..b03499a 100755 --- a/apps/cli/scripts/openclaw-relay-docker-e2e.sh +++ b/apps/cli/scripts/openclaw-relay-docker-e2e.sh @@ -90,15 +90,9 @@ reset_skill_state() { install_skill_assets() { local container="$1" local package_root="$CLI_GLOBAL_PACKAGE_ROOT" - local legacy_package_root="/home/node/.local/lib/node_modules/@clawdentity/cli" - if ! container_exec "$container" "test -f \"$package_root/postinstall.mjs\""; then - if container_exec "$container" "test -f \"$legacy_package_root/postinstall.mjs\""; then - package_root="$legacy_package_root" - else - fail "postinstall.mjs not found in CLI package root: $package_root" - fi - fi + container_exec "$container" "test -f \"$package_root/postinstall.mjs\"" \ + || fail "postinstall.mjs not found in strict CLI package root: $package_root" container_exec "$container" "npm_config_skill=true node \"$package_root/postinstall.mjs\" >/dev/null" } From 27ad3f6d072d6c14c6d5ae2856f6e2407d4c5719 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 16:12:09 +0530 Subject: [PATCH 079/190] chore: remove docker e2e traces from repo and docs --- AGENTS.md | 17 - README.md | 24 -- apps/cli/package.json | 1 - apps/cli/scripts/AGENTS.md | 5 - apps/cli/scripts/openclaw-relay-docker-e2e.sh | 298 ------------------ apps/cli/src/AGENTS.md | 2 +- apps/openclaw-skill/AGENTS.md | 8 - 7 files changed, 1 insertion(+), 354 deletions(-) delete mode 100755 apps/cli/scripts/openclaw-relay-docker-e2e.sh diff --git a/AGENTS.md b/AGENTS.md index 5f802cc..a068018 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -85,23 +85,6 @@ - Hono apps are tested via `app.request()` (Hono's built-in test client) — no wrangler or miniflare needed for unit tests. - Pass mock bindings as the third argument: `app.request("/path", {}, { DB: {}, ENVIRONMENT: "test" })`. -## User-Like E2E Skill Testing -- Validate onboarding and relay flows as a real user path, not as manual local shortcuts. -- Start backend services locally with Wrangler (registry/proxy) using the expected environment before E2E checks. -- Run OpenClaw agents in Docker and test through agent skills only; do not pre-configure relay files by hand. -- Keep two dedicated containers for relay verification: Alpha (sender) and Beta (receiver). Use stable names and document them in the test run notes. -- Keep Alpha and Beta isolated with separate HOME volumes so `~/.clawdentity` and `~/.openclaw` state never leaks between agents. -- Ensure both containers run with required model API credentials; onboarding tests must not depend on host-level credentials. -- Install via npm + skill entrypoint (`npm install clawdentity --skill`) and let the skill perform remaining setup. -- Use invite-code onboarding exactly as production intent: admin creates invite code, agent asks its human for the code, then agent completes setup. -- Verify resulting agent filesystem/config artifacts are created by the skill in the expected locations. -- Confirm end-to-end communication between at least two agents after setup (for example alpha <-> beta relay path). -- If a skill-run test fails because of partial/dirty skill-created state, clean/revert only skill-generated setup and rerun from a fresh user-like starting point. -- Reset for rerun must remove only skill-created artifacts first: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`, and `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/`. -- Use a full reset only when required for identity reprovisioning, and then also clear `~/.clawdentity/agents//` before re-onboarding. -- Skill-only policy: no direct `clawdentity openclaw setup` execution by humans during E2E validation; the agent must run the skill flow and prompt the human only for missing invite code or confirmations. -- npm-first contract: `npm install clawdentity --skill` must be the default install trigger for skill artifact preparation, and install logs should report deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). - ## Scaffold Best Practices - Start by reviewing README, PRD, and the active execution tracker issue so documentation mirrors the execution model. - Define the workspace layout now: `apps/registry`, `apps/proxy`, `apps/cli`, `packages/sdk`, and `packages/protocol` (with shared tooling such as `pnpm-workspace.yaml`, `tsconfig.base.json`, and `biome.json`) so downstream tickets have a known structure. diff --git a/README.md b/README.md index c92a066..79d08b0 100644 --- a/README.md +++ b/README.md @@ -531,30 +531,6 @@ When `--skill` mode is detected, installer logic prepares OpenClaw runtime artif Install is idempotent and logs deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). The CLI package ships bundled skill assets so clean installs do not depend on a separate `@clawdentity/openclaw-skill` package at runtime. -### Docker E2E relay check (skill + invite flow) - -For user-like OpenClaw relay validation with existing Docker agents, run: - -```bash -pnpm -F clawdentity run test:e2e:openclaw-docker -``` - -Defaults target: -- `clawdbot-agent-alpha-1` (`http://127.0.0.1:18789`) -- `clawdbot-agent-beta-1` (`http://127.0.0.1:19001`) - -This script validates: -- invite-code onboarding setup in both containers -- skill-created artifact presence -- bidirectional multi-message relay -- edge cases: unknown peer alias, connector offline, connector recovery - -Common environment overrides: -- `CLAWDENTITY_E2E_PAT` (required if registry is already bootstrapped) -- `RESET_MODE=skill|full|none` (default `skill`) -- `ALPHA_CONTAINER`, `BETA_CONTAINER` -- `REGISTRY_URL`, `PROXY_HOOK_URL`, `PROXY_WS_URL` - ### CLI npm release (manual) - GitHub workflow: `.github/workflows/publish-cli.yml` diff --git a/apps/cli/package.json b/apps/cli/package.json index af5ef2e..30d0c7e 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -29,7 +29,6 @@ "postinstall": "node ./postinstall.mjs", "sync:skill-bundle": "node ./scripts/sync-skill-bundle.mjs", "test": "vitest run", - "test:e2e:openclaw-docker": "bash ./scripts/openclaw-relay-docker-e2e.sh", "typecheck": "tsc --noEmit" }, "dependencies": { diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md index 298ed4b..5ed7530 100644 --- a/apps/cli/scripts/AGENTS.md +++ b/apps/cli/scripts/AGENTS.md @@ -5,11 +5,6 @@ ## Rules - `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. -- `openclaw-relay-docker-e2e.sh` is the source of truth for local-only Docker-based OpenClaw relay E2E validation (invite onboarding, skill artifacts, bidirectional relay, and connector failure/recovery checks). - Scripts must fail with actionable errors when required source artifacts are missing. -- Docker E2E skill install must use strict global package root `clawdentity` only and fail fast when `postinstall.mjs` is missing (no backward-compatibility fallback). -- Docker E2E scripts must keep reset behavior explicit (`RESET_MODE=skill|full|none`) and must only remove known skill-generated files in skill-reset mode. -- Docker E2E relay scripts should accept `CLAWDENTITY_E2E_PAT`, but when absent they must first attempt to reuse existing container CLI config PAT before fallback bootstrap so pre-bootstrapped environments remain runnable. -- Connector startup failures in Docker E2E should include the agent DID in diagnostics so operator allowlist mismatches can be fixed quickly. - Keep script output concise and stable for CI/release logs. - Do not add install-time network fetches to packaging scripts. diff --git a/apps/cli/scripts/openclaw-relay-docker-e2e.sh b/apps/cli/scripts/openclaw-relay-docker-e2e.sh deleted file mode 100755 index b03499a..0000000 --- a/apps/cli/scripts/openclaw-relay-docker-e2e.sh +++ /dev/null @@ -1,298 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -ALPHA_CONTAINER="${ALPHA_CONTAINER:-clawdbot-agent-alpha-1}" -BETA_CONTAINER="${BETA_CONTAINER:-clawdbot-agent-beta-1}" - -ALPHA_AGENT_NAME="${ALPHA_AGENT_NAME:-alpha}" -BETA_AGENT_NAME="${BETA_AGENT_NAME:-beta}" - -REGISTRY_URL="${REGISTRY_URL:-http://host.docker.internal:8787}" -PROXY_HOOK_URL="${PROXY_HOOK_URL:-http://host.docker.internal:8788/hooks/agent}" -PROXY_WS_URL="${PROXY_WS_URL:-ws://host.docker.internal:8788/v1/relay/connect}" - -ALPHA_HOST_OPENCLAW_URL="${ALPHA_HOST_OPENCLAW_URL:-http://127.0.0.1:18789}" -BETA_HOST_OPENCLAW_URL="${BETA_HOST_OPENCLAW_URL:-http://127.0.0.1:19001}" -CONTAINER_OPENCLAW_BASE_URL="${CONTAINER_OPENCLAW_BASE_URL:-http://127.0.0.1:18789}" - -ALPHA_HOOK_TOKEN="${ALPHA_HOOK_TOKEN:-alpha-hook-secret}" -BETA_HOOK_TOKEN="${BETA_HOOK_TOKEN:-beta-hook-secret}" -BOOTSTRAP_SECRET="${BOOTSTRAP_SECRET:-clawdentity-local-bootstrap}" -CLI_GLOBAL_PACKAGE_ROOT="${CLI_GLOBAL_PACKAGE_ROOT:-/home/node/.local/lib/node_modules/clawdentity}" - -RESET_MODE="${RESET_MODE:-skill}" -CLAWDENTITY_E2E_PAT="${CLAWDENTITY_E2E_PAT:-}" - -log() { - printf '[openclaw-relay-e2e] %s\n' "$*" -} - -fail() { - printf '[openclaw-relay-e2e] ERROR: %s\n' "$*" >&2 - exit 1 -} - -require_command() { - command -v "$1" >/dev/null 2>&1 || fail "Missing required command: $1" -} - -require_running_container() { - local container="$1" - local running - running="$(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null || true)" - [[ "$running" == "true" ]] || fail "Container is not running: $container" -} - -container_exec() { - local container="$1" - shift - docker exec "$container" sh -lc "$*" -} - -container_has_file() { - local container="$1" - local file_path="$2" - container_exec "$container" "test -f $file_path" -} - -extract_invite_code() { - printf '%s\n' "$1" | sed -n 's/^Invite code: //p' | head -n 1 -} - -extract_pat() { - printf '%s\n' "$1" | grep -Eo 'clw_pat_[A-Za-z0-9_-]+' | head -n 1 -} - -read_config_pat() { - local container="$1" - container_exec "$container" "node -e 'const fs=require(\"fs\");const p=process.env.HOME+\"/.clawdentity/config.json\";if(!fs.existsSync(p)){process.exit(0);}const cfg=JSON.parse(fs.readFileSync(p,\"utf8\"));if(typeof cfg.apiKey===\"string\"&&cfg.apiKey.trim().length>0){process.stdout.write(cfg.apiKey.trim());}'" -} - -read_agent_did() { - local container="$1" - local agent_name="$2" - container_exec "$container" "node -e 'const fs=require(\"fs\");const p=process.env.HOME+\"/.clawdentity/agents/$agent_name/identity.json\";const id=JSON.parse(fs.readFileSync(p,\"utf8\"));process.stdout.write(id.did);'" -} - -reset_skill_state() { - local container="$1" - local agent_name="$2" - - container_exec "$container" "rm -f ~/.clawdentity/peers.json ~/.clawdentity/openclaw-agent-name ~/.clawdentity/openclaw-relay.json ~/.openclaw/hooks/transforms/relay-to-peer.mjs" - container_exec "$container" "rm -rf ~/.openclaw/workspace/skills/clawdentity-openclaw-relay" - - if [[ "$RESET_MODE" == "full" ]]; then - container_exec "$container" "rm -rf ~/.clawdentity/agents/$agent_name" - fi -} - -install_skill_assets() { - local container="$1" - local package_root="$CLI_GLOBAL_PACKAGE_ROOT" - - container_exec "$container" "test -f \"$package_root/postinstall.mjs\"" \ - || fail "postinstall.mjs not found in strict CLI package root: $package_root" - - container_exec "$container" "npm_config_skill=true node \"$package_root/postinstall.mjs\" >/dev/null" -} - -ensure_agent_identity() { - local container="$1" - local agent_name="$2" - if container_exec "$container" "clawdentity agent inspect \"$agent_name\" >/dev/null 2>&1"; then - log "$container: agent '$agent_name' already exists" - return - fi - - log "$container: creating agent '$agent_name'" - container_exec "$container" "clawdentity agent create \"$agent_name\" --framework openclaw >/dev/null" -} - -configure_registry() { - local container="$1" - local pat="$2" - container_exec "$container" "clawdentity config init >/dev/null" - container_exec "$container" "clawdentity config set registryUrl \"$REGISTRY_URL\" >/dev/null" - container_exec "$container" "clawdentity config set apiKey \"$pat\" >/dev/null" -} - -stop_connector() { - local container="$1" - local agent_name="$2" - - container_exec "$container" "if [ -f /tmp/clawdentity-connector-$agent_name.pid ]; then kill \$(cat /tmp/clawdentity-connector-$agent_name.pid) 2>/dev/null || true; fi" - container_exec "$container" "for pid in \$(ps -ef | awk '/clawdentity connector start $agent_name/ && !/awk/ {print \$2}'); do kill \"\$pid\" 2>/dev/null || true; done" -} - -start_connector() { - local container="$1" - local agent_name="$2" - local hook_token="$3" - local agent_did="$4" - - stop_connector "$container" "$agent_name" - container_exec "$container" "nohup clawdentity connector start \"$agent_name\" --proxy-ws-url \"$PROXY_WS_URL\" --openclaw-hook-token \"$hook_token\" >/tmp/clawdentity-connector-$agent_name.log 2>&1 & echo \$! > /tmp/clawdentity-connector-$agent_name.pid" - - local waited=0 - while true; do - if container_exec "$container" "grep -q 'connector.websocket.connected' /tmp/clawdentity-connector-$agent_name.log"; then - log "$container: connector '$agent_name' connected" - break - fi - - waited=$((waited + 1)) - if [[ $waited -ge 30 ]]; then - container_exec "$container" "tail -n 120 /tmp/clawdentity-connector-$agent_name.log" || true - fail "$container: connector '$agent_name' did not connect within timeout. Ensure proxy allowlist includes DID $agent_did" - fi - sleep 1 - done -} - -send_peer_message() { - local sender_url="$1" - local hook_token="$2" - local peer_alias="$3" - local session_id="$4" - local message="$5" - local expected_status="$6" - - local response_body - response_body="$(mktemp)" - local status - status="$( - curl -sS \ - -o "$response_body" \ - -w '%{http_code}' \ - -X POST "$sender_url/hooks/send-to-peer" \ - -H 'content-type: application/json' \ - -H "x-openclaw-token: $hook_token" \ - --data "{\"peer\":\"$peer_alias\",\"sessionId\":\"$session_id\",\"message\":\"$message\"}" - )" - - if [[ "$status" != "$expected_status" ]]; then - log "send-to-peer failed: expected $expected_status, got $status" - cat "$response_body" >&2 - rm -f "$response_body" - fail "Unexpected send-to-peer status" - fi - - log "send-to-peer ok: $sender_url -> $peer_alias ($status) | $message" - rm -f "$response_body" -} - -run() { - require_command docker - require_command curl - require_command node - - require_running_container "$ALPHA_CONTAINER" - require_running_container "$BETA_CONTAINER" - - log "Validating clawdentity CLI availability in containers" - container_exec "$ALPHA_CONTAINER" "clawdentity --help >/dev/null" || fail "$ALPHA_CONTAINER missing clawdentity CLI" - container_exec "$BETA_CONTAINER" "clawdentity --help >/dev/null" || fail "$BETA_CONTAINER missing clawdentity CLI" - - if [[ "$RESET_MODE" != "none" ]]; then - log "Reset mode: $RESET_MODE" - reset_skill_state "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME" - reset_skill_state "$BETA_CONTAINER" "$BETA_AGENT_NAME" - fi - - log "Installing skill artifacts via npm --skill postinstall flow" - install_skill_assets "$ALPHA_CONTAINER" - install_skill_assets "$BETA_CONTAINER" - - local pat="$CLAWDENTITY_E2E_PAT" - if [[ -z "$pat" ]]; then - pat="$(read_config_pat "$ALPHA_CONTAINER")" - fi - - if [[ -z "$pat" ]]; then - log "No CLAWDENTITY_E2E_PAT provided; attempting bootstrap on $ALPHA_CONTAINER" - local bootstrap_output - if ! bootstrap_output="$(container_exec "$ALPHA_CONTAINER" "clawdentity admin bootstrap --bootstrap-secret \"$BOOTSTRAP_SECRET\"" 2>&1)"; then - printf '%s\n' "$bootstrap_output" >&2 - fail "Bootstrap failed. Set CLAWDENTITY_E2E_PAT for pre-bootstrapped environments." - fi - - pat="$(extract_pat "$bootstrap_output")" - [[ -n "$pat" ]] || fail "Failed to extract PAT from bootstrap output" - fi - log "Using PAT for CLI config in both containers" - - configure_registry "$ALPHA_CONTAINER" "$pat" - configure_registry "$BETA_CONTAINER" "$pat" - - ensure_agent_identity "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME" - ensure_agent_identity "$BETA_CONTAINER" "$BETA_AGENT_NAME" - - local alpha_did beta_did - alpha_did="$(read_agent_did "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME")" - beta_did="$(read_agent_did "$BETA_CONTAINER" "$BETA_AGENT_NAME")" - log "Alpha DID: $alpha_did" - log "Beta DID: $beta_did" - - log "Creating invite code in $BETA_CONTAINER for peer alias '$BETA_AGENT_NAME'" - local beta_invite_output beta_invite_code - beta_invite_output="$( - container_exec "$BETA_CONTAINER" \ - "clawdentity openclaw invite --did \"$beta_did\" --proxy-url \"$PROXY_HOOK_URL\" --peer-alias \"$BETA_AGENT_NAME\"" - )" - beta_invite_code="$(extract_invite_code "$beta_invite_output")" - [[ -n "$beta_invite_code" ]] || fail "Failed to extract beta invite code" - - log "Creating invite code in $ALPHA_CONTAINER for peer alias '$ALPHA_AGENT_NAME'" - local alpha_invite_output alpha_invite_code - alpha_invite_output="$( - container_exec "$ALPHA_CONTAINER" \ - "clawdentity openclaw invite --did \"$alpha_did\" --proxy-url \"$PROXY_HOOK_URL\" --peer-alias \"$ALPHA_AGENT_NAME\"" - )" - alpha_invite_code="$(extract_invite_code "$alpha_invite_output")" - [[ -n "$alpha_invite_code" ]] || fail "Failed to extract alpha invite code" - - log "Running invite onboarding setup inside $ALPHA_CONTAINER" - container_exec "$ALPHA_CONTAINER" \ - "clawdentity openclaw setup \"$ALPHA_AGENT_NAME\" --invite-code \"$beta_invite_code\" --openclaw-base-url \"$CONTAINER_OPENCLAW_BASE_URL\" >/dev/null" - - log "Running invite onboarding setup inside $BETA_CONTAINER" - container_exec "$BETA_CONTAINER" \ - "clawdentity openclaw setup \"$BETA_AGENT_NAME\" --invite-code \"$alpha_invite_code\" --openclaw-base-url \"$CONTAINER_OPENCLAW_BASE_URL\" >/dev/null" - - log "Verifying skill-created artifacts" - container_has_file "$ALPHA_CONTAINER" '$HOME/.clawdentity/peers.json' || fail "Alpha peers.json missing" - container_has_file "$ALPHA_CONTAINER" '$HOME/.clawdentity/openclaw-agent-name' || fail "Alpha openclaw-agent-name missing" - container_has_file "$ALPHA_CONTAINER" '$HOME/.clawdentity/openclaw-relay.json' || fail "Alpha openclaw-relay.json missing" - container_has_file "$ALPHA_CONTAINER" '$HOME/.openclaw/hooks/transforms/relay-to-peer.mjs' || fail "Alpha relay transform missing" - container_has_file "$ALPHA_CONTAINER" '$HOME/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md' || fail "Alpha skill bundle missing" - container_has_file "$BETA_CONTAINER" '$HOME/.clawdentity/peers.json' || fail "Beta peers.json missing" - container_has_file "$BETA_CONTAINER" '$HOME/.clawdentity/openclaw-agent-name' || fail "Beta openclaw-agent-name missing" - container_has_file "$BETA_CONTAINER" '$HOME/.clawdentity/openclaw-relay.json' || fail "Beta openclaw-relay.json missing" - container_has_file "$BETA_CONTAINER" '$HOME/.openclaw/hooks/transforms/relay-to-peer.mjs' || fail "Beta relay transform missing" - container_has_file "$BETA_CONTAINER" '$HOME/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md' || fail "Beta skill bundle missing" - - log "Starting connector runtimes" - start_connector "$ALPHA_CONTAINER" "$ALPHA_AGENT_NAME" "$ALPHA_HOOK_TOKEN" "$alpha_did" - start_connector "$BETA_CONTAINER" "$BETA_AGENT_NAME" "$BETA_HOOK_TOKEN" "$beta_did" - - log "Running bidirectional multi-message relay" - send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta" "alpha to beta m1" "204" - send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta" "alpha to beta m2" "204" - send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta" "alpha to beta m3" "204" - send_peer_message "$BETA_HOST_OPENCLAW_URL" "$BETA_HOOK_TOKEN" "$ALPHA_AGENT_NAME" "relay-beta-alpha" "beta to alpha m1" "204" - send_peer_message "$BETA_HOST_OPENCLAW_URL" "$BETA_HOOK_TOKEN" "$ALPHA_AGENT_NAME" "relay-beta-alpha" "beta to alpha m2" "204" - - log "Running edge case: unknown peer alias" - send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "unknown-peer" "relay-alpha-invalid-peer" "should fail with unknown peer alias" "500" - - log "Running edge case: connector offline and recovery" - stop_connector "$BETA_CONTAINER" "$BETA_AGENT_NAME" - send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta-offline" "should fail while beta connector is offline" "500" - start_connector "$BETA_CONTAINER" "$BETA_AGENT_NAME" "$BETA_HOOK_TOKEN" "$beta_did" - send_peer_message "$ALPHA_HOST_OPENCLAW_URL" "$ALPHA_HOOK_TOKEN" "$BETA_AGENT_NAME" "relay-alpha-beta-recovered" "should succeed after beta connector reconnect" "204" - - log "E2E complete: invite onboarding + skill artifacts + bidirectional relay + edge cases" -} - -run diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 6472686..2b75f32 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -28,7 +28,7 @@ - Detect install mode via npm environment (`npm_config_skill` and npm argv fallback) so non-skill installs remain unaffected. - Resolve skill artifacts in this order: explicit override, bundled `skill-bundle/openclaw-skill`, installed `@clawdentity/openclaw-skill`, then workspace fallback. - Skill install must copy `SKILL.md`, `references/*`, and `relay-to-peer.mjs` into OpenClaw runtime paths under `~/.openclaw` and must fail with actionable errors when source artifacts are missing. -- Installer logs must be deterministic and explicit (`installed`, `updated`, `unchanged`) so E2E skill tests can assert outcomes reliably. +- Installer logs must be deterministic and explicit (`installed`, `updated`, `unchanged`) so automated skill tests can assert outcomes reliably. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 193f1d3..08ad66d 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -33,14 +33,6 @@ - `pnpm -F @clawdentity/openclaw-skill test` - `pnpm -F @clawdentity/openclaw-skill build` -## Docker E2E Workflow -- Run E2E with two OpenClaw containers: Alpha (sender) and Beta (receiver), each with isolated HOME storage. -- Install and execute onboarding through skill flow only (`npm install clawdentity --skill` plus agent-executed skill steps). -- Human role in E2E is limited to supplying invite code and confirmations requested by the agent. -- Do not edit relay hooks, peer config, or selected-agent files manually during validation. -- After skill setup, verify these artifacts exist and are agent-generated: `~/.clawdentity/peers.json`, `~/.clawdentity/openclaw-agent-name`, `~/.clawdentity/openclaw-relay.json`, `~/.openclaw/hooks/transforms/relay-to-peer.mjs`. -- For reruns after failures, clear skill-generated artifacts first; only perform full identity reset (`~/.clawdentity/agents//`) when identity reprovisioning is needed. - ## Skill Runtime Behavior - Keep onboarding prompts input-focused (invite/API key/URLs) and let the skill decide command execution. - The agent should run required npm/CLI/filesystem operations via tools and only ask the human for missing inputs. From 93e26ad5f4fb2c9bd8bb18c70316d44a3a4dd43a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 18:07:51 +0530 Subject: [PATCH 080/190] feat(proxy): implement durable pairing trust store and sync openclaw skill docs --- PRD.md | 6 +- README.md | 32 +- .../openclaw-skill/skill/SKILL.md | 80 +++- .../skill/references/clawdentity-protocol.md | 83 +++- apps/openclaw-skill/AGENTS.md | 7 + apps/openclaw-skill/skill/SKILL.md | 80 +++- .../skill/references/clawdentity-protocol.md | 83 +++- apps/proxy/.env.example | 10 +- apps/proxy/AGENTS.md | 33 +- apps/proxy/src/AGENTS.md | 11 +- apps/proxy/src/agent-hook-route.test.ts | 36 ++ apps/proxy/src/agent-hook-route.ts | 9 + apps/proxy/src/auth-middleware.test.ts | 136 +++--- apps/proxy/src/auth-middleware.ts | 33 +- apps/proxy/src/config.test.ts | 44 +- apps/proxy/src/config.ts | 102 +--- apps/proxy/src/pairing-constants.ts | 8 + apps/proxy/src/pairing-route.test.ts | 236 ++++++++++ apps/proxy/src/pairing-route.ts | 368 +++++++++++++++ apps/proxy/src/proxy-trust-state.test.ts | 128 +++++ apps/proxy/src/proxy-trust-state.ts | 438 ++++++++++++++++++ apps/proxy/src/proxy-trust-store.test.ts | 128 +++++ apps/proxy/src/proxy-trust-store.ts | 336 ++++++++++++++ apps/proxy/src/server.ts | 36 ++ apps/proxy/src/trust-policy.ts | 71 +++ apps/proxy/src/worker.ts | 23 +- apps/proxy/vitest.config.ts | 14 + apps/proxy/wrangler.jsonc | 32 ++ apps/registry/src/AGENTS.md | 6 + apps/registry/src/agent-ownership.ts | 53 +++ apps/registry/src/server.test.ts | 148 +++++- apps/registry/src/server.ts | 23 + apps/registry/vitest.config.ts | 3 + packages/sdk/AGENTS.md | 2 + packages/sdk/package.json | 4 + packages/sdk/src/testing/ait-fixtures.test.ts | 44 ++ packages/sdk/src/testing/ait-fixtures.ts | 53 +++ packages/sdk/src/testing/index.ts | 2 + packages/sdk/tsup.config.ts | 5 +- tsconfig.base.json | 3 +- 40 files changed, 2570 insertions(+), 379 deletions(-) create mode 100644 apps/proxy/src/pairing-constants.ts create mode 100644 apps/proxy/src/pairing-route.test.ts create mode 100644 apps/proxy/src/pairing-route.ts create mode 100644 apps/proxy/src/proxy-trust-state.test.ts create mode 100644 apps/proxy/src/proxy-trust-state.ts create mode 100644 apps/proxy/src/proxy-trust-store.test.ts create mode 100644 apps/proxy/src/proxy-trust-store.ts create mode 100644 apps/proxy/src/trust-policy.ts create mode 100644 apps/registry/src/agent-ownership.ts create mode 100644 packages/sdk/src/testing/ait-fixtures.test.ts create mode 100644 packages/sdk/src/testing/ait-fixtures.ts create mode 100644 packages/sdk/src/testing/index.ts diff --git a/PRD.md b/PRD.md index 4c3aef5..5ba3e6b 100644 --- a/PRD.md +++ b/PRD.md @@ -77,14 +77,14 @@ Because OpenClaw requires `hooks.token` and expects Bearer/token auth for `/hook - **Proxy** - Verify inbound Clawdentity headers - - Enforce allowlist rules (agent DID only in current phase; owner DID support deferred) + - Enforce durable trust-pair rules for sender/recipient agent DIDs - Rate-limit per verified agent DID - Forward to OpenClaw `/hooks/agent` with `x-openclaw-token` - **Discovery** - Share-by-contact-card (verify link + endpoint) - Resolve `gateway_hint` from registry (optional) - - Pairing code (optional, “approve first contact”) + - Pairing code (`/pair/start` + `/pair/confirm`) for trust bootstrap - **Onboarding / access control** - Invite-gated user registration (`register --invite`) @@ -192,7 +192,7 @@ Verifier must enforce: - Valid caller → proxy forwards → OpenClaw returns 202 - Invalid/expired/revoked token → proxy returns 401 -- Valid but not allowlisted → proxy returns 403 +- Valid but not trusted for recipient pair → proxy returns 403 - Replay within time window is rejected (nonce reuse) - Revocation causes rejection within next CRL refresh diff --git a/README.md b/README.md index 79d08b0..0ba8d69 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ What Clawdentity adds: - Verifiable per-agent identity (AIT + PoP) - Fast revocation propagation (signed CRL + cache refresh) -- Proxy-side policy enforcement (allowlist + rate limits + replay protection) +- Proxy-side policy enforcement (trust pairs + rate limits + replay protection) --- @@ -50,7 +50,7 @@ Caller Agent | | Authorization: Claw + X-Claw-Proof/Nonce/Timestamp v -Clawdentity Proxy (verifies identity + allowlist + rate limits) +Clawdentity Proxy (verifies identity + trust policy + rate limits) | | x-openclaw-token: (internal only) v @@ -89,7 +89,7 @@ OpenClaw Gateway (normal /hooks/agent handling) - Proxy checks AIT expiry and CRL revocation status. - Proxy verifies PoP signature against the key in the token. - Proxy rejects replay via timestamp skew + nonce cache. -- Proxy enforces allowlist and rate limits. +- Proxy enforces trust-pair policy and rate limits. ### 4) Forward to OpenClaw @@ -139,7 +139,7 @@ This section walks through **every step** from zero to two OpenClaw agents excha │ │ │ │ │ Verifies identity │ └──────────────────────┘ │ Checks revocation │ - │ Enforces allowlist │ + │ Enforces trust pairs │ │ Rejects replays │ │ Rate limits per agent│ └───────────┬───────────┘ @@ -266,7 +266,7 @@ Alice's Operator Bob's Operator │ │ Configures OpenClaw hooks ``` -**Security:** The invite contains only public information (DID + proxy URL). No keys, tokens, or secrets are exchanged. Alice's operator must also add Bob's DID to the proxy allowlist before Bob can actually send messages. +**Security:** The invite contains only public information (DID + proxy URL). No keys, tokens, or secrets are exchanged. Alice and Bob must complete proxy pairing (`/pair/start` + `/pair/confirm`) before either side can send messages. ### Step 4: First Message (Bob → Alice) @@ -317,8 +317,8 @@ Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's Ope │ │ (per-agent nonce cache) │ │ │ ⑤ Check CRL revocation │ │ │ (signed list from registry) │ - │ │ ⑥ Enforce allowlist │ - │ │ (is Bob's DID permitted?) │ + │ │ ⑥ Enforce trust pair │ + │ │ (is Bob trusted for Alice?) │ │ │ ⑦ Validate agent access token │ │ │ (POST to registry) │ │ │ │ │ @@ -344,7 +344,7 @@ Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's Ope | **Revocation** | Rotate shared token = break all integrations | Revoke one agent instantly via CRL, others unaffected | | **Replay protection** | None | Timestamp + nonce + signature on every request | | **Tamper detection** | None | Body hash + PoP signature = any modification is detectable | -| **Per-caller policy** | Not possible | Allowlist by agent DID, rate limit per agent | +| **Per-caller policy** | Not possible | Trust pairs by sender/recipient DID, rate limit per agent | | **Key exposure** | Token must be shared with every caller | Private key never leaves the agent's machine | ### What Gets Verified (and When It Fails) @@ -356,7 +356,7 @@ Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's Ope | PoP signature | `PROXY_AUTH_INVALID_PROOF` | 401 | Sender doesn't hold the private key | | Nonce replay | `PROXY_AUTH_REPLAY` | 401 | Same request was sent twice | | CRL revocation | `PROXY_AUTH_REVOKED` | 401 | Agent identity has been revoked | -| Allowlist | `PROXY_AUTH_FORBIDDEN` | 403 | Agent is valid but not authorized here | +| Trust policy | `PROXY_AUTH_FORBIDDEN` | 403 | Agent is valid but not trusted for this recipient | | Agent access token | `PROXY_AGENT_ACCESS_INVALID` | 401 | Session token expired or revoked | | Rate limit | `PROXY_RATE_LIMIT_EXCEEDED` | 429 | Too many requests from this agent | @@ -373,7 +373,7 @@ Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's Ope ### Receiver side operator (callee gateway owner) -- Action: remove/deny caller in local allowlist (or keep `approvalRequired` for first contact) +- Action: remove/deny trusted caller pair in local proxy trust state (or keep approval-required first contact) - Scope: **local only** (that specific gateway/proxy) - Effect: caller is blocked on this gateway immediately, but remains valid elsewhere unless globally revoked. - Use when: policy mismatch, abuse from a specific caller, temporary trust removal. @@ -390,7 +390,7 @@ Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's Ope 2. Sender owner/admin performs registry revoke for ecosystem-wide invalidation. 3. Proxies return: - `401` for invalid/expired/revoked identity - - `403` for valid identity that is not allowlisted locally + - `403` for valid identity that is not trusted locally for the target recipient --- @@ -456,7 +456,7 @@ clawdentity/ - Handled by: `apps/proxy` - Proxy Worker verifies AIT + CRL + PoP before forwarding to OpenClaw. -- Enforces caller allowlist policy by DID. +- Enforces durable trust pairs for sender/recipient DID. - Applies per-agent rate limiting. - Keeps `hooks.token` private and only injects it internally during forward. - By default, `INJECT_IDENTITY_INTO_MESSAGE=true` to prepend a sanitized identity block @@ -510,7 +510,7 @@ clawdentity/ - Handled by: `apps/registry`, `apps/proxy`, `apps/cli` - Out-of-band contact card sharing. - Registry `gateway_hint` resolution. -- Optional pairing-code flow for first-contact allowlist approval. +- Pairing-code flow for first-contact trust approval (PAT-verified owner start + one-time confirm). --- @@ -559,7 +559,7 @@ MVP supports three ways to “find” another agent: 1. **Out-of-band share**: human shares a contact card (verify link + endpoint URL) 2. **Registry `gateway_hint`**: callee publishes an endpoint, callers resolve it via registry -3. **Pairing code** (proxy): “Approve first contact” to auto-add caller to allowlist +3. **Pairing code** (proxy): “Approve first contact” to establish a mutual trusted agent pair No one shares keys/files between agents. Identity is presented per request. @@ -582,7 +582,7 @@ No one shares keys/files between agents. Identity is presented per request. - method, path, timestamp, nonce, body hash - and reject nonce replays - Reject tampering: any change to method/path/body/timestamp/nonce invalidates proof. -- Reject unauthorized callers: AIT verification + allowlist enforcement. +- Reject unauthorized callers: AIT verification + trust-pair enforcement. - Reject compromised identities quickly: CRL-based revocation checks. - Contain abuse: per-agent rate limits at proxy boundary. @@ -600,7 +600,7 @@ No one shares keys/files between agents. Identity is presented per request. - Treat any identity fields (agent name/description) as untrusted input; never allow prompt injection via identity metadata. - Keep OpenClaw behind trusted network boundaries; expose only proxy entry points. -- Rotate PATs and audit allowlist entries regularly. +- Rotate PATs and audit trusted pair entries regularly. - Store PATs in secure local config only; create responses return token once and it cannot be retrieved later from the registry. - Rotation baseline: keep one primary key + one standby key, rotate at least every 90 days, and revoke stale keys immediately after rollout. diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md index 8e09b22..9466c2c 100644 --- a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md +++ b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md @@ -1,12 +1,12 @@ --- name: clawdentity_openclaw_relay -description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication with Clawdentity PoP verification. +description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication through the local Clawdentity connector runtime. version: 0.1.0 --- # Clawdentity OpenClaw Relay Skill -This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through Clawdentity identity (`Authorization: Claw ` + PoP headers) using a single invite code. +This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through the local Clawdentity connector runtime using a single invite code. ## Trigger Conditions @@ -36,14 +36,10 @@ Use this skill when any of the following are requested: - Local selected agent marker: `~/.clawdentity/openclaw-agent-name` - Relay runtime config: `~/.clawdentity/openclaw-relay.json` -## Operator Split +## Invite Input Assumption -### Admin/operator side (only action required) -Create invite code: - -`clawdentity openclaw invite --did --proxy-url --peer-alias ` - -Share the invite code string with the human who owns the target agent. +Provide a valid invite code string before running this skill. +Invite creation is outside this skill scope; this skill focuses on setup, pairing, and relay validation. ## Human + Agent Flow (strict user-style) @@ -57,6 +53,41 @@ This skill is operational. The agent must execute side effects via tools. - Ask the human only for missing secrets/inputs (for example API key or invite code). - Report final status with concrete outputs (local DID, peer alias, written paths). +## CLI Command Utilization (required) + +Use these commands as the default execution path for skill utilization: + +- Initialize local CLI config: + - `clawdentity config init` +- Configure registry URL and API key when missing: + - `clawdentity config set registryUrl ` + - `clawdentity config set apiKey ` +- Create and inspect local OpenClaw agent identity: + - `clawdentity agent create --framework openclaw` + - `clawdentity agent inspect ` +- Apply OpenClaw invite setup: + - `clawdentity openclaw setup --invite-code ` +- Start connector runtime for relay handoff: + - `clawdentity connector start ` +- Optional persistent connector autostart: + - `clawdentity connector service install ` +- Validate health and delivery: + - `clawdentity openclaw doctor` + - `clawdentity openclaw relay test --peer ` + +Pairing bootstrap for trust policy is API-based in the current release (no dedicated pairing CLI command yet): + +- Owner/initiator starts pairing on initiator proxy: + - `POST /pair/start` + - Requires `Authorization: Claw ` and `x-claw-owner-pat` + - Body: `{"agentDid":""}` +- Responder confirms on responder proxy: + - `POST /pair/confirm` + - Requires `Authorization: Claw ` + - Body: `{"pairingCode":""}` + +Successful confirm establishes mutual trust for the two agent DIDs. After confirm, both directions are allowed for trusted delivery. + 1. Confirm prerequisites with the human. - Confirm `clawdentity` CLI is installed and runnable. - Confirm API key exists for this agent (if missing, ask the human for it). @@ -94,11 +125,21 @@ This skill is operational. The agent must execute side effects via tools. - relay runtime config path - Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. -7. Validate with user-style relay test. -- Human asks Alpha to send a request with `peer: "beta"`. -- Agent relays with Claw + PoP headers. -- Peer proxy verifies and forwards to peer OpenClaw. -- Verify success logs on both sides. +7. Start connector runtime for local relay handoff. +- Run `clawdentity connector start `. +- Optional: run `clawdentity connector service install ` for persistent autostart. + +8. Complete trust pairing bootstrap. +- Run pairing start (`POST /pair/start`) from the owner/initiator side. +- Share returned one-time `pairingCode` with responder side. +- Run pairing confirm (`POST /pair/confirm`) from responder side. +- Confirm pairing success before relay test. + +9. Validate with user-style relay test. +- Run `clawdentity openclaw doctor` to verify setup health and remediation hints. +- Run `clawdentity openclaw relay test --peer ` to execute a probe. +- Confirm probe success and connector-mediated delivery logs. +- Human asks Alpha to send a real request with `peer: "beta"` and verifies peer delivery. ## Required question policy @@ -107,20 +148,23 @@ Ask the human only when required inputs are missing: - Unclear OpenClaw state directory. - Non-default OpenClaw base URL. - Missing invite code. -- Local registry/proxy network location is unknown or unreachable from agent runtime. +- Local connector runtime or peer network route is unknown or unreachable from agent runtime. ## Failure Handling If setup or relay fails: - Report precise missing file/path/value. - Fix only the failing config/input. -- Re-run the same user-style flow from step 5 onward. +- Ensure connector runtime is active (`clawdentity connector start `). +- Re-run `clawdentity openclaw doctor`. +- Re-run `clawdentity openclaw relay test --peer `. +- Re-run the same user-style flow from step 5 onward only after health checks pass. ## Bundled Resources ### References | File | Purpose | |------|---------| -| `references/clawdentity-protocol.md` | Header format, peer map schema, and relay verification details | +| `references/clawdentity-protocol.md` | Invite format, peer map schema, connector handoff envelope, and runtime failure mapping | -Directive: read the reference file before troubleshooting protocol or signature failures. +Directive: read the reference file before troubleshooting relay contract or connector handoff failures. diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md index a2df2e1..2c4c476 100644 --- a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md @@ -63,6 +63,42 @@ Rules: - `proxyUrl` required and must be a valid absolute URL - `name` optional +## Proxy Pairing Prerequisite + +Relay delivery policy is trust-pair based on proxy side. Pairing must be completed before first cross-agent delivery. + +Current pairing contract is API-based (no dedicated CLI pairing command): + +1. Initiator owner starts pairing: + - `POST /pair/start` + - headers: + - `Authorization: Claw ` + - `x-claw-owner-pat: ` + - body: + +```json +{ + "agentDid": "did:claw:agent:01RESPONDER..." +} +``` + +2. Responder confirms pairing: + - `POST /pair/confirm` + - headers: + - `Authorization: Claw ` + - body: + +```json +{ + "pairingCode": "01PAIRCODE..." +} +``` + +Rules: +- `pairingCode` is one-time and expires. +- Confirm establishes mutual trust for the initiator/responder pair. +- Same-agent sender/recipient is allowed by policy without explicit pair entry. + ## Relay Input Contract The OpenClaw transform reads `ctx.payload`. @@ -73,7 +109,7 @@ The OpenClaw transform reads `ctx.payload`. - If `payload.peer` exists: - resolve peer from `peers.json` - remove `peer` from forwarded body - - send JSON POST to `peer.proxyUrl` + - send JSON POST to local connector outbound endpoint - return `null` to skip local handling ## Relay Agent Selection Contract @@ -100,31 +136,40 @@ Rules: - `updatedAt` is ISO-8601 UTC timestamp. - Proxy runtime precedence is: `OPENCLAW_BASE_URL` env first, then `openclaw-relay.json`, then built-in default. -## Outbound Auth Contract +## Connector Handoff Contract + +The transform does not send directly to the peer proxy. It posts to the local connector runtime: +- Default endpoint: `http://127.0.0.1:19400/v1/outbound` +- Optional overrides: + - `CLAWDENTITY_CONNECTOR_BASE_URL` + - `CLAWDENTITY_CONNECTOR_OUTBOUND_PATH` + +Outbound JSON body sent by transform: -Headers sent to peer proxy: -- `Authorization: Claw ` -- `Content-Type: application/json` -- `X-Claw-Timestamp` -- `X-Claw-Nonce` -- `X-Claw-Body-SHA256` -- `X-Claw-Proof` +```json +{ + "peer": "beta", + "peerDid": "did:claw:agent:01H...", + "peerProxyUrl": "https://beta-proxy.example.com/hooks/agent", + "payload": { + "event": "agent.message" + } +} +``` -Signing inputs: -- HTTP method: `POST` -- path+query from peer URL -- unix seconds timestamp -- random nonce -- outbound JSON body bytes -- agent secret key from `secret.key` +Rules: +- `payload.peer` is removed before creating the `payload` object above. +- Transform sends `Content-Type: application/json` only. +- Connector runtime is responsible for Clawdentity auth headers and request signing when calling peer proxy. ## Error Conditions Relay fails when: - no selected local agent can be resolved - peer alias missing from config -- `secret.key` or `ait.jwt` missing/empty/invalid -- peer returns non-2xx -- peer network request fails +- local connector outbound endpoint is unavailable (`404`) +- local connector reports unknown peer alias (`409`) +- local connector rejects payload (`400` or `422`) +- local connector outbound request fails (network/other non-2xx) Error messages should include file/path context but never print secret content. diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 08ad66d..3c671ba 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -27,6 +27,13 @@ - Keep filesystem path logic centralized; avoid hardcoding `~/.clawdentity` paths across multiple files. - Keep relay behavior pure except for explicit dependencies (`fetch`, filesystem) so tests stay deterministic. - Prefer schema-first runtime validation over ad-hoc guards. +- Keep skill docs aligned with connector architecture: do not document direct transform-to-peer-proxy signing. +- Keep `skill/SKILL.md` command utilization section explicit and executable with current CLI commands used by this skill (`config`, `agent`, `openclaw setup/doctor/relay test`, `connector start`, optional `connector service install`). +- Keep pairing prerequisite documented as API-based (`/pair/start`, `/pair/confirm`) until a dedicated CLI pairing command exists. +- When `src/transforms/relay-to-peer.ts` relay envelope, endpoint defaults, or failure mapping changes, update: + - `skill/SKILL.md` + - `skill/references/clawdentity-protocol.md` + - bundled copies in `apps/cli/skill-bundle/openclaw-skill/skill/*` ## Validation Commands - `pnpm -F @clawdentity/openclaw-skill typecheck` diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index 8e09b22..9466c2c 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -1,12 +1,12 @@ --- name: clawdentity_openclaw_relay -description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication with Clawdentity PoP verification. +description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication through the local Clawdentity connector runtime. version: 0.1.0 --- # Clawdentity OpenClaw Relay Skill -This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through Clawdentity identity (`Authorization: Claw ` + PoP headers) using a single invite code. +This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through the local Clawdentity connector runtime using a single invite code. ## Trigger Conditions @@ -36,14 +36,10 @@ Use this skill when any of the following are requested: - Local selected agent marker: `~/.clawdentity/openclaw-agent-name` - Relay runtime config: `~/.clawdentity/openclaw-relay.json` -## Operator Split +## Invite Input Assumption -### Admin/operator side (only action required) -Create invite code: - -`clawdentity openclaw invite --did --proxy-url --peer-alias ` - -Share the invite code string with the human who owns the target agent. +Provide a valid invite code string before running this skill. +Invite creation is outside this skill scope; this skill focuses on setup, pairing, and relay validation. ## Human + Agent Flow (strict user-style) @@ -57,6 +53,41 @@ This skill is operational. The agent must execute side effects via tools. - Ask the human only for missing secrets/inputs (for example API key or invite code). - Report final status with concrete outputs (local DID, peer alias, written paths). +## CLI Command Utilization (required) + +Use these commands as the default execution path for skill utilization: + +- Initialize local CLI config: + - `clawdentity config init` +- Configure registry URL and API key when missing: + - `clawdentity config set registryUrl ` + - `clawdentity config set apiKey ` +- Create and inspect local OpenClaw agent identity: + - `clawdentity agent create --framework openclaw` + - `clawdentity agent inspect ` +- Apply OpenClaw invite setup: + - `clawdentity openclaw setup --invite-code ` +- Start connector runtime for relay handoff: + - `clawdentity connector start ` +- Optional persistent connector autostart: + - `clawdentity connector service install ` +- Validate health and delivery: + - `clawdentity openclaw doctor` + - `clawdentity openclaw relay test --peer ` + +Pairing bootstrap for trust policy is API-based in the current release (no dedicated pairing CLI command yet): + +- Owner/initiator starts pairing on initiator proxy: + - `POST /pair/start` + - Requires `Authorization: Claw ` and `x-claw-owner-pat` + - Body: `{"agentDid":""}` +- Responder confirms on responder proxy: + - `POST /pair/confirm` + - Requires `Authorization: Claw ` + - Body: `{"pairingCode":""}` + +Successful confirm establishes mutual trust for the two agent DIDs. After confirm, both directions are allowed for trusted delivery. + 1. Confirm prerequisites with the human. - Confirm `clawdentity` CLI is installed and runnable. - Confirm API key exists for this agent (if missing, ask the human for it). @@ -94,11 +125,21 @@ This skill is operational. The agent must execute side effects via tools. - relay runtime config path - Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. -7. Validate with user-style relay test. -- Human asks Alpha to send a request with `peer: "beta"`. -- Agent relays with Claw + PoP headers. -- Peer proxy verifies and forwards to peer OpenClaw. -- Verify success logs on both sides. +7. Start connector runtime for local relay handoff. +- Run `clawdentity connector start `. +- Optional: run `clawdentity connector service install ` for persistent autostart. + +8. Complete trust pairing bootstrap. +- Run pairing start (`POST /pair/start`) from the owner/initiator side. +- Share returned one-time `pairingCode` with responder side. +- Run pairing confirm (`POST /pair/confirm`) from responder side. +- Confirm pairing success before relay test. + +9. Validate with user-style relay test. +- Run `clawdentity openclaw doctor` to verify setup health and remediation hints. +- Run `clawdentity openclaw relay test --peer ` to execute a probe. +- Confirm probe success and connector-mediated delivery logs. +- Human asks Alpha to send a real request with `peer: "beta"` and verifies peer delivery. ## Required question policy @@ -107,20 +148,23 @@ Ask the human only when required inputs are missing: - Unclear OpenClaw state directory. - Non-default OpenClaw base URL. - Missing invite code. -- Local registry/proxy network location is unknown or unreachable from agent runtime. +- Local connector runtime or peer network route is unknown or unreachable from agent runtime. ## Failure Handling If setup or relay fails: - Report precise missing file/path/value. - Fix only the failing config/input. -- Re-run the same user-style flow from step 5 onward. +- Ensure connector runtime is active (`clawdentity connector start `). +- Re-run `clawdentity openclaw doctor`. +- Re-run `clawdentity openclaw relay test --peer `. +- Re-run the same user-style flow from step 5 onward only after health checks pass. ## Bundled Resources ### References | File | Purpose | |------|---------| -| `references/clawdentity-protocol.md` | Header format, peer map schema, and relay verification details | +| `references/clawdentity-protocol.md` | Invite format, peer map schema, connector handoff envelope, and runtime failure mapping | -Directive: read the reference file before troubleshooting protocol or signature failures. +Directive: read the reference file before troubleshooting relay contract or connector handoff failures. diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index a2df2e1..2c4c476 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -63,6 +63,42 @@ Rules: - `proxyUrl` required and must be a valid absolute URL - `name` optional +## Proxy Pairing Prerequisite + +Relay delivery policy is trust-pair based on proxy side. Pairing must be completed before first cross-agent delivery. + +Current pairing contract is API-based (no dedicated CLI pairing command): + +1. Initiator owner starts pairing: + - `POST /pair/start` + - headers: + - `Authorization: Claw ` + - `x-claw-owner-pat: ` + - body: + +```json +{ + "agentDid": "did:claw:agent:01RESPONDER..." +} +``` + +2. Responder confirms pairing: + - `POST /pair/confirm` + - headers: + - `Authorization: Claw ` + - body: + +```json +{ + "pairingCode": "01PAIRCODE..." +} +``` + +Rules: +- `pairingCode` is one-time and expires. +- Confirm establishes mutual trust for the initiator/responder pair. +- Same-agent sender/recipient is allowed by policy without explicit pair entry. + ## Relay Input Contract The OpenClaw transform reads `ctx.payload`. @@ -73,7 +109,7 @@ The OpenClaw transform reads `ctx.payload`. - If `payload.peer` exists: - resolve peer from `peers.json` - remove `peer` from forwarded body - - send JSON POST to `peer.proxyUrl` + - send JSON POST to local connector outbound endpoint - return `null` to skip local handling ## Relay Agent Selection Contract @@ -100,31 +136,40 @@ Rules: - `updatedAt` is ISO-8601 UTC timestamp. - Proxy runtime precedence is: `OPENCLAW_BASE_URL` env first, then `openclaw-relay.json`, then built-in default. -## Outbound Auth Contract +## Connector Handoff Contract + +The transform does not send directly to the peer proxy. It posts to the local connector runtime: +- Default endpoint: `http://127.0.0.1:19400/v1/outbound` +- Optional overrides: + - `CLAWDENTITY_CONNECTOR_BASE_URL` + - `CLAWDENTITY_CONNECTOR_OUTBOUND_PATH` + +Outbound JSON body sent by transform: -Headers sent to peer proxy: -- `Authorization: Claw ` -- `Content-Type: application/json` -- `X-Claw-Timestamp` -- `X-Claw-Nonce` -- `X-Claw-Body-SHA256` -- `X-Claw-Proof` +```json +{ + "peer": "beta", + "peerDid": "did:claw:agent:01H...", + "peerProxyUrl": "https://beta-proxy.example.com/hooks/agent", + "payload": { + "event": "agent.message" + } +} +``` -Signing inputs: -- HTTP method: `POST` -- path+query from peer URL -- unix seconds timestamp -- random nonce -- outbound JSON body bytes -- agent secret key from `secret.key` +Rules: +- `payload.peer` is removed before creating the `payload` object above. +- Transform sends `Content-Type: application/json` only. +- Connector runtime is responsible for Clawdentity auth headers and request signing when calling peer proxy. ## Error Conditions Relay fails when: - no selected local agent can be resolved - peer alias missing from config -- `secret.key` or `ait.jwt` missing/empty/invalid -- peer returns non-2xx -- peer network request fails +- local connector outbound endpoint is unavailable (`404`) +- local connector reports unknown peer alias (`409`) +- local connector rejects payload (`400` or `422`) +- local connector outbound request fails (network/other non-2xx) Error messages should include file/path context but never print secret content. diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 61b0c53..4a1aa6a 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -1,7 +1,6 @@ # Proxy local/development template # For local Wrangler development, copy values into .dev.vars. # OpenClaw vars are optional for relay-mode proxy operation. -# Keep them only for backwards compatibility with older local setups. # OPENCLAW_HOOK_TOKEN=optional-openclaw-hook-token # OPENCLAW_BASE_URL=http://127.0.0.1:18789 @@ -10,10 +9,11 @@ ENVIRONMENT=local REGISTRY_URL=https://dev.api.clawdentity.com INJECT_IDENTITY_INTO_MESSAGE=true -# Optional policy/runtime overrides -# ALLOW_LIST={"owners":[],"agents":[]} -# ALLOWLIST_OWNERS=did:claw:human:example -# ALLOWLIST_AGENTS=did:claw:agent:example +# Pairing/trust state is managed dynamically via /pair/start + /pair/confirm. +# No static allowlist environment variables are supported. +# /pair/start requires request header: x-claw-owner-pat: clw_pat_... + +# Optional runtime overrides # CRL_REFRESH_INTERVAL_MS=300000 # CRL_MAX_AGE_MS=900000 # CRL_STALE_BEHAVIOR=fail-open diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index fcc03fb..67c2fda 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -7,6 +7,7 @@ ## Runtime Configuration - Keep runtime config centralized in `src/config.ts`. - Keep Cloudflare Worker deployment config in `wrangler.jsonc` with explicit `local`, `development`, and `production` environments. +- Duplicate Durable Object `bindings` and `migrations` inside each Wrangler env block; env sections do not inherit top-level DO config. - Keep deploy traceability explicit by passing `APP_VERSION` (or fallback `PROXY_VERSION`) via Worker bindings; `/health` must surface the resolved version. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). @@ -14,8 +15,8 @@ - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. - Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-on (`true`); disable only when operators need unchanged webhook `message` forwarding. -- Keep OpenClaw env inputs (`OPENCLAW_BASE_URL`, `OPENCLAW_HOOK_TOKEN` / `OPENCLAW_HOOKS_TOKEN`) backward-compatible but optional for relay-mode startup. -- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw compatibility vars, and policy/rate-limit vars). +- Keep OpenClaw env inputs (`OPENCLAW_BASE_URL`, `OPENCLAW_HOOK_TOKEN`) optional for relay-mode startup. +- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw vars, and policy/rate-limit vars). - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) @@ -24,21 +25,26 @@ - Treat blank env values as unset for fallback resolution: - empty/whitespace values (and null-like values) in inherited env must not block `.env` or config-file fallbacks - dotenv merge semantics must match parser semantics (non-empty value wins). -- If hook token env vars are missing, resolve fallback token from `hooks.token` in `openclaw.json` (`OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH`, default `$OPENCLAW_STATE_DIR/openclaw.json`). +- If hook token env vars are missing, resolve fallback token from `hooks.token` in `openclaw.json` (`OPENCLAW_CONFIG_PATH`, default `$OPENCLAW_STATE_DIR/openclaw.json`). - Route relay sessions via Durable Objects: - `GET /v1/relay/connect` keys connector sessions by authenticated caller agent DID. - `POST /hooks/agent` keys recipient delivery by `x-claw-recipient-agent-did`. - Do not route sessions via `OWNER_AGENT_DID`. -- Keep env alias support stable for operator UX: +- Keep env input contract explicit for operator UX: - `LISTEN_PORT` or `PORT` - - `OPENCLAW_HOOK_TOKEN` or `OPENCLAW_HOOKS_TOKEN` + - `OPENCLAW_HOOK_TOKEN` - `REGISTRY_URL` or `CLAWDENTITY_REGISTRY_URL` - - state/config path aliases: `OPENCLAW_STATE_DIR`/`CLAWDBOT_STATE_DIR`, `OPENCLAW_CONFIG_PATH`/`CLAWDBOT_CONFIG_PATH` + - `OPENCLAW_STATE_DIR`, `OPENCLAW_CONFIG_PATH` -## Allowlist and Access -- Keep allowlist shape as `{ owners: string[], agents: string[] }`. -- Allow bootstrap from `ALLOW_LIST` JSON with optional explicit overrides (`ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`). -- Keep allowlist parsing deterministic and reject malformed input with structured config errors. +## Trust and Pairing +- Keep trust state in Durable Objects (`ProxyTrustState`), not in static environment variables. +- Do not add support for `ALLOW_LIST`, `ALLOWLIST_OWNERS`, or `ALLOWLIST_AGENTS`; trust is API-managed only. +- Pairing is managed by API: + - `POST /pair/start` (verified Claw auth + `x-claw-owner-pat` ownership check against registry `GET /v1/agents/:id/ownership`) + - `POST /pair/confirm` (verified Claw auth + one-time pairing code consume) +- Keep `/pair/confirm` as a single trust-store operation that establishes trust and consumes the code in one step (`confirmPairingCode`), never two separate calls. +- Confirming a valid pairing code must establish mutual trust for the initiator/responder agent pair. +- Keep pairing codes one-time and expiring; reject missing/expired/mismatched codes with explicit client errors. - Reject deprecated `ALLOW_ALL_VERIFIED` at startup; never provide a global allow-all bypass for verified callers. ## Auth Verification @@ -48,15 +54,16 @@ - Reject malformed authorization values that contain extra segments beyond `Claw `. - Reject malformed `X-Claw-Timestamp` values; accept only plain unix-seconds integer strings. - Verify request pipeline order as: AIT -> timestamp skew -> PoP signature -> nonce replay -> CRL revocation. -- Enforce proxy access by explicit agent DID allowlist after auth verification; owner DID-only entries do not grant access. +- Enforce known-agent access from durable trust state after auth verification (except pairing bootstrap paths). - When AIT verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning `401`. - When CRL verification fails with unknown `kid`, refresh registry keyset once and retry verification before returning dependency failure. - Return `401` for invalid/expired/replayed/revoked/invalid-proof requests. -- Return `403` when requests are verified but agent DID is not allowlisted. +- Return `403` when requests are verified but caller is not trusted. - Return `429` with `PROXY_PUBLIC_RATE_LIMIT_EXCEEDED` when repeated unauthenticated probes exceed public-route IP budget. -- Return `429` with `PROXY_RATE_LIMIT_EXCEEDED` when an allowlisted verified agent DID exceeds its request budget within the configured window. +- Return `429` with `PROXY_RATE_LIMIT_EXCEEDED` when a trusted verified agent DID exceeds its request budget within the configured window. - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. +- Keep `/hooks/agent` authorization strict: after auth succeeds, require trusted initiator/responder pair before relay delivery. - Keep `/v1/relay/connect` auth strict with verified Claw auth + PoP headers, but do not require `x-claw-agent-access`. ## CRL Policy diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 7ce9e8d..c41725e 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -16,6 +16,8 @@ - Keep OpenClaw compatibility vars optional for relay-mode runtime; never require `OPENCLAW_BASE_URL` or hook token for cloud relay startup. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. +- Keep static allowlist env vars removed (`ALLOW_LIST`, `ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`); trust must come from pairing state, not env. +- Keep `/pair/confirm` write path atomic at the trust-store API level: trust persistence and pairing-code consumption must happen in one operation (`confirmPairingCode`). ## Config Error Handling - Convert parse failures to `ProxyConfigError` with code `CONFIG_VALIDATION_FAILED`. @@ -24,7 +26,8 @@ ## Maintainability - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. -- Keep allowlist schema strict and agent-first: reject unknown allowlist keys and require explicit `allowList.agents` membership after verification. +- Keep trust/pairing state centralized in `proxy-trust-store.ts` and `proxy-trust-state.ts` (Durable Object backed). +- Keep pairing route logic isolated in `pairing-route.ts`; `server.ts` should compose it, not implement policy details. - Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. - Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. @@ -32,9 +35,12 @@ - Keep DO runtime behavior in `agent-relay-session.ts` (websocket accept, heartbeat alarm, connector delivery RPC). - Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. - Keep worker runtime cache keys sensitive to deploy-time version bindings so `/health` reflects fresh `APP_VERSION` after deploy. -- Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-allowlisted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-trusted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. +- Keep pairing bootstrap explicit: `/pair/start` and `/pair/confirm` must bypass known-agent gate in auth middleware. +- Keep `/pair/start` ownership validation against registry `GET /v1/agents/:id/ownership` using `x-claw-owner-pat`, and map dependency failures to `503`. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. +- Keep `/hooks/agent` trust check explicit: sender/recipient pair must be authorized by trust state before relay delivery. - Keep `/v1/relay/connect` keyed by authenticated connector DID from auth middleware, and reject non-websocket requests with clear client errors. - Keep pre-auth IP throttling enabled for `/hooks/agent` and `/v1/relay/connect` so repeated unauthenticated probes fail with `429` before auth/registry work. - Keep rate-limit failure semantics stable: verified requests over budget map to `429` with code `PROXY_RATE_LIMIT_EXCEEDED` and structured warn log event `proxy.rate_limit.exceeded`. @@ -46,5 +52,6 @@ - Keep agent-access validation centralized in `auth-middleware.ts` and call registry `POST /v1/agents/auth/validate`; treat non-`204` non-`401` responses as dependency failures (`503`). - Keep relay delivery failure mapping explicit for `/hooks/agent`: DO delivery/RPC failures -> `502`, unavailable DO namespace -> `503`. - Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. +- Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. - When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index 412fec3..cb269af 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -36,6 +36,7 @@ import type { RelayDeliveryResult, } from "./agent-relay-session.js"; import { parseProxyConfig } from "./config.js"; +import type { ProxyTrustStore } from "./proxy-trust-store.js"; import { createProxyApp } from "./server.js"; function hasDisallowedControlCharacter(value: string): boolean { @@ -105,10 +106,23 @@ function createHookRouteApp(input: { injectIdentityIntoMessage?: boolean; now?: () => Date; }) { + const trustStore: ProxyTrustStore = { + createPairingCode: vi.fn(), + consumePairingCode: vi.fn(), + confirmPairingCode: vi.fn(), + isAgentKnown: vi.fn(async () => true), + isPairAllowed: vi.fn( + async (pair) => + pair.responderAgentDid === "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + ), + upsertPair: vi.fn(async () => {}), + }; + return createProxyApp({ config: parseProxyConfig({ INJECT_IDENTITY_INTO_MESSAGE: input.injectIdentityIntoMessage, }), + trustStore, hooks: { now: input.now, resolveSessionNamespace: () => input.relayNamespace, @@ -184,6 +198,28 @@ describe("POST /hooks/agent", () => { expect(relayHarness.fetchRpc).toHaveBeenCalledTimes(1); }); + it("returns 403 when sender/recipient pair is not trusted", async () => { + const relayHarness = createRelayHarness(); + const app = createHookRouteApp({ + relayNamespace: relayHarness.namespace, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); + expect(relayHarness.fetchRpc).not.toHaveBeenCalled(); + }); + it("prepends sanitized identity block when message injection is enabled", async () => { const relayHarness = createRelayHarness(); const app = createHookRouteApp({ diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index 4bb7e7b..02d41b5 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -10,6 +10,8 @@ import { type RelayDeliveryInput, } from "./agent-relay-session.js"; import type { ProxyRequestVariables } from "./auth-middleware.js"; +import type { ProxyTrustStore } from "./proxy-trust-store.js"; +import { assertTrustedPair } from "./trust-policy.js"; const MAX_AGENT_DID_LENGTH = 160; const MAX_OWNER_DID_LENGTH = 160; @@ -28,6 +30,7 @@ export type AgentHookRuntimeOptions = { type CreateAgentHookHandlerOptions = AgentHookRuntimeOptions & { logger: Logger; + trustStore: ProxyTrustStore; }; type ProxyContext = Context<{ @@ -193,6 +196,12 @@ export function createAgentHookHandler( } const recipientAgentDid = parseRecipientAgentDid(c); + await assertTrustedPair({ + trustStore: options.trustStore, + initiatorAgentDid: auth.agentDid, + responderAgentDid: recipientAgentDid, + }); + const sessionNamespace = resolveSessionNamespace(c); if (sessionNamespace === undefined) { throw new AppError({ diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index b7eb271..0f68c1e 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -1,9 +1,4 @@ -import { - AGENT_AUTH_VALIDATE_PATH, - generateUlid, - makeAgentDid, - makeHumanDid, -} from "@clawdentity/protocol"; +import { AGENT_AUTH_VALIDATE_PATH, generateUlid } from "@clawdentity/protocol"; import { encodeEd25519KeypairBase64url, generateEd25519Keypair, @@ -11,10 +6,13 @@ import { signCRL, signHttpRequest, } from "@clawdentity/sdk"; +import { buildTestAitClaims } from "@clawdentity/sdk/testing"; import { describe, expect, it, vi } from "vitest"; import { RELAY_RECIPIENT_AGENT_DID_HEADER } from "./agent-hook-route.js"; import type { AgentRelaySessionNamespace } from "./agent-relay-session.js"; import { parseProxyConfig } from "./config.js"; +import { PAIR_CONFIRM_PATH } from "./pairing-constants.js"; +import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; import { RELAY_CONNECT_PATH } from "./relay-connect-route.js"; import { createProxyApp } from "./server.js"; @@ -23,6 +21,7 @@ const NOW_MS = Date.now(); const NOW_SECONDS = Math.floor(NOW_MS / 1000); const ISSUER = "https://api.clawdentity.com"; const BODY_JSON = JSON.stringify({ message: "hello" }); +const KNOWN_PEER_DID = "did:claw:agent:known-peer"; type AuthHarnessOptions = { expired?: boolean; @@ -30,14 +29,13 @@ type AuthHarnessOptions = { fetchCrlFails?: boolean; fetchKeysFails?: boolean; allowCurrentAgent?: boolean; - allowCurrentOwner?: boolean; revoked?: boolean; validateStatus?: number; }; type AuthHarness = { app: ReturnType; - claims: Awaited>; + claims: ReturnType; createSignedHeaders: (input?: { body?: string; method?: "GET" | "POST"; @@ -48,46 +46,6 @@ type AuthHarness = { }) => Promise>; }; -async function buildAitClaims(input: { agentPublicKeyX: string }): Promise<{ - iss: string; - sub: string; - ownerDid: string; - name: string; - framework: string; - description: string; - cnf: { - jwk: { - kty: "OKP"; - crv: "Ed25519"; - x: string; - }; - }; - iat: number; - nbf: number; - exp: number; - jti: string; -}> { - return { - iss: ISSUER, - sub: makeAgentDid(generateUlid(NOW_MS + 10)), - ownerDid: makeHumanDid(generateUlid(NOW_MS + 20)), - name: "Proxy Agent", - framework: "openclaw", - description: "test agent", - cnf: { - jwk: { - kty: "OKP", - crv: "Ed25519", - x: input.agentPublicKeyX, - }, - }, - iat: NOW_SECONDS - 10, - nbf: NOW_SECONDS - 10, - exp: NOW_SECONDS + 600, - jti: generateUlid(NOW_MS + 30), - }; -} - function resolveRequestUrl(requestInput: unknown): string { if (typeof requestInput === "string") { return requestInput; @@ -167,8 +125,13 @@ async function createAuthHarness( const agentKeypair = await generateEd25519Keypair(); const encodedRegistry = encodeEd25519KeypairBase64url(registryKeypair); const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); - const claims = await buildAitClaims({ - agentPublicKeyX: encodedAgent.publicKey, + const claims = buildTestAitClaims({ + publicKeyX: encodedAgent.publicKey, + issuer: ISSUER, + nowSeconds: NOW_SECONDS - 10, + ttlSeconds: 610, + nbfSkewSeconds: 0, + seedMs: NOW_MS, }); if (options.expired) { claims.exp = NOW_SECONDS - 1; @@ -210,9 +173,14 @@ async function createAuthHarness( validateStatus: options.validateStatus, }); - const allowListAgents = - options.allowCurrentAgent === false ? [] : [claims.sub]; - const allowListOwners = options.allowCurrentOwner ? [claims.ownerDid] : []; + const trustStore = createInMemoryProxyTrustStore(); + if (options.allowCurrentAgent !== false) { + await trustStore.upsertPair({ + initiatorAgentDid: claims.sub, + responderAgentDid: KNOWN_PEER_DID, + }); + } + const relaySession = { fetch: vi.fn(async (request: Request) => { if (request.method === "POST") { @@ -235,16 +203,11 @@ async function createAuthHarness( const app = createProxyApp({ config: parseProxyConfig({ - ...(allowListAgents.length > 0 - ? { ALLOWLIST_AGENTS: allowListAgents.join(",") } - : {}), - ...(allowListOwners.length > 0 - ? { ALLOWLIST_OWNERS: allowListOwners.join(",") } - : {}), ...(options.crlStaleBehavior ? { CRL_STALE_BEHAVIOR: options.crlStaleBehavior } : {}), }), + trustStore, auth: { fetchImpl: fetchMock as typeof fetch, clock: () => NOW_MS, @@ -328,12 +291,12 @@ describe("proxy auth middleware", () => { expect(body.auth.aitJti).toBe(harness.claims.jti); }); - it("returns 403 when a verified caller is not allowlisted by agent DID", async () => { + it("returns 403 when a verified caller is not trusted by agent DID", async () => { const harness = await createAuthHarness({ allowCurrentAgent: false, }); const headers = await harness.createSignedHeaders({ - nonce: "nonce-not-allowlisted", + nonce: "nonce-not-trusted", }); const response = await harness.app.request("/protected", { method: "POST", @@ -346,23 +309,26 @@ describe("proxy auth middleware", () => { expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); }); - it("returns 403 when only owner DID is allowlisted", async () => { + it("allows unknown agents to reach /pair/confirm for pairing bootstrap", async () => { const harness = await createAuthHarness({ allowCurrentAgent: false, - allowCurrentOwner: true, }); + const requestBody = JSON.stringify({ pairingCode: "missing-code" }); const headers = await harness.createSignedHeaders({ - nonce: "nonce-owner-only-allowlisted", + body: requestBody, + nonce: "nonce-pair-confirm-bootstrap", + pathWithQuery: PAIR_CONFIRM_PATH, }); - const response = await harness.app.request("/protected", { + + const response = await harness.app.request(PAIR_CONFIRM_PATH, { method: "POST", headers, - body: BODY_JSON, + body: requestBody, }); - expect(response.status).toBe(403); + expect(response.status).toBe(404); const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); + expect(body.error.code).toBe("PROXY_PAIR_CODE_NOT_FOUND"); }); it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { @@ -377,8 +343,13 @@ describe("proxy auth middleware", () => { encodeEd25519KeypairBase64url(newRegistryKeypair); const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); - const claims = await buildAitClaims({ - agentPublicKeyX: encodedAgent.publicKey, + const claims = buildTestAitClaims({ + publicKeyX: encodedAgent.publicKey, + issuer: ISSUER, + nowSeconds: NOW_SECONDS - 10, + ttlSeconds: 610, + nbfSkewSeconds: 0, + seedMs: NOW_MS, }); const ait = await signAIT({ claims, @@ -447,11 +418,17 @@ describe("proxy auth middleware", () => { }, ); + const trustStore = createInMemoryProxyTrustStore(); + await trustStore.upsertPair({ + initiatorAgentDid: claims.sub, + responderAgentDid: KNOWN_PEER_DID, + }); + const app = createProxyApp({ config: parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", - ALLOWLIST_AGENTS: claims.sub, }), + trustStore, auth: { fetchImpl: fetchMock as typeof fetch, clock: () => NOW_MS, @@ -495,8 +472,13 @@ describe("proxy auth middleware", () => { encodeEd25519KeypairBase64url(newRegistryKeypair); const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); - const claims = await buildAitClaims({ - agentPublicKeyX: encodedAgent.publicKey, + const claims = buildTestAitClaims({ + publicKeyX: encodedAgent.publicKey, + issuer: ISSUER, + nowSeconds: NOW_SECONDS - 10, + ttlSeconds: 610, + nbfSkewSeconds: 0, + seedMs: NOW_MS, }); const ait = await signAIT({ claims, @@ -565,11 +547,17 @@ describe("proxy auth middleware", () => { }, ); + const trustStore = createInMemoryProxyTrustStore(); + await trustStore.upsertPair({ + initiatorAgentDid: claims.sub, + responderAgentDid: KNOWN_PEER_DID, + }); + const app = createProxyApp({ config: parseProxyConfig({ OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", - ALLOWLIST_AGENTS: claims.sub, }), + trustStore, auth: { fetchImpl: fetchMock as typeof fetch, clock: () => NOW_MS, diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index c70f914..b7fd6cd 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -21,6 +21,9 @@ import { } from "@clawdentity/sdk"; import { createMiddleware } from "hono/factory"; import type { ProxyConfig } from "./config.js"; +import { PAIR_CONFIRM_PATH, PAIR_START_PATH } from "./pairing-constants.js"; +import type { ProxyTrustStore } from "./proxy-trust-store.js"; +import { assertKnownTrustedAgent } from "./trust-policy.js"; export const DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS = 60 * 60 * 1000; export const DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS = 300; @@ -53,6 +56,7 @@ export type ProxyRequestVariables = RequestContextVariables & { export type ProxyAuthMiddlewareOptions = { config: ProxyConfig; logger: Logger; + trustStore: ProxyTrustStore; fetchImpl?: typeof fetch; clock?: () => number; nonceCache?: NonceCache; @@ -137,22 +141,8 @@ function dependencyUnavailableError(options: { }); } -function forbiddenError(options: { - code: string; - message: string; - details?: Record; -}): AppError { - return new AppError({ - code: options.code, - message: options.message, - status: 403, - details: options.details, - expose: true, - }); -} - -function isAgentDidAllowed(config: ProxyConfig, agentDid: string): boolean { - return config.allowList.agents.includes(agentDid); +function shouldSkipKnownAgentCheck(path: string): boolean { + return path === PAIR_START_PATH || path === PAIR_CONFIRM_PATH; } export function parseClawAuthorizationHeader(authorization?: string): string { @@ -596,13 +586,10 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { }); } - if (!isAgentDidAllowed(options.config, claims.sub)) { - throw forbiddenError({ - code: "PROXY_AUTH_FORBIDDEN", - message: "Verified caller is not allowlisted", - details: { - agentDid: claims.sub, - }, + if (!shouldSkipKnownAgentCheck(c.req.path)) { + await assertKnownTrustedAgent({ + trustStore: options.trustStore, + agentDid: claims.sub, }); } diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 24181a2..8bc3f8a 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -29,10 +29,6 @@ describe("proxy config", () => { openclawHookToken: undefined, registryUrl: DEFAULT_REGISTRY_URL, environment: DEFAULT_PROXY_ENVIRONMENT, - allowList: { - owners: [], - agents: [], - }, crlRefreshIntervalMs: DEFAULT_CRL_REFRESH_INTERVAL_MS, crlMaxAgeMs: DEFAULT_CRL_MAX_AGE_MS, crlStaleBehavior: "fail-open", @@ -43,10 +39,10 @@ describe("proxy config", () => { }); }); - it("supports OpenClaw-compatible env aliases", () => { + it("supports canonical proxy env inputs", () => { const config = parseProxyConfig({ PORT: "4100", - OPENCLAW_HOOKS_TOKEN: "hooks-token", + OPENCLAW_HOOK_TOKEN: "hooks-token", CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", @@ -73,34 +69,10 @@ describe("proxy config", () => { expect(config.injectIdentityIntoMessage).toBe(false); }); - it("parses allow list object and override env lists", () => { - const config = parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", - ALLOW_LIST: JSON.stringify({ - owners: ["did:claw:owner:1"], - agents: ["did:claw:agent:1"], - }), - ALLOWLIST_OWNERS: "did:claw:owner:2,did:claw:owner:3", - }); - - expect(config.allowList).toEqual({ - owners: ["did:claw:owner:2", "did:claw:owner:3"], - agents: ["did:claw:agent:1"], - }); - }); - it("accepts missing hook token for relay-only startup", () => { expect(() => parseProxyConfig({})).not.toThrow(); }); - it("throws on malformed allow list JSON", () => { - expect(() => - parseProxyConfig({ - ALLOW_LIST: "{not-json", - }), - ).toThrow(ProxyConfigError); - }); - it("throws when deprecated ALLOW_ALL_VERIFIED is set", () => { expect(() => parseProxyConfig({ @@ -109,18 +81,6 @@ describe("proxy config", () => { ).toThrow(ProxyConfigError); }); - it("throws when ALLOW_LIST includes unknown keys", () => { - expect(() => - parseProxyConfig({ - ALLOW_LIST: JSON.stringify({ - owners: [], - agents: [], - allowAllVerified: true, - }), - }), - ).toThrow(ProxyConfigError); - }); - it("throws on unsupported environment value", () => { expect(() => parseProxyConfig({ diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 388b1a5..12a2a77 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -86,9 +86,6 @@ const proxyRuntimeEnvSchema = z.object({ ENVIRONMENT: z .enum(proxyEnvironmentValues) .default(DEFAULT_PROXY_ENVIRONMENT), - ALLOW_LIST: z.string().optional(), - ALLOWLIST_OWNERS: z.string().optional(), - ALLOWLIST_AGENTS: z.string().optional(), CRL_REFRESH_INTERVAL_MS: z.coerce .number() .int() @@ -117,20 +114,12 @@ const proxyRuntimeEnvSchema = z.object({ ), }); -const proxyAllowListSchema = z - .object({ - owners: z.array(z.string().trim().min(1)).default([]), - agents: z.array(z.string().trim().min(1)).default([]), - }) - .strict(); - export const proxyConfigSchema = z.object({ listenPort: z.number().int().min(1).max(65535), openclawBaseUrl: z.string().url(), openclawHookToken: z.string().min(1).optional(), registryUrl: z.string().url(), environment: z.enum(proxyEnvironmentValues), - allowList: proxyAllowListSchema, crlRefreshIntervalMs: z.number().int().positive(), crlMaxAgeMs: z.number().int().positive(), crlStaleBehavior: z.enum(["fail-open", "fail-closed"]), @@ -140,20 +129,15 @@ export const proxyConfigSchema = z.object({ }); export type ProxyConfig = z.infer; -export type ProxyAllowList = z.infer; type RuntimeEnvInput = { LISTEN_PORT?: unknown; PORT?: unknown; OPENCLAW_BASE_URL?: unknown; OPENCLAW_HOOK_TOKEN?: unknown; - OPENCLAW_HOOKS_TOKEN?: unknown; REGISTRY_URL?: unknown; CLAWDENTITY_REGISTRY_URL?: unknown; ENVIRONMENT?: unknown; - ALLOW_LIST?: unknown; - ALLOWLIST_OWNERS?: unknown; - ALLOWLIST_AGENTS?: unknown; ALLOW_ALL_VERIFIED?: unknown; CRL_REFRESH_INTERVAL_MS?: unknown; CRL_MAX_AGE_MS?: unknown; @@ -162,9 +146,7 @@ type RuntimeEnvInput = { AGENT_RATE_LIMIT_WINDOW_MS?: unknown; INJECT_IDENTITY_INTO_MESSAGE?: unknown; OPENCLAW_STATE_DIR?: unknown; - CLAWDBOT_STATE_DIR?: unknown; OPENCLAW_CONFIG_PATH?: unknown; - CLAWDBOT_CONFIG_PATH?: unknown; HOME?: unknown; USERPROFILE?: unknown; }; @@ -272,10 +254,7 @@ function resolveStateDir( ): string { const cwd = options.cwd ?? resolveDefaultCwd(); const home = resolveHomeDir(env, options.homeDir); - const stateDirOverride = firstNonEmptyString(env, [ - "OPENCLAW_STATE_DIR", - "CLAWDBOT_STATE_DIR", - ]); + const stateDirOverride = firstNonEmptyString(env, ["OPENCLAW_STATE_DIR"]); if (stateDirOverride !== undefined) { return resolvePathWithHome(stateDirOverride, cwd, home); @@ -303,10 +282,7 @@ function resolveOpenClawConfigPath( const cwd = options.cwd ?? resolveDefaultCwd(); const home = resolveHomeDir(env, options.homeDir); const stateDir = resolveStateDir(env, options); - const configPathOverride = firstNonEmptyString(env, [ - "OPENCLAW_CONFIG_PATH", - "CLAWDBOT_CONFIG_PATH", - ]); + const configPathOverride = firstNonEmptyString(env, ["OPENCLAW_CONFIG_PATH"]); if (configPathOverride !== undefined) { return resolvePathWithHome(configPathOverride, cwd, home); @@ -520,18 +496,12 @@ function normalizeRuntimeEnv(input: unknown): Record { return { LISTEN_PORT: firstNonEmpty(env, ["LISTEN_PORT", "PORT"]), OPENCLAW_BASE_URL: firstNonEmpty(env, ["OPENCLAW_BASE_URL"]), - OPENCLAW_HOOK_TOKEN: firstNonEmpty(env, [ - "OPENCLAW_HOOK_TOKEN", - "OPENCLAW_HOOKS_TOKEN", - ]), + OPENCLAW_HOOK_TOKEN: firstNonEmpty(env, ["OPENCLAW_HOOK_TOKEN"]), REGISTRY_URL: firstNonEmpty(env, [ "REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL", ]), ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), - ALLOW_LIST: firstNonEmpty(env, ["ALLOW_LIST"]), - ALLOWLIST_OWNERS: firstNonEmpty(env, ["ALLOWLIST_OWNERS"]), - ALLOWLIST_AGENTS: firstNonEmpty(env, ["ALLOWLIST_AGENTS"]), CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), CRL_STALE_BEHAVIOR: firstNonEmpty(env, ["CRL_STALE_BEHAVIOR"]), @@ -547,62 +517,6 @@ function normalizeRuntimeEnv(input: unknown): Record { }; } -function dedupe(values: readonly string[]): string[] { - return [...new Set(values)]; -} - -function parseDidList(input: string): string[] { - return dedupe( - input - .split(",") - .map((value) => value.trim()) - .filter((value) => value.length > 0), - ); -} - -function parseAllowList( - env: z.infer, -): ProxyAllowList { - let allowList: ProxyAllowList = { - owners: [], - agents: [], - }; - - if (env.ALLOW_LIST !== undefined) { - let parsedAllowList: unknown; - try { - parsedAllowList = JSON.parse(env.ALLOW_LIST); - } catch { - throw toConfigValidationError({ - fieldErrors: { - ALLOW_LIST: ["Expected valid JSON object"], - }, - formErrors: [], - }); - } - - const parsed = proxyAllowListSchema.safeParse(parsedAllowList); - if (!parsed.success) { - throw toConfigValidationError({ - fieldErrors: parsed.error.flatten().fieldErrors, - formErrors: parsed.error.flatten().formErrors, - }); - } - - allowList = parsed.data; - } - - if (env.ALLOWLIST_OWNERS !== undefined) { - allowList = { ...allowList, owners: parseDidList(env.ALLOWLIST_OWNERS) }; - } - - if (env.ALLOWLIST_AGENTS !== undefined) { - allowList = { ...allowList, agents: parseDidList(env.ALLOWLIST_AGENTS) }; - } - - return allowList; -} - function assertNoDeprecatedAllowAllVerified(env: RuntimeEnvInput): void { const value = env.ALLOW_ALL_VERIFIED; if ( @@ -615,9 +529,7 @@ function assertNoDeprecatedAllowAllVerified(env: RuntimeEnvInput): void { throw toConfigValidationError({ fieldErrors: { - ALLOW_ALL_VERIFIED: [ - "ALLOW_ALL_VERIFIED is no longer supported. Use ALLOWLIST_AGENTS.", - ], + ALLOW_ALL_VERIFIED: ["ALLOW_ALL_VERIFIED is no longer supported."], }, formErrors: [], }); @@ -628,10 +540,7 @@ function loadHookTokenFromFallback( options: ProxyConfigLoadOptions, ): void { if ( - firstNonEmpty(env as RuntimeEnvInput, [ - "OPENCLAW_HOOK_TOKEN", - "OPENCLAW_HOOKS_TOKEN", - ]) !== undefined + firstNonEmpty(env as RuntimeEnvInput, ["OPENCLAW_HOOK_TOKEN"]) !== undefined ) { return; } @@ -684,7 +593,6 @@ export function parseProxyConfig(env: unknown): ProxyConfig { openclawHookToken: parsedRuntimeEnv.data.OPENCLAW_HOOK_TOKEN, registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, environment: parsedRuntimeEnv.data.ENVIRONMENT, - allowList: parseAllowList(parsedRuntimeEnv.data), crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, crlMaxAgeMs: parsedRuntimeEnv.data.CRL_MAX_AGE_MS, crlStaleBehavior: parsedRuntimeEnv.data.CRL_STALE_BEHAVIOR, diff --git a/apps/proxy/src/pairing-constants.ts b/apps/proxy/src/pairing-constants.ts new file mode 100644 index 0000000..c73c74d --- /dev/null +++ b/apps/proxy/src/pairing-constants.ts @@ -0,0 +1,8 @@ +export const PAIR_START_PATH = "/pair/start"; +export const PAIR_CONFIRM_PATH = "/pair/confirm"; +export const OWNER_PAT_HEADER = "x-claw-owner-pat"; + +export const DEFAULT_PAIRING_CODE_TTL_SECONDS = 300; +export const MAX_PAIRING_CODE_TTL_SECONDS = 900; + +export const PROXY_TRUST_DO_NAME = "global-trust"; diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts new file mode 100644 index 0000000..3a27501 --- /dev/null +++ b/apps/proxy/src/pairing-route.test.ts @@ -0,0 +1,236 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; + +const INITIATOR_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_000)); +const RESPONDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_100)); +const INTRUDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_300)); + +vi.mock("./auth-middleware.js", async () => { + const { createMiddleware } = await import("hono/factory"); + + return { + createProxyAuthMiddleware: () => + createMiddleware(async (c, next) => { + c.set("auth", { + agentDid: c.req.header("x-test-agent-did") ?? INITIATOR_AGENT_DID, + ownerDid: c.req.header("x-test-owner-did") ?? "did:claw:human:owner", + issuer: "https://api.clawdentity.com", + aitJti: "test-ait-jti", + cnfPublicKey: "test-key", + }); + await next(); + }), + }; +}); + +import { parseProxyConfig } from "./config.js"; +import { + OWNER_PAT_HEADER, + PAIR_CONFIRM_PATH, + PAIR_START_PATH, +} from "./pairing-constants.js"; +import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; +import { createProxyApp } from "./server.js"; + +function createPairingApp(input?: { + fetchImpl?: typeof fetch; + nowMs?: () => number; +}) { + const trustStore = createInMemoryProxyTrustStore(); + const app = createProxyApp({ + config: parseProxyConfig({ + REGISTRY_URL: "https://registry.example.com", + }), + pairing: { + start: { + fetchImpl: input?.fetchImpl, + nowMs: input?.nowMs, + }, + confirm: { + nowMs: input?.nowMs, + }, + }, + trustStore, + }); + + return { + app, + trustStore, + }; +} + +describe(`POST ${PAIR_START_PATH}`, () => { + it("creates a pairing code when owner PAT controls caller agent DID", async () => { + const fetchMock = vi.fn(async (_requestInput: unknown) => + Response.json( + { + ownsAgent: true, + }, + { status: 200 }, + ), + ); + const fetchImpl = fetchMock as unknown as typeof fetch; + + const { app } = createPairingApp({ + fetchImpl, + nowMs: () => 1_700_000_000_000, + }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + [OWNER_PAT_HEADER]: "clw_pat_owner_token", + }, + body: JSON.stringify({ + agentDid: RESPONDER_AGENT_DID, + }), + }); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + expiresAt: string; + initiatorAgentDid: string; + pairingCode: string; + responderAgentDid: string; + }; + + expect(body.pairingCode.length).toBeGreaterThan(0); + expect(body.initiatorAgentDid).toBe(INITIATOR_AGENT_DID); + expect(body.responderAgentDid).toBe(RESPONDER_AGENT_DID); + expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); + expect(fetchImpl).toHaveBeenCalledTimes(1); + const fetchCallUrl = String(fetchMock.mock.calls[0]?.[0] ?? ""); + expect(fetchCallUrl).toContain("/v1/agents/"); + expect(fetchCallUrl).toContain("/ownership"); + }); + + it("returns 401 when owner PAT is invalid", async () => { + const fetchImpl = vi.fn( + async (_requestInput: unknown) => new Response(null, { status: 401 }), + ) as unknown as typeof fetch; + const { app } = createPairingApp({ fetchImpl }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + [OWNER_PAT_HEADER]: "clw_pat_invalid", + }, + body: JSON.stringify({ + agentDid: RESPONDER_AGENT_DID, + }), + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_OWNER_PAT_INVALID"); + }); + + it("returns 403 when owner PAT does not control caller DID", async () => { + const fetchImpl = vi.fn(async (_requestInput: unknown) => + Response.json( + { + ownsAgent: false, + }, + { status: 200 }, + ), + ) as unknown as typeof fetch; + const { app } = createPairingApp({ fetchImpl }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + [OWNER_PAT_HEADER]: "clw_pat_owner", + }, + body: JSON.stringify({ + agentDid: RESPONDER_AGENT_DID, + }), + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_OWNER_PAT_FORBIDDEN"); + }); +}); + +describe(`POST ${PAIR_CONFIRM_PATH}`, () => { + it("consumes pairing code and enables mutual trust", async () => { + const { app, trustStore } = createPairingApp({ + nowMs: () => 1_700_000_000_000, + }); + + const pairingCode = await trustStore.createPairingCode({ + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + ttlSeconds: 300, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + pairingCode: pairingCode.pairingCode, + }), + }); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + initiatorAgentDid: string; + paired: boolean; + responderAgentDid: string; + }; + + expect(body).toEqual({ + paired: true, + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }); + + expect( + await trustStore.isPairAllowed({ + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }), + ).toBe(true); + expect( + await trustStore.isPairAllowed({ + initiatorAgentDid: RESPONDER_AGENT_DID, + responderAgentDid: INITIATOR_AGENT_DID, + }), + ).toBe(true); + }); + + it("rejects pair confirm when caller does not match target agent", async () => { + const { app, trustStore } = createPairingApp({ + nowMs: () => 1_700_000_000_000, + }); + + const pairingCode = await trustStore.createPairingCode({ + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + ttlSeconds: 300, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": INTRUDER_AGENT_DID, + }, + body: JSON.stringify({ + pairingCode: pairingCode.pairingCode, + }), + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_CODE_AGENT_MISMATCH"); + }); +}); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts new file mode 100644 index 0000000..0320641 --- /dev/null +++ b/apps/proxy/src/pairing-route.ts @@ -0,0 +1,368 @@ +import { parseDid } from "@clawdentity/protocol"; +import { AppError, type Logger } from "@clawdentity/sdk"; +import type { Context } from "hono"; +import type { ProxyRequestVariables } from "./auth-middleware.js"; +import { + DEFAULT_PAIRING_CODE_TTL_SECONDS, + MAX_PAIRING_CODE_TTL_SECONDS, + OWNER_PAT_HEADER, + PAIR_CONFIRM_PATH, + PAIR_START_PATH, +} from "./pairing-constants.js"; +import { + type ProxyTrustStore, + ProxyTrustStoreError, +} from "./proxy-trust-store.js"; + +const REGISTRY_AGENT_OWNERSHIP_PATH_PREFIX = "/v1/agents"; + +export { OWNER_PAT_HEADER, PAIR_CONFIRM_PATH, PAIR_START_PATH }; + +type PairingRouteContext = Context<{ + Variables: ProxyRequestVariables; +}>; + +export type PairStartRuntimeOptions = { + fetchImpl?: typeof fetch; + nowMs?: () => number; +}; + +type CreatePairStartHandlerOptions = PairStartRuntimeOptions & { + logger: Logger; + registryUrl: string; + trustStore: ProxyTrustStore; +}; + +export type PairConfirmRuntimeOptions = { + nowMs?: () => number; +}; + +type CreatePairConfirmHandlerOptions = PairConfirmRuntimeOptions & { + logger: Logger; + trustStore: ProxyTrustStore; +}; + +function parseOwnerPatHeader(headerValue: string | undefined): string { + if (typeof headerValue !== "string" || headerValue.trim().length === 0) { + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_REQUIRED", + message: "X-Claw-Owner-Pat header is required", + status: 401, + expose: true, + }); + } + + return headerValue.trim(); +} + +function normalizeRegistryUrl(registryUrl: string): string { + const baseUrl = registryUrl.endsWith("/") ? registryUrl : `${registryUrl}/`; + return new URL(baseUrl).toString(); +} + +function parseAgentDid(value: unknown, inputName: string): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: `${inputName} is required`, + status: 400, + expose: true, + }); + } + + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + throw new Error("Invalid kind"); + } + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: `${inputName} must be a valid agent DID`, + status: 400, + expose: true, + }); + } + + return candidate; +} + +function parseTtlSeconds(value: unknown): number { + if (value === undefined) { + return DEFAULT_PAIRING_CODE_TTL_SECONDS; + } + + if (typeof value !== "number" || !Number.isInteger(value)) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "ttlSeconds must be an integer", + status: 400, + expose: true, + }); + } + + if (value < 1 || value > MAX_PAIRING_CODE_TTL_SECONDS) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: `ttlSeconds must be between 1 and ${MAX_PAIRING_CODE_TTL_SECONDS}`, + status: 400, + expose: true, + }); + } + + return value; +} + +async function parseJsonBody(c: PairingRouteContext): Promise { + try { + return await c.req.json(); + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "Request body must be valid JSON", + status: 400, + expose: true, + }); + } +} + +async function parseRegistryOwnershipResponse(response: Response): Promise<{ + ownsAgent: boolean; +}> { + const payload = (await response.json()) as { + ownsAgent?: unknown; + }; + if (typeof payload.ownsAgent !== "boolean") { + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", + message: "Registry owner lookup payload is invalid", + status: 503, + expose: true, + }); + } + + return { + ownsAgent: payload.ownsAgent, + }; +} + +async function assertPatOwnsInitiatorAgent(input: { + fetchImpl: typeof fetch; + initiatorAgentDid: string; + ownerPat: string; + registryUrl: string; +}): Promise { + const parsedDid = parseDid(input.initiatorAgentDid); + const ownershipUrl = new URL( + `${REGISTRY_AGENT_OWNERSHIP_PATH_PREFIX}/${parsedDid.ulid}/ownership`, + input.registryUrl, + ); + + let response: Response; + try { + response = await input.fetchImpl(ownershipUrl, { + method: "GET", + headers: { + authorization: `Bearer ${input.ownerPat}`, + }, + }); + } catch { + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", + message: "Registry owner lookup is unavailable", + status: 503, + expose: true, + }); + } + + if (response.status === 401) { + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_INVALID", + message: "Owner PAT is invalid or expired", + status: 401, + expose: true, + }); + } + + if (!response.ok) { + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", + message: "Registry owner lookup is unavailable", + status: 503, + expose: true, + }); + } + + let parsed: Awaited>; + try { + parsed = await parseRegistryOwnershipResponse(response); + } catch (error) { + if (error instanceof AppError) { + throw error; + } + + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", + message: "Registry owner lookup payload is invalid", + status: 503, + expose: true, + }); + } + + if (parsed.ownsAgent) { + return; + } + + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_FORBIDDEN", + message: "Owner PAT does not control caller agent DID", + status: 403, + expose: true, + }); +} + +function toPairingCodeAppError(error: unknown): AppError { + if (error instanceof ProxyTrustStoreError) { + return new AppError({ + code: error.code, + message: error.message, + status: error.status, + expose: true, + }); + } + + return new AppError({ + code: "PROXY_PAIR_STATE_UNAVAILABLE", + message: "Pairing state is unavailable", + status: 503, + expose: true, + }); +} + +export function createPairStartHandler( + options: CreatePairStartHandlerOptions, +): (c: PairingRouteContext) => Promise { + const fetchImpl = options.fetchImpl ?? fetch; + const nowMs = options.nowMs ?? Date.now; + const registryUrl = normalizeRegistryUrl(options.registryUrl); + + return async (c) => { + const auth = c.get("auth"); + if (auth === undefined) { + throw new AppError({ + code: "PROXY_PAIR_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const body = (await parseJsonBody(c)) as { + agentDid?: unknown; + ttlSeconds?: unknown; + }; + + const responderAgentDid = parseAgentDid(body.agentDid, "agentDid"); + if (responderAgentDid === auth.agentDid) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "agentDid must be different from caller agent DID", + status: 400, + expose: true, + }); + } + + const ttlSeconds = parseTtlSeconds(body.ttlSeconds); + const ownerPat = parseOwnerPatHeader(c.req.header(OWNER_PAT_HEADER)); + + await assertPatOwnsInitiatorAgent({ + fetchImpl, + initiatorAgentDid: auth.agentDid, + ownerPat, + registryUrl, + }); + + const pairingCodeResult = await options.trustStore + .createPairingCode({ + initiatorAgentDid: auth.agentDid, + responderAgentDid, + ttlSeconds, + nowMs: nowMs(), + }) + .catch((error: unknown) => { + throw toPairingCodeAppError(error); + }); + + options.logger.info("proxy.pair.start", { + requestId: c.get("requestId"), + initiatorAgentDid: auth.agentDid, + responderAgentDid, + expiresAt: new Date(pairingCodeResult.expiresAtMs).toISOString(), + }); + + return c.json({ + initiatorAgentDid: pairingCodeResult.initiatorAgentDid, + responderAgentDid: pairingCodeResult.responderAgentDid, + pairingCode: pairingCodeResult.pairingCode, + expiresAt: new Date(pairingCodeResult.expiresAtMs).toISOString(), + }); + }; +} + +export function createPairConfirmHandler( + options: CreatePairConfirmHandlerOptions, +): (c: PairingRouteContext) => Promise { + const nowMs = options.nowMs ?? Date.now; + + return async (c) => { + const auth = c.get("auth"); + if (auth === undefined) { + throw new AppError({ + code: "PROXY_PAIR_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const body = (await parseJsonBody(c)) as { + pairingCode?: unknown; + }; + + if ( + typeof body.pairingCode !== "string" || + body.pairingCode.trim() === "" + ) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "pairingCode is required", + status: 400, + expose: true, + }); + } + + const consumedPairingCode = await options.trustStore + .confirmPairingCode({ + pairingCode: body.pairingCode.trim(), + responderAgentDid: auth.agentDid, + nowMs: nowMs(), + }) + .catch((error: unknown) => { + throw toPairingCodeAppError(error); + }); + + options.logger.info("proxy.pair.confirm", { + requestId: c.get("requestId"), + initiatorAgentDid: consumedPairingCode.initiatorAgentDid, + responderAgentDid: consumedPairingCode.responderAgentDid, + }); + + return c.json( + { + paired: true, + initiatorAgentDid: consumedPairingCode.initiatorAgentDid, + responderAgentDid: consumedPairingCode.responderAgentDid, + }, + 201, + ); + }; +} diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts new file mode 100644 index 0000000..7d04f53 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -0,0 +1,128 @@ +import { describe, expect, it, vi } from "vitest"; +import { ProxyTrustState } from "./proxy-trust-state.js"; +import { TRUST_STORE_ROUTES } from "./proxy-trust-store.js"; + +function createStorageHarness(initial: Record = {}) { + const values = new Map(Object.entries(initial)); + + return { + values, + storage: { + get: vi.fn(async (key: string) => values.get(key)), + put: vi.fn(async (key: string, value: unknown) => { + values.set(key, value); + }), + setAlarm: vi.fn(async (_scheduled: number | Date) => {}), + deleteAlarm: vi.fn(async () => {}), + }, + }; +} + +function createProxyTrustState(initialStorage?: Record) { + const harness = createStorageHarness(initialStorage); + const state = { + storage: harness.storage, + }; + + return { + proxyTrustState: new ProxyTrustState( + state as unknown as DurableObjectState, + ), + harness, + }; +} + +function makeRequest(path: string, body: unknown): Request { + return new Request(`https://proxy-trust-state${path}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(body), + }); +} + +describe("ProxyTrustState", () => { + it("persists and answers known-agent checks via agent peer index", async () => { + const { proxyTrustState, harness } = createProxyTrustState(); + + const upsertResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.upsertPair, { + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }), + ); + + expect(upsertResponse.status).toBe(200); + + const knownAliceResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.isAgentKnown, { + agentDid: "did:claw:agent:alice", + }), + ); + expect(knownAliceResponse.status).toBe(200); + expect((await knownAliceResponse.json()) as { known: boolean }).toEqual({ + known: true, + }); + + expect(harness.values.has("trust:agent-peers")).toBe(true); + }); + + it("does not treat pairs as known agents without agent-peer index", async () => { + const { proxyTrustState, harness } = createProxyTrustState({ + "trust:pairs": ["did:claw:agent:alice|did:claw:agent:bob"], + }); + + const knownResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.isAgentKnown, { + agentDid: "did:claw:agent:alice", + }), + ); + + expect(knownResponse.status).toBe(200); + expect((await knownResponse.json()) as { known: boolean }).toEqual({ + known: false, + }); + + expect(harness.values.get("trust:agent-peers")).toBeUndefined(); + }); + + it("confirms pairing code in one operation and persists trust", async () => { + const { proxyTrustState } = createProxyTrustState(); + const codeResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingCode, { + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + ttlSeconds: 60, + nowMs: 1_700_000_000_000, + }), + ); + const codeBody = (await codeResponse.json()) as { pairingCode: string }; + + const confirmResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.confirmPairingCode, { + pairingCode: codeBody.pairingCode, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_100, + }), + ); + + expect(confirmResponse.status).toBe(200); + expect( + (await confirmResponse.json()) as { initiatorAgentDid: string }, + ).toEqual({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }); + + const pairCheckResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.isPairAllowed, { + initiatorAgentDid: "did:claw:agent:bob", + responderAgentDid: "did:claw:agent:alice", + }), + ); + expect((await pairCheckResponse.json()) as { allowed: boolean }).toEqual({ + allowed: true, + }); + }); +}); diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts new file mode 100644 index 0000000..28b817d --- /dev/null +++ b/apps/proxy/src/proxy-trust-state.ts @@ -0,0 +1,438 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { + type PairingCodeConsumeInput, + type PairingCodeInput, + TRUST_STORE_ROUTES, +} from "./proxy-trust-store.js"; + +type StoredPairingCode = { + expiresAtMs: number; + initiatorAgentDid: string; + responderAgentDid: string; +}; + +type PairingCodeMap = Record; +type AgentPeersIndex = Record; + +const PAIRS_STORAGE_KEY = "trust:pairs"; +const AGENT_PEERS_STORAGE_KEY = "trust:agent-peers"; +const PAIRING_CODES_STORAGE_KEY = "trust:pairing-codes"; + +function toPairKey( + initiatorAgentDid: string, + responderAgentDid: string, +): string { + return [initiatorAgentDid, responderAgentDid].sort().join("|"); +} + +function isNonEmptyString(value: unknown): value is string { + return typeof value === "string" && value.trim().length > 0; +} + +function addPeer( + index: AgentPeersIndex, + leftAgentDid: string, + rightAgentDid: string, +): void { + const peers = new Set(index[leftAgentDid] ?? []); + peers.add(rightAgentDid); + index[leftAgentDid] = [...peers].sort(); +} + +function toErrorResponse(input: { + code: string; + message: string; + status: number; +}): Response { + return Response.json( + { + error: { + code: input.code, + message: input.message, + }, + }, + { status: input.status }, + ); +} + +async function parseBody(request: Request): Promise { + try { + return await request.json(); + } catch { + return undefined; + } +} + +export class ProxyTrustState { + private readonly state: DurableObjectState; + + constructor(state: DurableObjectState) { + this.state = state; + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + if (request.method !== "POST") { + return new Response("Not found", { status: 404 }); + } + + if (url.pathname === TRUST_STORE_ROUTES.createPairingCode) { + return this.handleCreatePairingCode(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.consumePairingCode) { + return this.handleConsumePairingCode(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.confirmPairingCode) { + return this.handleConfirmPairingCode(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.upsertPair) { + return this.handleUpsertPair(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.isPairAllowed) { + return this.handleIsPairAllowed(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.isAgentKnown) { + return this.handleIsAgentKnown(request); + } + + return new Response("Not found", { status: 404 }); + } + + async alarm(): Promise { + const nowMs = Date.now(); + const pairingCodes = await this.loadPairingCodes(); + + let mutated = false; + for (const [pairingCode, details] of Object.entries(pairingCodes)) { + if (details.expiresAtMs <= nowMs) { + delete pairingCodes[pairingCode]; + mutated = true; + } + } + + if (mutated) { + await this.savePairingCodes(pairingCodes); + } + + await this.scheduleNextCodeCleanup(pairingCodes); + } + + private async handleCreatePairingCode(request: Request): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + if ( + !body || + !isNonEmptyString(body.initiatorAgentDid) || + !isNonEmptyString(body.responderAgentDid) || + typeof body.ttlSeconds !== "number" || + !Number.isInteger(body.ttlSeconds) || + body.ttlSeconds <= 0 + ) { + return toErrorResponse({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing code create input is invalid", + status: 400, + }); + } + + const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const pairingCode = generateUlid(nowMs); + const expiresAtMs = nowMs + body.ttlSeconds * 1000; + + const pairingCodes = await this.loadPairingCodes(); + pairingCodes[pairingCode] = { + initiatorAgentDid: body.initiatorAgentDid, + responderAgentDid: body.responderAgentDid, + expiresAtMs, + }; + + await this.savePairingCodes(pairingCodes); + await this.scheduleNextCodeCleanup(pairingCodes); + + return Response.json({ + pairingCode, + expiresAtMs, + initiatorAgentDid: body.initiatorAgentDid, + responderAgentDid: body.responderAgentDid, + }); + } + + private async handleConsumePairingCode(request: Request): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + if ( + !body || + !isNonEmptyString(body.pairingCode) || + !isNonEmptyString(body.responderAgentDid) + ) { + return toErrorResponse({ + code: "PROXY_PAIR_CONFIRM_INVALID_BODY", + message: "Pairing code consume input is invalid", + status: 400, + }); + } + + const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const pairingCodes = await this.loadPairingCodes(); + const stored = pairingCodes[body.pairingCode]; + + if (!stored) { + return toErrorResponse({ + code: "PROXY_PAIR_CODE_NOT_FOUND", + message: "Pairing code not found", + status: 404, + }); + } + + if (stored.expiresAtMs <= nowMs) { + delete pairingCodes[body.pairingCode]; + await this.savePairingCodes(pairingCodes); + await this.scheduleNextCodeCleanup(pairingCodes); + return toErrorResponse({ + code: "PROXY_PAIR_CODE_EXPIRED", + message: "Pairing code has expired", + status: 410, + }); + } + + if (stored.responderAgentDid !== body.responderAgentDid) { + return toErrorResponse({ + code: "PROXY_PAIR_CODE_AGENT_MISMATCH", + message: "Pairing code does not match caller agent DID", + status: 403, + }); + } + + delete pairingCodes[body.pairingCode]; + await this.savePairingCodes(pairingCodes); + await this.scheduleNextCodeCleanup(pairingCodes); + + return Response.json({ + initiatorAgentDid: stored.initiatorAgentDid, + responderAgentDid: stored.responderAgentDid, + }); + } + + private async handleConfirmPairingCode(request: Request): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + if ( + !body || + !isNonEmptyString(body.pairingCode) || + !isNonEmptyString(body.responderAgentDid) + ) { + return toErrorResponse({ + code: "PROXY_PAIR_CONFIRM_INVALID_BODY", + message: "Pairing code consume input is invalid", + status: 400, + }); + } + + const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const pairingCodes = await this.loadPairingCodes(); + const stored = pairingCodes[body.pairingCode]; + + if (!stored) { + return toErrorResponse({ + code: "PROXY_PAIR_CODE_NOT_FOUND", + message: "Pairing code not found", + status: 404, + }); + } + + if (stored.expiresAtMs <= nowMs) { + delete pairingCodes[body.pairingCode]; + await this.savePairingCodes(pairingCodes); + await this.scheduleNextCodeCleanup(pairingCodes); + return toErrorResponse({ + code: "PROXY_PAIR_CODE_EXPIRED", + message: "Pairing code has expired", + status: 410, + }); + } + + if (stored.responderAgentDid !== body.responderAgentDid) { + return toErrorResponse({ + code: "PROXY_PAIR_CODE_AGENT_MISMATCH", + message: "Pairing code does not match caller agent DID", + status: 403, + }); + } + + const pairs = await this.loadPairs(); + pairs.add(toPairKey(stored.initiatorAgentDid, stored.responderAgentDid)); + + const agentPeers = await this.loadAgentPeers(); + addPeer(agentPeers, stored.initiatorAgentDid, stored.responderAgentDid); + addPeer(agentPeers, stored.responderAgentDid, stored.initiatorAgentDid); + + await this.savePairs(pairs); + await this.saveAgentPeers(agentPeers); + + delete pairingCodes[body.pairingCode]; + await this.savePairingCodes(pairingCodes); + await this.scheduleNextCodeCleanup(pairingCodes); + + return Response.json({ + initiatorAgentDid: stored.initiatorAgentDid, + responderAgentDid: stored.responderAgentDid, + }); + } + + private async handleUpsertPair(request: Request): Promise { + const body = (await parseBody(request)) as + | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } + | undefined; + if ( + !body || + !isNonEmptyString(body.initiatorAgentDid) || + !isNonEmptyString(body.responderAgentDid) + ) { + return toErrorResponse({ + code: "PROXY_PAIR_UPSERT_INVALID_BODY", + message: "Pair upsert input is invalid", + status: 400, + }); + } + + const pairs = await this.loadPairs(); + pairs.add(toPairKey(body.initiatorAgentDid, body.responderAgentDid)); + await this.savePairs(pairs); + + const agentPeers = await this.loadAgentPeers(); + addPeer(agentPeers, body.initiatorAgentDid, body.responderAgentDid); + addPeer(agentPeers, body.responderAgentDid, body.initiatorAgentDid); + await this.saveAgentPeers(agentPeers); + + return Response.json({ ok: true }); + } + + private async handleIsPairAllowed(request: Request): Promise { + const body = (await parseBody(request)) as + | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } + | undefined; + if ( + !body || + !isNonEmptyString(body.initiatorAgentDid) || + !isNonEmptyString(body.responderAgentDid) + ) { + return toErrorResponse({ + code: "PROXY_PAIR_CHECK_INVALID_BODY", + message: "Pair check input is invalid", + status: 400, + }); + } + + if (body.initiatorAgentDid === body.responderAgentDid) { + return Response.json({ allowed: true }); + } + + const pairs = await this.loadPairs(); + return Response.json({ + allowed: pairs.has( + toPairKey(body.initiatorAgentDid, body.responderAgentDid), + ), + }); + } + + private async handleIsAgentKnown(request: Request): Promise { + const body = (await parseBody(request)) as + | { agentDid?: unknown } + | undefined; + if (!body || !isNonEmptyString(body.agentDid)) { + return toErrorResponse({ + code: "PROXY_AGENT_KNOWN_INVALID_BODY", + message: "Agent known check input is invalid", + status: 400, + }); + } + + const agentPeers = await this.loadAgentPeers(); + if ((agentPeers[body.agentDid]?.length ?? 0) > 0) { + return Response.json({ known: true }); + } + + return Response.json({ known: false }); + } + + private async loadPairs(): Promise> { + const raw = await this.state.storage.get(PAIRS_STORAGE_KEY); + if (!Array.isArray(raw)) { + return new Set(); + } + + const normalized = raw.filter((value) => typeof value === "string"); + return new Set(normalized); + } + + private async savePairs(pairs: Set): Promise { + await this.state.storage.put(PAIRS_STORAGE_KEY, [...pairs].sort()); + } + + private async loadAgentPeers(): Promise { + const raw = await this.state.storage.get( + AGENT_PEERS_STORAGE_KEY, + ); + if (typeof raw !== "object" || raw === null) { + return {}; + } + + const normalized: AgentPeersIndex = {}; + for (const [agentDid, peers] of Object.entries(raw)) { + if (!Array.isArray(peers)) { + continue; + } + + normalized[agentDid] = peers.filter((peer): peer is string => + isNonEmptyString(peer), + ); + } + + return normalized; + } + + private async saveAgentPeers(agentPeers: AgentPeersIndex): Promise { + await this.state.storage.put(AGENT_PEERS_STORAGE_KEY, agentPeers); + } + + private async loadPairingCodes(): Promise { + const raw = await this.state.storage.get( + PAIRING_CODES_STORAGE_KEY, + ); + + if (typeof raw !== "object" || raw === null) { + return {}; + } + + return raw; + } + + private async savePairingCodes(pairingCodes: PairingCodeMap): Promise { + await this.state.storage.put(PAIRING_CODES_STORAGE_KEY, pairingCodes); + } + + private async scheduleNextCodeCleanup( + pairingCodes: PairingCodeMap, + ): Promise { + const expiryValues = Object.values(pairingCodes).map( + (details) => details.expiresAtMs, + ); + + if (expiryValues.length === 0) { + await this.state.storage.deleteAlarm(); + return; + } + + const earliestExpiry = Math.min(...expiryValues); + await this.state.storage.setAlarm(earliestExpiry); + } +} diff --git a/apps/proxy/src/proxy-trust-store.test.ts b/apps/proxy/src/proxy-trust-store.test.ts new file mode 100644 index 0000000..9f044c1 --- /dev/null +++ b/apps/proxy/src/proxy-trust-store.test.ts @@ -0,0 +1,128 @@ +import { describe, expect, it } from "vitest"; +import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; + +describe("in-memory proxy trust store", () => { + it("allows same-agent sender and recipient without explicit pair entry", async () => { + const store = createInMemoryProxyTrustStore(); + expect( + await store.isPairAllowed({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:alice", + }), + ).toBe(true); + }); + + it("supports symmetric pair checks", async () => { + const store = createInMemoryProxyTrustStore(); + await store.upsertPair({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }); + + expect( + await store.isPairAllowed({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }), + ).toBe(true); + expect( + await store.isPairAllowed({ + initiatorAgentDid: "did:claw:agent:bob", + responderAgentDid: "did:claw:agent:alice", + }), + ).toBe(true); + }); + + it("tracks known agents through pair index updates", async () => { + const store = createInMemoryProxyTrustStore(); + expect(await store.isAgentKnown("did:claw:agent:alice")).toBe(false); + expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(false); + + await store.upsertPair({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }); + + expect(await store.isAgentKnown("did:claw:agent:alice")).toBe(true); + expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(true); + expect(await store.isAgentKnown("did:claw:agent:charlie")).toBe(false); + }); + + it("consumes one-time pairing codes", async () => { + const store = createInMemoryProxyTrustStore(); + const code = await store.createPairingCode({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + ttlSeconds: 60, + nowMs: 1_700_000_000_000, + }); + + const consumed = await store.consumePairingCode({ + pairingCode: code.pairingCode, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_100, + }); + + expect(consumed).toEqual({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }); + + await expect( + store.consumePairingCode({ + pairingCode: code.pairingCode, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_200, + }), + ).rejects.toMatchObject({ + code: "PROXY_PAIR_CODE_NOT_FOUND", + status: 404, + }); + }); + + it("confirms pairing code atomically and establishes trust", async () => { + const store = createInMemoryProxyTrustStore(); + const code = await store.createPairingCode({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + ttlSeconds: 60, + nowMs: 1_700_000_000_000, + }); + + const confirmed = await store.confirmPairingCode({ + pairingCode: code.pairingCode, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_100, + }); + + expect(confirmed).toEqual({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }); + expect(await store.isAgentKnown("did:claw:agent:alice")).toBe(true); + expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(true); + expect( + await store.isPairAllowed({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }), + ).toBe(true); + expect( + await store.isPairAllowed({ + initiatorAgentDid: "did:claw:agent:bob", + responderAgentDid: "did:claw:agent:alice", + }), + ).toBe(true); + + await expect( + store.consumePairingCode({ + pairingCode: code.pairingCode, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_200, + }), + ).rejects.toMatchObject({ + code: "PROXY_PAIR_CODE_NOT_FOUND", + status: 404, + }); + }); +}); diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts new file mode 100644 index 0000000..e53bb4d --- /dev/null +++ b/apps/proxy/src/proxy-trust-store.ts @@ -0,0 +1,336 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; + +export type PairingCodeInput = { + initiatorAgentDid: string; + responderAgentDid: string; + nowMs?: number; + ttlSeconds: number; +}; + +export type PairingCodeResult = { + pairingCode: string; + expiresAtMs: number; + initiatorAgentDid: string; + responderAgentDid: string; +}; + +export type PairingCodeConsumeInput = { + pairingCode: string; + responderAgentDid: string; + nowMs?: number; +}; + +export type PairingCodeConsumeResult = { + initiatorAgentDid: string; + responderAgentDid: string; +}; + +export type PairingInput = { + initiatorAgentDid: string; + responderAgentDid: string; +}; + +export interface ProxyTrustStore { + createPairingCode(input: PairingCodeInput): Promise; + consumePairingCode( + input: PairingCodeConsumeInput, + ): Promise; + confirmPairingCode( + input: PairingCodeConsumeInput, + ): Promise; + isAgentKnown(agentDid: string): Promise; + isPairAllowed(input: PairingInput): Promise; + upsertPair(input: PairingInput): Promise; +} + +export type ProxyTrustStateStub = { + fetch(request: Request): Promise; +}; + +export type ProxyTrustStateNamespace = { + get: (id: DurableObjectId) => ProxyTrustStateStub; + idFromName: (name: string) => DurableObjectId; +}; + +export class ProxyTrustStoreError extends Error { + readonly code: string; + readonly status: number; + + constructor(input: { code: string; message: string; status: number }) { + super(input.message); + this.name = "ProxyTrustStoreError"; + this.code = input.code; + this.status = input.status; + } +} + +export const TRUST_STORE_ROUTES = { + createPairingCode: "/pairing-codes/create", + consumePairingCode: "/pairing-codes/consume", + confirmPairingCode: "/pairing-codes/confirm", + isAgentKnown: "/agents/known", + isPairAllowed: "/pairs/check", + upsertPair: "/pairs/upsert", +} as const; + +function toPairKey( + initiatorAgentDid: string, + responderAgentDid: string, +): string { + return [initiatorAgentDid, responderAgentDid].sort().join("|"); +} + +function parseErrorPayload(payload: unknown): { + code: string; + message: string; +} { + if (typeof payload !== "object" || payload === null) { + return { + code: "PROXY_TRUST_STATE_ERROR", + message: "Trust state operation failed", + }; + } + + const error = (payload as { error?: unknown }).error; + if (typeof error !== "object" || error === null) { + return { + code: "PROXY_TRUST_STATE_ERROR", + message: "Trust state operation failed", + }; + } + + const code = + typeof (error as { code?: unknown }).code === "string" + ? (error as { code: string }).code + : "PROXY_TRUST_STATE_ERROR"; + const message = + typeof (error as { message?: unknown }).message === "string" + ? (error as { message: string }).message + : "Trust state operation failed"; + + return { code, message }; +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function createDurableObjectRequest(path: string, payload: unknown): Request { + return new Request(`https://proxy-trust-state${path}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(payload), + }); +} + +function resolveDurableStateStub( + namespace: ProxyTrustStateNamespace, +): ProxyTrustStateStub { + return namespace.get(namespace.idFromName(PROXY_TRUST_DO_NAME)); +} + +async function callDurableState( + namespace: ProxyTrustStateNamespace, + path: string, + payload: unknown, +): Promise { + const stub = resolveDurableStateStub(namespace); + const response = await stub.fetch(createDurableObjectRequest(path, payload)); + if (!response.ok) { + const parsed = parseErrorPayload(await parseJsonResponse(response)); + throw new ProxyTrustStoreError({ + code: parsed.code, + message: parsed.message, + status: response.status, + }); + } + + return (await response.json()) as T; +} + +export function createDurableProxyTrustStore( + namespace: ProxyTrustStateNamespace, +): ProxyTrustStore { + return { + async createPairingCode(input) { + return callDurableState( + namespace, + TRUST_STORE_ROUTES.createPairingCode, + input, + ); + }, + async consumePairingCode(input) { + return callDurableState( + namespace, + TRUST_STORE_ROUTES.consumePairingCode, + input, + ); + }, + async confirmPairingCode(input) { + return callDurableState( + namespace, + TRUST_STORE_ROUTES.confirmPairingCode, + input, + ); + }, + async isAgentKnown(agentDid) { + const result = await callDurableState<{ known: boolean }>( + namespace, + TRUST_STORE_ROUTES.isAgentKnown, + { agentDid }, + ); + return result.known; + }, + async isPairAllowed(input) { + const result = await callDurableState<{ allowed: boolean }>( + namespace, + TRUST_STORE_ROUTES.isPairAllowed, + input, + ); + return result.allowed; + }, + async upsertPair(input) { + await callDurableState<{ ok: true }>( + namespace, + TRUST_STORE_ROUTES.upsertPair, + input, + ); + }, + }; +} + +export function createInMemoryProxyTrustStore(): ProxyTrustStore { + const pairKeys = new Set(); + const agentPeers = new Map>(); + const pairingCodes = new Map< + string, + { + expiresAtMs: number; + initiatorAgentDid: string; + responderAgentDid: string; + } + >(); + + function cleanup(nowMs: number): void { + for (const [pairingCode, details] of pairingCodes.entries()) { + if (details.expiresAtMs <= nowMs) { + pairingCodes.delete(pairingCode); + } + } + } + + function upsertPeer(leftAgentDid: string, rightAgentDid: string): void { + const peers = agentPeers.get(leftAgentDid) ?? new Set(); + peers.add(rightAgentDid); + agentPeers.set(leftAgentDid, peers); + } + + function resolveConsumablePairingCode( + input: PairingCodeConsumeInput, + ): PairingCodeConsumeResult { + const nowMs = input.nowMs ?? Date.now(); + cleanup(nowMs); + + const pairing = pairingCodes.get(input.pairingCode); + if (!pairing) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_CODE_NOT_FOUND", + message: "Pairing code not found", + status: 404, + }); + } + + if (pairing.expiresAtMs <= nowMs) { + pairingCodes.delete(input.pairingCode); + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_CODE_EXPIRED", + message: "Pairing code has expired", + status: 410, + }); + } + + if (pairing.responderAgentDid !== input.responderAgentDid) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_CODE_AGENT_MISMATCH", + message: "Pairing code does not match caller agent DID", + status: 403, + }); + } + + return { + initiatorAgentDid: pairing.initiatorAgentDid, + responderAgentDid: pairing.responderAgentDid, + }; + } + + return { + async createPairingCode(input) { + const nowMs = input.nowMs ?? Date.now(); + cleanup(nowMs); + + const pairingCode = generateUlid(nowMs); + const expiresAtMs = nowMs + input.ttlSeconds * 1000; + + pairingCodes.set(pairingCode, { + initiatorAgentDid: input.initiatorAgentDid, + responderAgentDid: input.responderAgentDid, + expiresAtMs, + }); + + return { + pairingCode, + expiresAtMs, + initiatorAgentDid: input.initiatorAgentDid, + responderAgentDid: input.responderAgentDid, + }; + }, + async consumePairingCode(input) { + const consumedPair = resolveConsumablePairingCode(input); + pairingCodes.delete(input.pairingCode); + return consumedPair; + }, + async confirmPairingCode(input) { + const consumedPair = resolveConsumablePairingCode(input); + pairKeys.add( + toPairKey( + consumedPair.initiatorAgentDid, + consumedPair.responderAgentDid, + ), + ); + upsertPeer( + consumedPair.initiatorAgentDid, + consumedPair.responderAgentDid, + ); + upsertPeer( + consumedPair.responderAgentDid, + consumedPair.initiatorAgentDid, + ); + pairingCodes.delete(input.pairingCode); + return consumedPair; + }, + async isAgentKnown(agentDid) { + return (agentPeers.get(agentDid)?.size ?? 0) > 0; + }, + async isPairAllowed(input) { + if (input.initiatorAgentDid === input.responderAgentDid) { + return true; + } + + return pairKeys.has( + toPairKey(input.initiatorAgentDid, input.responderAgentDid), + ); + }, + async upsertPair(input) { + pairKeys.add(toPairKey(input.initiatorAgentDid, input.responderAgentDid)); + upsertPeer(input.initiatorAgentDid, input.responderAgentDid); + upsertPeer(input.responderAgentDid, input.initiatorAgentDid); + }, + }; +} diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index ddc3d67..d8cec1b 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -21,6 +21,17 @@ import { } from "./auth-middleware.js"; import type { ProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; +import { PAIR_CONFIRM_PATH, PAIR_START_PATH } from "./pairing-constants.js"; +import { + createPairConfirmHandler, + createPairStartHandler, + type PairConfirmRuntimeOptions, + type PairStartRuntimeOptions, +} from "./pairing-route.js"; +import { + createInMemoryProxyTrustStore, + type ProxyTrustStore, +} from "./proxy-trust-store.js"; import { createPublicRateLimitMiddleware, DEFAULT_PRE_AUTH_IP_RATE_LIMIT_REQUESTS_PER_MINUTE, @@ -53,6 +64,11 @@ type CreateProxyAppOptions = { rateLimit?: ProxyRateLimitRuntimeOptions; hooks?: AgentHookRuntimeOptions; relay?: RelayConnectRuntimeOptions; + pairing?: { + confirm?: PairConfirmRuntimeOptions; + start?: PairStartRuntimeOptions; + }; + trustStore?: ProxyTrustStore; }; export type ProxyApp = Hono<{ @@ -68,6 +84,7 @@ function resolveLogger(logger?: Logger): Logger { export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { const logger = resolveLogger(options.logger); + const trustStore = options.trustStore ?? createInMemoryProxyTrustStore(); const app = new Hono<{ Bindings: { AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; @@ -96,6 +113,7 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { createProxyAuthMiddleware({ config: options.config, logger, + trustStore, ...options.auth, }), ); @@ -121,9 +139,27 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { createAgentHookHandler({ logger, injectIdentityIntoMessage: options.config.injectIdentityIntoMessage, + trustStore, ...options.hooks, }), ); + app.post( + PAIR_START_PATH, + createPairStartHandler({ + logger, + registryUrl: options.config.registryUrl, + trustStore, + ...options.pairing?.start, + }), + ); + app.post( + PAIR_CONFIRM_PATH, + createPairConfirmHandler({ + logger, + trustStore, + ...options.pairing?.confirm, + }), + ); app.get( RELAY_CONNECT_PATH, createRelayConnectHandler({ diff --git a/apps/proxy/src/trust-policy.ts b/apps/proxy/src/trust-policy.ts new file mode 100644 index 0000000..21cbe6c --- /dev/null +++ b/apps/proxy/src/trust-policy.ts @@ -0,0 +1,71 @@ +import { AppError } from "@clawdentity/sdk"; +import type { ProxyTrustStore } from "./proxy-trust-store.js"; + +function toErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : "unknown"; +} + +export async function assertKnownTrustedAgent(input: { + trustStore: ProxyTrustStore; + agentDid: string; +}): Promise { + let isKnownAgent = false; + try { + isKnownAgent = await input.trustStore.isAgentKnown(input.agentDid); + } catch (error) { + throw new AppError({ + code: "PROXY_AUTH_DEPENDENCY_UNAVAILABLE", + message: "Proxy trust state is unavailable", + status: 503, + details: { + reason: toErrorMessage(error), + }, + expose: true, + }); + } + + if (!isKnownAgent) { + throw new AppError({ + code: "PROXY_AUTH_FORBIDDEN", + message: "Verified caller is not trusted", + status: 403, + details: { + agentDid: input.agentDid, + }, + expose: true, + }); + } +} + +export async function assertTrustedPair(input: { + trustStore: ProxyTrustStore; + initiatorAgentDid: string; + responderAgentDid: string; +}): Promise { + let isPairAllowed = false; + try { + isPairAllowed = await input.trustStore.isPairAllowed({ + initiatorAgentDid: input.initiatorAgentDid, + responderAgentDid: input.responderAgentDid, + }); + } catch (error) { + throw new AppError({ + code: "PROXY_PAIR_STATE_UNAVAILABLE", + message: "Pairing state is unavailable", + status: 503, + details: { + reason: toErrorMessage(error), + }, + expose: true, + }); + } + + if (!isPairAllowed) { + throw new AppError({ + code: "PROXY_AUTH_FORBIDDEN", + message: "Verified caller is not trusted for recipient", + status: 403, + expose: true, + }); + } +} diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 87c9a5d..77b195b 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -9,6 +9,12 @@ import { parseProxyConfig, } from "./config.js"; import { resolveProxyVersion } from "./index.js"; +import { ProxyTrustState } from "./proxy-trust-state.js"; +import { + createDurableProxyTrustStore, + createInMemoryProxyTrustStore, + type ProxyTrustStateNamespace, +} from "./proxy-trust-store.js"; import { createProxyApp, type ProxyApp } from "./server.js"; export type ProxyWorkerBindings = { @@ -16,14 +22,11 @@ export type ProxyWorkerBindings = { PORT?: string; OPENCLAW_BASE_URL?: string; OPENCLAW_HOOK_TOKEN?: string; - OPENCLAW_HOOKS_TOKEN?: string; AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; + PROXY_TRUST_STATE?: ProxyTrustStateNamespace; REGISTRY_URL?: string; CLAWDENTITY_REGISTRY_URL?: string; ENVIRONMENT?: string; - ALLOW_LIST?: string; - ALLOWLIST_OWNERS?: string; - ALLOWLIST_AGENTS?: string; ALLOW_ALL_VERIFIED?: string; CRL_REFRESH_INTERVAL_MS?: string; CRL_MAX_AGE_MS?: string; @@ -49,13 +52,11 @@ function toCacheKey(env: ProxyWorkerBindings): string { const keyParts = [ env.OPENCLAW_BASE_URL, env.OPENCLAW_HOOK_TOKEN, - env.OPENCLAW_HOOKS_TOKEN, + env.PROXY_TRUST_STATE === undefined ? "no-trust-do" : "has-trust-do", env.REGISTRY_URL, env.CLAWDENTITY_REGISTRY_URL, env.ENVIRONMENT, - env.ALLOW_LIST, - env.ALLOWLIST_OWNERS, - env.ALLOWLIST_AGENTS, + env.ALLOW_ALL_VERIFIED, env.CRL_REFRESH_INTERVAL_MS, env.CRL_MAX_AGE_MS, env.CRL_STALE_BEHAVIOR, @@ -79,6 +80,10 @@ function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { const app = createProxyApp({ config, logger, + trustStore: + env.PROXY_TRUST_STATE !== undefined + ? createDurableProxyTrustStore(env.PROXY_TRUST_STATE) + : createInMemoryProxyTrustStore(), version: resolveProxyVersion(env), }); @@ -138,5 +143,5 @@ const worker = { }, }; -export { AgentRelaySession }; +export { AgentRelaySession, ProxyTrustState }; export default worker; diff --git a/apps/proxy/vitest.config.ts b/apps/proxy/vitest.config.ts index e2ec332..e371378 100644 --- a/apps/proxy/vitest.config.ts +++ b/apps/proxy/vitest.config.ts @@ -1,6 +1,20 @@ +import { fileURLToPath } from "node:url"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + alias: { + "@clawdentity/protocol": fileURLToPath( + new URL("../../packages/protocol/src/index.ts", import.meta.url), + ), + "@clawdentity/sdk/testing": fileURLToPath( + new URL("../../packages/sdk/src/testing/index.ts", import.meta.url), + ), + "@clawdentity/sdk": fileURLToPath( + new URL("../../packages/sdk/src/index.ts", import.meta.url), + ), + }, + }, test: { globals: true, }, diff --git a/apps/proxy/wrangler.jsonc b/apps/proxy/wrangler.jsonc index 2124e86..4f04180 100644 --- a/apps/proxy/wrangler.jsonc +++ b/apps/proxy/wrangler.jsonc @@ -9,6 +9,10 @@ { "name": "AGENT_RELAY_SESSION", "class_name": "AgentRelaySession" + }, + { + "name": "PROXY_TRUST_STATE", + "class_name": "ProxyTrustState" } ] }, @@ -16,6 +20,10 @@ { "tag": "v1-agent-relay-session", "new_sqlite_classes": ["AgentRelaySession"] + }, + { + "tag": "v2-proxy-trust-state", + "new_sqlite_classes": ["ProxyTrustState"] } ], "env": { @@ -26,6 +34,10 @@ { "name": "AGENT_RELAY_SESSION", "class_name": "AgentRelaySession" + }, + { + "name": "PROXY_TRUST_STATE", + "class_name": "ProxyTrustState" } ] }, @@ -33,6 +45,10 @@ { "tag": "v1-agent-relay-session", "new_sqlite_classes": ["AgentRelaySession"] + }, + { + "tag": "v2-proxy-trust-state", + "new_sqlite_classes": ["ProxyTrustState"] } ], "vars": { @@ -49,6 +65,10 @@ { "name": "AGENT_RELAY_SESSION", "class_name": "AgentRelaySession" + }, + { + "name": "PROXY_TRUST_STATE", + "class_name": "ProxyTrustState" } ] }, @@ -56,6 +76,10 @@ { "tag": "v1-agent-relay-session", "new_sqlite_classes": ["AgentRelaySession"] + }, + { + "tag": "v2-proxy-trust-state", + "new_sqlite_classes": ["ProxyTrustState"] } ], "vars": { @@ -71,6 +95,10 @@ { "name": "AGENT_RELAY_SESSION", "class_name": "AgentRelaySession" + }, + { + "name": "PROXY_TRUST_STATE", + "class_name": "ProxyTrustState" } ] }, @@ -78,6 +106,10 @@ { "tag": "v1-agent-relay-session", "new_sqlite_classes": ["AgentRelaySession"] + }, + { + "tag": "v2-proxy-trust-state", + "new_sqlite_classes": ["ProxyTrustState"] } ], "vars": { diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index dc26e25..920caa0 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -64,6 +64,12 @@ - Keep ordering deterministic (`id` descending) and compute `nextCursor` from the last item in the returned page. - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors`: generic 400 message in `production`, detailed `fieldErrors` in `development`/`test`. +## GET /v1/agents/:id/ownership Contract +- Require PAT auth via `createApiKeyAuth`. +- Validate `:id` as ULID and return `400 AGENT_OWNERSHIP_INVALID_PATH` for malformed IDs. +- Return `{ ownsAgent: true }` when the caller owns the agent and `{ ownsAgent: false }` for foreign or missing IDs. +- Keep this endpoint ownership-only; do not return agent metadata. + ## POST /v1/invites Contract - Require PAT auth via `createApiKeyAuth`. - Enforce admin-only access with explicit `403 INVITE_CREATE_FORBIDDEN` for authenticated non-admin callers. diff --git a/apps/registry/src/agent-ownership.ts b/apps/registry/src/agent-ownership.ts new file mode 100644 index 0000000..0c8ac89 --- /dev/null +++ b/apps/registry/src/agent-ownership.ts @@ -0,0 +1,53 @@ +import { parseUlid } from "@clawdentity/protocol"; +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +function invalidOwnershipPath(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_OWNERSHIP_INVALID_PATH", + message: exposeDetails + ? "Agent ownership path is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +export function parseAgentOwnershipPath(input: { + id: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): string { + const id = input.id.trim(); + if (id.length === 0) { + throw invalidOwnershipPath({ + environment: input.environment, + details: { + fieldErrors: { id: ["id is required"] }, + formErrors: [], + }, + }); + } + + try { + return parseUlid(id).value; + } catch { + throw invalidOwnershipPath({ + environment: input.environment, + details: { + fieldErrors: { id: ["id must be a valid ULID"] }, + formErrors: [], + }, + }); + } +} diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index e3eda92..9ba9bd3 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -3,7 +3,6 @@ import { AGENT_AUTH_REFRESH_PATH, AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, - type AitClaims, canonicalizeAgentRegistrationProof, encodeBase64url, generateUlid, @@ -23,6 +22,7 @@ import { verifyAIT, verifyCRL, } from "@clawdentity/sdk"; +import { buildTestAitClaims } from "@clawdentity/sdk/testing"; import { describe, expect, it } from "vitest"; import { DEFAULT_AGENT_LIST_LIMIT } from "./agent-list.js"; import { @@ -41,27 +41,18 @@ import { import { RESOLVE_RATE_LIMIT_MAX_REQUESTS } from "./rate-limit.js"; import app, { createRegistryApp } from "./server.js"; -function makeAitClaims(publicKey: Uint8Array): AitClaims { - const now = Math.floor(Date.now() / 1000); - return { - iss: "https://registry.clawdentity.dev", - sub: makeAgentDid(generateUlid(1700100000000)), - ownerDid: makeHumanDid(generateUlid(1700100001000)), +function makeAitClaims(publicKey: Uint8Array) { + return buildTestAitClaims({ + publicKeyX: encodeBase64url(publicKey), + issuer: "https://registry.clawdentity.dev", + nowSeconds: Math.floor(Date.now() / 1000), + ttlSeconds: 3600, + nbfSkewSeconds: 5, + seedMs: 1_700_100_000_000, name: "agent-registry-01", framework: "openclaw", description: "registry key publishing verification path", - cnf: { - jwk: { - kty: "OKP", - crv: "Ed25519", - x: encodeBase64url(publicKey), - }, - }, - iat: now, - nbf: now - 5, - exp: now + 3600, - jti: generateUlid(1700100002000), - }; + }); } type FakeD1Row = { @@ -4496,6 +4487,125 @@ describe("GET /v1/agents", () => { }); }); +describe("GET /v1/agents/:id/ownership", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700100017000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/ownership`, + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns ownsAgent=true when caller owns the agent", async () => { + const { token, authRow } = await makeValidPatContext(); + const ownedAgentId = generateUlid(1700100017100); + const { database } = createFakeDb( + [authRow], + [ + { + id: ownedAgentId, + did: makeAgentDid(ownedAgentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${ownedAgentId}/ownership`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { ownsAgent: boolean }; + expect(body).toEqual({ ownsAgent: true }); + }); + + it("returns ownsAgent=false for non-owned or missing agent ids", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700100017200); + const missingAgentId = generateUlid(1700100017300); + const { database } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + ); + + const foreignRes = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}/ownership`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(foreignRes.status).toBe(200); + expect((await foreignRes.json()) as { ownsAgent: boolean }).toEqual({ + ownsAgent: false, + }); + + const missingRes = await createRegistryApp().request( + `/v1/agents/${missingAgentId}/ownership`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(missingRes.status).toBe(200); + expect((await missingRes.json()) as { ownsAgent: boolean }).toEqual({ + ownsAgent: false, + }); + }); + + it("returns path validation errors for invalid ids", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents/not-a-ulid/ownership", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_OWNERSHIP_INVALID_PATH"); + expect(body.error.message).toBe("Agent ownership path is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + id: expect.any(Array), + }); + }); +}); + describe("DELETE /v1/agents/:id", () => { it("returns 401 when PAT is missing", async () => { const agentId = generateUlid(1700200000000); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index cab1a1d..3cccc67 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -33,6 +33,7 @@ import { toAgentAuthResponse, } from "./agent-auth-lifecycle.js"; import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; +import { parseAgentOwnershipPath } from "./agent-ownership.js"; import { buildAgentRegistrationChallenge, buildAgentRegistrationFromParsed, @@ -1322,6 +1323,28 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { }); }); + app.get("/v1/agents/:id/ownership", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentOwnershipPath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: agents.id, + }) + .from(agents) + .where(and(eq(agents.owner_id, human.id), eq(agents.id, agentId))) + .limit(1); + + return c.json({ + ownsAgent: rows.length > 0, + }); + }); + app.post(AGENT_REGISTRATION_CHALLENGE_PATH, createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); diff --git a/apps/registry/vitest.config.ts b/apps/registry/vitest.config.ts index a42bac4..e371378 100644 --- a/apps/registry/vitest.config.ts +++ b/apps/registry/vitest.config.ts @@ -7,6 +7,9 @@ export default defineConfig({ "@clawdentity/protocol": fileURLToPath( new URL("../../packages/protocol/src/index.ts", import.meta.url), ), + "@clawdentity/sdk/testing": fileURLToPath( + new URL("../../packages/sdk/src/testing/index.ts", import.meta.url), + ), "@clawdentity/sdk": fileURLToPath( new URL("../../packages/sdk/src/index.ts", import.meta.url), ), diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index cf43aab..1f35ed8 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -17,6 +17,7 @@ - `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. - `security/nonce-cache`: in-memory TTL nonce replay protection keyed by `agentDid + nonce`. - `agent-auth-client`: shared agent auth refresh client + retry orchestration (`executeWithAgentAuthRefreshRetry`) for CLI/runtime integrations. +- `testing/*`: shared deterministic test fixtures (e.g. AIT claims) for app/package tests. - Tests should prove tamper cases (payload change, header kid swap, signature corruption). ## Design Rules @@ -45,6 +46,7 @@ - Keep `agent-auth-client` runtime-portable (no Node-only filesystem APIs); delegate persistence/locking to callers. - Keep refresh retry policy strict: a single refresh attempt and a single request retry on retryable auth failures. - Keep per-agent refresh single-flight keyed by explicit caller-provided key to avoid duplicate refresh races. +- Keep shared test fixtures in `src/testing/*` and consume via `@clawdentity/sdk/testing` to avoid copy/paste helpers across apps. ## Testing Rules - Unit test each shared module. diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 799f425..0764d1a 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -9,6 +9,10 @@ ".": { "import": "./dist/index.js", "types": "./dist/index.d.ts" + }, + "./testing": { + "import": "./dist/testing/index.js", + "types": "./dist/testing/index.d.ts" } }, "scripts": { diff --git a/packages/sdk/src/testing/ait-fixtures.test.ts b/packages/sdk/src/testing/ait-fixtures.test.ts new file mode 100644 index 0000000..eead148 --- /dev/null +++ b/packages/sdk/src/testing/ait-fixtures.test.ts @@ -0,0 +1,44 @@ +import { parseDid, parseUlid } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { buildTestAitClaims } from "./ait-fixtures.js"; + +describe("buildTestAitClaims", () => { + it("builds deterministic claims from a fixed seed", () => { + const claims = buildTestAitClaims({ + publicKeyX: "test-public-key-x", + seedMs: 1_700_000_000_000, + nowSeconds: 1_700_000_000, + }); + + expect(claims.iss).toBe("https://api.clawdentity.com"); + expect(parseDid(claims.sub).kind).toBe("agent"); + expect(parseDid(claims.ownerDid).kind).toBe("human"); + expect(parseUlid(parseDid(claims.sub).ulid).timestampMs).toBe( + 1_700_000_000_010, + ); + expect(parseUlid(parseDid(claims.ownerDid).ulid).timestampMs).toBe( + 1_700_000_000_020, + ); + expect(parseUlid(claims.jti).timestampMs).toBe(1_700_000_000_030); + expect(claims.exp).toBe(1_700_000_600); + }); + + it("allows caller override fields", () => { + const claims = buildTestAitClaims({ + publicKeyX: "test-public-key-x", + issuer: "https://registry.clawdentity.dev", + name: "registry-agent", + framework: "custom", + description: "fixture", + ttlSeconds: 60, + nowSeconds: 1_700_000_000, + seedMs: 1_700_100_000_000, + }); + + expect(claims.iss).toBe("https://registry.clawdentity.dev"); + expect(claims.name).toBe("registry-agent"); + expect(claims.framework).toBe("custom"); + expect(claims.description).toBe("fixture"); + expect(claims.exp).toBe(1_700_000_060); + }); +}); diff --git a/packages/sdk/src/testing/ait-fixtures.ts b/packages/sdk/src/testing/ait-fixtures.ts new file mode 100644 index 0000000..80c933c --- /dev/null +++ b/packages/sdk/src/testing/ait-fixtures.ts @@ -0,0 +1,53 @@ +import { + type AitClaims, + generateUlid, + makeAgentDid, + makeHumanDid, +} from "@clawdentity/protocol"; + +export type BuildTestAitClaimsInput = { + publicKeyX: string; + issuer?: string; + nowSeconds?: number; + seedMs?: number; + ttlSeconds?: number; + nbfSkewSeconds?: number; + name?: string; + framework?: string; + description?: string; +}; + +const DEFAULT_SEED_MS = 1_700_000_000_000; +const DEFAULT_ISSUER = "https://api.clawdentity.com"; +const DEFAULT_NAME = "Proxy Agent"; +const DEFAULT_FRAMEWORK = "openclaw"; +const DEFAULT_DESCRIPTION = "test agent"; +const DEFAULT_TTL_SECONDS = 600; + +export function buildTestAitClaims(input: BuildTestAitClaimsInput): AitClaims { + const seedMs = input.seedMs ?? DEFAULT_SEED_MS; + const nowSeconds = + input.nowSeconds ?? Math.floor((input.seedMs ?? Date.now()) / 1000); + const ttlSeconds = input.ttlSeconds ?? DEFAULT_TTL_SECONDS; + const nbfSkewSeconds = input.nbfSkewSeconds ?? 5; + + return { + iss: input.issuer ?? DEFAULT_ISSUER, + sub: makeAgentDid(generateUlid(seedMs + 10)), + ownerDid: makeHumanDid(generateUlid(seedMs + 20)), + name: input.name ?? DEFAULT_NAME, + framework: input.framework ?? DEFAULT_FRAMEWORK, + description: input.description ?? DEFAULT_DESCRIPTION, + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: input.publicKeyX, + }, + }, + iat: nowSeconds, + nbf: nowSeconds - nbfSkewSeconds, + exp: nowSeconds + ttlSeconds, + jti: generateUlid(seedMs + 30), + }; +} diff --git a/packages/sdk/src/testing/index.ts b/packages/sdk/src/testing/index.ts new file mode 100644 index 0000000..37803a7 --- /dev/null +++ b/packages/sdk/src/testing/index.ts @@ -0,0 +1,2 @@ +export type { BuildTestAitClaimsInput } from "./ait-fixtures.js"; +export { buildTestAitClaims } from "./ait-fixtures.js"; diff --git a/packages/sdk/tsup.config.ts b/packages/sdk/tsup.config.ts index 7a3d66a..227c4d4 100644 --- a/packages/sdk/tsup.config.ts +++ b/packages/sdk/tsup.config.ts @@ -1,7 +1,10 @@ import { defineConfig } from "tsup"; export default defineConfig({ - entry: ["src/index.ts"], + entry: { + index: "src/index.ts", + "testing/index": "src/testing/index.ts", + }, format: ["esm"], dts: true, clean: true, diff --git a/tsconfig.base.json b/tsconfig.base.json index dead24d..05832e8 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -17,7 +17,8 @@ "paths": { "@clawdentity/connector": ["packages/connector/src/index.ts"], "@clawdentity/protocol": ["packages/protocol/src/index.ts"], - "@clawdentity/sdk": ["packages/sdk/src/index.ts"] + "@clawdentity/sdk": ["packages/sdk/src/index.ts"], + "@clawdentity/sdk/testing": ["packages/sdk/src/testing/index.ts"] } }, "exclude": ["node_modules", "dist"] From 997b8c7c3f84387ac6287163dffb05a7f289bcca Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 18:14:00 +0530 Subject: [PATCH 081/190] ci: build before tests in deploy-develop workflow --- .github/workflows/deploy-develop.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index fdc899b..f8bde83 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -53,12 +53,12 @@ jobs: - name: Typecheck run: pnpm -r typecheck - - name: Run tests - run: pnpm -r test - - name: Build run: pnpm -r build + - name: Run tests + run: pnpm -r test + - name: Capture pre-deploy rollback artifacts run: | mkdir -p artifacts From cc8db90d9303de0842760e338bbeee8390026cf4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 18:16:43 +0530 Subject: [PATCH 082/190] ci: use pnpm exec wrangler in deploy workflow --- .github/AGENTS.md | 17 +++++++++-------- .github/workflows/deploy-develop.yml | 20 ++++++++++---------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index ea8d206..c710c07 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -17,12 +17,13 @@ ## Deployment Rules (Develop) - `deploy-develop.yml` runs on pushes to `develop`. -- Run full quality gates before deployment: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build`. +- Run full quality gates before deployment: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r build`, `pnpm -r test`. - Deploy both workers in the same workflow: - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy - proxy (`apps/proxy`, env `development`) after registry health passes - Verify registry health at `https://dev.api.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. - Use workflow concurrency groups to prevent overlapping deploys for the same environment. +- Run Wrangler through workspace tooling (`pnpm exec wrangler`) in CI so commands work without a global Wrangler install on GitHub runners. ## Release Rules (CLI) - `publish-cli.yml` is manual (`workflow_dispatch`) and must accept explicit `version` + `dist_tag` inputs. @@ -44,13 +45,13 @@ ## Migration Rollback Strategy (Develop) - Capture pre-deploy artifacts: - - `wrangler --cwd apps/registry deployments list --env dev --json` - - `wrangler --cwd apps/proxy deployments list --env development --json || true` (non-blocking for first deploy before proxy Worker exists) - - `wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` - - `wrangler d1 export clawdentity-db-dev --remote --output ` + - `pnpm exec wrangler --cwd apps/registry deployments list --env dev --json` + - `pnpm exec wrangler --cwd apps/proxy deployments list --env development --json || true` (non-blocking for first deploy before proxy Worker exists) + - `pnpm exec wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` + - `pnpm exec wrangler d1 export clawdentity-db-dev --remote --output ` - Keep deploy snapshot collection non-blocking for Worker deployment listings (pre and post) so rollback artifact capture does not fail the workflow when a Worker has no prior deployment history. - Upload artifacts on every run for operator recovery. - On failed deploy: - - Registry rollback: `wrangler --cwd apps/registry rollback --env dev` - - Proxy rollback: `wrangler --cwd apps/proxy rollback --env development` - - DB rollback: `wrangler d1 time-travel restore clawdentity-db-dev --env dev --timestamp ` + - Registry rollback: `pnpm exec wrangler --cwd apps/registry rollback --env dev` + - Proxy rollback: `pnpm exec wrangler --cwd apps/proxy rollback --env development` + - DB rollback: `pnpm exec wrangler d1 time-travel restore clawdentity-db-dev --env dev --timestamp ` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index f8bde83..62b7c80 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -65,16 +65,16 @@ jobs: PREDEPLOY_TS=$(date -u +%Y-%m-%dT%H:%M:%SZ) echo "PREDEPLOY_TS=${PREDEPLOY_TS}" >> "${GITHUB_ENV}" printf "%s\n" "${PREDEPLOY_TS}" > artifacts/predeploy.timestamp - wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-pre.json + pnpm exec wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-pre.json # First proxy deploy may not have an existing Worker/deployments yet. - wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json || true - wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json - wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql + pnpm exec wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json || true + pnpm exec wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json + pnpm exec wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql - name: Apply registry dev migrations and deploy run: | - wrangler --cwd apps/registry d1 migrations apply clawdentity-db-dev --remote --env dev - wrangler --cwd apps/registry deploy --env dev --var APP_VERSION:${APP_VERSION} + pnpm exec wrangler --cwd apps/registry d1 migrations apply clawdentity-db-dev --remote --env dev + pnpm exec wrangler --cwd apps/registry deploy --env dev --var APP_VERSION:${APP_VERSION} - name: Verify registry health endpoint run: | @@ -105,7 +105,7 @@ jobs: run: | mkdir -p artifacts PROXY_DEPLOY_OUTPUT_FILE="artifacts/proxy-deploy-output.txt" - wrangler --cwd apps/proxy deploy --env development --var APP_VERSION:${APP_VERSION} 2>&1 | tee "${PROXY_DEPLOY_OUTPUT_FILE}" + pnpm exec wrangler --cwd apps/proxy deploy --env development --var APP_VERSION:${APP_VERSION} 2>&1 | tee "${PROXY_DEPLOY_OUTPUT_FILE}" PROXY_WORKERS_DEV_URL="$(grep -Eo 'https://[[:alnum:]._-]+\.workers\.dev' "${PROXY_DEPLOY_OUTPUT_FILE}" | head -n 1 || true)" PROXY_HEALTH_URL="" @@ -155,9 +155,9 @@ jobs: if: always() run: | mkdir -p artifacts - wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-post.json || true - wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-post.json || true - wrangler --cwd apps/registry d1 migrations list clawdentity-db-dev --remote --env dev > artifacts/d1-migrations-post.txt || true + pnpm exec wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-post.json || true + pnpm exec wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-post.json || true + pnpm exec wrangler --cwd apps/registry d1 migrations list clawdentity-db-dev --remote --env dev > artifacts/d1-migrations-post.txt || true - name: Rollback instructions on failure if: failure() From 288e0ccdc12e71d693dc4c2dfad02f101d1014a2 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 18:19:23 +0530 Subject: [PATCH 083/190] ci: fix d1 export artifact path in deploy workflow --- .github/AGENTS.md | 2 +- .github/workflows/deploy-develop.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index c710c07..3ee3b30 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -48,7 +48,7 @@ - `pnpm exec wrangler --cwd apps/registry deployments list --env dev --json` - `pnpm exec wrangler --cwd apps/proxy deployments list --env development --json || true` (non-blocking for first deploy before proxy Worker exists) - `pnpm exec wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` - - `pnpm exec wrangler d1 export clawdentity-db-dev --remote --output ` + - `pnpm exec wrangler d1 export clawdentity-db-dev --remote --output "${GITHUB_WORKSPACE}/artifacts/"` - Keep deploy snapshot collection non-blocking for Worker deployment listings (pre and post) so rollback artifact capture does not fail the workflow when a Worker has no prior deployment history. - Upload artifacts on every run for operator recovery. - On failed deploy: diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 62b7c80..a465830 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -69,7 +69,7 @@ jobs: # First proxy deploy may not have an existing Worker/deployments yet. pnpm exec wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json || true pnpm exec wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json - pnpm exec wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output artifacts/d1-dev-predeploy.sql + pnpm exec wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output "${GITHUB_WORKSPACE}/artifacts/d1-dev-predeploy.sql" - name: Apply registry dev migrations and deploy run: | From fa260fa79fc4eb7e1df92b0e6084fa91323bca7c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 18:21:52 +0530 Subject: [PATCH 084/190] ci: retry deploy health checks to handle propagation --- .github/AGENTS.md | 1 + .github/workflows/deploy-develop.yml | 102 ++++++++++++++++++--------- 2 files changed, 71 insertions(+), 32 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 3ee3b30..3967d12 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -22,6 +22,7 @@ - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy - proxy (`apps/proxy`, env `development`) after registry health passes - Verify registry health at `https://dev.api.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. +- Health verification should use bounded retries (for example 3 minutes with 10-second polling) and `Cache-Control: no-cache` requests to tolerate short edge propagation delays after deploy. - Use workflow concurrency groups to prevent overlapping deploys for the same environment. - Run Wrangler through workspace tooling (`pnpm exec wrangler`) in CI so commands work without a global Wrangler install on GitHub runners. diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index a465830..e6dd057 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -79,26 +79,44 @@ jobs: - name: Verify registry health endpoint run: | python3 - <<'PY' - import json, os, sys, urllib.request, urllib.error + import json, os, sys, time, urllib.request, urllib.error + url = "https://dev.api.clawdentity.com/health" expected_version = os.environ.get("APP_VERSION", "") - try: - resp = urllib.request.urlopen(url, timeout=10) - except urllib.error.HTTPError as exc: - sys.stderr.write(f"health check failed ({exc.code} {exc.reason})\n") - sys.exit(1) - data = json.load(resp) - if data.get("status") != "ok" or data.get("environment") != "development": - raise SystemExit(f"unexpected health payload: {data}") if not expected_version: raise SystemExit("APP_VERSION was not set in workflow environment") - if data.get("version") == "0.0.0": - raise SystemExit(f"unexpected placeholder version after deploy: {data}") - if data.get("version") != expected_version: - raise SystemExit( - f"unexpected version: expected {expected_version}, got {data.get('version')}" - ) - print("healthcheck passed", data) + attempts = 18 + delay_seconds = 10 + last_error = None + + for attempt in range(1, attempts + 1): + try: + req = urllib.request.Request( + url, + headers={"Cache-Control": "no-cache"}, + ) + resp = urllib.request.urlopen(req, timeout=10) + data = json.load(resp) + if data.get("status") != "ok" or data.get("environment") != "development": + raise RuntimeError(f"unexpected health payload: {data}") + if data.get("version") == "0.0.0": + raise RuntimeError(f"unexpected placeholder version after deploy: {data}") + if data.get("version") != expected_version: + raise RuntimeError( + f"unexpected version: expected {expected_version}, got {data.get('version')}" + ) + print(f"healthcheck passed on attempt {attempt}", data) + break + except Exception as exc: + last_error = exc + sys.stderr.write( + f"registry health attempt {attempt}/{attempts} failed: {exc}\n" + ) + if attempt == attempts: + raise SystemExit( + f"registry health check failed after {attempts} attempts: {last_error}" + ) + time.sleep(delay_seconds) PY - name: Deploy proxy to development environment @@ -127,28 +145,48 @@ jobs: - name: Verify proxy health endpoint run: | python3 - <<'PY' - import json, os, sys, urllib.request, urllib.error + import json, os, sys, time, urllib.request, urllib.error + url = os.environ.get("PROXY_HEALTH_URL", "") expected_version = os.environ.get("APP_VERSION", "") if not url: raise SystemExit("PROXY_HEALTH_URL was not set") - try: - resp = urllib.request.urlopen(url, timeout=10) - except urllib.error.HTTPError as exc: - sys.stderr.write(f"proxy health check failed ({exc.code} {exc.reason})\n") - sys.exit(1) - data = json.load(resp) - if data.get("status") != "ok" or data.get("environment") != "development": - raise SystemExit(f"unexpected proxy health payload: {data}") if not expected_version: raise SystemExit("APP_VERSION was not set in workflow environment") - if data.get("version") == "0.0.0": - raise SystemExit(f"unexpected placeholder proxy version after deploy: {data}") - if data.get("version") != expected_version: - raise SystemExit( - f"unexpected proxy version: expected {expected_version}, got {data.get('version')}" - ) - print("proxy healthcheck passed", data) + attempts = 18 + delay_seconds = 10 + last_error = None + + for attempt in range(1, attempts + 1): + try: + req = urllib.request.Request( + url, + headers={"Cache-Control": "no-cache"}, + ) + resp = urllib.request.urlopen(req, timeout=10) + data = json.load(resp) + if data.get("status") != "ok" or data.get("environment") != "development": + raise RuntimeError(f"unexpected proxy health payload: {data}") + if data.get("version") == "0.0.0": + raise RuntimeError( + f"unexpected placeholder proxy version after deploy: {data}" + ) + if data.get("version") != expected_version: + raise RuntimeError( + f"unexpected proxy version: expected {expected_version}, got {data.get('version')}" + ) + print(f"proxy healthcheck passed on attempt {attempt}", data) + break + except Exception as exc: + last_error = exc + sys.stderr.write( + f"proxy health attempt {attempt}/{attempts} failed: {exc}\n" + ) + if attempt == attempts: + raise SystemExit( + f"proxy health check failed after {attempts} attempts: {last_error}" + ) + time.sleep(delay_seconds) PY - name: Capture post-deploy state From 4f9831c14592f5151d3c86d7f0c45b9b8b7c9052 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 18:27:23 +0530 Subject: [PATCH 085/190] ci: set explicit headers for deploy health checks --- .github/AGENTS.md | 1 + .github/workflows/deploy-develop.yml | 12 ++++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 3967d12..a035d17 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -23,6 +23,7 @@ - proxy (`apps/proxy`, env `development`) after registry health passes - Verify registry health at `https://dev.api.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. - Health verification should use bounded retries (for example 3 minutes with 10-second polling) and `Cache-Control: no-cache` requests to tolerate short edge propagation delays after deploy. +- When using Python `urllib` for health checks, always set explicit request headers (`Accept: application/json` and a custom `User-Agent` such as `Clawdentity-CI/1.0`) because Cloudflare may return `403`/`1010` for the default `Python-urllib/*` user agent. - Use workflow concurrency groups to prevent overlapping deploys for the same environment. - Run Wrangler through workspace tooling (`pnpm exec wrangler`) in CI so commands work without a global Wrangler install on GitHub runners. diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index e6dd057..18320f6 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -93,7 +93,11 @@ jobs: try: req = urllib.request.Request( url, - headers={"Cache-Control": "no-cache"}, + headers={ + "Cache-Control": "no-cache", + "Accept": "application/json", + "User-Agent": "Clawdentity-CI/1.0", + }, ) resp = urllib.request.urlopen(req, timeout=10) data = json.load(resp) @@ -161,7 +165,11 @@ jobs: try: req = urllib.request.Request( url, - headers={"Cache-Control": "no-cache"}, + headers={ + "Cache-Control": "no-cache", + "Accept": "application/json", + "User-Agent": "Clawdentity-CI/1.0", + }, ) resp = urllib.request.urlopen(req, timeout=10) data = json.load(resp) From 9f3751daa49b914d60c35e5741088529676e8347 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 21:13:24 +0530 Subject: [PATCH 086/190] feat(pairing): add QR ticket flow and init registry overrides --- AGENTS.md | 51 ++ apps/cli/AGENTS.md | 1 + apps/cli/package.json | 14 +- .../openclaw-skill/skill/SKILL.md | 82 +- .../skill/references/clawdentity-protocol.md | 16 +- apps/cli/src/AGENTS.md | 1 + apps/cli/src/commands/AGENTS.md | 17 + apps/cli/src/commands/config.test.ts | 43 + apps/cli/src/commands/config.ts | 32 +- apps/cli/src/commands/pair.test.ts | 323 +++++++ apps/cli/src/commands/pair.ts | 816 ++++++++++++++++++ apps/cli/src/config/manager.test.ts | 19 + apps/cli/src/config/manager.ts | 11 +- apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/cli/src/types/jsqr.d.ts | 28 + apps/cli/tsup.config.ts | 1 + apps/openclaw-skill/AGENTS.md | 42 +- apps/openclaw-skill/skill/SKILL.md | 82 +- .../skill/references/clawdentity-protocol.md | 16 +- apps/proxy/.env.example | 3 +- apps/proxy/AGENTS.md | 18 +- apps/proxy/src/AGENTS.md | 7 +- apps/proxy/src/agent-hook-route.test.ts | 5 +- apps/proxy/src/auth-middleware.test.ts | 14 +- apps/proxy/src/config.test.ts | 80 +- apps/proxy/src/config.ts | 98 --- apps/proxy/src/index.test.ts | 7 +- apps/proxy/src/pairing-constants.ts | 4 +- apps/proxy/src/pairing-route.test.ts | 77 +- apps/proxy/src/pairing-route.ts | 316 +++++-- apps/proxy/src/pairing-ticket.ts | 161 ++++ apps/proxy/src/proxy-trust-state.test.ts | 40 +- apps/proxy/src/proxy-trust-state.ts | 217 +++-- apps/proxy/src/proxy-trust-store.test.ts | 65 +- apps/proxy/src/proxy-trust-store.ts | 160 ++-- apps/proxy/src/server.test.ts | 21 +- apps/proxy/src/worker.ts | 2 - pnpm-lock.yaml | 188 ++++ 39 files changed, 2429 insertions(+), 659 deletions(-) create mode 100644 apps/cli/src/commands/pair.test.ts create mode 100644 apps/cli/src/commands/pair.ts create mode 100644 apps/cli/src/types/jsqr.d.ts create mode 100644 apps/proxy/src/pairing-ticket.ts diff --git a/AGENTS.md b/AGENTS.md index a068018..0ea3757 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -85,6 +85,57 @@ - Hono apps are tested via `app.request()` (Hono's built-in test client) — no wrangler or miniflare needed for unit tests. - Pass mock bindings as the third argument: `app.request("/path", {}, { DB: {}, ENVIRONMENT: "test" })`. +## Dual OpenClaw Container Baseline (Skill E2E) +- Runtime stack for local dual-agent tests lives in sibling repo `~/Workdir/clawdbot`: + - Compose file: `docker-compose.dual.yml` + - Env file: `.env.dual` + - Containers: `clawdbot-agent-alpha-1` (`localhost:18789`), `clawdbot-agent-beta-1` (`localhost:19001`) +- Clean pre-skill baseline state is persisted as host snapshots: + - `~/.openclaw-baselines/alpha-kimi-preskill` + - `~/.openclaw-baselines/beta-kimi-preskill` +- Latest paired-and-approved baseline (saved on 2026-02-17) is: + - `~/.openclaw-baselines/alpha-kimi-preskill-device-approved-20260217-194756` + - `~/.openclaw-baselines/beta-kimi-preskill-device-approved-20260217-194756` + - stable aliases: + - `~/.openclaw-baselines/alpha-kimi-preskill-device-approved-latest` + - `~/.openclaw-baselines/beta-kimi-preskill-device-approved-latest` +- Current stable paired baseline (saved on 2026-02-17) is: + - `~/.openclaw-baselines/alpha-kimi-paired-stable-20260217-200909` + - `~/.openclaw-baselines/beta-kimi-paired-stable-20260217-200909` + - stable aliases: + - `~/.openclaw-baselines/alpha-kimi-paired-stable-latest` + - `~/.openclaw-baselines/beta-kimi-paired-stable-latest` +- Baseline contract: + - OpenClaw config exists (`~/.openclaw/openclaw.json`) with `agents.defaults.model.primary = "kimi-coding/k2p5"`. + - No Clawdentity relay skill artifacts are installed in workspace yet. + - This is the restore point for repeated “install skill + onboard + pairing” user-flow tests. +- Restore workflow before each skill test cycle: + - Stop dual compose stack. + - Replace `~/.openclaw-alpha` and `~/.openclaw-beta` contents from baseline snapshots. + - Start dual compose stack. + - Run skill-install/onboarding flow from that restored state. + - Recommended fast restore command: + - `rsync -a --delete ~/.openclaw-baselines/alpha-kimi-paired-stable-latest/ ~/.openclaw-alpha/ && rsync -a --delete ~/.openclaw-baselines/beta-kimi-paired-stable-latest/ ~/.openclaw-beta/` +- Snapshot refresh workflow after reaching a new known-good state: + - Stop dual compose stack. + - Copy `~/.openclaw-alpha` and `~/.openclaw-beta` into new timestamped folders under `~/.openclaw-baselines`. + - Start dual compose stack. + - Update this section with the new snapshot folder names. +- Pairing issue runbook (`Disconnected (1008): pairing required` in UI): + - Cause: OpenClaw device approval is pending; this is gateway pairing, not Clawdentity peer trust pairing. + - Scope clarification: + - This error is unrelated to proxy trust bootstrap (`/pair/start` + `/pair/confirm`). + - Fixing this error only restores OpenClaw UI/device access. + - Clawdentity trust pairing is a separate step for inter-agent relay authorization. + - Check pending requests: + - `docker exec clawdbot-agent-alpha-1 sh -lc 'node openclaw.mjs devices list --json'` + - `docker exec clawdbot-agent-beta-1 sh -lc 'node openclaw.mjs devices list --json'` + - Approve each pending request ID: + - `docker exec clawdbot-agent-alpha-1 sh -lc 'node openclaw.mjs devices approve '` + - `docker exec clawdbot-agent-beta-1 sh -lc 'node openclaw.mjs devices approve '` + - Re-open UI: + - `http://localhost:18789/` and `http://localhost:19001/` + ## Scaffold Best Practices - Start by reviewing README, PRD, and the active execution tracker issue so documentation mirrors the execution model. - Define the workspace layout now: `apps/registry`, `apps/proxy`, `apps/cli`, `packages/sdk`, and `packages/protocol` (with shared tooling such as `pnpm-workspace.yaml`, `tsconfig.base.json`, and `biome.json`) so downstream tickets have a known structure. diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 2ddd47c..1f739bd 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -19,6 +19,7 @@ - Prefer `@clawdentity/sdk` helpers (`decodeAIT`) when surfacing agent metadata instead of parsing JWTs manually. - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. - Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. +- Keep publish artifacts ESM-compatible and avoid bundling CJS-only runtime deps that rely on dynamic `require` (for example `ws`); externalize them and declare them in CLI `dependencies` so installed binaries start cleanly. - npm `--skill` installer behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. - Keep `skill-bundle/openclaw-skill/` in sync with `apps/openclaw-skill` via `pnpm -F clawdentity run sync:skill-bundle` before build/pack so `postinstall --skill` works in clean installs. - Keep `skill-bundle/openclaw-skill/dist/relay-to-peer.mjs` tracked in git so clean-checkout tests and packaged installs have the required relay artifact before workspace builds run. diff --git a/apps/cli/package.json b/apps/cli/package.json index 30d0c7e..4c5f2e5 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,6 +1,6 @@ { "name": "clawdentity", - "version": "0.0.0", + "version": "0.0.2", "type": "module", "publishConfig": { "access": "public" @@ -8,7 +8,7 @@ "main": "./dist/index.js", "types": "./dist/index.d.ts", "bin": { - "clawdentity": "./dist/bin.js" + "clawdentity": "dist/bin.js" }, "exports": { ".": { @@ -32,12 +32,18 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "commander": "^13.1.0" + "commander": "^13.1.0", + "jsqr": "^1.4.0", + "pngjs": "^7.0.0", + "qrcode": "^1.5.4", + "ws": "^8.19.0" }, "devDependencies": { "@clawdentity/connector": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", - "@types/node": "^22.18.11" + "@types/node": "^22.18.11", + "@types/pngjs": "^6.0.5", + "@types/qrcode": "^1.5.6" } } diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md index 9466c2c..2bf3783 100644 --- a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md +++ b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md @@ -38,8 +38,9 @@ Use this skill when any of the following are requested: ## Invite Input Assumption -Provide a valid invite code string before running this skill. -Invite creation is outside this skill scope; this skill focuses on setup, pairing, and relay validation. +Invite creation is outside this skill scope. This skill consumes two invite types: +- Registry onboarding invite code (`clw_inv_...`) to obtain PAT when local API key is missing. +- OpenClaw peer relay invite code (`clawd1_...`) for `openclaw setup`. ## Human + Agent Flow (strict user-style) @@ -59,14 +60,21 @@ Use these commands as the default execution path for skill utilization: - Initialize local CLI config: - `clawdentity config init` + - `clawdentity config init --registry-url ` (supports first-run registry URL override) + - or set env before init: `CLAWDENTITY_REGISTRY_URL=` (primary global override) + - compatible alias: `CLAWDENTITY_REGISTRY=` - Configure registry URL and API key when missing: - `clawdentity config set registryUrl ` +- Complete registry onboarding when API key is missing: + - `clawdentity invite redeem ` + - `clawdentity invite redeem --registry-url ` +- Configure API key only for non-invite fallback: - `clawdentity config set apiKey ` - Create and inspect local OpenClaw agent identity: - `clawdentity agent create --framework openclaw` - `clawdentity agent inspect ` - Apply OpenClaw invite setup: - - `clawdentity openclaw setup --invite-code ` + - `clawdentity openclaw setup --invite-code ` - Start connector runtime for relay handoff: - `clawdentity connector start ` - Optional persistent connector autostart: @@ -75,47 +83,58 @@ Use these commands as the default execution path for skill utilization: - `clawdentity openclaw doctor` - `clawdentity openclaw relay test --peer ` -Pairing bootstrap for trust policy is API-based in the current release (no dedicated pairing CLI command yet): +Pairing bootstrap uses CLI commands in the current release: - Owner/initiator starts pairing on initiator proxy: - - `POST /pair/start` - - Requires `Authorization: Claw ` and `x-claw-owner-pat` - - Body: `{"agentDid":""}` + - `clawdentity pair start --proxy-url --qr` + - Optionally pass explicit owner PAT: `--owner-pat ` - Responder confirms on responder proxy: - - `POST /pair/confirm` - - Requires `Authorization: Claw ` - - Body: `{"pairingCode":""}` + - `clawdentity pair confirm --qr-file --proxy-url ` + - optional global proxy URL env fallback: `CLAWDENTITY_PROXY_URL=` Successful confirm establishes mutual trust for the two agent DIDs. After confirm, both directions are allowed for trusted delivery. 1. Confirm prerequisites with the human. - Confirm `clawdentity` CLI is installed and runnable. -- Confirm API key exists for this agent (if missing, ask the human for it). +- Confirm local agent name. +- Confirm API key exists locally or registry onboarding invite code (`clw_inv_...`) is available. +- Confirm OpenClaw peer relay invite code (`clawd1_...`) is available for setup. +- Do not request API key and registry invite code in the same prompt. +- Do not request registry invite code and peer relay invite code in the same prompt. +- Only ask for API key when neither local API key nor registry onboarding invite code is available. - Confirm OpenClaw state directory path if non-default. - Confirm OpenClaw base URL if local endpoint is non-default. +- Confirm each side proxy URL for pairing command execution. 2. Confirm skill artifact exists in workspace skills directory. - Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. - If missing, install/update skill package contents before setup. -3. Configure local Clawdentity identity for this OpenClaw agent. +3. Initialize local Clawdentity config. - Run `clawdentity config init`. -- If needed, ask the human for API key and run `clawdentity config set apiKey `. +- Use `clawdentity config init --registry-url ` when registry URL override is required. + +4. Complete registry onboarding auth before agent creation. +- If API key already exists, continue. +- Else redeem registry onboarding invite: + - `clawdentity invite redeem ` + - optional: `--registry-url ` +- If registry invite code is unavailable, fallback to API key path: + - ask human for API key + - run `clawdentity config set apiKey ` + +5. Configure local Clawdentity identity for this OpenClaw agent. - Create identity: `clawdentity agent create --framework openclaw`. - Verify identity: `clawdentity agent inspect `. -4. Ask the human for invite code. -- Prompt exactly for one invite code string. -- Do not ask for DID/proxy URL when invite code is present. - -5. Run automated setup from invite code. +6. Run automated setup from peer relay invite code. - Execute: - `clawdentity openclaw setup --invite-code ` + `clawdentity openclaw setup --invite-code ` - Use `--openclaw-dir ` when state directory is non-default. - Use `--openclaw-base-url ` when local OpenClaw HTTP endpoint is non-default. - Use `--peer-alias ` only when alias override is required. -6. Verify setup outputs. +7. Verify setup outputs. - Confirm setup reports: - peer alias - peer DID @@ -125,17 +144,19 @@ Successful confirm establishes mutual trust for the two agent DIDs. After confir - relay runtime config path - Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. -7. Start connector runtime for local relay handoff. +8. Start connector runtime for local relay handoff. - Run `clawdentity connector start `. - Optional: run `clawdentity connector service install ` for persistent autostart. -8. Complete trust pairing bootstrap. -- Run pairing start (`POST /pair/start`) from the owner/initiator side. -- Share returned one-time `pairingCode` with responder side. -- Run pairing confirm (`POST /pair/confirm`) from responder side. +9. Complete trust pairing bootstrap. +- Run pairing start from owner/initiator side: + - `clawdentity pair start --proxy-url --qr` +- Share the one-time QR image with responder side. +- Run pairing confirm from responder side: + - `clawdentity pair confirm --qr-file --proxy-url ` - Confirm pairing success before relay test. -9. Validate with user-style relay test. +10. Validate with user-style relay test. - Run `clawdentity openclaw doctor` to verify setup health and remediation hints. - Run `clawdentity openclaw relay test --peer ` to execute a probe. - Confirm probe success and connector-mediated delivery logs. @@ -144,10 +165,13 @@ Successful confirm establishes mutual trust for the two agent DIDs. After confir ## Required question policy Ask the human only when required inputs are missing: -- Missing Clawdentity API key. +- Missing local agent name. +- Missing peer relay invite code (`clawd1_...`). +- Missing registry onboarding invite code (`clw_inv_...`) when API key is absent. +- Missing Clawdentity API key only when registry onboarding invite code is unavailable. +- Missing initiator/responder proxy URLs for pairing commands. - Unclear OpenClaw state directory. - Non-default OpenClaw base URL. -- Missing invite code. - Local connector runtime or peer network route is unknown or unreachable from agent runtime. ## Failure Handling @@ -158,7 +182,7 @@ If setup or relay fails: - Ensure connector runtime is active (`clawdentity connector start `). - Re-run `clawdentity openclaw doctor`. - Re-run `clawdentity openclaw relay test --peer `. -- Re-run the same user-style flow from step 5 onward only after health checks pass. +- Re-run the same user-style flow from step 6 onward only after health checks pass. ## Bundled Resources diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md index 2c4c476..7e9635c 100644 --- a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md @@ -67,35 +67,37 @@ Rules: Relay delivery policy is trust-pair based on proxy side. Pairing must be completed before first cross-agent delivery. -Current pairing contract is API-based (no dedicated CLI pairing command): +Current pairing contract is ticket-based with CLI support: 1. Initiator owner starts pairing: - - `POST /pair/start` + - CLI: `clawdentity pair start --proxy-url --qr` + - proxy route: `POST /pair/start` - headers: - `Authorization: Claw ` - `x-claw-owner-pat: ` - - body: + - body (optional): ```json { - "agentDid": "did:claw:agent:01RESPONDER..." + "ttlSeconds": 900 } ``` 2. Responder confirms pairing: - - `POST /pair/confirm` + - CLI: `clawdentity pair confirm --qr-file --proxy-url ` + - proxy route: `POST /pair/confirm` - headers: - `Authorization: Claw ` - body: ```json { - "pairingCode": "01PAIRCODE..." + "ticket": "clwpair1_..." } ``` Rules: -- `pairingCode` is one-time and expires. +- `ticket` is one-time and expires (default 15 minutes). - Confirm establishes mutual trust for the initiator/responder pair. - Same-agent sender/recipient is allowed by policy without explicit pair entry. diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 2b75f32..6e5d6f4 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -18,6 +18,7 @@ - `invite` command routes must use endpoint constants from `@clawdentity/protocol` (`INVITES_PATH`, `INVITES_REDEEM_PATH`) instead of inline path literals. - Agent auth refresh state is stored per-agent at `~/.clawdentity/agents//registry-auth.json` and must be written with secure file permissions. - `agent auth refresh` must use `Authorization: Claw ` + PoP headers from local agent keys and must not require PAT config. +- `pair` command logic should stay in `commands/pair.ts`; keep proxy pairing bootstrap (`/pair/start`, `/pair/confirm`) CLI-driven with local AIT + PoP proof headers and one-time ticket QR support (`--qr`, `--qr-file`). - `connector start ` must validate local agent material (`identity.json`, `ait.jwt`, `secret.key`, `registry-auth.json`) before starting runtime and must fail with stable CLI errors when files are missing/invalid. - `connector start` must print the local outbound handoff endpoint so transform troubleshooting is deterministic. - `connector service install ` must install user-scoped autostart integration (`launchd` on macOS, `systemd --user` on Linux) so connector runtime survives host restarts. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 0688eff..0c197c6 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -11,6 +11,11 @@ - For new command-domain errors, use SDK `AppError` with stable `code` values. - Normalize Commander option keys at the command boundary when helper/runtime option names differ (for example `--peer` -> `peerAlias`) so flags are never silently ignored. +## Config Command Rules +- `config init` must support first-run registry override from both `--registry-url` and environment variables. +- Precedence for initial registry URL is: CLI flag, then `CLAWDENTITY_REGISTRY_URL`, then `CLAWDENTITY_REGISTRY`, then default production URL. +- `config init` must stay non-destructive: if config file already exists, do not overwrite. + ## Verification Command Rules - `verify` must preserve the `✅`/`❌` output contract with explicit reasons. - Token argument can be either a raw token or file path; missing file paths should fall back to raw token mode. @@ -24,6 +29,9 @@ - `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. - `openclaw setup` must set `hooks.allowRequestSessionKey=false` by default and retain `hooks.allowedSessionKeyPrefixes` enforcement for safer `/hooks/agent` session routing. - Keep thrown command errors static (no interpolated runtime values); include variable context in error details/log fields. Diagnostic check output (`openclaw doctor`, `openclaw relay test`) may include concrete paths/aliases so operators can remediate quickly. +- Keep invite-type distinction explicit in output/docs: + - `clw_inv_...` = registry onboarding invite (`invite redeem`) + - `clawd1_...` = OpenClaw peer relay invite (`openclaw setup`) ## Connector Command Rules - `connector start ` is the runtime entrypoint for local relay handoff and must remain long-running when connector runtime provides a wait/closed primitive. @@ -83,3 +91,12 @@ - Relay probe must target local OpenClaw `POST /hooks/send-to-peer` with deterministic payload fields (`peer`, `sessionId`, `message`). - Relay test output must summarize endpoint, HTTP status, and remediation guidance when delivery fails. - `openclaw relay test --json` must emit a stable result envelope and include preflight details when preflight failed. + +## Pair Command Rules +- `pair start ` must call proxy `/pair/start` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. +- `pair start` must send owner PAT via `x-claw-owner-pat`, defaulting to configured API key unless explicitly overridden by `--owner-pat`. +- `pair start --qr` must generate a one-time local PNG QR containing the returned ticket and print the filesystem path. +- `pair confirm ` must call proxy `/pair/confirm` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. +- `pair confirm` must accept either `--qr-file ` (primary) or `--ticket ` (fallback), never both. +- `pair` commands must accept proxy URL via `--proxy-url` and fallback to env `CLAWDENTITY_PROXY_URL` when the flag is absent. +- `pair` commands must fail with deterministic operator messages for invalid ticket/QR input, missing local agent proof material, and proxy auth/state errors. diff --git a/apps/cli/src/commands/config.test.ts b/apps/cli/src/commands/config.test.ts index 8e99134..d24126b 100644 --- a/apps/cli/src/commands/config.test.ts +++ b/apps/cli/src/commands/config.test.ts @@ -40,6 +40,7 @@ const mockedWriteConfig = vi.mocked(writeConfig); const mockedSetConfigValue = vi.mocked(setConfigValue); const mockedGetConfigValue = vi.mocked(getConfigValue); const mockedResolveConfig = vi.mocked(resolveConfig); +const previousEnv = process.env; const buildErrnoError = (code: string): NodeJS.ErrnoException => { const error = new Error(code) as NodeJS.ErrnoException; @@ -97,6 +98,7 @@ const runConfigCommand = async (args: string[]) => { describe("config command", () => { beforeEach(() => { vi.clearAllMocks(); + process.env = { ...previousEnv }; mockedReadConfig.mockResolvedValue({ registryUrl: "https://api.clawdentity.com", @@ -107,6 +109,7 @@ describe("config command", () => { }); afterEach(() => { + process.env = previousEnv; process.exitCode = undefined; }); @@ -125,6 +128,46 @@ describe("config command", () => { expect(result.exitCode).toBeUndefined(); }); + it("initializes config with --registry-url override", async () => { + mockedAccess.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + await runConfigCommand([ + "init", + "--registry-url", + "https://dev.api.clawdentity.com", + ]); + + expect(mockedWriteConfig).toHaveBeenCalledWith({ + registryUrl: "https://dev.api.clawdentity.com", + }); + }); + + it("initializes config with env registry override", async () => { + mockedAccess.mockRejectedValueOnce(buildErrnoError("ENOENT")); + process.env.CLAWDENTITY_REGISTRY = "https://dev.api.clawdentity.com"; + + await runConfigCommand(["init"]); + + expect(mockedWriteConfig).toHaveBeenCalledWith({ + registryUrl: "https://dev.api.clawdentity.com", + }); + }); + + it("prefers --registry-url over env registry override", async () => { + mockedAccess.mockRejectedValueOnce(buildErrnoError("ENOENT")); + process.env.CLAWDENTITY_REGISTRY = "https://env.api.clawdentity.com"; + + await runConfigCommand([ + "init", + "--registry-url", + "https://flag.api.clawdentity.com", + ]); + + expect(mockedWriteConfig).toHaveBeenCalledWith({ + registryUrl: "https://flag.api.clawdentity.com", + }); + }); + it("skips init when config already exists", async () => { mockedAccess.mockResolvedValueOnce(undefined); diff --git a/apps/cli/src/commands/config.ts b/apps/cli/src/commands/config.ts index 29abc27..0737333 100644 --- a/apps/cli/src/commands/config.ts +++ b/apps/cli/src/commands/config.ts @@ -55,6 +55,21 @@ const getValidatedKey = (key: string): CliConfigKey | undefined => { return undefined; }; +interface ConfigInitOptions { + registryUrl?: string; +} + +const getEnvRegistryUrlOverride = (): string | undefined => { + const envCandidates = [ + process.env.CLAWDENTITY_REGISTRY_URL, + process.env.CLAWDENTITY_REGISTRY, + ]; + + return envCandidates.find((value): value is string => { + return typeof value === "string" && value.length > 0; + }); +}; + export const createConfigCommand = (): Command => { const configCommand = new Command("config").description( "Manage local CLI configuration", @@ -63,8 +78,9 @@ export const createConfigCommand = (): Command => { configCommand .command("init") .description("Initialize local config file") + .option("--registry-url ", "Initialize config with registry URL") .action( - withErrorHandling("config init", async () => { + withErrorHandling("config init", async (options: ConfigInitOptions) => { const configFilePath = getConfigFilePath(); try { @@ -78,10 +94,20 @@ export const createConfigCommand = (): Command => { } const config = await readConfig(); - await writeConfig(config); + const registryUrl = + options.registryUrl ?? + getEnvRegistryUrlOverride() ?? + config.registryUrl; + + await writeConfig({ + ...config, + registryUrl, + }); writeStdoutLine(`Initialized config at ${configFilePath}`); - writeStdoutLine(JSON.stringify(maskApiKey(config), null, 2)); + writeStdoutLine( + JSON.stringify(maskApiKey({ ...config, registryUrl }), null, 2), + ); }), ); diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts new file mode 100644 index 0000000..b61505f --- /dev/null +++ b/apps/cli/src/commands/pair.test.ts @@ -0,0 +1,323 @@ +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, +} from "@clawdentity/sdk"; +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { confirmPairing, createPairCommand, startPairing } from "./pair.js"; + +const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +type PairFixture = { + ait: string; + secretKeyBase64url: string; +}; + +const createPairFixture = async (): Promise => { + const keypair = await generateEd25519Keypair(); + const encoded = encodeEd25519KeypairBase64url(keypair); + + return { + ait: "ey.mock.ait", + secretKeyBase64url: encoded.secretKey, + }; +}; + +const createReadFileMock = (fixture: PairFixture) => { + return vi.fn(async (filePath: string, encoding?: BufferEncoding) => { + if (filePath.endsWith("/ait.jwt")) { + return fixture.ait; + } + + if (filePath.endsWith("/secret.key")) { + return fixture.secretKeyBase64url; + } + + if (filePath.endsWith("pair.png")) { + if (encoding) { + return ""; + } + return new Uint8Array([1, 2, 3, 4]); + } + + throw buildErrnoError("ENOENT"); + }); +}; + +const previousEnv = process.env; + +describe("pair command helpers", () => { + beforeEach(() => { + vi.clearAllMocks(); + process.env = { ...previousEnv }; + }); + + afterEach(() => { + process.env = previousEnv; + }); + + it("starts pairing with local agent proof and configured owner PAT", async () => { + const fixture = await createPairFixture(); + const readFileImpl = createReadFileMock(fixture); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const fetchImpl = vi.fn(async (_url: string, _init?: RequestInit) => { + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + const result = await startPairing( + "alpha", + { + proxyUrl: "https://alpha.proxy.example", + ttlSeconds: "900", + qr: true, + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: + readFileImpl as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.api.clawdentity.com/", + apiKey: "clw_pat_configured", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.ticket).toBe("clwpair1_eyJ2IjoxfQ"); + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); + expect(result.qrPath).toContain( + "/tmp/.clawdentity/pairing/alpha-pair-1700000000.png", + ); + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(mkdirImpl).toHaveBeenCalledTimes(1); + const [, init] = fetchImpl.mock.calls[0] as [string, RequestInit]; + expect(init?.method).toBe("POST"); + const headers = new Headers(init?.headers); + expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); + expect(headers.get("x-claw-owner-pat")).toBe("clw_pat_configured"); + expect(headers.get("x-claw-proof")).toBeTruthy(); + expect(headers.get("x-claw-body-sha256")).toBeTruthy(); + expect(headers.get("x-claw-timestamp")).toBe("1700000000"); + expect(headers.get("x-claw-nonce")).toBe("nonce-start"); + expect(String(init?.body ?? "")).toContain("ttlSeconds"); + }); + + it("uses CLAWDENTITY_PROXY_URL when --proxy-url is omitted", async () => { + process.env.CLAWDENTITY_PROXY_URL = "https://env.proxy.example"; + const fixture = await createPairFixture(); + + const result = await startPairing( + "alpha", + { + ownerPat: "clw_pat_explicit", + }, + { + fetchImpl: (async () => + Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + )) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.api.clawdentity.com/", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.proxyUrl).toBe("https://env.proxy.example/"); + }); + + it("fails start when owner PAT is missing", async () => { + const fixture = await createPairFixture(); + + await expect( + startPairing( + "alpha", + { + proxyUrl: "https://alpha.proxy.example", + }, + { + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.api.clawdentity.com/", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ), + ).rejects.toMatchObject({ + message: expect.stringContaining("Owner PAT is required"), + }); + }); + + it("confirms pairing with qr-file ticket decode", async () => { + const fixture = await createPairFixture(); + const fetchImpl = vi.fn(async (_url: string, _init?: RequestInit) => { + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + }, + { status: 201 }, + ); + }); + + const result = await confirmPairing( + "beta", + { + proxyUrl: "https://beta.proxy.example", + qrFile: "/tmp/pair.png", + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + qrDecodeImpl: () => "clwpair1_ticket", + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.paired).toBe(true); + expect(result.proxyUrl).toBe("https://beta.proxy.example/"); + const [, init] = fetchImpl.mock.calls[0] as [string, RequestInit]; + const headers = new Headers(init?.headers); + expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); + expect(headers.get("x-claw-proof")).toBeTruthy(); + expect(headers.get("x-claw-body-sha256")).toBeTruthy(); + expect(headers.get("x-claw-owner-pat")).toBeNull(); + expect(headers.get("x-claw-timestamp")).toBe("1700000000"); + expect(headers.get("x-claw-nonce")).toBe("nonce-confirm"); + expect(String(init?.body ?? "")).toContain("clwpair1_ticket"); + }); +}); + +const runPairCommand = async ( + args: string[], + command = createPairCommand(), +): Promise<{ + exitCode: number | undefined; + stderr: string; + stdout: string; +}> => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "pair", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +}; + +describe("pair command output", () => { + it("prints pairing ticket from pair start", async () => { + const fixture = await createPairFixture(); + const command = createPairCommand({ + fetchImpl: (async () => + Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + )) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").mkdir, + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.api.clawdentity.com/", + apiKey: "clw_pat_configured", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }); + + const result = await runPairCommand( + ["start", "alpha", "--proxy-url", "https://alpha.proxy.example", "--qr"], + command, + ); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Pairing ticket created"); + expect(result.stdout).toContain("Ticket: clwpair1_eyJ2IjoxfQ"); + expect(result.stdout).toContain("QR File: "); + }); +}); diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts new file mode 100644 index 0000000..9dce533 --- /dev/null +++ b/apps/cli/src/commands/pair.ts @@ -0,0 +1,816 @@ +import { randomBytes } from "node:crypto"; +import { mkdir, readFile, writeFile } from "node:fs/promises"; +import { dirname, join, resolve } from "node:path"; +import { decodeBase64url } from "@clawdentity/protocol"; +import { AppError, createLogger, signHttpRequest } from "@clawdentity/sdk"; +import { Command } from "commander"; +import jsQR from "jsqr"; +import { PNG } from "pngjs"; +import QRCode from "qrcode"; +import { + type CliConfig, + getConfigDir, + resolveConfig, +} from "../config/manager.js"; +import { writeStdoutLine } from "../io.js"; +import { assertValidAgentName } from "./agent-name.js"; +import { withErrorHandling } from "./helpers.js"; + +const logger = createLogger({ service: "cli", module: "pair" }); + +const AGENTS_DIR_NAME = "agents"; +const AIT_FILE_NAME = "ait.jwt"; +const SECRET_KEY_FILE_NAME = "secret.key"; +const PAIRING_QR_DIR_NAME = "pairing"; + +const PAIR_START_PATH = "/pair/start"; +const PAIR_CONFIRM_PATH = "/pair/confirm"; +const OWNER_PAT_HEADER = "x-claw-owner-pat"; +const NONCE_SIZE = 24; +const PAIRING_TICKET_PREFIX = "clwpair1_"; + +export type PairStartOptions = { + ownerPat?: string; + proxyUrl?: string; + ttlSeconds?: string; + qr?: boolean; + qrOutput?: string; +}; + +export type PairConfirmOptions = { + proxyUrl?: string; + qrFile?: string; + ticket?: string; +}; + +type PairRequestOptions = { + fetchImpl?: typeof fetch; + getConfigDirImpl?: typeof getConfigDir; + nowSecondsImpl?: () => number; + nonceFactoryImpl?: () => string; + readFileImpl?: typeof readFile; + writeFileImpl?: typeof writeFile; + mkdirImpl?: typeof mkdir; + resolveConfigImpl?: () => Promise; + qrEncodeImpl?: (ticket: string) => Promise; + qrDecodeImpl?: (imageBytes: Uint8Array) => string; +}; + +type PairCommandDependencies = PairRequestOptions; + +type PairStartResult = { + initiatorAgentDid: string; + ticket: string; + expiresAt: string; + proxyUrl: string; + qrPath?: string; +}; + +type PairConfirmResult = { + paired: boolean; + initiatorAgentDid: string; + responderAgentDid: string; + proxyUrl: string; +}; + +type RegistryErrorEnvelope = { + error?: { + code?: string; + message?: string; + }; +}; + +type LocalAgentProofMaterial = { + ait: string; + secretKey: Uint8Array; +}; + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +function createCliError(code: string, message: string): AppError { + return new AppError({ + code, + message, + status: 400, + }); +} + +function parseNonEmptyString(value: unknown): string { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +} + +function parsePairingTicket(value: unknown): string { + const ticket = parseNonEmptyString(value); + if (!ticket.startsWith(PAIRING_TICKET_PREFIX)) { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + return ticket; +} + +function parseTtlSeconds(value: string | undefined): number | undefined { + const raw = parseNonEmptyString(value); + if (raw.length === 0) { + return undefined; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isInteger(parsed) || parsed < 1) { + throw createCliError( + "CLI_PAIR_START_INVALID_TTL", + "ttlSeconds must be a positive integer", + ); + } + + return parsed; +} + +function resolveProxyUrl(overrideProxyUrl: string | undefined): string { + const candidate = + parseNonEmptyString(overrideProxyUrl) || + parseNonEmptyString(process.env.CLAWDENTITY_PROXY_URL); + + if (candidate.length === 0) { + throw createCliError( + "CLI_PAIR_PROXY_URL_REQUIRED", + "Proxy URL is required. Pass --proxy-url or set CLAWDENTITY_PROXY_URL.", + ); + } + + try { + const parsed = new URL(candidate); + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + throw new Error("invalid protocol"); + } + + return parsed.toString(); + } catch { + throw createCliError("CLI_PAIR_INVALID_PROXY_URL", "Proxy URL is invalid"); + } +} + +function toProxyRequestUrl(proxyUrl: string, path: string): string { + const normalizedBase = proxyUrl.endsWith("/") ? proxyUrl : `${proxyUrl}/`; + return new URL(path.slice(1), normalizedBase).toString(); +} + +function toPathWithQuery(url: string): string { + const parsed = new URL(url); + return `${parsed.pathname}${parsed.search}`; +} + +function extractErrorCode(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.code !== "string") { + return undefined; + } + + const code = envelope.error.code.trim(); + return code.length > 0 ? code : undefined; +} + +function extractErrorMessage(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const message = envelope.error.message.trim(); + return message.length > 0 ? message : undefined; +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +async function executePairRequest(input: { + fetchImpl: typeof fetch; + init: RequestInit; + url: string; +}): Promise { + try { + return await input.fetchImpl(input.url, input.init); + } catch { + throw createCliError( + "CLI_PAIR_REQUEST_FAILED", + "Unable to connect to proxy URL. Check network access and proxyUrl.", + ); + } +} + +function mapStartPairError(status: number, payload: unknown): string { + const code = extractErrorCode(payload); + const message = extractErrorMessage(payload); + + if (code === "PROXY_PAIR_OWNER_PAT_INVALID" || status === 401) { + return message + ? `Owner PAT is invalid (401): ${message}` + : "Owner PAT is invalid or expired (401)."; + } + + if (code === "PROXY_PAIR_OWNER_PAT_FORBIDDEN" || status === 403) { + return message + ? `Owner PAT does not control initiator agent DID (403): ${message}` + : "Owner PAT does not control initiator agent DID (403)."; + } + + if (status === 400) { + return message + ? `Pair start request is invalid (400): ${message}` + : "Pair start request is invalid (400)."; + } + + if (status >= 500) { + return `Proxy pairing service is unavailable (${status}).`; + } + + if (message) { + return `Pair start failed (${status}): ${message}`; + } + + return `Pair start failed (${status})`; +} + +function mapConfirmPairError(status: number, payload: unknown): string { + const code = extractErrorCode(payload); + const message = extractErrorMessage(payload); + + if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { + return "Pairing ticket is invalid or expired"; + } + + if (code === "PROXY_PAIR_TICKET_EXPIRED" || status === 410) { + return "Pairing ticket has expired"; + } + + if (status === 400) { + return message + ? `Pair confirm request is invalid (400): ${message}` + : "Pair confirm request is invalid (400)."; + } + + if (status >= 500) { + return `Proxy pairing service is unavailable (${status}).`; + } + + if (message) { + return `Pair confirm failed (${status}): ${message}`; + } + + return `Pair confirm failed (${status})`; +} + +function parsePairStartResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_START_INVALID_RESPONSE", + "Pair start response is invalid", + ); + } + + const ticket = parsePairingTicket(payload.ticket); + const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); + const expiresAt = parseNonEmptyString(payload.expiresAt); + + if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { + throw createCliError( + "CLI_PAIR_START_INVALID_RESPONSE", + "Pair start response is invalid", + ); + } + + return { + ticket, + initiatorAgentDid, + expiresAt, + }; +} + +function parsePairConfirmResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_CONFIRM_INVALID_RESPONSE", + "Pair confirm response is invalid", + ); + } + + const paired = payload.paired === true; + const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); + const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); + + if ( + !paired || + initiatorAgentDid.length === 0 || + responderAgentDid.length === 0 + ) { + throw createCliError( + "CLI_PAIR_CONFIRM_INVALID_RESPONSE", + "Pair confirm response is invalid", + ); + } + + return { + paired, + initiatorAgentDid, + responderAgentDid, + }; +} + +async function readAgentProofMaterial( + agentName: string, + dependencies: PairRequestOptions, +): Promise { + const readFileImpl = dependencies.readFileImpl ?? readFile; + const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; + const normalizedAgentName = assertValidAgentName(agentName); + + const agentDir = join( + getConfigDirImpl(), + AGENTS_DIR_NAME, + normalizedAgentName, + ); + const aitPath = join(agentDir, AIT_FILE_NAME); + const secretKeyPath = join(agentDir, SECRET_KEY_FILE_NAME); + + let ait: string; + try { + ait = (await readFileImpl(aitPath, "utf-8")).trim(); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" is missing ${AIT_FILE_NAME}. Run agent create first.`, + ); + } + + throw error; + } + + if (ait.length === 0) { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" has an empty ${AIT_FILE_NAME}`, + ); + } + + let encodedSecretKey: string; + try { + encodedSecretKey = (await readFileImpl(secretKeyPath, "utf-8")).trim(); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" is missing ${SECRET_KEY_FILE_NAME}. Run agent create first.`, + ); + } + + throw error; + } + + if (encodedSecretKey.length === 0) { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" has an empty ${SECRET_KEY_FILE_NAME}`, + ); + } + + let secretKey: Uint8Array; + try { + secretKey = decodeBase64url(encodedSecretKey); + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" has invalid ${SECRET_KEY_FILE_NAME}`, + ); + } + + return { + ait, + secretKey, + }; +} + +function resolveOwnerPat(options: { + explicitOwnerPat: string | undefined; + config: CliConfig; +}): string { + const ownerPat = + parseNonEmptyString(options.explicitOwnerPat) || + parseNonEmptyString(options.config.apiKey); + + if (ownerPat.length > 0) { + return ownerPat; + } + + throw createCliError( + "CLI_PAIR_START_OWNER_PAT_REQUIRED", + "Owner PAT is required. Pass --owner-pat or configure API key with `clawdentity invite redeem` / `clawdentity config set apiKey `.", + ); +} + +async function buildSignedHeaders(input: { + bodyBytes?: Uint8Array; + method: string; + requestUrl: string; + secretKey: Uint8Array; + timestampSeconds: number; + nonce: string; +}): Promise> { + const signed = await signHttpRequest({ + method: input.method, + pathWithQuery: toPathWithQuery(input.requestUrl), + timestamp: String(input.timestampSeconds), + nonce: input.nonce, + body: input.bodyBytes, + secretKey: input.secretKey, + }); + + return signed.headers; +} + +async function encodeTicketQrPng(ticket: string): Promise { + const buffer = await QRCode.toBuffer(ticket, { + type: "png", + width: 512, + margin: 2, + errorCorrectionLevel: "M", + }); + return new Uint8Array(buffer); +} + +function decodeTicketFromPng(imageBytes: Uint8Array): string { + let decodedPng: PNG; + try { + decodedPng = PNG.sync.read(Buffer.from(imageBytes)); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_FILE_INVALID", + "QR image file is invalid or unsupported", + ); + } + + const imageData = new Uint8ClampedArray( + decodedPng.data.buffer, + decodedPng.data.byteOffset, + decodedPng.data.byteLength, + ); + + const decoded = jsQR(imageData, decodedPng.width, decodedPng.height); + if (!decoded || parseNonEmptyString(decoded.data).length === 0) { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_NOT_FOUND", + "No pairing QR code was found in the image", + ); + } + + return parsePairingTicket(decoded.data); +} + +async function persistPairingQr(input: { + agentName: string; + qrOutput: string | undefined; + ticket: string; + dependencies: PairRequestOptions; + nowSeconds: number; +}): Promise { + const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; + const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; + const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; + const qrEncodeImpl = input.dependencies.qrEncodeImpl ?? encodeTicketQrPng; + + const baseDir = join(getConfigDirImpl(), PAIRING_QR_DIR_NAME); + const outputPath = parseNonEmptyString(input.qrOutput) + ? resolve(input.qrOutput ?? "") + : join( + baseDir, + `${assertValidAgentName(input.agentName)}-pair-${input.nowSeconds}.png`, + ); + + await mkdirImpl(dirname(outputPath), { recursive: true }); + const imageBytes = await qrEncodeImpl(input.ticket); + await writeFileImpl(outputPath, imageBytes); + + return outputPath; +} + +function resolveConfirmTicketSource(options: PairConfirmOptions): { + ticket: string; + source: "ticket" | "qr-file"; + qrFilePath?: string; +} { + const inlineTicket = parseNonEmptyString(options.ticket); + const qrFile = parseNonEmptyString(options.qrFile); + + if (inlineTicket.length > 0 && qrFile.length > 0) { + throw createCliError( + "CLI_PAIR_CONFIRM_INPUT_CONFLICT", + "Provide either --ticket or --qr-file, not both", + ); + } + + if (inlineTicket.length > 0) { + return { + ticket: parsePairingTicket(inlineTicket), + source: "ticket", + }; + } + + if (qrFile.length > 0) { + return { + ticket: "", + source: "qr-file", + qrFilePath: resolve(qrFile), + }; + } + + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_REQUIRED", + "Pairing ticket is required. Pass --ticket or --qr-file .", + ); +} + +export async function startPairing( + agentName: string, + options: PairStartOptions, + dependencies: PairRequestOptions = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const nowSecondsImpl = + dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nonceFactoryImpl = + dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + + const ttlSeconds = parseTtlSeconds(options.ttlSeconds); + const proxyUrl = resolveProxyUrl(options.proxyUrl); + + const config = await resolveConfigImpl(); + const ownerPat = resolveOwnerPat({ + explicitOwnerPat: options.ownerPat, + config, + }); + + const { ait, secretKey } = await readAgentProofMaterial( + agentName, + dependencies, + ); + + const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_START_PATH); + const requestBody = JSON.stringify({ + ttlSeconds, + }); + const bodyBytes = new TextEncoder().encode(requestBody); + + const timestampSeconds = nowSecondsImpl(); + const nonce = nonceFactoryImpl(); + const signedHeaders = await buildSignedHeaders({ + method: "POST", + requestUrl, + bodyBytes, + secretKey, + timestampSeconds, + nonce, + }); + + const response = await executePairRequest({ + fetchImpl, + url: requestUrl, + init: { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + [OWNER_PAT_HEADER]: ownerPat, + ...signedHeaders, + }, + body: requestBody, + }, + }); + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw createCliError( + "CLI_PAIR_START_FAILED", + mapStartPairError(response.status, responseBody), + ); + } + + const parsed = parsePairStartResponse(responseBody); + const result: PairStartResult = { + ...parsed, + proxyUrl, + }; + + if (options.qr === true) { + result.qrPath = await persistPairingQr({ + agentName, + qrOutput: options.qrOutput, + ticket: parsed.ticket, + dependencies, + nowSeconds: timestampSeconds, + }); + } + + return result; +} + +export async function confirmPairing( + agentName: string, + options: PairConfirmOptions, + dependencies: PairRequestOptions = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const nowSecondsImpl = + dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nonceFactoryImpl = + dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + const readFileImpl = dependencies.readFileImpl ?? readFile; + const qrDecodeImpl = dependencies.qrDecodeImpl ?? decodeTicketFromPng; + + const ticketSource = resolveConfirmTicketSource(options); + const proxyUrl = resolveProxyUrl(options.proxyUrl); + + let ticket = ticketSource.ticket; + if (ticketSource.source === "qr-file") { + if (!ticketSource.qrFilePath) { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_FILE_REQUIRED", + "QR file path is required", + ); + } + + let imageBytes: Uint8Array; + try { + imageBytes = await readFileImpl(ticketSource.qrFilePath); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_FILE_NOT_FOUND", + `QR file not found: ${ticketSource.qrFilePath}`, + ); + } + + throw error; + } + + ticket = parsePairingTicket(qrDecodeImpl(new Uint8Array(imageBytes))); + } + + const { ait, secretKey } = await readAgentProofMaterial( + agentName, + dependencies, + ); + + const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_CONFIRM_PATH); + const requestBody = JSON.stringify({ ticket }); + const bodyBytes = new TextEncoder().encode(requestBody); + + const timestampSeconds = nowSecondsImpl(); + const nonce = nonceFactoryImpl(); + const signedHeaders = await buildSignedHeaders({ + method: "POST", + requestUrl, + bodyBytes, + secretKey, + timestampSeconds, + nonce, + }); + + const response = await executePairRequest({ + fetchImpl, + url: requestUrl, + init: { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signedHeaders, + }, + body: requestBody, + }, + }); + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw createCliError( + "CLI_PAIR_CONFIRM_FAILED", + mapConfirmPairError(response.status, responseBody), + ); + } + + const parsed = parsePairConfirmResponse(responseBody); + + return { + ...parsed, + proxyUrl, + }; +} + +export const createPairCommand = ( + dependencies: PairCommandDependencies = {}, +): Command => { + const pairCommand = new Command("pair").description( + "Manage proxy trust pairing between agents", + ); + + pairCommand + .command("start ") + .description("Start pairing and issue one-time pairing ticket") + .option( + "--proxy-url ", + "Initiator proxy base URL (or set CLAWDENTITY_PROXY_URL)", + ) + .option( + "--owner-pat ", + "Owner PAT override (defaults to configured API key)", + ) + .option("--ttl-seconds ", "Pairing ticket expiry in seconds") + .option("--qr", "Generate a local QR file for sharing") + .option("--qr-output ", "Write QR PNG to a specific file path") + .action( + withErrorHandling( + "pair start", + async (agentName: string, options: PairStartOptions) => { + const result = await startPairing(agentName, options, dependencies); + + logger.info("cli.pair_started", { + initiatorAgentDid: result.initiatorAgentDid, + proxyUrl: result.proxyUrl, + expiresAt: result.expiresAt, + qrPath: result.qrPath, + }); + + writeStdoutLine("Pairing ticket created"); + writeStdoutLine(`Ticket: ${result.ticket}`); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine(`Expires At: ${result.expiresAt}`); + if (result.qrPath) { + writeStdoutLine(`QR File: ${result.qrPath}`); + } + }, + ), + ); + + pairCommand + .command("confirm ") + .description("Confirm pairing using one-time pairing ticket") + .option("--ticket ", "One-time pairing ticket (clwpair1_...)") + .option("--qr-file ", "Path to pairing QR PNG file") + .option( + "--proxy-url ", + "Responder proxy base URL (or set CLAWDENTITY_PROXY_URL)", + ) + .action( + withErrorHandling( + "pair confirm", + async (agentName: string, options: PairConfirmOptions) => { + const result = await confirmPairing(agentName, options, dependencies); + + logger.info("cli.pair_confirmed", { + initiatorAgentDid: result.initiatorAgentDid, + responderAgentDid: result.responderAgentDid, + proxyUrl: result.proxyUrl, + }); + + writeStdoutLine("Pairing confirmed"); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); + writeStdoutLine(`Paired: ${result.paired ? "true" : "false"}`); + }, + ), + ); + + return pairCommand; +}; diff --git a/apps/cli/src/config/manager.test.ts b/apps/cli/src/config/manager.test.ts index cabcbfe..e977451 100644 --- a/apps/cli/src/config/manager.test.ts +++ b/apps/cli/src/config/manager.test.ts @@ -107,6 +107,25 @@ describe("config manager", () => { }); }); + it("applies CLAWDENTITY_REGISTRY when CLAWDENTITY_REGISTRY_URL is unset", async () => { + mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + process.env.CLAWDENTITY_REGISTRY = "http://legacy-env:8787"; + + await expect(resolveConfig()).resolves.toEqual({ + registryUrl: "http://legacy-env:8787", + }); + }); + + it("prefers CLAWDENTITY_REGISTRY_URL over CLAWDENTITY_REGISTRY", async () => { + mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + process.env.CLAWDENTITY_REGISTRY_URL = "http://primary-env:8787"; + process.env.CLAWDENTITY_REGISTRY = "http://legacy-env:8787"; + + await expect(resolveConfig()).resolves.toEqual({ + registryUrl: "http://primary-env:8787", + }); + }); + it("prefers env apiKey over config file", async () => { mockedReadFile.mockResolvedValueOnce('{"apiKey":"from-file"}'); process.env.CLAWDENTITY_API_KEY = "from-env"; diff --git a/apps/cli/src/config/manager.ts b/apps/cli/src/config/manager.ts index c017e14..c95e39f 100644 --- a/apps/cli/src/config/manager.ts +++ b/apps/cli/src/config/manager.ts @@ -21,6 +21,10 @@ const ENV_KEY_MAP: Record = { apiKey: "CLAWDENTITY_API_KEY", }; +const LEGACY_ENV_KEY_MAP: Partial> = { + registryUrl: ["CLAWDENTITY_REGISTRY"], +}; + const DEFAULT_CONFIG: CliConfig = { registryUrl: DEFAULT_REGISTRY_URL, }; @@ -85,7 +89,12 @@ export const resolveConfig = async (): Promise => { const config = await readConfig(); for (const key of Object.keys(ENV_KEY_MAP) as CliConfigKey[]) { - const envVar = process.env[ENV_KEY_MAP[key]]; + const envKeys = [ENV_KEY_MAP[key], ...(LEGACY_ENV_KEY_MAP[key] ?? [])]; + const envVar = envKeys + .map((envKey) => process.env[envKey]) + .find((value): value is string => { + return typeof value === "string" && value.length > 0; + }); if (typeof envVar === "string" && envVar.length > 0) { config[key] = envVar; diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 43a8b16..02eb11a 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -70,6 +70,14 @@ describe("cli", () => { expect(hasOpenclawCommand).toBe(true); }); + it("registers the pair command", () => { + const hasPairCommand = createProgram() + .commands.map((command) => command.name()) + .includes("pair"); + + expect(hasPairCommand).toBe(true); + }); + it("registers the invite command", () => { const hasInviteCommand = createProgram() .commands.map((command) => command.name()) diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 5c4e656..0d28ae1 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -7,6 +7,7 @@ import { createConfigCommand } from "./commands/config.js"; import { createConnectorCommand } from "./commands/connector.js"; import { createInviteCommand } from "./commands/invite.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; +import { createPairCommand } from "./commands/pair.js"; import { createVerifyCommand } from "./commands/verify.js"; const require = createRequire(import.meta.url); @@ -37,5 +38,6 @@ export const createProgram = (): Command => { .addCommand(createConfigCommand()) .addCommand(createInviteCommand()) .addCommand(createOpenclawCommand()) + .addCommand(createPairCommand()) .addCommand(createVerifyCommand()); }; diff --git a/apps/cli/src/types/jsqr.d.ts b/apps/cli/src/types/jsqr.d.ts new file mode 100644 index 0000000..5ead872 --- /dev/null +++ b/apps/cli/src/types/jsqr.d.ts @@ -0,0 +1,28 @@ +declare module "jsqr" { + type JsQrPoint = { + x: number; + y: number; + }; + + type JsQrCode = { + binaryData: number[]; + data: string; + chunks: unknown[]; + location: { + topRightCorner: JsQrPoint; + topLeftCorner: JsQrPoint; + bottomRightCorner: JsQrPoint; + bottomLeftCorner: JsQrPoint; + topRightFinderPattern: JsQrPoint; + topLeftFinderPattern: JsQrPoint; + bottomLeftFinderPattern: JsQrPoint; + bottomRightAlignmentPattern?: JsQrPoint; + }; + }; + + export default function jsQR( + data: Uint8ClampedArray, + width: number, + height: number, + ): JsQrCode | null; +} diff --git a/apps/cli/tsup.config.ts b/apps/cli/tsup.config.ts index df576e4..f51fab9 100644 --- a/apps/cli/tsup.config.ts +++ b/apps/cli/tsup.config.ts @@ -5,6 +5,7 @@ export default defineConfig({ format: ["esm"], bundle: true, splitting: false, + external: ["ws"], noExternal: [ "@clawdentity/connector", "@clawdentity/protocol", diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 3c671ba..14b4bca 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -28,8 +28,8 @@ - Keep relay behavior pure except for explicit dependencies (`fetch`, filesystem) so tests stay deterministic. - Prefer schema-first runtime validation over ad-hoc guards. - Keep skill docs aligned with connector architecture: do not document direct transform-to-peer-proxy signing. -- Keep `skill/SKILL.md` command utilization section explicit and executable with current CLI commands used by this skill (`config`, `agent`, `openclaw setup/doctor/relay test`, `connector start`, optional `connector service install`). -- Keep pairing prerequisite documented as API-based (`/pair/start`, `/pair/confirm`) until a dedicated CLI pairing command exists. +- Keep `skill/SKILL.md` command utilization section explicit and executable with current CLI commands used by this skill (`config`, `invite redeem`, `agent`, `openclaw setup/doctor/relay test`, `pair`, `connector start`, optional `connector service install`). +- Keep pairing flow documented as CLI-based (`clawdentity pair start`, `clawdentity pair confirm`), not raw proxy HTTP calls. - When `src/transforms/relay-to-peer.ts` relay envelope, endpoint defaults, or failure mapping changes, update: - `skill/SKILL.md` - `skill/references/clawdentity-protocol.md` @@ -42,5 +42,43 @@ ## Skill Runtime Behavior - Keep onboarding prompts input-focused (invite/API key/URLs) and let the skill decide command execution. +- For first-time onboarding, prefer registry invite redeem (`clw_inv_...`) before asking for API key. +- Disambiguate invite types in prompts: + - `clw_inv_...` = registry onboarding invite (yields PAT via `invite redeem`) + - `clawd1_...` = peer relay invite (used by `openclaw setup`) + - `clwpair1_...` = proxy trust pairing ticket (used by `pair start` / `pair confirm`) - The agent should run required npm/CLI/filesystem operations via tools and only ask the human for missing inputs. - Report deterministic completion outputs: local DID, peer alias, and generated filesystem paths. + +## Dual Container Test State +- For local user-flow validation against two OpenClaw gateways, use: + - `clawdbot-agent-alpha-1` (host port `18789`) + - `clawdbot-agent-beta-1` (host port `19001`) +- Keep a reusable pre-skill snapshot where model is already configured: + - `~/.openclaw-baselines/alpha-kimi-preskill` + - `~/.openclaw-baselines/beta-kimi-preskill` +- Keep a reusable paired-and-approved snapshot for fast UI + skill install regression: + - `~/.openclaw-baselines/alpha-kimi-preskill-device-approved-20260217-194756` + - `~/.openclaw-baselines/beta-kimi-preskill-device-approved-20260217-194756` + - stable aliases: + - `~/.openclaw-baselines/alpha-kimi-preskill-device-approved-latest` + - `~/.openclaw-baselines/beta-kimi-preskill-device-approved-latest` +- Keep a reusable paired-stable snapshot for repeat tests without re-approving UI devices: + - `~/.openclaw-baselines/alpha-kimi-paired-stable-20260217-200909` + - `~/.openclaw-baselines/beta-kimi-paired-stable-20260217-200909` + - stable aliases: + - `~/.openclaw-baselines/alpha-kimi-paired-stable-latest` + - `~/.openclaw-baselines/beta-kimi-paired-stable-latest` +- Snapshot must represent: + - `openclaw.json` default model set to `kimi-coding/k2p5` + - no relay skill artifacts installed yet +- Use this snapshot as the starting point for every skill install regression run. +- Pairing troubleshooting: + - If UI shows `Disconnected (1008): pairing required`, OpenClaw device approval is pending. + - This is not Clawdentity proxy trust pairing (`/pair/start` + `/pair/confirm`); it is only OpenClaw UI/device approval. + - Run: + - `docker exec clawdbot-agent-alpha-1 sh -lc 'node openclaw.mjs devices list --json'` + - `docker exec clawdbot-agent-beta-1 sh -lc 'node openclaw.mjs devices list --json'` + - Approve any pending request IDs: + - `docker exec clawdbot-agent-alpha-1 sh -lc 'node openclaw.mjs devices approve '` + - `docker exec clawdbot-agent-beta-1 sh -lc 'node openclaw.mjs devices approve '` diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index 9466c2c..2bf3783 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -38,8 +38,9 @@ Use this skill when any of the following are requested: ## Invite Input Assumption -Provide a valid invite code string before running this skill. -Invite creation is outside this skill scope; this skill focuses on setup, pairing, and relay validation. +Invite creation is outside this skill scope. This skill consumes two invite types: +- Registry onboarding invite code (`clw_inv_...`) to obtain PAT when local API key is missing. +- OpenClaw peer relay invite code (`clawd1_...`) for `openclaw setup`. ## Human + Agent Flow (strict user-style) @@ -59,14 +60,21 @@ Use these commands as the default execution path for skill utilization: - Initialize local CLI config: - `clawdentity config init` + - `clawdentity config init --registry-url ` (supports first-run registry URL override) + - or set env before init: `CLAWDENTITY_REGISTRY_URL=` (primary global override) + - compatible alias: `CLAWDENTITY_REGISTRY=` - Configure registry URL and API key when missing: - `clawdentity config set registryUrl ` +- Complete registry onboarding when API key is missing: + - `clawdentity invite redeem ` + - `clawdentity invite redeem --registry-url ` +- Configure API key only for non-invite fallback: - `clawdentity config set apiKey ` - Create and inspect local OpenClaw agent identity: - `clawdentity agent create --framework openclaw` - `clawdentity agent inspect ` - Apply OpenClaw invite setup: - - `clawdentity openclaw setup --invite-code ` + - `clawdentity openclaw setup --invite-code ` - Start connector runtime for relay handoff: - `clawdentity connector start ` - Optional persistent connector autostart: @@ -75,47 +83,58 @@ Use these commands as the default execution path for skill utilization: - `clawdentity openclaw doctor` - `clawdentity openclaw relay test --peer ` -Pairing bootstrap for trust policy is API-based in the current release (no dedicated pairing CLI command yet): +Pairing bootstrap uses CLI commands in the current release: - Owner/initiator starts pairing on initiator proxy: - - `POST /pair/start` - - Requires `Authorization: Claw ` and `x-claw-owner-pat` - - Body: `{"agentDid":""}` + - `clawdentity pair start --proxy-url --qr` + - Optionally pass explicit owner PAT: `--owner-pat ` - Responder confirms on responder proxy: - - `POST /pair/confirm` - - Requires `Authorization: Claw ` - - Body: `{"pairingCode":""}` + - `clawdentity pair confirm --qr-file --proxy-url ` + - optional global proxy URL env fallback: `CLAWDENTITY_PROXY_URL=` Successful confirm establishes mutual trust for the two agent DIDs. After confirm, both directions are allowed for trusted delivery. 1. Confirm prerequisites with the human. - Confirm `clawdentity` CLI is installed and runnable. -- Confirm API key exists for this agent (if missing, ask the human for it). +- Confirm local agent name. +- Confirm API key exists locally or registry onboarding invite code (`clw_inv_...`) is available. +- Confirm OpenClaw peer relay invite code (`clawd1_...`) is available for setup. +- Do not request API key and registry invite code in the same prompt. +- Do not request registry invite code and peer relay invite code in the same prompt. +- Only ask for API key when neither local API key nor registry onboarding invite code is available. - Confirm OpenClaw state directory path if non-default. - Confirm OpenClaw base URL if local endpoint is non-default. +- Confirm each side proxy URL for pairing command execution. 2. Confirm skill artifact exists in workspace skills directory. - Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. - If missing, install/update skill package contents before setup. -3. Configure local Clawdentity identity for this OpenClaw agent. +3. Initialize local Clawdentity config. - Run `clawdentity config init`. -- If needed, ask the human for API key and run `clawdentity config set apiKey `. +- Use `clawdentity config init --registry-url ` when registry URL override is required. + +4. Complete registry onboarding auth before agent creation. +- If API key already exists, continue. +- Else redeem registry onboarding invite: + - `clawdentity invite redeem ` + - optional: `--registry-url ` +- If registry invite code is unavailable, fallback to API key path: + - ask human for API key + - run `clawdentity config set apiKey ` + +5. Configure local Clawdentity identity for this OpenClaw agent. - Create identity: `clawdentity agent create --framework openclaw`. - Verify identity: `clawdentity agent inspect `. -4. Ask the human for invite code. -- Prompt exactly for one invite code string. -- Do not ask for DID/proxy URL when invite code is present. - -5. Run automated setup from invite code. +6. Run automated setup from peer relay invite code. - Execute: - `clawdentity openclaw setup --invite-code ` + `clawdentity openclaw setup --invite-code ` - Use `--openclaw-dir ` when state directory is non-default. - Use `--openclaw-base-url ` when local OpenClaw HTTP endpoint is non-default. - Use `--peer-alias ` only when alias override is required. -6. Verify setup outputs. +7. Verify setup outputs. - Confirm setup reports: - peer alias - peer DID @@ -125,17 +144,19 @@ Successful confirm establishes mutual trust for the two agent DIDs. After confir - relay runtime config path - Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. -7. Start connector runtime for local relay handoff. +8. Start connector runtime for local relay handoff. - Run `clawdentity connector start `. - Optional: run `clawdentity connector service install ` for persistent autostart. -8. Complete trust pairing bootstrap. -- Run pairing start (`POST /pair/start`) from the owner/initiator side. -- Share returned one-time `pairingCode` with responder side. -- Run pairing confirm (`POST /pair/confirm`) from responder side. +9. Complete trust pairing bootstrap. +- Run pairing start from owner/initiator side: + - `clawdentity pair start --proxy-url --qr` +- Share the one-time QR image with responder side. +- Run pairing confirm from responder side: + - `clawdentity pair confirm --qr-file --proxy-url ` - Confirm pairing success before relay test. -9. Validate with user-style relay test. +10. Validate with user-style relay test. - Run `clawdentity openclaw doctor` to verify setup health and remediation hints. - Run `clawdentity openclaw relay test --peer ` to execute a probe. - Confirm probe success and connector-mediated delivery logs. @@ -144,10 +165,13 @@ Successful confirm establishes mutual trust for the two agent DIDs. After confir ## Required question policy Ask the human only when required inputs are missing: -- Missing Clawdentity API key. +- Missing local agent name. +- Missing peer relay invite code (`clawd1_...`). +- Missing registry onboarding invite code (`clw_inv_...`) when API key is absent. +- Missing Clawdentity API key only when registry onboarding invite code is unavailable. +- Missing initiator/responder proxy URLs for pairing commands. - Unclear OpenClaw state directory. - Non-default OpenClaw base URL. -- Missing invite code. - Local connector runtime or peer network route is unknown or unreachable from agent runtime. ## Failure Handling @@ -158,7 +182,7 @@ If setup or relay fails: - Ensure connector runtime is active (`clawdentity connector start `). - Re-run `clawdentity openclaw doctor`. - Re-run `clawdentity openclaw relay test --peer `. -- Re-run the same user-style flow from step 5 onward only after health checks pass. +- Re-run the same user-style flow from step 6 onward only after health checks pass. ## Bundled Resources diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index 2c4c476..7e9635c 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -67,35 +67,37 @@ Rules: Relay delivery policy is trust-pair based on proxy side. Pairing must be completed before first cross-agent delivery. -Current pairing contract is API-based (no dedicated CLI pairing command): +Current pairing contract is ticket-based with CLI support: 1. Initiator owner starts pairing: - - `POST /pair/start` + - CLI: `clawdentity pair start --proxy-url --qr` + - proxy route: `POST /pair/start` - headers: - `Authorization: Claw ` - `x-claw-owner-pat: ` - - body: + - body (optional): ```json { - "agentDid": "did:claw:agent:01RESPONDER..." + "ttlSeconds": 900 } ``` 2. Responder confirms pairing: - - `POST /pair/confirm` + - CLI: `clawdentity pair confirm --qr-file --proxy-url ` + - proxy route: `POST /pair/confirm` - headers: - `Authorization: Claw ` - body: ```json { - "pairingCode": "01PAIRCODE..." + "ticket": "clwpair1_..." } ``` Rules: -- `pairingCode` is one-time and expires. +- `ticket` is one-time and expires (default 15 minutes). - Confirm establishes mutual trust for the initiator/responder pair. - Same-agent sender/recipient is allowed by policy without explicit pair entry. diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 4a1aa6a..cfb5883 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -1,7 +1,6 @@ # Proxy local/development template # For local Wrangler development, copy values into .dev.vars. -# OpenClaw vars are optional for relay-mode proxy operation. -# OPENCLAW_HOOK_TOKEN=optional-openclaw-hook-token +# OPENCLAW_BASE_URL is optional for relay-mode proxy operation. # OPENCLAW_BASE_URL=http://127.0.0.1:18789 # Runtime vars diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 67c2fda..15c3f7b 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -15,8 +15,8 @@ - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. - Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-on (`true`); disable only when operators need unchanged webhook `message` forwarding. -- Keep OpenClaw env inputs (`OPENCLAW_BASE_URL`, `OPENCLAW_HOOK_TOKEN`) optional for relay-mode startup. -- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw vars, and policy/rate-limit vars). +- Keep OpenClaw base URL input (`OPENCLAW_BASE_URL`) optional for relay-mode startup. +- Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw base URL, and policy/rate-limit vars). - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) @@ -25,26 +25,26 @@ - Treat blank env values as unset for fallback resolution: - empty/whitespace values (and null-like values) in inherited env must not block `.env` or config-file fallbacks - dotenv merge semantics must match parser semantics (non-empty value wins). -- If hook token env vars are missing, resolve fallback token from `hooks.token` in `openclaw.json` (`OPENCLAW_CONFIG_PATH`, default `$OPENCLAW_STATE_DIR/openclaw.json`). +- Do not read or require `OPENCLAW_HOOK_TOKEN` in proxy runtime; that token is connector-side only. - Route relay sessions via Durable Objects: - `GET /v1/relay/connect` keys connector sessions by authenticated caller agent DID. - `POST /hooks/agent` keys recipient delivery by `x-claw-recipient-agent-did`. - Do not route sessions via `OWNER_AGENT_DID`. - Keep env input contract explicit for operator UX: - `LISTEN_PORT` or `PORT` - - `OPENCLAW_HOOK_TOKEN` + - `OPENCLAW_BASE_URL` - `REGISTRY_URL` or `CLAWDENTITY_REGISTRY_URL` - - `OPENCLAW_STATE_DIR`, `OPENCLAW_CONFIG_PATH` + - `OPENCLAW_STATE_DIR` ## Trust and Pairing - Keep trust state in Durable Objects (`ProxyTrustState`), not in static environment variables. - Do not add support for `ALLOW_LIST`, `ALLOWLIST_OWNERS`, or `ALLOWLIST_AGENTS`; trust is API-managed only. - Pairing is managed by API: - `POST /pair/start` (verified Claw auth + `x-claw-owner-pat` ownership check against registry `GET /v1/agents/:id/ownership`) - - `POST /pair/confirm` (verified Claw auth + one-time pairing code consume) -- Keep `/pair/confirm` as a single trust-store operation that establishes trust and consumes the code in one step (`confirmPairingCode`), never two separate calls. -- Confirming a valid pairing code must establish mutual trust for the initiator/responder agent pair. -- Keep pairing codes one-time and expiring; reject missing/expired/mismatched codes with explicit client errors. + - `POST /pair/confirm` (verified Claw auth + one-time pairing ticket consume) +- Keep `/pair/confirm` as a single trust-store operation that establishes trust and consumes the ticket in one step (`confirmPairingTicket`), never two separate calls. +- Confirming a valid pairing ticket must establish mutual trust for the initiator/responder agent pair. +- Keep pairing tickets one-time and expiring; reject missing/expired/malformed tickets with explicit client errors. - Reject deprecated `ALLOW_ALL_VERIFIED` at startup; never provide a global allow-all bypass for verified callers. ## Auth Verification diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index c41725e..b142ec6 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -11,13 +11,14 @@ - Keep inbound auth verification in `auth-middleware.ts` with focused helpers for token parsing, registry material loading, CRL checks, and replay protection. - Keep per-agent DID throttling in `agent-rate-limit-middleware.ts`; do not blend rate-limit state or counters into `auth-middleware.ts`. - Keep pre-auth public-route IP throttling in `public-rate-limit-middleware.ts`; do not blend unauthenticated probe controls into `auth-middleware.ts`. -- Keep `.env` fallback loading and OpenClaw config (`hooks.token`) fallback logic inside `config.ts` so runtime behavior is deterministic. +- Keep `.env` fallback loading inside `config.ts` so runtime behavior is deterministic. - Keep OpenClaw base URL fallback logic in `config.ts`: `OPENCLAW_BASE_URL` env -> `~/.clawdentity/openclaw-relay.json` -> default. -- Keep OpenClaw compatibility vars optional for relay-mode runtime; never require `OPENCLAW_BASE_URL` or hook token for cloud relay startup. +- Keep OpenClaw compatibility vars optional for relay-mode runtime; never require `OPENCLAW_BASE_URL` for cloud relay startup. +- Do not add `OPENCLAW_HOOK_TOKEN` handling to proxy runtime; hook token auth belongs to connector -> OpenClaw delivery path. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. - Keep static allowlist env vars removed (`ALLOW_LIST`, `ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`); trust must come from pairing state, not env. -- Keep `/pair/confirm` write path atomic at the trust-store API level: trust persistence and pairing-code consumption must happen in one operation (`confirmPairingCode`). +- Keep `/pair/confirm` write path atomic at the trust-store API level: trust persistence and one-time ticket consumption must happen in one operation (`confirmPairingTicket`). ## Config Error Handling - Convert parse failures to `ProxyConfigError` with code `CONFIG_VALIDATION_FAILED`. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index cb269af..556361a 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -107,9 +107,8 @@ function createHookRouteApp(input: { now?: () => Date; }) { const trustStore: ProxyTrustStore = { - createPairingCode: vi.fn(), - consumePairingCode: vi.fn(), - confirmPairingCode: vi.fn(), + createPairingTicket: vi.fn(), + confirmPairingTicket: vi.fn(), isAgentKnown: vi.fn(async () => true), isPairAllowed: vi.fn( async (pair) => diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index 0f68c1e..d291738 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -313,7 +313,7 @@ describe("proxy auth middleware", () => { const harness = await createAuthHarness({ allowCurrentAgent: false, }); - const requestBody = JSON.stringify({ pairingCode: "missing-code" }); + const requestBody = JSON.stringify({ ticket: "clwpair1_missing-ticket" }); const headers = await harness.createSignedHeaders({ body: requestBody, nonce: "nonce-pair-confirm-bootstrap", @@ -326,9 +326,9 @@ describe("proxy auth middleware", () => { body: requestBody, }); - expect(response.status).toBe(404); + expect(response.status).toBe(400); const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_CODE_NOT_FOUND"); + expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); }); it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { @@ -425,9 +425,7 @@ describe("proxy auth middleware", () => { }); const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", - }), + config: parseProxyConfig({}), trustStore, auth: { fetchImpl: fetchMock as typeof fetch, @@ -554,9 +552,7 @@ describe("proxy auth middleware", () => { }); const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "openclaw-hook-token", - }), + config: parseProxyConfig({}), trustStore, auth: { fetchImpl: fetchMock as typeof fetch, diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 8bc3f8a..5da3f55 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -17,16 +17,13 @@ import { parseProxyConfig, } from "./config.js"; -const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; - describe("proxy config", () => { - it("parses defaults without requiring OpenClaw token", () => { + it("parses defaults without requiring OpenClaw vars", () => { const config = parseProxyConfig({}); expect(config).toEqual({ listenPort: DEFAULT_PROXY_LISTEN_PORT, openclawBaseUrl: DEFAULT_OPENCLAW_BASE_URL, - openclawHookToken: undefined, registryUrl: DEFAULT_REGISTRY_URL, environment: DEFAULT_PROXY_ENVIRONMENT, crlRefreshIntervalMs: DEFAULT_CRL_REFRESH_INTERVAL_MS, @@ -42,7 +39,6 @@ describe("proxy config", () => { it("supports canonical proxy env inputs", () => { const config = parseProxyConfig({ PORT: "4100", - OPENCLAW_HOOK_TOKEN: "hooks-token", CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", @@ -52,7 +48,6 @@ describe("proxy config", () => { }); expect(config.listenPort).toBe(4100); - expect(config.openclawHookToken).toBe("hooks-token"); expect(config.registryUrl).toBe("https://registry.example.com"); expect(config.environment).toBe("local"); expect(config.crlStaleBehavior).toBe("fail-closed"); @@ -69,10 +64,6 @@ describe("proxy config", () => { expect(config.injectIdentityIntoMessage).toBe(false); }); - it("accepts missing hook token for relay-only startup", () => { - expect(() => parseProxyConfig({})).not.toThrow(); - }); - it("throws when deprecated ALLOW_ALL_VERIFIED is set", () => { expect(() => parseProxyConfig({ @@ -136,14 +127,14 @@ describe("proxy config loading", () => { [ "OPENCLAW_BASE_URL=https://cwd.example.com", "REGISTRY_URL=https://registry.cwd.example.com", - "OPENCLAW_HOOK_TOKEN=from-cwd-dotenv", ].join("\n"), ); writeFileSync( join(sandbox.stateDir, ".env"), - ["OPENCLAW_HOOK_TOKEN=from-state-dotenv", "LISTEN_PORT=4444"].join( - "\n", - ), + [ + "REGISTRY_URL=https://registry.state.example.com", + "LISTEN_PORT=4444", + ].join("\n"), ); const config = loadProxyConfig( @@ -157,7 +148,6 @@ describe("proxy config loading", () => { ); expect(config.openclawBaseUrl).toBe("https://env.example.com"); - expect(config.openclawHookToken).toBe("from-cwd-dotenv"); expect(config.listenPort).toBe(4444); expect(config.registryUrl).toBe("https://registry.cwd.example.com"); } finally { @@ -165,7 +155,7 @@ describe("proxy config loading", () => { } }); - it("allows loading config when no OpenClaw token fallback is present", () => { + it("loads config when optional OpenClaw vars are absent", () => { const sandbox = createSandbox(); try { const config = loadProxyConfig( @@ -176,8 +166,8 @@ describe("proxy config loading", () => { }, ); - expect(config.openclawHookToken).toBeUndefined(); expect(config.openclawBaseUrl).toBe(DEFAULT_OPENCLAW_BASE_URL); + expect(config.registryUrl).toBe(DEFAULT_REGISTRY_URL); } finally { sandbox.cleanup(); } @@ -188,10 +178,7 @@ describe("proxy config loading", () => { try { writeFileSync( join(sandbox.cwd, ".env"), - [ - "OPENCLAW_HOOK_TOKEN=from-cwd-dotenv", - "INJECT_IDENTITY_INTO_MESSAGE=true", - ].join("\n"), + "INJECT_IDENTITY_INTO_MESSAGE=true", ); const config = loadProxyConfig( @@ -213,12 +200,12 @@ describe("proxy config loading", () => { try { writeFileSync( join(sandbox.cwd, ".env"), - "OPENCLAW_HOOK_TOKEN=from-cwd-dotenv", + "REGISTRY_URL=https://registry.cwd.example.com", ); const config = loadProxyConfig( { - OPENCLAW_HOOK_TOKEN: "", + REGISTRY_URL: "", }, { cwd: sandbox.cwd, @@ -226,36 +213,7 @@ describe("proxy config loading", () => { }, ); - expect(config.openclawHookToken).toBe("from-cwd-dotenv"); - } finally { - sandbox.cleanup(); - } - }); - - it("falls back to hooks.token from openclaw.json (JSON5) when env token is missing", () => { - const sandbox = createSandbox(); - try { - writeFileSync( - join(sandbox.stateDir, OPENCLAW_CONFIG_FILENAME), - [ - "{", - " // JSON5 comment", - " hooks: {", - ' token: "token-from-openclaw-config",', - " },", - "}", - ].join("\n"), - ); - - const config = loadProxyConfig( - {}, - { - cwd: sandbox.cwd, - homeDir: sandbox.root, - }, - ); - - expect(config.openclawHookToken).toBe("token-from-openclaw-config"); + expect(config.registryUrl).toBe("https://registry.cwd.example.com"); } finally { sandbox.cleanup(); } @@ -277,9 +235,7 @@ describe("proxy config loading", () => { ); const config = loadProxyConfig( - { - OPENCLAW_HOOK_TOKEN: "token", - }, + {}, { cwd: sandbox.cwd, homeDir: sandbox.root, @@ -309,7 +265,6 @@ describe("proxy config loading", () => { const config = loadProxyConfig( { - OPENCLAW_HOOK_TOKEN: "token", OPENCLAW_BASE_URL: "http://127.0.0.1:19999", }, { @@ -330,10 +285,7 @@ describe("proxy config loading", () => { rmSync(sandbox.stateDir, { recursive: true, force: true }); const legacyStateDir = join(sandbox.root, ".clawdbot"); mkdirSync(legacyStateDir, { recursive: true }); - writeFileSync( - join(legacyStateDir, ".env"), - "OPENCLAW_HOOK_TOKEN=legacy-token", - ); + writeFileSync(join(legacyStateDir, ".env"), "LISTEN_PORT=4555"); const config = loadProxyConfig( {}, @@ -343,17 +295,17 @@ describe("proxy config loading", () => { }, ); - expect(config.openclawHookToken).toBe("legacy-token"); + expect(config.listenPort).toBe(4555); } finally { sandbox.cleanup(); } }); - it("throws when openclaw.json is invalid and token fallback is required", () => { + it("throws when openclaw-relay.json is invalid and base-url fallback is required", () => { const sandbox = createSandbox(); try { writeFileSync( - join(sandbox.stateDir, OPENCLAW_CONFIG_FILENAME), + join(sandbox.clawdentityDir, "openclaw-relay.json"), "{bad-json", ); diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 12a2a77..1e35ae9 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -2,7 +2,6 @@ import { existsSync, readFileSync } from "node:fs"; import { homedir } from "node:os"; import { isAbsolute, join, resolve } from "node:path"; import dotenv from "dotenv"; -import JSON5 from "json5"; import { z } from "zod"; export type ProxyCrlStaleBehavior = "fail-open" | "fail-closed"; @@ -43,7 +42,6 @@ export class ProxyConfigError extends Error { } } -const OPENCLAW_CONFIG_FILENAME = "openclaw.json"; const CLAWDENTITY_CONFIG_DIR = ".clawdentity"; const OPENCLAW_RELAY_CONFIG_FILENAME = "openclaw-relay.json"; const LEGACY_STATE_DIR_NAMES = [".clawdbot", ".moldbot", ".moltbot"] as const; @@ -81,7 +79,6 @@ const proxyRuntimeEnvSchema = z.object({ .max(65535) .default(DEFAULT_PROXY_LISTEN_PORT), OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), - OPENCLAW_HOOK_TOKEN: z.string().trim().min(1).optional(), REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), ENVIRONMENT: z .enum(proxyEnvironmentValues) @@ -117,7 +114,6 @@ const proxyRuntimeEnvSchema = z.object({ export const proxyConfigSchema = z.object({ listenPort: z.number().int().min(1).max(65535), openclawBaseUrl: z.string().url(), - openclawHookToken: z.string().min(1).optional(), registryUrl: z.string().url(), environment: z.enum(proxyEnvironmentValues), crlRefreshIntervalMs: z.number().int().positive(), @@ -134,7 +130,6 @@ type RuntimeEnvInput = { LISTEN_PORT?: unknown; PORT?: unknown; OPENCLAW_BASE_URL?: unknown; - OPENCLAW_HOOK_TOKEN?: unknown; REGISTRY_URL?: unknown; CLAWDENTITY_REGISTRY_URL?: unknown; ENVIRONMENT?: unknown; @@ -146,7 +141,6 @@ type RuntimeEnvInput = { AGENT_RATE_LIMIT_WINDOW_MS?: unknown; INJECT_IDENTITY_INTO_MESSAGE?: unknown; OPENCLAW_STATE_DIR?: unknown; - OPENCLAW_CONFIG_PATH?: unknown; HOME?: unknown; USERPROFILE?: unknown; }; @@ -275,22 +269,6 @@ function resolveStateDir( return canonicalStateDir; } -function resolveOpenClawConfigPath( - env: RuntimeEnvInput, - options: ProxyConfigLoadOptions, -): string { - const cwd = options.cwd ?? resolveDefaultCwd(); - const home = resolveHomeDir(env, options.homeDir); - const stateDir = resolveStateDir(env, options); - const configPathOverride = firstNonEmptyString(env, ["OPENCLAW_CONFIG_PATH"]); - - if (configPathOverride !== undefined) { - return resolvePathWithHome(configPathOverride, cwd, home); - } - - return join(stateDir, OPENCLAW_CONFIG_FILENAME); -} - function resolveOpenclawRelayConfigPath( env: RuntimeEnvInput, options: ProxyConfigLoadOptions, @@ -355,60 +333,6 @@ function loadEnvWithDotEnvFallback( return mergedEnv; } -function resolveHookTokenFromOpenClawConfig( - env: RuntimeEnvInput, - options: ProxyConfigLoadOptions, -): string | undefined { - const configPath = resolveOpenClawConfigPath(env, options); - if (!existsSync(configPath)) { - return undefined; - } - - let parsed: unknown; - try { - parsed = JSON5.parse(readFileSync(configPath, "utf8")); - } catch (error) { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_CONFIG_PATH: [ - `Unable to parse OpenClaw config at ${configPath}`, - ], - }, - formErrors: [ - error instanceof Error - ? error.message - : "Unknown OpenClaw config parse error", - ], - }); - } - - if (typeof parsed !== "object" || parsed === null) { - return undefined; - } - - const hooksValue = (parsed as Record).hooks; - if (typeof hooksValue !== "object" || hooksValue === null) { - return undefined; - } - - const tokenValue = (hooksValue as Record).token; - if (tokenValue === undefined || tokenValue === null) { - return undefined; - } - - if (typeof tokenValue !== "string") { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_CONFIG_PATH: ["hooks.token must be a string when set"], - }, - formErrors: [], - }); - } - - const trimmedToken = tokenValue.trim(); - return trimmedToken.length > 0 ? trimmedToken : undefined; -} - function resolveBaseUrlFromRelayConfig( env: RuntimeEnvInput, options: ProxyConfigLoadOptions, @@ -496,7 +420,6 @@ function normalizeRuntimeEnv(input: unknown): Record { return { LISTEN_PORT: firstNonEmpty(env, ["LISTEN_PORT", "PORT"]), OPENCLAW_BASE_URL: firstNonEmpty(env, ["OPENCLAW_BASE_URL"]), - OPENCLAW_HOOK_TOKEN: firstNonEmpty(env, ["OPENCLAW_HOOK_TOKEN"]), REGISTRY_URL: firstNonEmpty(env, [ "REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL", @@ -535,25 +458,6 @@ function assertNoDeprecatedAllowAllVerified(env: RuntimeEnvInput): void { }); } -function loadHookTokenFromFallback( - env: MutableEnv, - options: ProxyConfigLoadOptions, -): void { - if ( - firstNonEmpty(env as RuntimeEnvInput, ["OPENCLAW_HOOK_TOKEN"]) !== undefined - ) { - return; - } - - const token = resolveHookTokenFromOpenClawConfig( - env as RuntimeEnvInput, - options, - ); - if (token !== undefined) { - env.OPENCLAW_HOOK_TOKEN = token; - } -} - function loadOpenclawBaseUrlFromFallback( env: MutableEnv, options: ProxyConfigLoadOptions, @@ -590,7 +494,6 @@ export function parseProxyConfig(env: unknown): ProxyConfig { const candidateConfig = { listenPort: parsedRuntimeEnv.data.LISTEN_PORT, openclawBaseUrl: parsedRuntimeEnv.data.OPENCLAW_BASE_URL, - openclawHookToken: parsedRuntimeEnv.data.OPENCLAW_HOOK_TOKEN, registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, environment: parsedRuntimeEnv.data.ENVIRONMENT, crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, @@ -620,6 +523,5 @@ export function loadProxyConfig( ): ProxyConfig { const mergedEnv = loadEnvWithDotEnvFallback(env, options); loadOpenclawBaseUrlFromFallback(mergedEnv, options); - loadHookTokenFromFallback(mergedEnv, options); return parseProxyConfig(mergedEnv); } diff --git a/apps/proxy/src/index.test.ts b/apps/proxy/src/index.test.ts index d2f08cf..a07dd45 100644 --- a/apps/proxy/src/index.test.ts +++ b/apps/proxy/src/index.test.ts @@ -12,12 +12,9 @@ describe("proxy", () => { }); it("initializes runtime with validated config", () => { - const runtime = initializeProxyRuntime({ - OPENCLAW_HOOK_TOKEN: "hook-token", - }); + const runtime = initializeProxyRuntime({}); expect(runtime.version).toBe(PROXY_VERSION); - expect(runtime.config.openclawHookToken).toBe("hook-token"); expect(runtime.config.listenPort).toBe(4000); }); @@ -31,7 +28,7 @@ describe("proxy", () => { const runtime = initializeProxyRuntime({}); expect(runtime.version).toBe(PROXY_VERSION); - expect(runtime.config.openclawHookToken).toBeUndefined(); + expect(runtime.config.openclawBaseUrl).toBe("http://127.0.0.1:18789"); }); it("prefers APP_VERSION for runtime version", () => { diff --git a/apps/proxy/src/pairing-constants.ts b/apps/proxy/src/pairing-constants.ts index c73c74d..88a790f 100644 --- a/apps/proxy/src/pairing-constants.ts +++ b/apps/proxy/src/pairing-constants.ts @@ -2,7 +2,7 @@ export const PAIR_START_PATH = "/pair/start"; export const PAIR_CONFIRM_PATH = "/pair/confirm"; export const OWNER_PAT_HEADER = "x-claw-owner-pat"; -export const DEFAULT_PAIRING_CODE_TTL_SECONDS = 300; -export const MAX_PAIRING_CODE_TTL_SECONDS = 900; +export const DEFAULT_PAIRING_TICKET_TTL_SECONDS = 900; +export const MAX_PAIRING_TICKET_TTL_SECONDS = 900; export const PROXY_TRUST_DO_NAME = "global-trust"; diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index 3a27501..202aac9 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -1,9 +1,9 @@ import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; +import { createPairingTicket } from "./pairing-ticket.js"; const INITIATOR_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_000)); const RESPONDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_100)); -const INTRUDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_300)); vi.mock("./auth-middleware.js", async () => { const { createMiddleware } = await import("hono/factory"); @@ -47,6 +47,7 @@ function createPairingApp(input?: { nowMs: input?.nowMs, }, confirm: { + fetchImpl: input?.fetchImpl, nowMs: input?.nowMs, }, }, @@ -60,7 +61,7 @@ function createPairingApp(input?: { } describe(`POST ${PAIR_START_PATH}`, () => { - it("creates a pairing code when owner PAT controls caller agent DID", async () => { + it("creates a pairing ticket when owner PAT controls caller agent DID", async () => { const fetchMock = vi.fn(async (_requestInput: unknown) => Response.json( { @@ -82,23 +83,19 @@ describe(`POST ${PAIR_START_PATH}`, () => { "content-type": "application/json", [OWNER_PAT_HEADER]: "clw_pat_owner_token", }, - body: JSON.stringify({ - agentDid: RESPONDER_AGENT_DID, - }), + body: JSON.stringify({}), }); expect(response.status).toBe(200); const body = (await response.json()) as { expiresAt: string; initiatorAgentDid: string; - pairingCode: string; - responderAgentDid: string; + ticket: string; }; - expect(body.pairingCode.length).toBeGreaterThan(0); + expect(body.ticket.startsWith("clwpair1_")).toBe(true); expect(body.initiatorAgentDid).toBe(INITIATOR_AGENT_DID); - expect(body.responderAgentDid).toBe(RESPONDER_AGENT_DID); - expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); + expect(body.expiresAt).toBe("2023-11-14T22:28:20.000Z"); expect(fetchImpl).toHaveBeenCalledTimes(1); const fetchCallUrl = String(fetchMock.mock.calls[0]?.[0] ?? ""); expect(fetchCallUrl).toContain("/v1/agents/"); @@ -117,9 +114,7 @@ describe(`POST ${PAIR_START_PATH}`, () => { "content-type": "application/json", [OWNER_PAT_HEADER]: "clw_pat_invalid", }, - body: JSON.stringify({ - agentDid: RESPONDER_AGENT_DID, - }), + body: JSON.stringify({}), }); expect(response.status).toBe(401); @@ -144,9 +139,7 @@ describe(`POST ${PAIR_START_PATH}`, () => { "content-type": "application/json", [OWNER_PAT_HEADER]: "clw_pat_owner", }, - body: JSON.stringify({ - agentDid: RESPONDER_AGENT_DID, - }), + body: JSON.stringify({}), }); expect(response.status).toBe(403); @@ -156,15 +149,15 @@ describe(`POST ${PAIR_START_PATH}`, () => { }); describe(`POST ${PAIR_CONFIRM_PATH}`, () => { - it("consumes pairing code and enables mutual trust", async () => { + it("confirms local issuer tickets and enables mutual trust", async () => { const { app, trustStore } = createPairingApp({ nowMs: () => 1_700_000_000_000, }); - const pairingCode = await trustStore.createPairingCode({ + const ticket = await trustStore.createPairingTicket({ initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - ttlSeconds: 300, + issuerProxyUrl: "http://localhost", + ttlSeconds: 900, nowMs: 1_700_000_000_000, }); @@ -175,7 +168,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { "x-test-agent-did": RESPONDER_AGENT_DID, }, body: JSON.stringify({ - pairingCode: pairingCode.pairingCode, + ticket: ticket.ticket, }), }); @@ -206,15 +199,32 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { ).toBe(true); }); - it("rejects pair confirm when caller does not match target agent", async () => { + it("forwards confirm to issuer proxy when ticket issuer differs", async () => { + const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { + expect(String(url)).toBe("https://issuer.proxy.example/pair/confirm"); + const forwardedBody = JSON.parse(String(init?.body ?? "{}")) as { + ticket: string; + }; + expect(forwardedBody.ticket.startsWith("clwpair1_")).toBe(true); + + return Response.json( + { + paired: true, + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }, + { status: 201 }, + ); + }); + const { app, trustStore } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, nowMs: () => 1_700_000_000_000, }); - const pairingCode = await trustStore.createPairingCode({ - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - ttlSeconds: 300, + const created = createPairingTicket({ + issuerProxyUrl: "https://issuer.proxy.example", + expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, }); @@ -222,15 +232,20 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { method: "POST", headers: { "content-type": "application/json", - "x-test-agent-did": INTRUDER_AGENT_DID, + "x-test-agent-did": RESPONDER_AGENT_DID, }, body: JSON.stringify({ - pairingCode: pairingCode.pairingCode, + ticket: created.ticket, }), }); - expect(response.status).toBe(403); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_CODE_AGENT_MISMATCH"); + expect(response.status).toBe(201); + expect(forwardFetch).toHaveBeenCalledTimes(1); + expect( + await trustStore.isPairAllowed({ + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }), + ).toBe(true); }); }); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 0320641..a51ff23 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -3,12 +3,16 @@ import { AppError, type Logger } from "@clawdentity/sdk"; import type { Context } from "hono"; import type { ProxyRequestVariables } from "./auth-middleware.js"; import { - DEFAULT_PAIRING_CODE_TTL_SECONDS, - MAX_PAIRING_CODE_TTL_SECONDS, + DEFAULT_PAIRING_TICKET_TTL_SECONDS, + MAX_PAIRING_TICKET_TTL_SECONDS, OWNER_PAT_HEADER, PAIR_CONFIRM_PATH, PAIR_START_PATH, } from "./pairing-constants.js"; +import { + PairingTicketParseError, + parsePairingTicket, +} from "./pairing-ticket.js"; import { type ProxyTrustStore, ProxyTrustStoreError, @@ -34,6 +38,7 @@ type CreatePairStartHandlerOptions = PairStartRuntimeOptions & { }; export type PairConfirmRuntimeOptions = { + fetchImpl?: typeof fetch; nowMs?: () => number; }; @@ -60,37 +65,9 @@ function normalizeRegistryUrl(registryUrl: string): string { return new URL(baseUrl).toString(); } -function parseAgentDid(value: unknown, inputName: string): string { - if (typeof value !== "string" || value.trim().length === 0) { - throw new AppError({ - code: "PROXY_PAIR_INVALID_BODY", - message: `${inputName} is required`, - status: 400, - expose: true, - }); - } - - const candidate = value.trim(); - try { - const parsed = parseDid(candidate); - if (parsed.kind !== "agent") { - throw new Error("Invalid kind"); - } - } catch { - throw new AppError({ - code: "PROXY_PAIR_INVALID_BODY", - message: `${inputName} must be a valid agent DID`, - status: 400, - expose: true, - }); - } - - return candidate; -} - function parseTtlSeconds(value: unknown): number { if (value === undefined) { - return DEFAULT_PAIRING_CODE_TTL_SECONDS; + return DEFAULT_PAIRING_TICKET_TTL_SECONDS; } if (typeof value !== "number" || !Number.isInteger(value)) { @@ -102,10 +79,10 @@ function parseTtlSeconds(value: unknown): number { }); } - if (value < 1 || value > MAX_PAIRING_CODE_TTL_SECONDS) { + if (value < 1 || value > MAX_PAIRING_TICKET_TTL_SECONDS) { throw new AppError({ code: "PROXY_PAIR_INVALID_BODY", - message: `ttlSeconds must be between 1 and ${MAX_PAIRING_CODE_TTL_SECONDS}`, + message: `ttlSeconds must be between 1 and ${MAX_PAIRING_TICKET_TTL_SECONDS}`, status: 400, expose: true, }); @@ -222,7 +199,15 @@ async function assertPatOwnsInitiatorAgent(input: { }); } -function toPairingCodeAppError(error: unknown): AppError { +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function toPairingStoreAppError(error: unknown): AppError { if (error instanceof ProxyTrustStoreError) { return new AppError({ code: error.code, @@ -240,6 +225,116 @@ function toPairingCodeAppError(error: unknown): AppError { }); } +function extractErrorCode(payload: unknown): string | undefined { + if (typeof payload !== "object" || payload === null) { + return undefined; + } + + const error = (payload as { error?: unknown }).error; + if (typeof error !== "object" || error === null) { + return undefined; + } + + return typeof (error as { code?: unknown }).code === "string" + ? (error as { code: string }).code + : undefined; +} + +function extractErrorMessage(payload: unknown): string | undefined { + if (typeof payload !== "object" || payload === null) { + return undefined; + } + + const error = (payload as { error?: unknown }).error; + if (typeof error !== "object" || error === null) { + return undefined; + } + + return typeof (error as { message?: unknown }).message === "string" + ? (error as { message: string }).message + : undefined; +} + +function normalizeProxyOrigin(value: string): string { + const parsed = new URL(value); + return parsed.origin; +} + +function mapForwardedPairConfirmError( + status: number, + payload: unknown, +): AppError { + const code = extractErrorCode(payload) ?? "PROXY_PAIR_CONFIRM_FAILED"; + const message = + extractErrorMessage(payload) ?? + (status >= 500 + ? "Issuer proxy pairing service is unavailable" + : "Issuer proxy rejected pairing confirm"); + + return new AppError({ + code, + message, + status, + expose: true, + }); +} + +function parsePairConfirmResponse(payload: unknown): { + paired: true; + initiatorAgentDid: string; + responderAgentDid: string; +} { + if (typeof payload !== "object" || payload === null) { + throw new AppError({ + code: "PROXY_PAIR_CONFIRM_INVALID_RESPONSE", + message: "Issuer proxy response is invalid", + status: 502, + expose: true, + }); + } + + const paired = (payload as { paired?: unknown }).paired === true; + const initiatorRaw = (payload as { initiatorAgentDid?: unknown }) + .initiatorAgentDid; + const responderRaw = (payload as { responderAgentDid?: unknown }) + .responderAgentDid; + const initiatorAgentDid = + typeof initiatorRaw === "string" ? initiatorRaw : ""; + const responderAgentDid = + typeof responderRaw === "string" ? responderRaw : ""; + + if (!paired) { + throw new AppError({ + code: "PROXY_PAIR_CONFIRM_INVALID_RESPONSE", + message: "Issuer proxy response is invalid", + status: 502, + expose: true, + }); + } + + try { + if (parseDid(initiatorAgentDid).kind !== "agent") { + throw new Error("invalid"); + } + if (parseDid(responderAgentDid).kind !== "agent") { + throw new Error("invalid"); + } + } catch { + throw new AppError({ + code: "PROXY_PAIR_CONFIRM_INVALID_RESPONSE", + message: "Issuer proxy response is invalid", + status: 502, + expose: true, + }); + } + + return { + paired: true, + initiatorAgentDid, + responderAgentDid, + }; +} + export function createPairStartHandler( options: CreatePairStartHandlerOptions, ): (c: PairingRouteContext) => Promise { @@ -258,20 +353,9 @@ export function createPairStartHandler( } const body = (await parseJsonBody(c)) as { - agentDid?: unknown; ttlSeconds?: unknown; }; - const responderAgentDid = parseAgentDid(body.agentDid, "agentDid"); - if (responderAgentDid === auth.agentDid) { - throw new AppError({ - code: "PROXY_PAIR_INVALID_BODY", - message: "agentDid must be different from caller agent DID", - status: 400, - expose: true, - }); - } - const ttlSeconds = parseTtlSeconds(body.ttlSeconds); const ownerPat = parseOwnerPatHeader(c.req.header(OWNER_PAT_HEADER)); @@ -282,29 +366,29 @@ export function createPairStartHandler( registryUrl, }); - const pairingCodeResult = await options.trustStore - .createPairingCode({ + const issuerProxyUrl = normalizeProxyOrigin(c.req.url); + const pairingTicketResult = await options.trustStore + .createPairingTicket({ initiatorAgentDid: auth.agentDid, - responderAgentDid, + issuerProxyUrl, ttlSeconds, nowMs: nowMs(), }) .catch((error: unknown) => { - throw toPairingCodeAppError(error); + throw toPairingStoreAppError(error); }); options.logger.info("proxy.pair.start", { requestId: c.get("requestId"), initiatorAgentDid: auth.agentDid, - responderAgentDid, - expiresAt: new Date(pairingCodeResult.expiresAtMs).toISOString(), + issuerProxyUrl: pairingTicketResult.issuerProxyUrl, + expiresAt: new Date(pairingTicketResult.expiresAtMs).toISOString(), }); return c.json({ - initiatorAgentDid: pairingCodeResult.initiatorAgentDid, - responderAgentDid: pairingCodeResult.responderAgentDid, - pairingCode: pairingCodeResult.pairingCode, - expiresAt: new Date(pairingCodeResult.expiresAtMs).toISOString(), + initiatorAgentDid: pairingTicketResult.initiatorAgentDid, + ticket: pairingTicketResult.ticket, + expiresAt: new Date(pairingTicketResult.expiresAtMs).toISOString(), }); }; } @@ -313,6 +397,7 @@ export function createPairConfirmHandler( options: CreatePairConfirmHandlerOptions, ): (c: PairingRouteContext) => Promise { const nowMs = options.nowMs ?? Date.now; + const fetchImpl = options.fetchImpl ?? fetch; return async (c) => { const auth = c.get("auth"); @@ -325,42 +410,135 @@ export function createPairConfirmHandler( } const body = (await parseJsonBody(c)) as { - pairingCode?: unknown; + ticket?: unknown; }; - if ( - typeof body.pairingCode !== "string" || - body.pairingCode.trim() === "" - ) { + if (typeof body.ticket !== "string" || body.ticket.trim() === "") { throw new AppError({ code: "PROXY_PAIR_INVALID_BODY", - message: "pairingCode is required", + message: "ticket is required", + status: 400, + expose: true, + }); + } + + const ticket = body.ticket.trim(); + + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(ticket); + } catch (error) { + if (error instanceof PairingTicketParseError) { + throw new AppError({ + code: error.code, + message: error.message, + status: 400, + expose: true, + }); + } + + throw new AppError({ + code: "PROXY_PAIR_TICKET_INVALID_FORMAT", + message: "Pairing ticket format is invalid", status: 400, expose: true, }); } - const consumedPairingCode = await options.trustStore - .confirmPairingCode({ - pairingCode: body.pairingCode.trim(), + const localProxyOrigin = normalizeProxyOrigin(c.req.url); + const ticketIssuerOrigin = normalizeProxyOrigin(parsedTicket.iss); + const isIssuerLocal = ticketIssuerOrigin === localProxyOrigin; + + if (!isIssuerLocal) { + const issuerConfirmUrl = new URL( + PAIR_CONFIRM_PATH, + ticketIssuerOrigin.endsWith("/") + ? ticketIssuerOrigin + : `${ticketIssuerOrigin}/`, + ).toString(); + + const forwardedResponse = await fetchImpl(issuerConfirmUrl, { + method: "POST", + headers: c.req.raw.headers, + body: JSON.stringify({ ticket }), + }).catch((error: unknown) => { + throw new AppError({ + code: "PROXY_PAIR_STATE_UNAVAILABLE", + message: "Issuer proxy pairing service is unavailable", + status: 503, + details: { + reason: error instanceof Error ? error.message : "unknown", + }, + expose: true, + }); + }); + + const forwardedBody = await parseJsonResponse(forwardedResponse); + if (!forwardedResponse.ok) { + throw mapForwardedPairConfirmError( + forwardedResponse.status, + forwardedBody, + ); + } + + const confirmed = parsePairConfirmResponse(forwardedBody); + if (confirmed.responderAgentDid !== auth.agentDid) { + throw new AppError({ + code: "PROXY_PAIR_CONFIRM_RESPONDER_MISMATCH", + message: "Issuer proxy response did not match caller responder DID", + status: 502, + expose: true, + }); + } + + await options.trustStore + .upsertPair({ + initiatorAgentDid: confirmed.initiatorAgentDid, + responderAgentDid: confirmed.responderAgentDid, + }) + .catch((error: unknown) => { + throw toPairingStoreAppError(error); + }); + + options.logger.info("proxy.pair.confirm.forwarded", { + requestId: c.get("requestId"), + initiatorAgentDid: confirmed.initiatorAgentDid, + responderAgentDid: confirmed.responderAgentDid, + issuerProxyUrl: ticketIssuerOrigin, + }); + + return c.json( + { + paired: true, + initiatorAgentDid: confirmed.initiatorAgentDid, + responderAgentDid: confirmed.responderAgentDid, + }, + 201, + ); + } + + const confirmedPairingTicket = await options.trustStore + .confirmPairingTicket({ + ticket, responderAgentDid: auth.agentDid, nowMs: nowMs(), }) .catch((error: unknown) => { - throw toPairingCodeAppError(error); + throw toPairingStoreAppError(error); }); options.logger.info("proxy.pair.confirm", { requestId: c.get("requestId"), - initiatorAgentDid: consumedPairingCode.initiatorAgentDid, - responderAgentDid: consumedPairingCode.responderAgentDid, + initiatorAgentDid: confirmedPairingTicket.initiatorAgentDid, + responderAgentDid: confirmedPairingTicket.responderAgentDid, + issuerProxyUrl: confirmedPairingTicket.issuerProxyUrl, }); return c.json( { paired: true, - initiatorAgentDid: consumedPairingCode.initiatorAgentDid, - responderAgentDid: consumedPairingCode.responderAgentDid, + initiatorAgentDid: confirmedPairingTicket.initiatorAgentDid, + responderAgentDid: confirmedPairingTicket.responderAgentDid, }, 201, ); diff --git a/apps/proxy/src/pairing-ticket.ts b/apps/proxy/src/pairing-ticket.ts new file mode 100644 index 0000000..e759359 --- /dev/null +++ b/apps/proxy/src/pairing-ticket.ts @@ -0,0 +1,161 @@ +import { + decodeBase64url, + encodeBase64url, + generateUlid, +} from "@clawdentity/protocol"; + +const PAIRING_TICKET_PREFIX = "clwpair1_"; +const PAIRING_TICKET_VERSION = 1; +const TICKET_NONCE_BYTES = 18; + +export type PairingTicketPayload = { + v: number; + iss: string; + kid: string; + nonce: string; + exp: number; +}; + +export class PairingTicketParseError extends Error { + readonly code: string; + + constructor(code: string, message: string) { + super(message); + this.name = "PairingTicketParseError"; + this.code = code; + } +} + +function utf8Encode(value: string): Uint8Array { + return new TextEncoder().encode(value); +} + +function utf8Decode(value: Uint8Array): string { + return new TextDecoder().decode(value); +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function assertHttpUrl(value: string): string { + let parsed: URL; + try { + parsed = new URL(value); + } catch { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_ISSUER", + "Pairing ticket issuer URL is invalid", + ); + } + + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_ISSUER", + "Pairing ticket issuer URL is invalid", + ); + } + + return parsed.origin; +} + +function createRandomNonce(): string { + const bytes = new Uint8Array(TICKET_NONCE_BYTES); + crypto.getRandomValues(bytes); + return encodeBase64url(bytes); +} + +export function createPairingTicket(input: { + issuerProxyUrl: string; + expiresAtMs: number; + nowMs: number; +}): { + ticket: string; + payload: PairingTicketPayload; +} { + const payload: PairingTicketPayload = { + v: PAIRING_TICKET_VERSION, + iss: assertHttpUrl(input.issuerProxyUrl), + kid: generateUlid(input.nowMs), + nonce: createRandomNonce(), + exp: Math.floor(input.expiresAtMs / 1000), + }; + + const encodedPayload = encodeBase64url(utf8Encode(JSON.stringify(payload))); + + return { + ticket: `${PAIRING_TICKET_PREFIX}${encodedPayload}`, + payload, + }; +} + +export function parsePairingTicket(ticket: string): PairingTicketPayload { + const trimmedTicket = ticket.trim(); + if (!trimmedTicket.startsWith(PAIRING_TICKET_PREFIX)) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + const encodedPayload = trimmedTicket.slice(PAIRING_TICKET_PREFIX.length); + if (encodedPayload.length === 0) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + let payload: unknown; + try { + payload = JSON.parse(utf8Decode(decodeBase64url(encodedPayload))); + } catch { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + if (!isRecord(payload)) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + if (payload.v !== PAIRING_TICKET_VERSION) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION", + "Pairing ticket version is not supported", + ); + } + + if (typeof payload.kid !== "string" || payload.kid.trim().length === 0) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + if (typeof payload.nonce !== "string" || payload.nonce.trim().length === 0) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + if (typeof payload.exp !== "number" || !Number.isInteger(payload.exp)) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + } + + return { + v: PAIRING_TICKET_VERSION, + iss: assertHttpUrl(payload.iss as string), + kid: payload.kid.trim(), + nonce: payload.nonce.trim(), + exp: payload.exp, + }; +} diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts index 7d04f53..a72fa37 100644 --- a/apps/proxy/src/proxy-trust-state.test.ts +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -68,40 +68,21 @@ describe("ProxyTrustState", () => { expect(harness.values.has("trust:agent-peers")).toBe(true); }); - it("does not treat pairs as known agents without agent-peer index", async () => { - const { proxyTrustState, harness } = createProxyTrustState({ - "trust:pairs": ["did:claw:agent:alice|did:claw:agent:bob"], - }); - - const knownResponse = await proxyTrustState.fetch( - makeRequest(TRUST_STORE_ROUTES.isAgentKnown, { - agentDid: "did:claw:agent:alice", - }), - ); - - expect(knownResponse.status).toBe(200); - expect((await knownResponse.json()) as { known: boolean }).toEqual({ - known: false, - }); - - expect(harness.values.get("trust:agent-peers")).toBeUndefined(); - }); - - it("confirms pairing code in one operation and persists trust", async () => { + it("confirms pairing ticket in one operation and persists trust", async () => { const { proxyTrustState } = createProxyTrustState(); - const codeResponse = await proxyTrustState.fetch( - makeRequest(TRUST_STORE_ROUTES.createPairingCode, { + const ticketResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { initiatorAgentDid: "did:claw:agent:alice", - responderAgentDid: "did:claw:agent:bob", + issuerProxyUrl: "https://proxy-a.example.com", ttlSeconds: 60, nowMs: 1_700_000_000_000, }), ); - const codeBody = (await codeResponse.json()) as { pairingCode: string }; + const ticketBody = (await ticketResponse.json()) as { ticket: string }; const confirmResponse = await proxyTrustState.fetch( - makeRequest(TRUST_STORE_ROUTES.confirmPairingCode, { - pairingCode: codeBody.pairingCode, + makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { + ticket: ticketBody.ticket, responderAgentDid: "did:claw:agent:bob", nowMs: 1_700_000_000_100, }), @@ -109,10 +90,15 @@ describe("ProxyTrustState", () => { expect(confirmResponse.status).toBe(200); expect( - (await confirmResponse.json()) as { initiatorAgentDid: string }, + (await confirmResponse.json()) as { + initiatorAgentDid: string; + responderAgentDid: string; + issuerProxyUrl: string; + }, ).toEqual({ initiatorAgentDid: "did:claw:agent:alice", responderAgentDid: "did:claw:agent:bob", + issuerProxyUrl: "https://proxy-a.example.com", }); const pairCheckResponse = await proxyTrustState.fetch( diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index 28b817d..93f8185 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -1,22 +1,26 @@ -import { generateUlid } from "@clawdentity/protocol"; import { - type PairingCodeConsumeInput, - type PairingCodeInput, + createPairingTicket, + PairingTicketParseError, + parsePairingTicket, +} from "./pairing-ticket.js"; +import { + type PairingTicketConfirmInput, + type PairingTicketInput, TRUST_STORE_ROUTES, } from "./proxy-trust-store.js"; -type StoredPairingCode = { +type StoredPairingTicket = { expiresAtMs: number; initiatorAgentDid: string; - responderAgentDid: string; + issuerProxyUrl: string; }; -type PairingCodeMap = Record; +type PairingTicketMap = Record; type AgentPeersIndex = Record; const PAIRS_STORAGE_KEY = "trust:pairs"; const AGENT_PEERS_STORAGE_KEY = "trust:agent-peers"; -const PAIRING_CODES_STORAGE_KEY = "trust:pairing-codes"; +const PAIRING_TICKETS_STORAGE_KEY = "trust:pairing-tickets"; function toPairKey( initiatorAgentDid: string, @@ -77,16 +81,12 @@ export class ProxyTrustState { return new Response("Not found", { status: 404 }); } - if (url.pathname === TRUST_STORE_ROUTES.createPairingCode) { - return this.handleCreatePairingCode(request); - } - - if (url.pathname === TRUST_STORE_ROUTES.consumePairingCode) { - return this.handleConsumePairingCode(request); + if (url.pathname === TRUST_STORE_ROUTES.createPairingTicket) { + return this.handleCreatePairingTicket(request); } - if (url.pathname === TRUST_STORE_ROUTES.confirmPairingCode) { - return this.handleConfirmPairingCode(request); + if (url.pathname === TRUST_STORE_ROUTES.confirmPairingTicket) { + return this.handleConfirmPairingTicket(request); } if (url.pathname === TRUST_STORE_ROUTES.upsertPair) { @@ -106,185 +106,164 @@ export class ProxyTrustState { async alarm(): Promise { const nowMs = Date.now(); - const pairingCodes = await this.loadPairingCodes(); + const pairingTickets = await this.loadPairingTickets(); let mutated = false; - for (const [pairingCode, details] of Object.entries(pairingCodes)) { + for (const [ticket, details] of Object.entries(pairingTickets)) { if (details.expiresAtMs <= nowMs) { - delete pairingCodes[pairingCode]; + delete pairingTickets[ticket]; mutated = true; } } if (mutated) { - await this.savePairingCodes(pairingCodes); + await this.savePairingTickets(pairingTickets); } - await this.scheduleNextCodeCleanup(pairingCodes); + await this.scheduleNextCodeCleanup(pairingTickets); } - private async handleCreatePairingCode(request: Request): Promise { + private async handleCreatePairingTicket(request: Request): Promise { const body = (await parseBody(request)) as - | Partial + | Partial | undefined; if ( !body || !isNonEmptyString(body.initiatorAgentDid) || - !isNonEmptyString(body.responderAgentDid) || + !isNonEmptyString(body.issuerProxyUrl) || typeof body.ttlSeconds !== "number" || !Number.isInteger(body.ttlSeconds) || body.ttlSeconds <= 0 ) { return toErrorResponse({ code: "PROXY_PAIR_START_INVALID_BODY", - message: "Pairing code create input is invalid", + message: "Pairing ticket create input is invalid", status: 400, }); } const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); - const pairingCode = generateUlid(nowMs); const expiresAtMs = nowMs + body.ttlSeconds * 1000; - const pairingCodes = await this.loadPairingCodes(); - pairingCodes[pairingCode] = { + let created: ReturnType; + try { + created = createPairingTicket({ + issuerProxyUrl: body.issuerProxyUrl, + expiresAtMs, + nowMs, + }); + } catch (error) { + if (error instanceof PairingTicketParseError) { + return toErrorResponse({ + code: error.code, + message: error.message, + status: 400, + }); + } + + throw error; + } + + const pairingTickets = await this.loadPairingTickets(); + pairingTickets[created.ticket] = { initiatorAgentDid: body.initiatorAgentDid, - responderAgentDid: body.responderAgentDid, + issuerProxyUrl: created.payload.iss, expiresAtMs, }; - await this.savePairingCodes(pairingCodes); - await this.scheduleNextCodeCleanup(pairingCodes); + await this.savePairingTickets(pairingTickets); + await this.scheduleNextCodeCleanup(pairingTickets); return Response.json({ - pairingCode, + ticket: created.ticket, expiresAtMs, initiatorAgentDid: body.initiatorAgentDid, - responderAgentDid: body.responderAgentDid, + issuerProxyUrl: created.payload.iss, }); } - private async handleConsumePairingCode(request: Request): Promise { + private async handleConfirmPairingTicket( + request: Request, + ): Promise { const body = (await parseBody(request)) as - | Partial + | Partial | undefined; if ( !body || - !isNonEmptyString(body.pairingCode) || + !isNonEmptyString(body.ticket) || !isNonEmptyString(body.responderAgentDid) ) { return toErrorResponse({ code: "PROXY_PAIR_CONFIRM_INVALID_BODY", - message: "Pairing code consume input is invalid", + message: "Pairing ticket confirm input is invalid", status: 400, }); } - const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); - const pairingCodes = await this.loadPairingCodes(); - const stored = pairingCodes[body.pairingCode]; - - if (!stored) { - return toErrorResponse({ - code: "PROXY_PAIR_CODE_NOT_FOUND", - message: "Pairing code not found", - status: 404, - }); - } - - if (stored.expiresAtMs <= nowMs) { - delete pairingCodes[body.pairingCode]; - await this.savePairingCodes(pairingCodes); - await this.scheduleNextCodeCleanup(pairingCodes); - return toErrorResponse({ - code: "PROXY_PAIR_CODE_EXPIRED", - message: "Pairing code has expired", - status: 410, - }); - } - - if (stored.responderAgentDid !== body.responderAgentDid) { - return toErrorResponse({ - code: "PROXY_PAIR_CODE_AGENT_MISMATCH", - message: "Pairing code does not match caller agent DID", - status: 403, - }); - } - - delete pairingCodes[body.pairingCode]; - await this.savePairingCodes(pairingCodes); - await this.scheduleNextCodeCleanup(pairingCodes); - - return Response.json({ - initiatorAgentDid: stored.initiatorAgentDid, - responderAgentDid: stored.responderAgentDid, - }); - } + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(body.ticket); + } catch (error) { + if (error instanceof PairingTicketParseError) { + return toErrorResponse({ + code: error.code, + message: error.message, + status: 400, + }); + } - private async handleConfirmPairingCode(request: Request): Promise { - const body = (await parseBody(request)) as - | Partial - | undefined; - if ( - !body || - !isNonEmptyString(body.pairingCode) || - !isNonEmptyString(body.responderAgentDid) - ) { - return toErrorResponse({ - code: "PROXY_PAIR_CONFIRM_INVALID_BODY", - message: "Pairing code consume input is invalid", - status: 400, - }); + throw error; } const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); - const pairingCodes = await this.loadPairingCodes(); - const stored = pairingCodes[body.pairingCode]; + const pairingTickets = await this.loadPairingTickets(); + const stored = pairingTickets[body.ticket]; if (!stored) { return toErrorResponse({ - code: "PROXY_PAIR_CODE_NOT_FOUND", - message: "Pairing code not found", + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", status: 404, }); } - if (stored.expiresAtMs <= nowMs) { - delete pairingCodes[body.pairingCode]; - await this.savePairingCodes(pairingCodes); - await this.scheduleNextCodeCleanup(pairingCodes); + if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + delete pairingTickets[body.ticket]; + await this.savePairingTickets(pairingTickets); + await this.scheduleNextCodeCleanup(pairingTickets); return toErrorResponse({ - code: "PROXY_PAIR_CODE_EXPIRED", - message: "Pairing code has expired", + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", status: 410, }); } - if (stored.responderAgentDid !== body.responderAgentDid) { + if (stored.issuerProxyUrl !== parsedTicket.iss) { return toErrorResponse({ - code: "PROXY_PAIR_CODE_AGENT_MISMATCH", - message: "Pairing code does not match caller agent DID", - status: 403, + code: "PROXY_PAIR_TICKET_INVALID_ISSUER", + message: "Pairing ticket issuer URL is invalid", + status: 400, }); } const pairs = await this.loadPairs(); - pairs.add(toPairKey(stored.initiatorAgentDid, stored.responderAgentDid)); + pairs.add(toPairKey(stored.initiatorAgentDid, body.responderAgentDid)); const agentPeers = await this.loadAgentPeers(); - addPeer(agentPeers, stored.initiatorAgentDid, stored.responderAgentDid); - addPeer(agentPeers, stored.responderAgentDid, stored.initiatorAgentDid); + addPeer(agentPeers, stored.initiatorAgentDid, body.responderAgentDid); + addPeer(agentPeers, body.responderAgentDid, stored.initiatorAgentDid); await this.savePairs(pairs); await this.saveAgentPeers(agentPeers); - delete pairingCodes[body.pairingCode]; - await this.savePairingCodes(pairingCodes); - await this.scheduleNextCodeCleanup(pairingCodes); + delete pairingTickets[body.ticket]; + await this.savePairingTickets(pairingTickets); + await this.scheduleNextCodeCleanup(pairingTickets); return Response.json({ initiatorAgentDid: stored.initiatorAgentDid, - responderAgentDid: stored.responderAgentDid, + responderAgentDid: body.responderAgentDid, + issuerProxyUrl: stored.issuerProxyUrl, }); } @@ -404,9 +383,9 @@ export class ProxyTrustState { await this.state.storage.put(AGENT_PEERS_STORAGE_KEY, agentPeers); } - private async loadPairingCodes(): Promise { - const raw = await this.state.storage.get( - PAIRING_CODES_STORAGE_KEY, + private async loadPairingTickets(): Promise { + const raw = await this.state.storage.get( + PAIRING_TICKETS_STORAGE_KEY, ); if (typeof raw !== "object" || raw === null) { @@ -416,14 +395,16 @@ export class ProxyTrustState { return raw; } - private async savePairingCodes(pairingCodes: PairingCodeMap): Promise { - await this.state.storage.put(PAIRING_CODES_STORAGE_KEY, pairingCodes); + private async savePairingTickets( + pairingTickets: PairingTicketMap, + ): Promise { + await this.state.storage.put(PAIRING_TICKETS_STORAGE_KEY, pairingTickets); } private async scheduleNextCodeCleanup( - pairingCodes: PairingCodeMap, + pairingTickets: PairingTicketMap, ): Promise { - const expiryValues = Object.values(pairingCodes).map( + const expiryValues = Object.values(pairingTickets).map( (details) => details.expiresAtMs, ); diff --git a/apps/proxy/src/proxy-trust-store.test.ts b/apps/proxy/src/proxy-trust-store.test.ts index 9f044c1..26fa99e 100644 --- a/apps/proxy/src/proxy-trust-store.test.ts +++ b/apps/proxy/src/proxy-trust-store.test.ts @@ -48,81 +48,60 @@ describe("in-memory proxy trust store", () => { expect(await store.isAgentKnown("did:claw:agent:charlie")).toBe(false); }); - it("consumes one-time pairing codes", async () => { + it("confirms one-time pairing tickets and establishes trust", async () => { const store = createInMemoryProxyTrustStore(); - const code = await store.createPairingCode({ + const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", - responderAgentDid: "did:claw:agent:bob", + issuerProxyUrl: "https://proxy-a.example.com", ttlSeconds: 60, nowMs: 1_700_000_000_000, }); - const consumed = await store.consumePairingCode({ - pairingCode: code.pairingCode, + const confirmed = await store.confirmPairingTicket({ + ticket: ticket.ticket, responderAgentDid: "did:claw:agent:bob", nowMs: 1_700_000_000_100, }); - expect(consumed).toEqual({ + expect(confirmed).toEqual({ initiatorAgentDid: "did:claw:agent:alice", responderAgentDid: "did:claw:agent:bob", + issuerProxyUrl: "https://proxy-a.example.com", }); await expect( - store.consumePairingCode({ - pairingCode: code.pairingCode, + store.confirmPairingTicket({ + ticket: ticket.ticket, responderAgentDid: "did:claw:agent:bob", nowMs: 1_700_000_000_200, }), ).rejects.toMatchObject({ - code: "PROXY_PAIR_CODE_NOT_FOUND", + code: "PROXY_PAIR_TICKET_NOT_FOUND", status: 404, }); + + expect(await store.isAgentKnown("did:claw:agent:alice")).toBe(true); + expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(true); }); - it("confirms pairing code atomically and establishes trust", async () => { + it("rejects expired tickets", async () => { const store = createInMemoryProxyTrustStore(); - const code = await store.createPairingCode({ + const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", - responderAgentDid: "did:claw:agent:bob", - ttlSeconds: 60, + issuerProxyUrl: "https://proxy-a.example.com", + ttlSeconds: 1, nowMs: 1_700_000_000_000, }); - const confirmed = await store.confirmPairingCode({ - pairingCode: code.pairingCode, - responderAgentDid: "did:claw:agent:bob", - nowMs: 1_700_000_000_100, - }); - - expect(confirmed).toEqual({ - initiatorAgentDid: "did:claw:agent:alice", - responderAgentDid: "did:claw:agent:bob", - }); - expect(await store.isAgentKnown("did:claw:agent:alice")).toBe(true); - expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(true); - expect( - await store.isPairAllowed({ - initiatorAgentDid: "did:claw:agent:alice", - responderAgentDid: "did:claw:agent:bob", - }), - ).toBe(true); - expect( - await store.isPairAllowed({ - initiatorAgentDid: "did:claw:agent:bob", - responderAgentDid: "did:claw:agent:alice", - }), - ).toBe(true); - await expect( - store.consumePairingCode({ - pairingCode: code.pairingCode, + store.confirmPairingTicket({ + ticket: ticket.ticket, responderAgentDid: "did:claw:agent:bob", - nowMs: 1_700_000_000_200, + nowMs: 1_700_000_002_000, }), ).rejects.toMatchObject({ - code: "PROXY_PAIR_CODE_NOT_FOUND", - status: 404, + code: "PROXY_PAIR_TICKET_EXPIRED", + status: 410, }); }); }); diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index e53bb4d..75db178 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -1,29 +1,34 @@ -import { generateUlid } from "@clawdentity/protocol"; import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; +import { + createPairingTicket, + PairingTicketParseError, + parsePairingTicket, +} from "./pairing-ticket.js"; -export type PairingCodeInput = { +export type PairingTicketInput = { initiatorAgentDid: string; - responderAgentDid: string; - nowMs?: number; + issuerProxyUrl: string; ttlSeconds: number; + nowMs?: number; }; -export type PairingCodeResult = { - pairingCode: string; +export type PairingTicketResult = { + ticket: string; expiresAtMs: number; initiatorAgentDid: string; - responderAgentDid: string; + issuerProxyUrl: string; }; -export type PairingCodeConsumeInput = { - pairingCode: string; +export type PairingTicketConfirmInput = { + ticket: string; responderAgentDid: string; nowMs?: number; }; -export type PairingCodeConsumeResult = { +export type PairingTicketConfirmResult = { initiatorAgentDid: string; responderAgentDid: string; + issuerProxyUrl: string; }; export type PairingInput = { @@ -32,13 +37,10 @@ export type PairingInput = { }; export interface ProxyTrustStore { - createPairingCode(input: PairingCodeInput): Promise; - consumePairingCode( - input: PairingCodeConsumeInput, - ): Promise; - confirmPairingCode( - input: PairingCodeConsumeInput, - ): Promise; + createPairingTicket(input: PairingTicketInput): Promise; + confirmPairingTicket( + input: PairingTicketConfirmInput, + ): Promise; isAgentKnown(agentDid: string): Promise; isPairAllowed(input: PairingInput): Promise; upsertPair(input: PairingInput): Promise; @@ -66,9 +68,8 @@ export class ProxyTrustStoreError extends Error { } export const TRUST_STORE_ROUTES = { - createPairingCode: "/pairing-codes/create", - consumePairingCode: "/pairing-codes/consume", - confirmPairingCode: "/pairing-codes/confirm", + createPairingTicket: "/pairing-tickets/create", + confirmPairingTicket: "/pairing-tickets/confirm", isAgentKnown: "/agents/known", isPairAllowed: "/pairs/check", upsertPair: "/pairs/upsert", @@ -159,24 +160,17 @@ export function createDurableProxyTrustStore( namespace: ProxyTrustStateNamespace, ): ProxyTrustStore { return { - async createPairingCode(input) { - return callDurableState( - namespace, - TRUST_STORE_ROUTES.createPairingCode, - input, - ); - }, - async consumePairingCode(input) { - return callDurableState( + async createPairingTicket(input) { + return callDurableState( namespace, - TRUST_STORE_ROUTES.consumePairingCode, + TRUST_STORE_ROUTES.createPairingTicket, input, ); }, - async confirmPairingCode(input) { - return callDurableState( + async confirmPairingTicket(input) { + return callDurableState( namespace, - TRUST_STORE_ROUTES.confirmPairingCode, + TRUST_STORE_ROUTES.confirmPairingTicket, input, ); }, @@ -209,19 +203,19 @@ export function createDurableProxyTrustStore( export function createInMemoryProxyTrustStore(): ProxyTrustStore { const pairKeys = new Set(); const agentPeers = new Map>(); - const pairingCodes = new Map< + const pairingTickets = new Map< string, { expiresAtMs: number; initiatorAgentDid: string; - responderAgentDid: string; + issuerProxyUrl: string; } >(); function cleanup(nowMs: number): void { - for (const [pairingCode, details] of pairingCodes.entries()) { + for (const [ticket, details] of pairingTickets.entries()) { if (details.expiresAtMs <= nowMs) { - pairingCodes.delete(pairingCode); + pairingTickets.delete(ticket); } } } @@ -232,88 +226,102 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { agentPeers.set(leftAgentDid, peers); } - function resolveConsumablePairingCode( - input: PairingCodeConsumeInput, - ): PairingCodeConsumeResult { + function resolveConfirmablePairingTicket( + input: PairingTicketConfirmInput, + ): PairingTicketConfirmResult { const nowMs = input.nowMs ?? Date.now(); - cleanup(nowMs); - const pairing = pairingCodes.get(input.pairingCode); - if (!pairing) { + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(input.ticket); + } catch (error) { + if (error instanceof PairingTicketParseError) { + throw new ProxyTrustStoreError({ + code: error.code, + message: error.message, + status: 400, + }); + } + + throw error; + } + + const stored = pairingTickets.get(input.ticket); + if (!stored) { throw new ProxyTrustStoreError({ - code: "PROXY_PAIR_CODE_NOT_FOUND", - message: "Pairing code not found", + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", status: 404, }); } - if (pairing.expiresAtMs <= nowMs) { - pairingCodes.delete(input.pairingCode); + if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + pairingTickets.delete(input.ticket); throw new ProxyTrustStoreError({ - code: "PROXY_PAIR_CODE_EXPIRED", - message: "Pairing code has expired", + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", status: 410, }); } - if (pairing.responderAgentDid !== input.responderAgentDid) { + if (stored.issuerProxyUrl !== parsedTicket.iss) { throw new ProxyTrustStoreError({ - code: "PROXY_PAIR_CODE_AGENT_MISMATCH", - message: "Pairing code does not match caller agent DID", - status: 403, + code: "PROXY_PAIR_TICKET_INVALID_ISSUER", + message: "Pairing ticket issuer URL is invalid", + status: 400, }); } return { - initiatorAgentDid: pairing.initiatorAgentDid, - responderAgentDid: pairing.responderAgentDid, + initiatorAgentDid: stored.initiatorAgentDid, + responderAgentDid: input.responderAgentDid, + issuerProxyUrl: stored.issuerProxyUrl, }; } return { - async createPairingCode(input) { + async createPairingTicket(input) { const nowMs = input.nowMs ?? Date.now(); cleanup(nowMs); - const pairingCode = generateUlid(nowMs); const expiresAtMs = nowMs + input.ttlSeconds * 1000; + const created = createPairingTicket({ + issuerProxyUrl: input.issuerProxyUrl, + expiresAtMs, + nowMs, + }); - pairingCodes.set(pairingCode, { + pairingTickets.set(created.ticket, { initiatorAgentDid: input.initiatorAgentDid, - responderAgentDid: input.responderAgentDid, + issuerProxyUrl: created.payload.iss, expiresAtMs, }); return { - pairingCode, + ticket: created.ticket, expiresAtMs, initiatorAgentDid: input.initiatorAgentDid, - responderAgentDid: input.responderAgentDid, + issuerProxyUrl: created.payload.iss, }; }, - async consumePairingCode(input) { - const consumedPair = resolveConsumablePairingCode(input); - pairingCodes.delete(input.pairingCode); - return consumedPair; - }, - async confirmPairingCode(input) { - const consumedPair = resolveConsumablePairingCode(input); + async confirmPairingTicket(input) { + const confirmedPair = resolveConfirmablePairingTicket(input); pairKeys.add( toPairKey( - consumedPair.initiatorAgentDid, - consumedPair.responderAgentDid, + confirmedPair.initiatorAgentDid, + confirmedPair.responderAgentDid, ), ); upsertPeer( - consumedPair.initiatorAgentDid, - consumedPair.responderAgentDid, + confirmedPair.initiatorAgentDid, + confirmedPair.responderAgentDid, ); upsertPeer( - consumedPair.responderAgentDid, - consumedPair.initiatorAgentDid, + confirmedPair.responderAgentDid, + confirmedPair.initiatorAgentDid, ); - pairingCodes.delete(input.pairingCode); - return consumedPair; + pairingTickets.delete(input.ticket); + return confirmedPair; }, async isAgentKnown(agentDid) { return (agentPeers.get(agentDid)?.size ?? 0) > 0; diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts index 3611cfc..c3b3616 100644 --- a/apps/proxy/src/server.test.ts +++ b/apps/proxy/src/server.test.ts @@ -12,9 +12,7 @@ import { createProxyApp } from "./server.js"; describe("proxy server", () => { it("returns health response with status, version, and environment", async () => { const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", - }), + config: parseProxyConfig({}), }); const res = await app.request("/health"); @@ -36,7 +34,6 @@ describe("proxy server", () => { it("uses ENVIRONMENT from config for health payload", async () => { const app = createProxyApp({ config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", ENVIRONMENT: "local", }), }); @@ -50,9 +47,7 @@ describe("proxy server", () => { it("uses provided app version when supplied by runtime", async () => { const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", - }), + config: parseProxyConfig({}), version: "sha-123456", }); @@ -67,9 +62,7 @@ describe("proxy server", () => { const logSpy = vi.spyOn(console, "info").mockImplementation(() => {}); try { const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", - }), + config: parseProxyConfig({}), }); const res = await app.request("/health"); @@ -100,9 +93,7 @@ describe("proxy server", () => { it("returns 429 for repeated unauthenticated probes on /hooks/agent from same IP", async () => { const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", - }), + config: parseProxyConfig({}), rateLimit: { publicIpMaxRequests: 2, publicIpWindowMs: 60_000, @@ -137,9 +128,7 @@ describe("proxy server", () => { it("returns 429 for repeated unauthenticated probes on relay connect from same IP", async () => { const app = createProxyApp({ - config: parseProxyConfig({ - OPENCLAW_HOOK_TOKEN: "token", - }), + config: parseProxyConfig({}), rateLimit: { publicIpMaxRequests: 2, publicIpWindowMs: 60_000, diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 77b195b..7a2e045 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -21,7 +21,6 @@ export type ProxyWorkerBindings = { LISTEN_PORT?: string; PORT?: string; OPENCLAW_BASE_URL?: string; - OPENCLAW_HOOK_TOKEN?: string; AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; PROXY_TRUST_STATE?: ProxyTrustStateNamespace; REGISTRY_URL?: string; @@ -51,7 +50,6 @@ let cachedRuntime: CachedProxyRuntime | undefined; function toCacheKey(env: ProxyWorkerBindings): string { const keyParts = [ env.OPENCLAW_BASE_URL, - env.OPENCLAW_HOOK_TOKEN, env.PROXY_TRUST_STATE === undefined ? "no-trust-do" : "has-trust-do", env.REGISTRY_URL, env.CLAWDENTITY_REGISTRY_URL, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0f6b94e..ba5e04d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -38,6 +38,18 @@ importers: commander: specifier: ^13.1.0 version: 13.1.0 + jsqr: + specifier: ^1.4.0 + version: 1.4.0 + pngjs: + specifier: ^7.0.0 + version: 7.0.0 + qrcode: + specifier: ^1.5.4 + version: 1.5.4 + ws: + specifier: ^8.19.0 + version: 8.19.0 devDependencies: '@clawdentity/connector': specifier: workspace:* @@ -51,6 +63,12 @@ importers: '@types/node': specifier: ^22.18.11 version: 22.19.11 + '@types/pngjs': + specifier: ^6.0.5 + version: 6.0.5 + '@types/qrcode': + specifier: ^1.5.6 + version: 1.5.6 apps/openclaw-skill: dependencies: @@ -1142,6 +1160,12 @@ packages: '@types/node@22.19.11': resolution: {integrity: sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==} + '@types/pngjs@6.0.5': + resolution: {integrity: sha512-0k5eKfrA83JOZPppLtS2C7OUtyNAl2wKNxfyYl9Q5g9lPkgBl/9hNyAu6HuEH2J4XmIv2znEpkDd0SaZVxW6iQ==} + + '@types/qrcode@1.5.6': + resolution: {integrity: sha512-te7NQcV2BOvdj2b1hCAHzAoMNuj65kNBMz0KBaxM6c3VGBOhU0dURQKOtH8CFNI/dsKkwlv32p26qYQTWoB5bw==} + '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} @@ -1279,6 +1303,10 @@ packages: resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} engines: {node: '>= 0.4'} + camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + chai@6.2.2: resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} engines: {node: '>=18'} @@ -1311,6 +1339,9 @@ packages: resolution: {integrity: sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==} engines: {node: '>=18'} + cliui@6.0.0: + resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} + cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} @@ -1365,6 +1396,10 @@ packages: supports-color: optional: true + decamelize@1.2.0: + resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} + engines: {node: '>=0.10.0'} + defaults@1.0.4: resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} @@ -1380,6 +1415,9 @@ packages: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} + dijkstrajs@1.0.3: + resolution: {integrity: sha512-qiSlmBq9+BCdCA/L46dw8Uy93mloxsPSbwnm5yrKn2vMPiy8KyAskTF6zuV/j5BMsmOGZDPs7KjU+mjb670kfA==} + dotenv-expand@11.0.7: resolution: {integrity: sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA==} engines: {node: '>=12'} @@ -1603,6 +1641,10 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} + find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + fix-dts-default-cjs-exports@1.0.1: resolution: {integrity: sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==} @@ -1771,6 +1813,9 @@ packages: jsonc-parser@3.2.0: resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} + jsqr@1.4.0: + resolution: {integrity: sha512-dxLob7q65Xg2DvstYkRpkYtmKm2sPJ9oFhrhmudT1dZvNFFTlroai3AWSpLey/w5vMcLBXRgOJsbXpdN9HzU/A==} + kleur@4.1.5: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} @@ -1799,6 +1844,10 @@ packages: resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + log-symbols@4.1.0: resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} engines: {node: '>=10'} @@ -1924,6 +1973,22 @@ packages: resolution: {integrity: sha512-zAKMgGXUim0Jyd6CXK9lraBnD3H5yPGBPPOkC23a2BG6hsm4Zu6OQSjQuEtV0BHDf4aKHcUFvJiGRrFuW3MG8g==} engines: {node: '>=10'} + p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + + p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + + p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + path-key@3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} @@ -1961,6 +2026,14 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + pngjs@5.0.0: + resolution: {integrity: sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==} + engines: {node: '>=10.13.0'} + + pngjs@7.0.0: + resolution: {integrity: sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==} + engines: {node: '>=14.19.0'} + postcss-load-config@6.0.1: resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} engines: {node: '>= 18'} @@ -1990,6 +2063,11 @@ packages: proxy-from-env@1.1.0: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + qrcode@1.5.4: + resolution: {integrity: sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg==} + engines: {node: '>=10.13.0'} + hasBin: true + react-is@18.3.1: resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} @@ -2005,6 +2083,9 @@ packages: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} + require-main-filename@2.0.0: + resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} + resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} @@ -2040,6 +2121,9 @@ packages: engines: {node: '>=10'} hasBin: true + set-blocking@2.0.0: + resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} + sharp@0.34.5: resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} @@ -2310,6 +2394,9 @@ packages: wcwidth@1.0.1: resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + which-module@2.0.1: + resolution: {integrity: sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==} + which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} @@ -2335,6 +2422,10 @@ packages: '@cloudflare/workers-types': optional: true + wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} @@ -2370,6 +2461,9 @@ packages: utf-8-validate: optional: true + y18n@4.0.3: + resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==} + y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} @@ -2379,10 +2473,18 @@ packages: engines: {node: '>= 14.6'} hasBin: true + yargs-parser@18.1.3: + resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} + engines: {node: '>=6'} + yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} + yargs@15.4.1: + resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==} + engines: {node: '>=8'} + yargs@17.7.2: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} @@ -2994,6 +3096,14 @@ snapshots: dependencies: undici-types: 6.21.0 + '@types/pngjs@6.0.5': + dependencies: + '@types/node': 22.19.11 + + '@types/qrcode@1.5.6': + dependencies: + '@types/node': 22.19.11 + '@types/ws@8.18.1': dependencies: '@types/node': 22.19.11 @@ -3129,6 +3239,8 @@ snapshots: es-errors: 1.3.0 function-bind: 1.1.2 + camelcase@5.3.1: {} + chai@6.2.2: {} chalk@4.1.2: @@ -3157,6 +3269,12 @@ snapshots: slice-ansi: 5.0.0 string-width: 7.2.0 + cliui@6.0.0: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 6.2.0 + cliui@8.0.1: dependencies: string-width: 4.2.3 @@ -3197,6 +3315,8 @@ snapshots: dependencies: ms: 2.1.3 + decamelize@1.2.0: {} + defaults@1.0.4: dependencies: clone: 1.0.4 @@ -3207,6 +3327,8 @@ snapshots: detect-libc@2.1.2: {} + dijkstrajs@1.0.3: {} + dotenv-expand@11.0.7: dependencies: dotenv: 16.4.7 @@ -3403,6 +3525,11 @@ snapshots: dependencies: to-regex-range: 5.0.1 + find-up@4.1.0: + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + fix-dts-default-cjs-exports@1.0.1: dependencies: magic-string: 0.30.21 @@ -3536,6 +3663,8 @@ snapshots: jsonc-parser@3.2.0: {} + jsqr@1.4.0: {} + kleur@4.1.5: {} lilconfig@3.1.3: {} @@ -3570,6 +3699,10 @@ snapshots: load-tsconfig@0.2.5: {} + locate-path@5.0.0: + dependencies: + p-locate: 4.1.0 + log-symbols@4.1.0: dependencies: chalk: 4.1.2 @@ -3746,6 +3879,18 @@ snapshots: strip-ansi: 6.0.1 wcwidth: 1.0.1 + p-limit@2.3.0: + dependencies: + p-try: 2.2.0 + + p-locate@4.1.0: + dependencies: + p-limit: 2.3.0 + + p-try@2.2.0: {} + + path-exists@4.0.0: {} + path-key@3.1.1: {} path-key@4.0.0: {} @@ -3770,6 +3915,10 @@ snapshots: mlly: 1.8.0 pathe: 2.0.3 + pngjs@5.0.0: {} + + pngjs@7.0.0: {} + postcss-load-config@6.0.1(postcss@8.5.6)(yaml@2.8.2): dependencies: lilconfig: 3.1.3 @@ -3791,6 +3940,12 @@ snapshots: proxy-from-env@1.1.0: {} + qrcode@1.5.4: + dependencies: + dijkstrajs: 1.0.3 + pngjs: 5.0.0 + yargs: 15.4.1 + react-is@18.3.1: {} readable-stream@3.6.2: @@ -3803,6 +3958,8 @@ snapshots: require-directory@2.1.1: {} + require-main-filename@2.0.0: {} + resolve-from@5.0.0: {} resolve-pkg-maps@1.0.0: {} @@ -3856,6 +4013,8 @@ snapshots: semver@7.7.4: {} + set-blocking@2.0.0: {} + sharp@0.34.5: dependencies: '@img/colour': 1.0.0 @@ -4117,6 +4276,8 @@ snapshots: dependencies: defaults: 1.0.4 + which-module@2.0.1: {} + which@2.0.2: dependencies: isexe: 2.0.0 @@ -4151,6 +4312,12 @@ snapshots: - bufferutil - utf-8-validate + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi@7.0.0: dependencies: ansi-styles: 4.3.0 @@ -4169,12 +4336,33 @@ snapshots: ws@8.19.0: {} + y18n@4.0.3: {} + y18n@5.0.8: {} yaml@2.8.2: {} + yargs-parser@18.1.3: + dependencies: + camelcase: 5.3.1 + decamelize: 1.2.0 + yargs-parser@21.1.1: {} + yargs@15.4.1: + dependencies: + cliui: 6.0.0 + decamelize: 1.2.0 + find-up: 4.1.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + require-main-filename: 2.0.0 + set-blocking: 2.0.0 + string-width: 4.2.3 + which-module: 2.0.1 + y18n: 4.0.3 + yargs-parser: 18.1.3 + yargs@17.7.2: dependencies: cliui: 8.0.1 From 2a796ef9e00f1e393e91f246ba66b48e96147db6 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 23:19:12 +0530 Subject: [PATCH 087/190] fix(proxy): block unsafe pair confirm forwarding targets by default --- apps/proxy/AGENTS.md | 1 + apps/proxy/src/AGENTS.md | 2 + apps/proxy/src/pairing-route.test.ts | 81 +++++++++ apps/proxy/src/pairing-route.ts | 261 ++++++++++++++++++++++++++- 4 files changed, 343 insertions(+), 2 deletions(-) diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 15c3f7b..097e05b 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -42,6 +42,7 @@ - Pairing is managed by API: - `POST /pair/start` (verified Claw auth + `x-claw-owner-pat` ownership check against registry `GET /v1/agents/:id/ownership`) - `POST /pair/confirm` (verified Claw auth + one-time pairing ticket consume) +- Cross-proxy `/pair/confirm` forwarding must enforce built-in SSRF protections (block localhost/private/reserved destinations for non-local proxy origins). - Keep `/pair/confirm` as a single trust-store operation that establishes trust and consumes the ticket in one step (`confirmPairingTicket`), never two separate calls. - Confirming a valid pairing ticket must establish mutual trust for the initiator/responder agent pair. - Keep pairing tickets one-time and expiring; reject missing/expired/malformed tickets with explicit client errors. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index b142ec6..78f7c80 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -39,6 +39,8 @@ - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-trusted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep pairing bootstrap explicit: `/pair/start` and `/pair/confirm` must bypass known-agent gate in auth middleware. - Keep `/pair/start` ownership validation against registry `GET /v1/agents/:id/ownership` using `x-claw-owner-pat`, and map dependency failures to `503`. +- Keep cross-proxy `/pair/confirm` forwarding SSRF-safe by default: reject localhost/private/reserved issuer origins when the current proxy origin is non-local. +- Preserve the original request JSON bytes when forwarding `/pair/confirm` so forwarded PoP/body-signature headers remain valid. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. - Keep `/hooks/agent` trust check explicit: sender/recipient pair must be authorized by trust state before relay delivery. diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index 202aac9..c46615e 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -248,4 +248,85 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }), ).toBe(true); }); + + it("rejects forwarding to blocked issuer origin for non-local proxy origins", async () => { + const forwardFetch = vi.fn(async () => { + throw new Error("forward fetch should not be called"); + }); + + const { app } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + + const created = createPairingTicket({ + issuerProxyUrl: "http://127.0.0.1:8787", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request( + "https://proxy.public.example/pair/confirm", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: created.ticket, + }), + }, + ); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_TICKET_ISSUER_BLOCKED"); + expect(forwardFetch).not.toHaveBeenCalled(); + }); + + it("preserves original signed JSON body when forwarding to issuer proxy", async () => { + let expectedBody = ""; + const forwardFetch = vi.fn(async (_url: unknown, init?: RequestInit) => { + expect(String(init?.body ?? "")).toBe(expectedBody); + + return Response.json( + { + paired: true, + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }, + { status: 201 }, + ); + }); + + const { app } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + + const created = createPairingTicket({ + issuerProxyUrl: "https://issuer.proxy.example", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + const bodyRaw = `{ "ticket":"${created.ticket}", "extra":"value" }`; + expectedBody = bodyRaw; + + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: bodyRaw, + }); + + expect(response.status).toBe(201); + expect(forwardFetch).toHaveBeenCalledTimes(1); + const forwardedBody = String( + (forwardFetch.mock.calls[0]?.[1] as RequestInit | undefined)?.body ?? "", + ); + expect(forwardedBody).toBe(bodyRaw); + }); }); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index a51ff23..b51c4dd 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -104,6 +104,27 @@ async function parseJsonBody(c: PairingRouteContext): Promise { } } +async function parseRawJsonBody(c: PairingRouteContext): Promise<{ + rawBody: string; + json: unknown; +}> { + const rawBody = await c.req.raw.clone().text(); + + try { + return { + rawBody, + json: JSON.parse(rawBody) as unknown, + }; + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "Request body must be valid JSON", + status: 400, + expose: true, + }); + } +} + async function parseRegistryOwnershipResponse(response: Response): Promise<{ ownsAgent: boolean; }> { @@ -260,6 +281,226 @@ function normalizeProxyOrigin(value: string): string { return parsed.origin; } +function normalizeHostName(value: string): string { + const lowered = value.trim().toLowerCase(); + return lowered.endsWith(".") ? lowered.slice(0, -1) : lowered; +} + +function parseIpv4Literal( + hostname: string, +): [number, number, number, number] | null { + const parts = hostname.split("."); + if (parts.length !== 4) { + return null; + } + + const bytes: number[] = []; + for (const part of parts) { + if (!/^\d+$/.test(part)) { + return null; + } + + const value = Number(part); + if (!Number.isInteger(value) || value < 0 || value > 255) { + return null; + } + + bytes.push(value); + } + + return bytes as [number, number, number, number]; +} + +function isBlockedIpv4Literal(hostname: string): boolean { + const ipv4 = parseIpv4Literal(hostname); + if (ipv4 === null) { + return false; + } + + const [a, b, c, d] = ipv4; + + if (a === 10) return true; + if (a === 127) return true; + if (a === 0) return true; + if (a === 169 && b === 254) return true; + if (a === 172 && b >= 16 && b <= 31) return true; + if (a === 192 && b === 168) return true; + if (a === 100 && b >= 64 && b <= 127) return true; + if (a === 192 && b === 0 && c === 0) return true; + if (a === 192 && b === 0 && c === 2) return true; + if (a === 198 && b === 18) return true; + if (a === 198 && b === 19) return true; + if (a === 198 && b === 51 && c === 100) return true; + if (a === 203 && b === 0 && c === 113) return true; + if (a >= 224) return true; + if (a === 255 && b === 255 && c === 255 && d === 255) return true; + + return false; +} + +function parseIpv6Literal(hostname: string): number[] | null { + const raw = + hostname.startsWith("[") && hostname.endsWith("]") + ? hostname.slice(1, -1) + : hostname; + const noZoneId = raw.split("%")[0] ?? raw; + if (!noZoneId.includes(":")) { + return null; + } + + const parts = noZoneId.split("::"); + if (parts.length > 2) { + return null; + } + + const parseGroupList = (value: string): number[] | null => { + if (value.length === 0) { + return []; + } + + const groups = value.split(":"); + const words: number[] = []; + for (const group of groups) { + if ( + group.length === 0 || + group.length > 4 || + !/^[0-9a-f]+$/i.test(group) + ) { + return null; + } + + words.push(Number.parseInt(group, 16)); + } + + return words; + }; + + const left = parseGroupList(parts[0] ?? ""); + const right = parseGroupList(parts[1] ?? ""); + if (left === null || right === null) { + return null; + } + + if (parts.length === 1) { + if (left.length !== 8) { + return null; + } + + return left; + } + + const missing = 8 - (left.length + right.length); + if (missing < 1) { + return null; + } + + return [...left, ...new Array(missing).fill(0), ...right]; +} + +function isBlockedIpv6Literal(hostname: string): boolean { + const ipv6 = parseIpv6Literal(hostname); + if (ipv6 === null) { + return false; + } + + const [a, b, c, d, e, f, g, h] = ipv6; + + const isUnspecified = + a === 0 && + b === 0 && + c === 0 && + d === 0 && + e === 0 && + f === 0 && + g === 0 && + h === 0; + if (isUnspecified) { + return true; + } + + const isLoopback = + a === 0 && + b === 0 && + c === 0 && + d === 0 && + e === 0 && + f === 0 && + g === 0 && + h === 1; + if (isLoopback) { + return true; + } + + if ((a & 0xfe00) === 0xfc00) { + return true; + } + + if ((a & 0xffc0) === 0xfe80) { + return true; + } + + if ((a & 0xff00) === 0xff00) { + return true; + } + + if (a === 0x2001 && b === 0x0db8) { + return true; + } + + const isIpv4Mapped = + a === 0 && + b === 0 && + c === 0 && + d === 0 && + e === 0 && + (f === 0xffff || f === 0); + + if (isIpv4Mapped) { + const mappedA = g >> 8; + const mappedB = g & 0xff; + const mappedC = h >> 8; + const mappedD = h & 0xff; + return isBlockedIpv4Literal(`${mappedA}.${mappedB}.${mappedC}.${mappedD}`); + } + + return false; +} + +function isLocalLikeHostname(hostname: string): boolean { + if (hostname === "localhost" || hostname.endsWith(".localhost")) { + return true; + } + + if (hostname.endsWith(".local") || hostname.endsWith(".internal")) { + return true; + } + + if (!hostname.includes(".") && parseIpv4Literal(hostname) === null) { + return true; + } + + return false; +} + +function isBlockedForwardOrigin(origin: string): boolean { + const parsed = new URL(origin); + const hostname = normalizeHostName(parsed.hostname); + + if (isLocalLikeHostname(hostname)) { + return true; + } + + if (isBlockedIpv4Literal(hostname)) { + return true; + } + + if (isBlockedIpv6Literal(hostname)) { + return true; + } + + return false; +} + function mapForwardedPairConfirmError( status: number, payload: unknown, @@ -409,7 +650,8 @@ export function createPairConfirmHandler( }); } - const body = (await parseJsonBody(c)) as { + const parsedBody = await parseRawJsonBody(c); + const body = parsedBody.json as { ticket?: unknown; }; @@ -450,6 +692,21 @@ export function createPairConfirmHandler( const isIssuerLocal = ticketIssuerOrigin === localProxyOrigin; if (!isIssuerLocal) { + const localProxyAllowsPrivateForwarding = + isBlockedForwardOrigin(localProxyOrigin); + + if ( + !localProxyAllowsPrivateForwarding && + isBlockedForwardOrigin(ticketIssuerOrigin) + ) { + throw new AppError({ + code: "PROXY_PAIR_TICKET_ISSUER_BLOCKED", + message: "Pairing ticket issuer origin is blocked", + status: 403, + expose: true, + }); + } + const issuerConfirmUrl = new URL( PAIR_CONFIRM_PATH, ticketIssuerOrigin.endsWith("/") @@ -460,7 +717,7 @@ export function createPairConfirmHandler( const forwardedResponse = await fetchImpl(issuerConfirmUrl, { method: "POST", headers: c.req.raw.headers, - body: JSON.stringify({ ticket }), + body: parsedBody.rawBody, }).catch((error: unknown) => { throw new AppError({ code: "PROXY_PAIR_STATE_UNAVAILABLE", From 01c4fc5edd7ce35a4db074108c9c6a34a29307a4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Tue, 17 Feb 2026 23:49:58 +0530 Subject: [PATCH 088/190] refactor(proxy): remove legacy state-dir fallback and support pairing issuer override --- apps/cli/package.json | 2 +- apps/proxy/AGENTS.md | 3 ++- apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/config.test.ts | 32 ++++++++----------------- apps/proxy/src/config.ts | 21 +++++++--------- apps/proxy/src/pairing-route.test.ts | 36 +++++++++++++++++++++++++++- apps/proxy/src/pairing-route.ts | 8 ++++++- apps/proxy/src/server.ts | 1 + 8 files changed, 65 insertions(+), 39 deletions(-) diff --git a/apps/cli/package.json b/apps/cli/package.json index 4c5f2e5..9647557 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,6 +1,6 @@ { "name": "clawdentity", - "version": "0.0.2", + "version": "0.0.3", "type": "module", "publishConfig": { "access": "public" diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 097e05b..5f54b72 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -19,7 +19,7 @@ - Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw base URL, and policy/rate-limit vars). - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory - - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`, with legacy fallback to existing `~/.clawdbot` / `~/.moldbot` / `~/.moltbot`) + - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`) - existing environment variables always win over `.env` values. - If `OPENCLAW_BASE_URL` is still missing after env loading, fallback to `~/.clawdentity/openclaw-relay.json` (`openclawBaseUrl`) before applying the built-in default. - Treat blank env values as unset for fallback resolution: @@ -34,6 +34,7 @@ - `LISTEN_PORT` or `PORT` - `OPENCLAW_BASE_URL` - `REGISTRY_URL` or `CLAWDENTITY_REGISTRY_URL` + - `PAIRING_ISSUER_URL` (optional stable issuer origin used in pairing tickets) - `OPENCLAW_STATE_DIR` ## Trust and Pairing diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 78f7c80..0434f3a 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -39,6 +39,7 @@ - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-trusted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. - Keep pairing bootstrap explicit: `/pair/start` and `/pair/confirm` must bypass known-agent gate in auth middleware. - Keep `/pair/start` ownership validation against registry `GET /v1/agents/:id/ownership` using `x-claw-owner-pat`, and map dependency failures to `503`. +- Allow optional `PAIRING_ISSUER_URL` override for `/pair/start` ticket issuer origin so cross-proxy forwarding can work when inbound hostnames differ from proxy-to-proxy reachability hostnames. - Keep cross-proxy `/pair/confirm` forwarding SSRF-safe by default: reject localhost/private/reserved issuer origins when the current proxy origin is non-local. - Preserve the original request JSON bytes when forwarding `/pair/confirm` so forwarded PoP/body-signature headers remain valid. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 5da3f55..bd39126 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -40,6 +40,7 @@ describe("proxy config", () => { const config = parseProxyConfig({ PORT: "4100", CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", + PAIRING_ISSUER_URL: "https://proxy.example.com", ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: "75", @@ -49,6 +50,7 @@ describe("proxy config", () => { expect(config.listenPort).toBe(4100); expect(config.registryUrl).toBe("https://registry.example.com"); + expect(config.pairingIssuerUrl).toBe("https://proxy.example.com"); expect(config.environment).toBe("local"); expect(config.crlStaleBehavior).toBe("fail-closed"); expect(config.agentRateLimitRequestsPerMinute).toBe(75); @@ -100,6 +102,14 @@ describe("proxy config", () => { }), ).toThrow(ProxyConfigError); }); + + it("throws on invalid pairing issuer URL", () => { + expect(() => + parseProxyConfig({ + PAIRING_ISSUER_URL: "not-a-url", + }), + ).toThrow(ProxyConfigError); + }); }); describe("proxy config loading", () => { @@ -279,28 +289,6 @@ describe("proxy config loading", () => { } }); - it("uses legacy state directory when canonical .openclaw does not exist", () => { - const sandbox = createSandbox(); - try { - rmSync(sandbox.stateDir, { recursive: true, force: true }); - const legacyStateDir = join(sandbox.root, ".clawdbot"); - mkdirSync(legacyStateDir, { recursive: true }); - writeFileSync(join(legacyStateDir, ".env"), "LISTEN_PORT=4555"); - - const config = loadProxyConfig( - {}, - { - cwd: sandbox.cwd, - homeDir: sandbox.root, - }, - ); - - expect(config.listenPort).toBe(4555); - } finally { - sandbox.cleanup(); - } - }); - it("throws when openclaw-relay.json is invalid and base-url fallback is required", () => { const sandbox = createSandbox(); try { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 1e35ae9..af678fe 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -44,7 +44,6 @@ export class ProxyConfigError extends Error { const CLAWDENTITY_CONFIG_DIR = ".clawdentity"; const OPENCLAW_RELAY_CONFIG_FILENAME = "openclaw-relay.json"; -const LEGACY_STATE_DIR_NAMES = [".clawdbot", ".moldbot", ".moltbot"] as const; const envBooleanSchema = z.preprocess((value) => { if (typeof value === "string") { @@ -80,6 +79,7 @@ const proxyRuntimeEnvSchema = z.object({ .default(DEFAULT_PROXY_LISTEN_PORT), OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), + PAIRING_ISSUER_URL: z.string().trim().url().optional(), ENVIRONMENT: z .enum(proxyEnvironmentValues) .default(DEFAULT_PROXY_ENVIRONMENT), @@ -115,6 +115,7 @@ export const proxyConfigSchema = z.object({ listenPort: z.number().int().min(1).max(65535), openclawBaseUrl: z.string().url(), registryUrl: z.string().url(), + pairingIssuerUrl: z.string().url().optional(), environment: z.enum(proxyEnvironmentValues), crlRefreshIntervalMs: z.number().int().positive(), crlMaxAgeMs: z.number().int().positive(), @@ -132,6 +133,7 @@ type RuntimeEnvInput = { OPENCLAW_BASE_URL?: unknown; REGISTRY_URL?: unknown; CLAWDENTITY_REGISTRY_URL?: unknown; + PAIRING_ISSUER_URL?: unknown; ENVIRONMENT?: unknown; ALLOW_ALL_VERIFIED?: unknown; CRL_REFRESH_INTERVAL_MS?: unknown; @@ -255,17 +257,6 @@ function resolveStateDir( } const canonicalStateDir = join(home, ".openclaw"); - if (existsSync(canonicalStateDir)) { - return canonicalStateDir; - } - - for (const legacyDirName of LEGACY_STATE_DIR_NAMES) { - const legacyStateDir = join(home, legacyDirName); - if (existsSync(legacyStateDir)) { - return legacyStateDir; - } - } - return canonicalStateDir; } @@ -424,6 +415,7 @@ function normalizeRuntimeEnv(input: unknown): Record { "REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL", ]), + PAIRING_ISSUER_URL: firstNonEmpty(env, ["PAIRING_ISSUER_URL"]), ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), @@ -491,7 +483,7 @@ export function parseProxyConfig(env: unknown): ProxyConfig { }); } - const candidateConfig = { + const candidateConfig: Record = { listenPort: parsedRuntimeEnv.data.LISTEN_PORT, openclawBaseUrl: parsedRuntimeEnv.data.OPENCLAW_BASE_URL, registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, @@ -505,6 +497,9 @@ export function parseProxyConfig(env: unknown): ProxyConfig { injectIdentityIntoMessage: parsedRuntimeEnv.data.INJECT_IDENTITY_INTO_MESSAGE, }; + if (parsedRuntimeEnv.data.PAIRING_ISSUER_URL !== undefined) { + candidateConfig.pairingIssuerUrl = parsedRuntimeEnv.data.PAIRING_ISSUER_URL; + } const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); if (parsedConfig.success) { diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index c46615e..60c774f 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -1,6 +1,6 @@ import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; -import { createPairingTicket } from "./pairing-ticket.js"; +import { createPairingTicket, parsePairingTicket } from "./pairing-ticket.js"; const INITIATOR_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_000)); const RESPONDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_100)); @@ -35,11 +35,13 @@ import { createProxyApp } from "./server.js"; function createPairingApp(input?: { fetchImpl?: typeof fetch; nowMs?: () => number; + pairingIssuerUrl?: string; }) { const trustStore = createInMemoryProxyTrustStore(); const app = createProxyApp({ config: parseProxyConfig({ REGISTRY_URL: "https://registry.example.com", + PAIRING_ISSUER_URL: input?.pairingIssuerUrl, }), pairing: { start: { @@ -146,6 +148,38 @@ describe(`POST ${PAIR_START_PATH}`, () => { const body = (await response.json()) as { error: { code: string } }; expect(body.error.code).toBe("PROXY_PAIR_OWNER_PAT_FORBIDDEN"); }); + + it("uses configured pairing issuer URL when creating ticket", async () => { + const fetchImpl = vi.fn(async (_requestInput: unknown) => + Response.json( + { + ownsAgent: true, + }, + { status: 200 }, + ), + ) as unknown as typeof fetch; + const { app } = createPairingApp({ + fetchImpl, + nowMs: () => 1_700_000_000_000, + pairingIssuerUrl: "http://127.0.0.1:8788", + }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + [OWNER_PAT_HEADER]: "clw_pat_owner_token", + }, + body: JSON.stringify({}), + }); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + ticket: string; + }; + const parsedTicket = parsePairingTicket(body.ticket); + expect(parsedTicket.iss).toBe("http://127.0.0.1:8788"); + }); }); describe(`POST ${PAIR_CONFIRM_PATH}`, () => { diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index b51c4dd..54bd6a7 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -35,6 +35,7 @@ type CreatePairStartHandlerOptions = PairStartRuntimeOptions & { logger: Logger; registryUrl: string; trustStore: ProxyTrustStore; + issuerProxyUrl?: string; }; export type PairConfirmRuntimeOptions = { @@ -582,6 +583,10 @@ export function createPairStartHandler( const fetchImpl = options.fetchImpl ?? fetch; const nowMs = options.nowMs ?? Date.now; const registryUrl = normalizeRegistryUrl(options.registryUrl); + const configuredIssuerProxyUrl = + typeof options.issuerProxyUrl === "string" + ? normalizeProxyOrigin(options.issuerProxyUrl) + : undefined; return async (c) => { const auth = c.get("auth"); @@ -607,7 +612,8 @@ export function createPairStartHandler( registryUrl, }); - const issuerProxyUrl = normalizeProxyOrigin(c.req.url); + const issuerProxyUrl = + configuredIssuerProxyUrl ?? normalizeProxyOrigin(c.req.url); const pairingTicketResult = await options.trustStore .createPairingTicket({ initiatorAgentDid: auth.agentDid, diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index d8cec1b..e9ab595 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -148,6 +148,7 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { createPairStartHandler({ logger, registryUrl: options.config.registryUrl, + issuerProxyUrl: options.config.pairingIssuerUrl, trustStore, ...options.pairing?.start, }), From c5ce140b614399ec797c329dfc95d3e96ba33cd0 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Wed, 18 Feb 2026 09:10:16 +0530 Subject: [PATCH 089/190] fix(proxy): enforce secure pair-confirm forwarding and confirm-time ticket cleanup --- apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/pairing-route.test.ts | 82 ++++++++++++++++++++++++ apps/proxy/src/pairing-route.ts | 17 +++-- apps/proxy/src/proxy-trust-store.test.ts | 35 ++++++++++ apps/proxy/src/proxy-trust-store.ts | 7 +- 5 files changed, 137 insertions(+), 5 deletions(-) diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 0434f3a..4b4b15c 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -41,6 +41,7 @@ - Keep `/pair/start` ownership validation against registry `GET /v1/agents/:id/ownership` using `x-claw-owner-pat`, and map dependency failures to `503`. - Allow optional `PAIRING_ISSUER_URL` override for `/pair/start` ticket issuer origin so cross-proxy forwarding can work when inbound hostnames differ from proxy-to-proxy reachability hostnames. - Keep cross-proxy `/pair/confirm` forwarding SSRF-safe by default: reject localhost/private/reserved issuer origins when the current proxy origin is non-local. +- Enforce that forwarded `/pair/confirm` issuer origins use HTTPS once the proxy origin is non-local, while continuing to allow HTTP when both the proxy and issuer are on local/dev hosts. - Preserve the original request JSON bytes when forwarding `/pair/confirm` so forwarded PoP/body-signature headers remain valid. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index 60c774f..c603260 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -319,6 +319,88 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { expect(forwardFetch).not.toHaveBeenCalled(); }); + it("rejects HTTP issuer origin when proxy is non-local", async () => { + const forwardFetch = vi.fn(async () => { + throw new Error("forward fetch should not be called"); + }); + + const { app } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + + const created = createPairingTicket({ + issuerProxyUrl: "http://issuer.proxy.example", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request( + "https://proxy.public.example/pair/confirm", + { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: created.ticket, + }), + }, + ); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_CONFIRM_ISSUER_INSECURE"); + expect(forwardFetch).not.toHaveBeenCalled(); + }); + + it("allows HTTP issuer origin when both proxy and issuer are local", async () => { + const forwardFetch = vi.fn(async (url: unknown) => { + expect(String(url)).toBe("http://127.0.0.1:8787/pair/confirm"); + + return Response.json( + { + paired: true, + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }, + { status: 201 }, + ); + }); + + const { app, trustStore } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + + const created = createPairingTicket({ + issuerProxyUrl: "http://127.0.0.1:8787", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request("http://localhost/pair/confirm", { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: created.ticket, + }), + }); + + expect(response.status).toBe(201); + expect(forwardFetch).toHaveBeenCalledTimes(1); + expect( + await trustStore.isPairAllowed({ + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }), + ).toBe(true); + }); + it("preserves original signed JSON body when forwarding to issuer proxy", async () => { let expectedBody = ""; const forwardFetch = vi.fn(async (_url: unknown, init?: RequestInit) => { diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 54bd6a7..a53ad95 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -700,11 +700,20 @@ export function createPairConfirmHandler( if (!isIssuerLocal) { const localProxyAllowsPrivateForwarding = isBlockedForwardOrigin(localProxyOrigin); + const issuerOriginUrl = new URL(ticketIssuerOrigin); + const issuerOriginIsBlocked = isBlockedForwardOrigin(ticketIssuerOrigin); - if ( - !localProxyAllowsPrivateForwarding && - isBlockedForwardOrigin(ticketIssuerOrigin) - ) { + if (!issuerOriginIsBlocked && issuerOriginUrl.protocol !== "https:") { + throw new AppError({ + code: "PROXY_PAIR_CONFIRM_ISSUER_INSECURE", + message: + "Forwarded issuer proxy pairing origin must use HTTPS outside of local hosts", + status: 403, + expose: true, + }); + } + + if (!localProxyAllowsPrivateForwarding && issuerOriginIsBlocked) { throw new AppError({ code: "PROXY_PAIR_TICKET_ISSUER_BLOCKED", message: "Pairing ticket issuer origin is blocked", diff --git a/apps/proxy/src/proxy-trust-store.test.ts b/apps/proxy/src/proxy-trust-store.test.ts index 26fa99e..7e625dd 100644 --- a/apps/proxy/src/proxy-trust-store.test.ts +++ b/apps/proxy/src/proxy-trust-store.test.ts @@ -104,4 +104,39 @@ describe("in-memory proxy trust store", () => { status: 410, }); }); + + it("cleans up unrelated expired tickets during confirm lookups", async () => { + const store = createInMemoryProxyTrustStore(); + + const expiredTicket = await store.createPairingTicket({ + initiatorAgentDid: "did:claw:agent:alice", + issuerProxyUrl: "https://proxy-a.example.com", + ttlSeconds: 1, + nowMs: 1_700_000_000_000, + }); + + const validTicket = await store.createPairingTicket({ + initiatorAgentDid: "did:claw:agent:alice", + issuerProxyUrl: "https://proxy-a.example.com", + ttlSeconds: 60, + nowMs: 1_700_000_000_000, + }); + + await store.confirmPairingTicket({ + ticket: validTicket.ticket, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_002_000, + }); + + await expect( + store.confirmPairingTicket({ + ticket: expiredTicket.ticket, + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_002_100, + }), + ).rejects.toMatchObject({ + code: "PROXY_PAIR_TICKET_NOT_FOUND", + status: 404, + }); + }); }); diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 75db178..425b7b7 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -212,8 +212,12 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { } >(); - function cleanup(nowMs: number): void { + function cleanup(nowMs: number, skipTicket?: string): void { for (const [ticket, details] of pairingTickets.entries()) { + if (skipTicket === ticket) { + continue; + } + if (details.expiresAtMs <= nowMs) { pairingTickets.delete(ticket); } @@ -230,6 +234,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { input: PairingTicketConfirmInput, ): PairingTicketConfirmResult { const nowMs = input.nowMs ?? Date.now(); + cleanup(nowMs, input.ticket); let parsedTicket: ReturnType; try { From d8f3ccecb049c4df2a1d82e9708ad97655f9902a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Wed, 18 Feb 2026 09:50:50 +0530 Subject: [PATCH 090/190] proxy: enforce trust store backend by environment --- apps/proxy/.env.example | 3 ++ apps/proxy/AGENTS.md | 3 ++ apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/node-server.ts | 36 ++++++++++++- apps/proxy/src/server.test.ts | 18 +++++++ apps/proxy/src/trust-store-backend.ts | 78 +++++++++++++++++++++++++++ apps/proxy/src/worker.test.ts | 73 +++++++++++++++++++++++-- apps/proxy/src/worker.ts | 24 +++++---- 8 files changed, 222 insertions(+), 14 deletions(-) create mode 100644 apps/proxy/src/trust-store-backend.ts diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index cfb5883..0ad3051 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -7,6 +7,9 @@ ENVIRONMENT=local REGISTRY_URL=https://dev.api.clawdentity.com INJECT_IDENTITY_INTO_MESSAGE=true +# Trust backend policy: +# - local: in-memory trust fallback is allowed when PROXY_TRUST_STATE is unavailable. +# - development/production: PROXY_TRUST_STATE is required; startup fails when missing. # Pairing/trust state is managed dynamically via /pair/start + /pair/confirm. # No static allowlist environment variables are supported. diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 5f54b72..fd83afb 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -14,6 +14,9 @@ - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). - Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. +- Keep trust-store backend policy environment-scoped: + - `local`: allow in-memory trust-store fallback when `PROXY_TRUST_STATE` binding is unavailable. + - `development` and `production`: require `PROXY_TRUST_STATE`; fail startup when missing. - Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-on (`true`); disable only when operators need unchanged webhook `message` forwarding. - Keep OpenClaw base URL input (`OPENCLAW_BASE_URL`) optional for relay-mode startup. - Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw base URL, and policy/rate-limit vars). diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 4b4b15c..136d4d0 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -17,6 +17,7 @@ - Do not add `OPENCLAW_HOOK_TOKEN` handling to proxy runtime; hook token auth belongs to connector -> OpenClaw delivery path. - Keep fallback semantics consistent across merge + parse stages: empty/whitespace env values are treated as missing, so non-empty `.env`/file values can be used. - Do not derive runtime environment from `NODE_ENV`; use validated `ENVIRONMENT` from proxy config. +- Keep trust-store backend policy explicit: only `local` may fallback to in-memory trust when `PROXY_TRUST_STATE` binding is absent; `development` and `production` must fail startup without durable trust binding. - Keep static allowlist env vars removed (`ALLOW_LIST`, `ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`); trust must come from pairing state, not env. - Keep `/pair/confirm` write path atomic at the trust-store API level: trust persistence and one-time ticket consumption must happen in one operation (`confirmPairingTicket`). diff --git a/apps/proxy/src/node-server.ts b/apps/proxy/src/node-server.ts index 6a8145c..e233720 100644 --- a/apps/proxy/src/node-server.ts +++ b/apps/proxy/src/node-server.ts @@ -4,6 +4,7 @@ import type { ProxyConfig } from "./config.js"; import { loadProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; import { createProxyApp, type ProxyApp } from "./server.js"; +import { resolveNodeTrustStore } from "./trust-store-backend.js"; type StartProxyServerOptions = { env?: unknown; @@ -23,14 +24,46 @@ function resolveLogger(logger?: Logger): Logger { return logger ?? createLogger({ service: "proxy" }); } +function resolveDefaultNodeEnv(): unknown { + const nodeProcess = (globalThis as { process?: { env?: unknown } }).process; + const processEnv = + typeof nodeProcess?.env === "object" && nodeProcess.env !== null + ? (nodeProcess.env as Record) + : {}; + + if ( + typeof processEnv.ENVIRONMENT === "string" && + processEnv.ENVIRONMENT.trim().length > 0 + ) { + return processEnv; + } + + return { + ...processEnv, + ENVIRONMENT: "local", + }; +} + export function startProxyServer( options: StartProxyServerOptions = {}, ): StartedProxyServer { - const config = options.config ?? loadProxyConfig(options.env); + const config = + options.config ?? loadProxyConfig(options.env ?? resolveDefaultNodeEnv()); const logger = resolveLogger(options.logger); + const trustStoreResolution = resolveNodeTrustStore({ + environment: config.environment, + }); + if (trustStoreResolution.backend === "memory") { + logger.warn("proxy.trust_store.memory_fallback", { + environment: config.environment, + runtime: "node", + reason: "Node runtime has no Durable Object trust binding", + }); + } const app = createProxyApp({ config, logger, + trustStore: trustStoreResolution.trustStore, }); const port = options.port ?? config.listenPort; const server = serve({ @@ -42,6 +75,7 @@ export function startProxyServer( port, version: PROXY_VERSION, environment: config.environment, + trustStoreBackend: trustStoreResolution.backend, }); return { diff --git a/apps/proxy/src/server.test.ts b/apps/proxy/src/server.test.ts index c3b3616..3a2c49a 100644 --- a/apps/proxy/src/server.test.ts +++ b/apps/proxy/src/server.test.ts @@ -91,6 +91,24 @@ describe("proxy server", () => { ).toThrow(ProxyConfigError); }); + it("fails node runtime startup for non-local environments", () => { + expect(() => + startProxyServer({ + config: parseProxyConfig({ + ENVIRONMENT: "development", + }), + }), + ).toThrow(ProxyConfigError); + + expect(() => + startProxyServer({ + config: parseProxyConfig({ + ENVIRONMENT: "production", + }), + }), + ).toThrow(ProxyConfigError); + }); + it("returns 429 for repeated unauthenticated probes on /hooks/agent from same IP", async () => { const app = createProxyApp({ config: parseProxyConfig({}), diff --git a/apps/proxy/src/trust-store-backend.ts b/apps/proxy/src/trust-store-backend.ts new file mode 100644 index 0000000..35bdf84 --- /dev/null +++ b/apps/proxy/src/trust-store-backend.ts @@ -0,0 +1,78 @@ +import { ProxyConfigError, type ProxyEnvironment } from "./config.js"; +import { + createDurableProxyTrustStore, + createInMemoryProxyTrustStore, + type ProxyTrustStateNamespace, + type ProxyTrustStore, +} from "./proxy-trust-store.js"; + +export type ProxyTrustStoreBackend = "durable" | "memory"; + +type RuntimeTarget = "worker" | "node"; + +type ProxyTrustStoreResolution = { + backend: ProxyTrustStoreBackend; + trustStore: ProxyTrustStore; +}; + +function requiresDurableTrustStore(environment: ProxyEnvironment): boolean { + return environment === "development" || environment === "production"; +} + +function toMissingDurableTrustStoreError(input: { + environment: ProxyEnvironment; + runtime: RuntimeTarget; +}): ProxyConfigError { + const runtimeHint = + input.runtime === "worker" + ? "Ensure PROXY_TRUST_STATE Durable Object binding is configured for this environment." + : "Node runtime supports local in-memory trust only. Use Worker runtime with PROXY_TRUST_STATE for non-local environments."; + return new ProxyConfigError("Proxy configuration is invalid", { + fieldErrors: { + PROXY_TRUST_STATE: [ + `PROXY_TRUST_STATE is required when ENVIRONMENT is '${input.environment}'. ${runtimeHint}`, + ], + }, + formErrors: [], + }); +} + +export function resolveWorkerTrustStore(input: { + environment: ProxyEnvironment; + trustStateNamespace?: ProxyTrustStateNamespace; +}): ProxyTrustStoreResolution { + if (input.trustStateNamespace !== undefined) { + return { + backend: "durable", + trustStore: createDurableProxyTrustStore(input.trustStateNamespace), + }; + } + + if (requiresDurableTrustStore(input.environment)) { + throw toMissingDurableTrustStoreError({ + environment: input.environment, + runtime: "worker", + }); + } + + return { + backend: "memory", + trustStore: createInMemoryProxyTrustStore(), + }; +} + +export function resolveNodeTrustStore(input: { + environment: ProxyEnvironment; +}): ProxyTrustStoreResolution { + if (requiresDurableTrustStore(input.environment)) { + throw toMissingDurableTrustStoreError({ + environment: input.environment, + runtime: "node", + }); + } + + return { + backend: "memory", + trustStore: createInMemoryProxyTrustStore(), + }; +} diff --git a/apps/proxy/src/worker.test.ts b/apps/proxy/src/worker.test.ts index 405b983..a8ad685 100644 --- a/apps/proxy/src/worker.test.ts +++ b/apps/proxy/src/worker.test.ts @@ -2,6 +2,20 @@ import { describe, expect, it, vi } from "vitest"; import { PROXY_VERSION } from "./index.js"; import worker, { type ProxyWorkerBindings } from "./worker.js"; +function createTrustStateNamespace(): NonNullable< + ProxyWorkerBindings["PROXY_TRUST_STATE"] +> { + return { + idFromName: vi.fn( + (name: string) => + ({ toString: () => name }) as unknown as DurableObjectId, + ), + get: vi.fn(() => ({ + fetch: vi.fn(async () => new Response(null, { status: 204 })), + })), + }; +} + function createExecutionContext(): ExecutionContext { return { waitUntil: vi.fn(), @@ -34,10 +48,12 @@ describe("proxy worker", () => { }); }); - it("allows startup with empty bindings for relay mode", async () => { + it("allows local startup without trust DO binding", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - {} satisfies ProxyWorkerBindings, + { + ENVIRONMENT: "local", + } satisfies ProxyWorkerBindings, createExecutionContext(), ); @@ -49,14 +65,15 @@ describe("proxy worker", () => { }; expect(payload.status).toBe("ok"); expect(payload.version).toBe(PROXY_VERSION); - expect(payload.environment).toBe("development"); + expect(payload.environment).toBe("local"); }); - it("accepts deployed env without OpenClaw vars in relay mode", async () => { + it("allows development startup when trust DO binding exists", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), { ENVIRONMENT: "development", + PROXY_TRUST_STATE: createTrustStateNamespace(), } satisfies ProxyWorkerBindings, createExecutionContext(), ); @@ -70,6 +87,54 @@ describe("proxy worker", () => { expect(payload.environment).toBe("development"); }); + it("fails startup in development when trust DO binding is missing", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + { + ENVIRONMENT: "development", + } satisfies ProxyWorkerBindings, + createExecutionContext(), + ); + + expect(response.status).toBe(500); + const payload = (await response.json()) as { + error: { + code: string; + details: { + fieldErrors?: Record; + }; + }; + }; + expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(payload.error.details.fieldErrors?.PROXY_TRUST_STATE?.[0]).toContain( + "ENVIRONMENT is 'development'", + ); + }); + + it("fails startup in production when trust DO binding is missing", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + { + ENVIRONMENT: "production", + } satisfies ProxyWorkerBindings, + createExecutionContext(), + ); + + expect(response.status).toBe(500); + const payload = (await response.json()) as { + error: { + code: string; + details: { + fieldErrors?: Record; + }; + }; + }; + expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(payload.error.details.fieldErrors?.PROXY_TRUST_STATE?.[0]).toContain( + "ENVIRONMENT is 'production'", + ); + }); + it("returns config validation error for malformed OPENCLAW_BASE_URL", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 7a2e045..55c095f 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -10,12 +10,9 @@ import { } from "./config.js"; import { resolveProxyVersion } from "./index.js"; import { ProxyTrustState } from "./proxy-trust-state.js"; -import { - createDurableProxyTrustStore, - createInMemoryProxyTrustStore, - type ProxyTrustStateNamespace, -} from "./proxy-trust-store.js"; +import type { ProxyTrustStateNamespace } from "./proxy-trust-store.js"; import { createProxyApp, type ProxyApp } from "./server.js"; +import { resolveWorkerTrustStore } from "./trust-store-backend.js"; export type ProxyWorkerBindings = { LISTEN_PORT?: string; @@ -25,6 +22,7 @@ export type ProxyWorkerBindings = { PROXY_TRUST_STATE?: ProxyTrustStateNamespace; REGISTRY_URL?: string; CLAWDENTITY_REGISTRY_URL?: string; + PAIRING_ISSUER_URL?: string; ENVIRONMENT?: string; ALLOW_ALL_VERIFIED?: string; CRL_REFRESH_INTERVAL_MS?: string; @@ -53,6 +51,7 @@ function toCacheKey(env: ProxyWorkerBindings): string { env.PROXY_TRUST_STATE === undefined ? "no-trust-do" : "has-trust-do", env.REGISTRY_URL, env.CLAWDENTITY_REGISTRY_URL, + env.PAIRING_ISSUER_URL, env.ENVIRONMENT, env.ALLOW_ALL_VERIFIED, env.CRL_REFRESH_INTERVAL_MS, @@ -75,13 +74,20 @@ function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { } const config = parseProxyConfig(env); + const trustStoreResolution = resolveWorkerTrustStore({ + environment: config.environment, + trustStateNamespace: env.PROXY_TRUST_STATE, + }); + if (trustStoreResolution.backend === "memory") { + logger.warn("proxy.trust_store.memory_fallback", { + environment: config.environment, + reason: "PROXY_TRUST_STATE binding is unavailable", + }); + } const app = createProxyApp({ config, logger, - trustStore: - env.PROXY_TRUST_STATE !== undefined - ? createDurableProxyTrustStore(env.PROXY_TRUST_STATE) - : createInMemoryProxyTrustStore(), + trustStore: trustStoreResolution.trustStore, version: resolveProxyVersion(env), }); From d3f2b54d6a4b64ef66183796e90ebba86bf6d38a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Wed, 18 Feb 2026 10:08:15 +0530 Subject: [PATCH 091/190] cli: generate skill bundle at build time only --- .github/workflows/publish-cli.yml | 10 +- .gitignore | 3 +- apps/cli/AGENTS.md | 8 +- apps/cli/package.json | 3 +- apps/cli/scripts/AGENTS.md | 2 + apps/cli/scripts/sync-skill-bundle.mjs | 28 +- apps/cli/scripts/verify-skill-bundle.mjs | 42 +++ apps/cli/skill-bundle/AGENTS.md | 3 +- .../openclaw-skill/dist/relay-to-peer.mjs | 256 ------------------ .../openclaw-skill/skill/SKILL.md | 194 ------------- .../skill/references/clawdentity-protocol.md | 177 ------------ apps/cli/src/AGENTS.md | 1 + apps/cli/src/install-skill-mode.test.ts | 26 +- apps/openclaw-skill/AGENTS.md | 2 +- 14 files changed, 82 insertions(+), 673 deletions(-) create mode 100644 apps/cli/scripts/verify-skill-bundle.mjs delete mode 100644 apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs delete mode 100644 apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md delete mode 100644 apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index d2c151b..4897929 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -74,6 +74,9 @@ jobs: - name: Build CLI package run: pnpm -F clawdentity build + - name: Verify generated skill bundle + run: pnpm -F clawdentity verify:skill-bundle + - name: Set package version for release run: npm --prefix apps/cli pkg set version=${{ inputs.version }} @@ -107,7 +110,12 @@ jobs: NODE - name: Dry-run package contents - run: npm --prefix apps/cli pack --dry-run + run: | + PACK_OUTPUT="$(npm --prefix apps/cli pack --dry-run)" + printf "%s\n" "$PACK_OUTPUT" + printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/skill/SKILL.md" + printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md" + printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/dist/relay-to-peer.mjs" - name: Publish package run: npm --prefix apps/cli publish --access public --provenance --tag ${{ inputs.dist_tag }} diff --git a/.gitignore b/.gitignore index 6e04fb7..3744b8c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ node_modules/ dist/ -!apps/cli/skill-bundle/openclaw-skill/dist/ -!apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs +apps/cli/skill-bundle/openclaw-skill/ .nx/ nx nx.bat diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 1f739bd..02734b8 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -21,9 +21,11 @@ - Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. - Keep publish artifacts ESM-compatible and avoid bundling CJS-only runtime deps that rely on dynamic `require` (for example `ws`); externalize them and declare them in CLI `dependencies` so installed binaries start cleanly. - npm `--skill` installer behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. -- Keep `skill-bundle/openclaw-skill/` in sync with `apps/openclaw-skill` via `pnpm -F clawdentity run sync:skill-bundle` before build/pack so `postinstall --skill` works in clean installs. -- Keep `skill-bundle/openclaw-skill/dist/relay-to-peer.mjs` tracked in git so clean-checkout tests and packaged installs have the required relay artifact before workspace builds run. -- When running the CLI test suite (`pnpm -F clawdentity test`), build `@clawdentity/openclaw-skill` and resync the skill bundle first so `relay-to-peer.mjs` exists on clean checkout and tests pass with deterministic artifacts. +- Keep `skill-bundle/openclaw-skill/` generated from `apps/openclaw-skill` only; do not hand-edit bundled files. +- Keep generated bundle policy strict: `sync-skill-bundle` must copy from `apps/openclaw-skill/dist/relay-to-peer.mjs` and fail if source build artifacts are missing. +- Keep generated bundle files out of git; rely on `build`/`prepack` to rebuild `skill-bundle` before `npm pack`/`npm publish`. +- Keep `verify:skill-bundle` aligned with required install artifacts so CI validates tarball readiness before publish. +- Keep CLI tests independent from repo-committed bundle artifacts by using sandbox skill roots or explicit `CLAWDENTITY_SKILL_PACKAGE_ROOT` overrides. - Keep runtime dependencies publish-safe: avoid `workspace:*` entries in published runtime deps (`dependencies`, `peerDependencies`, `optionalDependencies`), and bundle internal packages into CLI dist. - Keep release automation in `.github/workflows/publish-cli.yml` manual-only with explicit semver input and npm provenance. diff --git a/apps/cli/package.json b/apps/cli/package.json index 9647557..ce5b067 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -22,12 +22,13 @@ "skill-bundle" ], "scripts": { - "build": "pnpm run sync:skill-bundle && tsup", + "build": "pnpm -F @clawdentity/openclaw-skill build && pnpm run sync:skill-bundle && pnpm run verify:skill-bundle && tsup", "format": "biome format .", "lint": "biome lint .", "prepack": "pnpm run build", "postinstall": "node ./postinstall.mjs", "sync:skill-bundle": "node ./scripts/sync-skill-bundle.mjs", + "verify:skill-bundle": "node ./scripts/verify-skill-bundle.mjs", "test": "vitest run", "typecheck": "tsc --noEmit" }, diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md index 5ed7530..03b4ab5 100644 --- a/apps/cli/scripts/AGENTS.md +++ b/apps/cli/scripts/AGENTS.md @@ -5,6 +5,8 @@ ## Rules - `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. +- `sync-skill-bundle.mjs` must copy only from built source artifacts (`apps/openclaw-skill/dist/relay-to-peer.mjs`) and never fallback to stale bundled copies. +- `verify-skill-bundle.mjs` must validate the exact artifacts required by npm `--skill` install flow. - Scripts must fail with actionable errors when required source artifacts are missing. - Keep script output concise and stable for CI/release logs. - Do not add install-time network fetches to packaging scripts. diff --git a/apps/cli/scripts/sync-skill-bundle.mjs b/apps/cli/scripts/sync-skill-bundle.mjs index 72789c1..df81478 100644 --- a/apps/cli/scripts/sync-skill-bundle.mjs +++ b/apps/cli/scripts/sync-skill-bundle.mjs @@ -1,5 +1,5 @@ import { constants } from "node:fs"; -import { access, cp, mkdir, readFile, rm, writeFile } from "node:fs/promises"; +import { access, copyFile, cp, mkdir, rm } from "node:fs/promises"; import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; @@ -21,26 +21,9 @@ async function assertReadable(path, label) { } } -async function tryRead(path) { - try { - return await readFile(path); - } catch { - return undefined; - } -} - async function main() { await assertReadable(sourceSkillDirectory, "skill directory"); - - const sourceRelayContent = await tryRead(sourceRelayModule); - const bundledRelayContent = await tryRead(targetRelayModule); - const relayModuleContent = sourceRelayContent ?? bundledRelayContent; - - if (relayModuleContent === undefined) { - throw new Error( - `[sync-skill-bundle] Missing required relay module at ${sourceRelayModule}. Build @clawdentity/openclaw-skill first.`, - ); - } + await assertReadable(sourceRelayModule, "relay module"); await rm(targetSkillRoot, { recursive: true, force: true }); await mkdir(join(targetSkillRoot, "dist"), { recursive: true }); @@ -48,16 +31,11 @@ async function main() { await cp(sourceSkillDirectory, join(targetSkillRoot, "skill"), { recursive: true, }); - await writeFile(targetRelayModule, relayModuleContent); + await copyFile(sourceRelayModule, targetRelayModule); process.stdout.write( `[sync-skill-bundle] Bundled skill assets into ${targetSkillRoot}\n`, ); - if (sourceRelayContent === undefined) { - process.stdout.write( - "[sync-skill-bundle] Source relay build missing; reused existing bundled relay artifact.\n", - ); - } } main().catch((error) => { diff --git a/apps/cli/scripts/verify-skill-bundle.mjs b/apps/cli/scripts/verify-skill-bundle.mjs new file mode 100644 index 0000000..6904f0e --- /dev/null +++ b/apps/cli/scripts/verify-skill-bundle.mjs @@ -0,0 +1,42 @@ +import { constants } from "node:fs"; +import { access } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; + +const scriptDir = dirname(fileURLToPath(import.meta.url)); +const cliRoot = join(scriptDir, ".."); +const targetSkillRoot = join(cliRoot, "skill-bundle", "openclaw-skill"); + +const requiredPaths = [ + join(targetSkillRoot, "skill", "SKILL.md"), + join(targetSkillRoot, "skill", "references", "clawdentity-protocol.md"), + join(targetSkillRoot, "dist", "relay-to-peer.mjs"), +]; + +async function main() { + const missingPaths = []; + for (const path of requiredPaths) { + try { + await access(path, constants.R_OK); + } catch { + missingPaths.push(path); + } + } + + if (missingPaths.length > 0) { + const renderedPaths = missingPaths.map((path) => `- ${path}`).join("\n"); + throw new Error( + `[verify-skill-bundle] Missing required bundled artifacts:\n${renderedPaths}\nRun: pnpm -F @clawdentity/openclaw-skill build && pnpm -F clawdentity run sync:skill-bundle`, + ); + } + + process.stdout.write( + `[verify-skill-bundle] Verified ${requiredPaths.length} bundled artifacts in ${targetSkillRoot}\n`, + ); +} + +main().catch((error) => { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`${message}\n`); + process.exitCode = 1; +}); diff --git a/apps/cli/skill-bundle/AGENTS.md b/apps/cli/skill-bundle/AGENTS.md index 65a0819..dc8a7ba 100644 --- a/apps/cli/skill-bundle/AGENTS.md +++ b/apps/cli/skill-bundle/AGENTS.md @@ -5,7 +5,8 @@ ## Rules - Treat this folder as generated release input; do not hand-edit bundled files. -- Regenerate by running `pnpm -F clawdentity run sync:skill-bundle` after changes in `apps/openclaw-skill`. +- Keep `openclaw-skill/` generated-only and gitignored; commit only this `AGENTS.md`. +- Regenerate by running `pnpm -F @clawdentity/openclaw-skill build && pnpm -F clawdentity run sync:skill-bundle`. - Required bundled files: - `openclaw-skill/skill/SKILL.md` - `openclaw-skill/skill/references/*` diff --git a/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs b/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs deleted file mode 100644 index 4dd38d6..0000000 --- a/apps/cli/skill-bundle/openclaw-skill/dist/relay-to-peer.mjs +++ /dev/null @@ -1,256 +0,0 @@ -// src/transforms/peers-config.ts -import { chmod, mkdir, readFile, writeFile } from "fs/promises"; -import { homedir } from "os"; -import { dirname, join } from "path"; -var CLAWDENTITY_DIR = ".clawdentity"; -var PEERS_FILENAME = "peers.json"; -var PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; -function isRecord(value) { - return typeof value === "object" && value !== null; -} -function getErrorCode(error) { - if (!isRecord(error)) { - return void 0; - } - return typeof error.code === "string" ? error.code : void 0; -} -function parseNonEmptyString(value, label) { - if (typeof value !== "string") { - throw new Error(`${label} must be a string`); - } - const trimmed = value.trim(); - if (trimmed.length === 0) { - throw new Error(`${label} must not be empty`); - } - return trimmed; -} -function parsePeerAlias(value) { - const alias = parseNonEmptyString(value, "peer alias"); - if (alias.length > 128) { - throw new Error("peer alias must be at most 128 characters"); - } - if (!PEER_ALIAS_PATTERN.test(alias)) { - throw new Error( - "peer alias must use only letters, numbers, dot, underscore, or hyphen" - ); - } - return alias; -} -function parseDid(value) { - const did = parseNonEmptyString(value, "did"); - if (!did.startsWith("did:")) { - throw new Error("did must start with 'did:'"); - } - return did; -} -function parseProxyUrl(value) { - const candidate = parseNonEmptyString(value, "proxyUrl"); - try { - return new URL(candidate).toString(); - } catch { - throw new Error("proxyUrl must be a valid URL"); - } -} -function parsePeerName(value) { - if (value === void 0) { - return void 0; - } - return parseNonEmptyString(value, "name"); -} -function parsePeerEntry(value) { - if (!isRecord(value)) { - throw new Error("peer entry must be an object"); - } - const did = parseDid(value.did); - const proxyUrl = parseProxyUrl(value.proxyUrl); - const name = parsePeerName(value.name); - if (name === void 0) { - return { did, proxyUrl }; - } - return { did, proxyUrl, name }; -} -function parsePeersConfig(value, source) { - if (!isRecord(value)) { - throw new Error( - `Peer config validation failed at ${source}: root must be an object` - ); - } - const peersRaw = value.peers; - if (peersRaw === void 0) { - return { peers: {} }; - } - if (!isRecord(peersRaw)) { - throw new Error( - `Peer config validation failed at ${source}: peers must be an object` - ); - } - const peers = {}; - for (const [alias, peerValue] of Object.entries(peersRaw)) { - const normalizedAlias = parsePeerAlias(alias); - try { - peers[normalizedAlias] = parsePeerEntry(peerValue); - } catch (error) { - const reason = error instanceof Error ? error.message : String(error); - throw new Error( - `Peer config validation failed at ${source}: peers.${normalizedAlias}: ${reason}` - ); - } - } - return { peers }; -} -function resolvePeersConfigPath(options = {}) { - if (typeof options.configPath === "string" && options.configPath.trim().length > 0) { - return options.configPath.trim(); - } - if (typeof options.configDir === "string" && options.configDir.trim().length > 0) { - return join(options.configDir.trim(), PEERS_FILENAME); - } - const home = typeof options.homeDir === "string" && options.homeDir.trim().length > 0 ? options.homeDir.trim() : homedir(); - return join(home, CLAWDENTITY_DIR, PEERS_FILENAME); -} -async function loadPeersConfig(options = {}) { - const configPath = resolvePeersConfigPath(options); - let rawJson; - try { - rawJson = await readFile(configPath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return { peers: {} }; - } - throw error; - } - let parsed; - try { - parsed = JSON.parse(rawJson); - } catch { - throw new Error(`Peer config at ${configPath} is not valid JSON`); - } - return parsePeersConfig(parsed, configPath); -} - -// src/transforms/relay-to-peer.ts -var DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; -var DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; -function isRecord2(value) { - return typeof value === "object" && value !== null; -} -function parseRequiredString(value) { - if (typeof value !== "string") { - throw new Error("Input value must be a string"); - } - const trimmed = value.trim(); - if (trimmed.length === 0) { - throw new Error("Input value must not be empty"); - } - return trimmed; -} -function removePeerField(payload) { - const outbound = {}; - for (const [key, value] of Object.entries(payload)) { - if (key !== "peer") { - outbound[key] = value; - } - } - return outbound; -} -function resolveRelayFetch(fetchImpl) { - const resolved = fetchImpl ?? globalThis.fetch; - if (typeof resolved !== "function") { - throw new Error("fetch implementation is required"); - } - return resolved; -} -function parseConnectorBaseUrl(value) { - let parsed; - try { - parsed = new URL(value); - } catch { - throw new Error("Connector base URL is invalid"); - } - if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { - throw new Error("Connector base URL is invalid"); - } - if (parsed.pathname === "/" && parsed.search.length === 0 && parsed.hash.length === 0) { - return parsed.origin; - } - return parsed.toString(); -} -function normalizeConnectorPath(value) { - const trimmed = value.trim(); - if (trimmed.length === 0) { - throw new Error("Connector outbound path is invalid"); - } - return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; -} -function resolveConnectorEndpoint(options) { - const baseUrlInput = options.connectorBaseUrl ?? process.env.CLAWDENTITY_CONNECTOR_BASE_URL ?? DEFAULT_CONNECTOR_BASE_URL; - const pathInput = options.connectorPath ?? process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH ?? DEFAULT_CONNECTOR_OUTBOUND_PATH; - const baseUrl = parseConnectorBaseUrl(baseUrlInput.trim()); - const path = normalizeConnectorPath(pathInput.trim()); - return new URL(path, baseUrl).toString(); -} -function mapConnectorFailure(status) { - if (status === 404) { - return new Error("Local connector outbound endpoint is unavailable"); - } - if (status === 409) { - return new Error("Peer alias is not configured"); - } - if (status === 400 || status === 422) { - return new Error("Local connector rejected outbound relay payload"); - } - return new Error("Local connector outbound relay request failed"); -} -async function postToConnector(endpoint, payload, fetchImpl) { - let response; - try { - response = await fetchImpl(endpoint, { - method: "POST", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify(payload) - }); - } catch { - throw new Error("Local connector outbound relay request failed"); - } - if (!response.ok) { - throw mapConnectorFailure(response.status); - } -} -async function relayPayloadToPeer(payload, options = {}) { - if (!isRecord2(payload)) { - return payload; - } - const peerAliasValue = payload.peer; - if (peerAliasValue === void 0) { - return payload; - } - const peerAlias = parseRequiredString(peerAliasValue); - const peersConfig = await loadPeersConfig(options); - const peerEntry = peersConfig.peers[peerAlias]; - if (!peerEntry) { - throw new Error("Peer alias is not configured"); - } - const connectorEndpoint = resolveConnectorEndpoint(options); - const fetchImpl = resolveRelayFetch(options.fetchImpl); - const outboundPayload = removePeerField(payload); - await postToConnector( - connectorEndpoint, - { - peer: peerAlias, - peerDid: peerEntry.did, - peerProxyUrl: peerEntry.proxyUrl, - payload: outboundPayload - }, - fetchImpl - ); - return null; -} -async function relayToPeer(ctx) { - return relayPayloadToPeer(ctx?.payload); -} -export { - relayToPeer as default, - relayPayloadToPeer -}; diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md b/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md deleted file mode 100644 index 2bf3783..0000000 --- a/apps/cli/skill-bundle/openclaw-skill/skill/SKILL.md +++ /dev/null @@ -1,194 +0,0 @@ ---- -name: clawdentity_openclaw_relay -description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication through the local Clawdentity connector runtime. -version: 0.1.0 ---- - -# Clawdentity OpenClaw Relay Skill - -This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through the local Clawdentity connector runtime using a single invite code. - -## Trigger Conditions - -Use this skill when any of the following are requested: -- Install relay support for OpenClaw peer communication. -- Complete first-time setup from an invite code. -- Repair broken relay setup after config drift. -- Verify invite-code onboarding and peer mapping. - -## Filesystem Truth (must be used exactly) - -### OpenClaw state files -- OpenClaw state root (default): `~/.openclaw` -- OpenClaw config: `~/.openclaw/openclaw.json` -- Transform target path: `~/.openclaw/hooks/transforms/relay-to-peer.mjs` -- Workspace skill location: `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` -- Default transform source expected by CLI setup: - `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` - -### Clawdentity identity files -- Clawdentity root: `~/.clawdentity` -- Agent config: `~/.clawdentity/config.json` -- Agent identity directory: `~/.clawdentity/agents//` -- Agent private key: `~/.clawdentity/agents//secret.key` -- Agent AIT token: `~/.clawdentity/agents//ait.jwt` -- Peer map: `~/.clawdentity/peers.json` -- Local selected agent marker: `~/.clawdentity/openclaw-agent-name` -- Relay runtime config: `~/.clawdentity/openclaw-relay.json` - -## Invite Input Assumption - -Invite creation is outside this skill scope. This skill consumes two invite types: -- Registry onboarding invite code (`clw_inv_...`) to obtain PAT when local API key is missing. -- OpenClaw peer relay invite code (`clawd1_...`) for `openclaw setup`. - -## Human + Agent Flow (strict user-style) - -Follow this order. Do not skip steps. - -## Tool Execution Contract (Agent) - -This skill is operational. The agent must execute side effects via tools. -- Use the `exec` tool for npm/CLI/filesystem actions needed by onboarding. -- Do not ask the human to run shell commands that the agent can run itself. -- Ask the human only for missing secrets/inputs (for example API key or invite code). -- Report final status with concrete outputs (local DID, peer alias, written paths). - -## CLI Command Utilization (required) - -Use these commands as the default execution path for skill utilization: - -- Initialize local CLI config: - - `clawdentity config init` - - `clawdentity config init --registry-url ` (supports first-run registry URL override) - - or set env before init: `CLAWDENTITY_REGISTRY_URL=` (primary global override) - - compatible alias: `CLAWDENTITY_REGISTRY=` -- Configure registry URL and API key when missing: - - `clawdentity config set registryUrl ` -- Complete registry onboarding when API key is missing: - - `clawdentity invite redeem ` - - `clawdentity invite redeem --registry-url ` -- Configure API key only for non-invite fallback: - - `clawdentity config set apiKey ` -- Create and inspect local OpenClaw agent identity: - - `clawdentity agent create --framework openclaw` - - `clawdentity agent inspect ` -- Apply OpenClaw invite setup: - - `clawdentity openclaw setup --invite-code ` -- Start connector runtime for relay handoff: - - `clawdentity connector start ` -- Optional persistent connector autostart: - - `clawdentity connector service install ` -- Validate health and delivery: - - `clawdentity openclaw doctor` - - `clawdentity openclaw relay test --peer ` - -Pairing bootstrap uses CLI commands in the current release: - -- Owner/initiator starts pairing on initiator proxy: - - `clawdentity pair start --proxy-url --qr` - - Optionally pass explicit owner PAT: `--owner-pat ` -- Responder confirms on responder proxy: - - `clawdentity pair confirm --qr-file --proxy-url ` - - optional global proxy URL env fallback: `CLAWDENTITY_PROXY_URL=` - -Successful confirm establishes mutual trust for the two agent DIDs. After confirm, both directions are allowed for trusted delivery. - -1. Confirm prerequisites with the human. -- Confirm `clawdentity` CLI is installed and runnable. -- Confirm local agent name. -- Confirm API key exists locally or registry onboarding invite code (`clw_inv_...`) is available. -- Confirm OpenClaw peer relay invite code (`clawd1_...`) is available for setup. -- Do not request API key and registry invite code in the same prompt. -- Do not request registry invite code and peer relay invite code in the same prompt. -- Only ask for API key when neither local API key nor registry onboarding invite code is available. -- Confirm OpenClaw state directory path if non-default. -- Confirm OpenClaw base URL if local endpoint is non-default. -- Confirm each side proxy URL for pairing command execution. - -2. Confirm skill artifact exists in workspace skills directory. -- Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. -- If missing, install/update skill package contents before setup. - -3. Initialize local Clawdentity config. -- Run `clawdentity config init`. -- Use `clawdentity config init --registry-url ` when registry URL override is required. - -4. Complete registry onboarding auth before agent creation. -- If API key already exists, continue. -- Else redeem registry onboarding invite: - - `clawdentity invite redeem ` - - optional: `--registry-url ` -- If registry invite code is unavailable, fallback to API key path: - - ask human for API key - - run `clawdentity config set apiKey ` - -5. Configure local Clawdentity identity for this OpenClaw agent. -- Create identity: `clawdentity agent create --framework openclaw`. -- Verify identity: `clawdentity agent inspect `. - -6. Run automated setup from peer relay invite code. -- Execute: - `clawdentity openclaw setup --invite-code ` -- Use `--openclaw-dir ` when state directory is non-default. -- Use `--openclaw-base-url ` when local OpenClaw HTTP endpoint is non-default. -- Use `--peer-alias ` only when alias override is required. - -7. Verify setup outputs. -- Confirm setup reports: - - peer alias - - peer DID - - updated OpenClaw config path - - installed transform path - - OpenClaw base URL - - relay runtime config path -- Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. - -8. Start connector runtime for local relay handoff. -- Run `clawdentity connector start `. -- Optional: run `clawdentity connector service install ` for persistent autostart. - -9. Complete trust pairing bootstrap. -- Run pairing start from owner/initiator side: - - `clawdentity pair start --proxy-url --qr` -- Share the one-time QR image with responder side. -- Run pairing confirm from responder side: - - `clawdentity pair confirm --qr-file --proxy-url ` -- Confirm pairing success before relay test. - -10. Validate with user-style relay test. -- Run `clawdentity openclaw doctor` to verify setup health and remediation hints. -- Run `clawdentity openclaw relay test --peer ` to execute a probe. -- Confirm probe success and connector-mediated delivery logs. -- Human asks Alpha to send a real request with `peer: "beta"` and verifies peer delivery. - -## Required question policy - -Ask the human only when required inputs are missing: -- Missing local agent name. -- Missing peer relay invite code (`clawd1_...`). -- Missing registry onboarding invite code (`clw_inv_...`) when API key is absent. -- Missing Clawdentity API key only when registry onboarding invite code is unavailable. -- Missing initiator/responder proxy URLs for pairing commands. -- Unclear OpenClaw state directory. -- Non-default OpenClaw base URL. -- Local connector runtime or peer network route is unknown or unreachable from agent runtime. - -## Failure Handling - -If setup or relay fails: -- Report precise missing file/path/value. -- Fix only the failing config/input. -- Ensure connector runtime is active (`clawdentity connector start `). -- Re-run `clawdentity openclaw doctor`. -- Re-run `clawdentity openclaw relay test --peer `. -- Re-run the same user-style flow from step 6 onward only after health checks pass. - -## Bundled Resources - -### References -| File | Purpose | -|------|---------| -| `references/clawdentity-protocol.md` | Invite format, peer map schema, connector handoff envelope, and runtime failure mapping | - -Directive: read the reference file before troubleshooting relay contract or connector handoff failures. diff --git a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md deleted file mode 100644 index 7e9635c..0000000 --- a/apps/cli/skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md +++ /dev/null @@ -1,177 +0,0 @@ -# Clawdentity Relay Protocol Reference - -## Purpose - -Define the exact runtime contract used by `relay-to-peer.mjs`. - -## Filesystem Paths - -### OpenClaw files -- `~/.openclaw/openclaw.json` -- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` -- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` - -### Clawdentity files -- `~/.clawdentity/config.json` -- `~/.clawdentity/agents//secret.key` -- `~/.clawdentity/agents//ait.jwt` -- `~/.clawdentity/peers.json` -- `~/.clawdentity/openclaw-agent-name` -- `~/.clawdentity/openclaw-relay.json` - -## Invite Code Contract - -Invite codes are prefixed with `clawd1_` and contain base64url JSON: - -```json -{ - "v": 1, - "issuedAt": "2026-02-15T20:00:00.000Z", - "did": "did:claw:agent:01H...", - "proxyUrl": "https://beta-proxy.example.com/hooks/agent", - "alias": "beta", - "name": "Beta Agent" -} -``` - -Rules: -- `v` must be `1`. -- `issuedAt` is ISO-8601 UTC timestamp. -- `did` must be an agent DID. -- `proxyUrl` must be absolute `http` or `https`. -- `alias` is optional but preferred for zero-question setup. - -## Peer Map Schema - -`~/.clawdentity/peers.json` must be valid JSON: - -```json -{ - "peers": { - "beta": { - "did": "did:claw:agent:01H...", - "proxyUrl": "https://beta-proxy.example.com/hooks/agent", - "name": "Beta Agent" - } - } -} -``` - -Rules: -- peer alias key uses `[a-zA-Z0-9._-]` -- `did` required and must begin with `did:` -- `proxyUrl` required and must be a valid absolute URL -- `name` optional - -## Proxy Pairing Prerequisite - -Relay delivery policy is trust-pair based on proxy side. Pairing must be completed before first cross-agent delivery. - -Current pairing contract is ticket-based with CLI support: - -1. Initiator owner starts pairing: - - CLI: `clawdentity pair start --proxy-url --qr` - - proxy route: `POST /pair/start` - - headers: - - `Authorization: Claw ` - - `x-claw-owner-pat: ` - - body (optional): - -```json -{ - "ttlSeconds": 900 -} -``` - -2. Responder confirms pairing: - - CLI: `clawdentity pair confirm --qr-file --proxy-url ` - - proxy route: `POST /pair/confirm` - - headers: - - `Authorization: Claw ` - - body: - -```json -{ - "ticket": "clwpair1_..." -} -``` - -Rules: -- `ticket` is one-time and expires (default 15 minutes). -- Confirm establishes mutual trust for the initiator/responder pair. -- Same-agent sender/recipient is allowed by policy without explicit pair entry. - -## Relay Input Contract - -The OpenClaw transform reads `ctx.payload`. - -- If `payload.peer` is absent: - - return payload unchanged - - do not relay -- If `payload.peer` exists: - - resolve peer from `peers.json` - - remove `peer` from forwarded body - - send JSON POST to local connector outbound endpoint - - return `null` to skip local handling - -## Relay Agent Selection Contract - -Relay resolves local agent name in this order: -1. transform option `agentName` -2. `CLAWDENTITY_AGENT_NAME` -3. `~/.clawdentity/openclaw-agent-name` -4. single local agent fallback from `~/.clawdentity/agents/` - -## Local OpenClaw Base URL Contract - -`~/.clawdentity/openclaw-relay.json` stores the OpenClaw upstream base URL used by local proxy runtime fallback: - -```json -{ - "openclawBaseUrl": "http://127.0.0.1:18789", - "updatedAt": "2026-02-15T20:00:00.000Z" -} -``` - -Rules: -- `openclawBaseUrl` must be absolute `http` or `https`. -- `updatedAt` is ISO-8601 UTC timestamp. -- Proxy runtime precedence is: `OPENCLAW_BASE_URL` env first, then `openclaw-relay.json`, then built-in default. - -## Connector Handoff Contract - -The transform does not send directly to the peer proxy. It posts to the local connector runtime: -- Default endpoint: `http://127.0.0.1:19400/v1/outbound` -- Optional overrides: - - `CLAWDENTITY_CONNECTOR_BASE_URL` - - `CLAWDENTITY_CONNECTOR_OUTBOUND_PATH` - -Outbound JSON body sent by transform: - -```json -{ - "peer": "beta", - "peerDid": "did:claw:agent:01H...", - "peerProxyUrl": "https://beta-proxy.example.com/hooks/agent", - "payload": { - "event": "agent.message" - } -} -``` - -Rules: -- `payload.peer` is removed before creating the `payload` object above. -- Transform sends `Content-Type: application/json` only. -- Connector runtime is responsible for Clawdentity auth headers and request signing when calling peer proxy. - -## Error Conditions - -Relay fails when: -- no selected local agent can be resolved -- peer alias missing from config -- local connector outbound endpoint is unavailable (`404`) -- local connector reports unknown peer alias (`409`) -- local connector rejects payload (`400` or `422`) -- local connector outbound request fails (network/other non-2xx) - -Error messages should include file/path context but never print secret content. diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 6e5d6f4..fa76a8e 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -30,6 +30,7 @@ - Resolve skill artifacts in this order: explicit override, bundled `skill-bundle/openclaw-skill`, installed `@clawdentity/openclaw-skill`, then workspace fallback. - Skill install must copy `SKILL.md`, `references/*`, and `relay-to-peer.mjs` into OpenClaw runtime paths under `~/.openclaw` and must fail with actionable errors when source artifacts are missing. - Installer logs must be deterministic and explicit (`installed`, `updated`, `unchanged`) so automated skill tests can assert outcomes reliably. +- Keep installer tests independent from repo-committed bundle artifacts by using sandbox roots and `CLAWDENTITY_SKILL_PACKAGE_ROOT` overrides where needed. ## Verification Flow Contract - `verify` must support both raw token input and file-path input without requiring extra flags. diff --git a/apps/cli/src/install-skill-mode.test.ts b/apps/cli/src/install-skill-mode.test.ts index a1b44eb..c8e1d2b 100644 --- a/apps/cli/src/install-skill-mode.test.ts +++ b/apps/cli/src/install-skill-mode.test.ts @@ -47,7 +47,7 @@ function createSkillSandbox(): SkillSandbox { ); writeFileSync( join(skillPackageRoot, "dist", "relay-to-peer.mjs"), - "export default async function relayToPeer(){ return null; }\n", + "// relay-to-peer transform\nexport default async function relayToPeer(){ return null; }\n", "utf8", ); @@ -132,7 +132,7 @@ describe("installOpenclawSkillArtifacts", () => { expect(readFileSync(skillPath, "utf8")).toContain("Clawdentity"); expect(readFileSync(workspaceRelayPath, "utf8")).toContain("relayToPeer"); - expect(readFileSync(hooksRelayPath, "utf8")).toContain("relayToPeer"); + expect(readFileSync(hooksRelayPath, "utf8")).toContain("relay-to-peer"); expect(readFileSync(referencePath, "utf8")).toContain("Protocol"); const secondRun = await installOpenclawSkillArtifacts({ @@ -212,17 +212,19 @@ describe("runNpmSkillInstall", () => { expect(result.skipped).toBe(true); }); - it("installs bundled skill artifacts when --skill is set", async () => { - const root = mkdtempSync(join(tmpdir(), "clawdentity-skill-bundle-")); - const openclawDir = join(root, ".openclaw"); + it("installs skill artifacts when --skill is set", async () => { + const sandbox = createSkillSandbox(); const stdout: string[] = []; const stderr: string[] = []; try { const result = await runNpmSkillInstall({ - env: { npm_config_skill: "true" }, - homeDir: root, - openclawDir, + env: { + npm_config_skill: "true", + CLAWDENTITY_SKILL_PACKAGE_ROOT: sandbox.skillPackageRoot, + }, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, writeStdout: (line) => stdout.push(line), writeStderr: (line) => stderr.push(line), }); @@ -234,23 +236,23 @@ describe("runNpmSkillInstall", () => { ); const skillPath = join( - openclawDir, + sandbox.openclawDir, "workspace", "skills", "clawdentity-openclaw-relay", "SKILL.md", ); const hooksRelayPath = join( - openclawDir, + sandbox.openclawDir, "hooks", "transforms", "relay-to-peer.mjs", ); expect(readFileSync(skillPath, "utf8")).toContain("OpenClaw Relay"); - expect(readFileSync(hooksRelayPath, "utf8")).toContain("relay-to-peer"); + expect(readFileSync(hooksRelayPath, "utf8")).toContain("relayToPeer"); } finally { - rmSync(root, { recursive: true, force: true }); + sandbox.cleanup(); } }); }); diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 14b4bca..3246d74 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -33,7 +33,7 @@ - When `src/transforms/relay-to-peer.ts` relay envelope, endpoint defaults, or failure mapping changes, update: - `skill/SKILL.md` - `skill/references/clawdentity-protocol.md` - - bundled copies in `apps/cli/skill-bundle/openclaw-skill/skill/*` + - regenerate CLI bundle via `pnpm -F @clawdentity/openclaw-skill build && pnpm -F clawdentity run sync:skill-bundle` ## Validation Commands - `pnpm -F @clawdentity/openclaw-skill typecheck` From 2d7356ea4bf169249ded62610530a311ac92417f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Wed, 18 Feb 2026 12:59:32 +0530 Subject: [PATCH 092/190] feat(pairing): harden proxy pairing flow and trust state --- AGENTS.md | 4 + apps/cli/src/commands/AGENTS.md | 2 + apps/cli/src/commands/pair.test.ts | 20 + apps/cli/src/commands/pair.ts | 63 ++- .../skill/references/clawdentity-protocol.md | 4 +- apps/proxy/src/AGENTS.md | 6 +- apps/proxy/src/auth-middleware.test.ts | 43 ++ apps/proxy/src/auth-middleware.ts | 15 +- apps/proxy/src/pairing-constants.ts | 2 +- apps/proxy/src/pairing-route.test.ts | 388 +++++++++++++++--- apps/proxy/src/pairing-route.ts | 342 ++++++++++++--- apps/proxy/src/pairing-ticket.ts | 205 +++++++-- apps/proxy/src/proxy-trust-state.test.ts | 88 +++- apps/proxy/src/proxy-trust-state.ts | 109 +++-- apps/proxy/src/proxy-trust-store.test.ts | 101 ++++- apps/proxy/src/proxy-trust-store.ts | 91 ++-- apps/proxy/src/server.ts | 1 + .../drizzle/0003_proxy_pairing_keys.sql | 14 + apps/registry/src/AGENTS.md | 7 + apps/registry/src/db/AGENTS.md | 2 + apps/registry/src/db/schema.ts | 22 + apps/registry/src/server.ts | 208 +++++++++- packages/protocol/AGENTS.md | 1 + packages/protocol/src/endpoints.ts | 2 + packages/protocol/src/index.test.ts | 6 + packages/protocol/src/index.ts | 2 + 26 files changed, 1539 insertions(+), 209 deletions(-) create mode 100644 apps/registry/drizzle/0003_proxy_pairing_keys.sql diff --git a/AGENTS.md b/AGENTS.md index 0ea3757..8672509 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -3,6 +3,10 @@ ## Purpose - Define repository-wide engineering and documentation guardrails for Clawdentity. - Keep product docs and issue governance in sync with the active GitHub tracker. +- When shipping features, UX of the user is most important aspect. +- Remember users run clawdentity in the machines which are not exposed to internet. +- The location of the openclaw aka clawdbot is here at /Users/dev/Workdir/clawdbot which is what we are building the current +- Based on the changes made to the cli, always plan for changes in skills as well. Both go together ## Core Rules - Ship maintainable, non-duplicative changes. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 0c197c6..e3e870b 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -96,7 +96,9 @@ - `pair start ` must call proxy `/pair/start` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. - `pair start` must send owner PAT via `x-claw-owner-pat`, defaulting to configured API key unless explicitly overridden by `--owner-pat`. - `pair start --qr` must generate a one-time local PNG QR containing the returned ticket and print the filesystem path. +- `pair start --qr` must sweep expired QR artifacts in `~/.clawdentity/pairing` before writing a new file. - `pair confirm ` must call proxy `/pair/confirm` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. - `pair confirm` must accept either `--qr-file ` (primary) or `--ticket ` (fallback), never both. +- `pair confirm --qr-file` must delete the consumed QR file after successful confirm (best effort, non-fatal on cleanup failure). - `pair` commands must accept proxy URL via `--proxy-url` and fallback to env `CLAWDENTITY_PROXY_URL` when the flag is absent. - `pair` commands must fail with deterministic operator messages for invalid ticket/QR input, missing local agent proof material, and proxy auth/state errors. diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts index b61505f..811f59a 100644 --- a/apps/cli/src/commands/pair.test.ts +++ b/apps/cli/src/commands/pair.test.ts @@ -63,6 +63,12 @@ describe("pair command helpers", () => { it("starts pairing with local agent proof and configured owner PAT", async () => { const fixture = await createPairFixture(); const readFileImpl = createReadFileMock(fixture); + const readdirImpl = vi.fn(async () => [ + "alpha-pair-1699999000.png", + "alpha-pair-1699999500.png", + "notes.txt", + ]); + const unlinkImpl = vi.fn(async () => undefined); const writeFileImpl = vi.fn(async () => undefined); const mkdirImpl = vi.fn(async () => undefined); const fetchImpl = vi.fn(async (_url: string, _init?: RequestInit) => { @@ -93,6 +99,10 @@ describe("pair command helpers", () => { writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, mkdirImpl: mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + readdirImpl: + readdirImpl as unknown as typeof import("node:fs/promises").readdir, + unlinkImpl: + unlinkImpl as unknown as typeof import("node:fs/promises").unlink, qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), resolveConfigImpl: async () => ({ registryUrl: "https://dev.api.clawdentity.com/", @@ -107,6 +117,11 @@ describe("pair command helpers", () => { expect(result.qrPath).toContain( "/tmp/.clawdentity/pairing/alpha-pair-1700000000.png", ); + expect(readdirImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/alpha-pair-1699999000.png", + ); expect(writeFileImpl).toHaveBeenCalledTimes(1); expect(mkdirImpl).toHaveBeenCalledTimes(1); const [, init] = fetchImpl.mock.calls[0] as [string, RequestInit]; @@ -181,6 +196,7 @@ describe("pair command helpers", () => { it("confirms pairing with qr-file ticket decode", async () => { const fixture = await createPairFixture(); + const unlinkImpl = vi.fn(async () => undefined); const fetchImpl = vi.fn(async (_url: string, _init?: RequestInit) => { return Response.json( { @@ -205,6 +221,8 @@ describe("pair command helpers", () => { readFileImpl: createReadFileMock( fixture, ) as unknown as typeof import("node:fs/promises").readFile, + unlinkImpl: + unlinkImpl as unknown as typeof import("node:fs/promises").unlink, qrDecodeImpl: () => "clwpair1_ticket", getConfigDirImpl: () => "/tmp/.clawdentity", }, @@ -221,6 +239,8 @@ describe("pair command helpers", () => { expect(headers.get("x-claw-timestamp")).toBe("1700000000"); expect(headers.get("x-claw-nonce")).toBe("nonce-confirm"); expect(String(init?.body ?? "")).toContain("clwpair1_ticket"); + expect(unlinkImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).toHaveBeenCalledWith("/tmp/pair.png"); }); }); diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 9dce533..1554bfe 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -1,5 +1,5 @@ import { randomBytes } from "node:crypto"; -import { mkdir, readFile, writeFile } from "node:fs/promises"; +import { mkdir, readdir, readFile, unlink, writeFile } from "node:fs/promises"; import { dirname, join, resolve } from "node:path"; import { decodeBase64url } from "@clawdentity/protocol"; import { AppError, createLogger, signHttpRequest } from "@clawdentity/sdk"; @@ -28,6 +28,8 @@ const PAIR_CONFIRM_PATH = "/pair/confirm"; const OWNER_PAT_HEADER = "x-claw-owner-pat"; const NONCE_SIZE = 24; const PAIRING_TICKET_PREFIX = "clwpair1_"; +const PAIRING_QR_MAX_AGE_SECONDS = 900; +const PAIRING_QR_FILENAME_PATTERN = /-pair-(\d+)\.png$/; export type PairStartOptions = { ownerPat?: string; @@ -51,6 +53,8 @@ type PairRequestOptions = { readFileImpl?: typeof readFile; writeFileImpl?: typeof writeFile; mkdirImpl?: typeof mkdir; + readdirImpl?: typeof readdir; + unlinkImpl?: typeof unlink; resolveConfigImpl?: () => Promise; qrEncodeImpl?: (ticket: string) => Promise; qrDecodeImpl?: (imageBytes: Uint8Array) => string; @@ -501,6 +505,8 @@ async function persistPairingQr(input: { nowSeconds: number; }): Promise { const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; + const readdirImpl = input.dependencies.readdirImpl ?? readdir; + const unlinkImpl = input.dependencies.unlinkImpl ?? unlink; const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; const qrEncodeImpl = input.dependencies.qrEncodeImpl ?? encodeTicketQrPng; @@ -513,6 +519,44 @@ async function persistPairingQr(input: { `${assertValidAgentName(input.agentName)}-pair-${input.nowSeconds}.png`, ); + const existingFiles = await readdirImpl(baseDir).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return [] as string[]; + } + + throw error; + }); + for (const fileName of existingFiles) { + if (typeof fileName !== "string") { + continue; + } + + const match = PAIRING_QR_FILENAME_PATTERN.exec(fileName); + if (!match) { + continue; + } + + const issuedAtSeconds = Number.parseInt(match[1] ?? "", 10); + if (!Number.isInteger(issuedAtSeconds)) { + continue; + } + + if (issuedAtSeconds + PAIRING_QR_MAX_AGE_SECONDS > input.nowSeconds) { + continue; + } + + const stalePath = join(baseDir, fileName); + await unlinkImpl(stalePath).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + throw error; + }); + } + await mkdirImpl(dirname(outputPath), { recursive: true }); const imageBytes = await qrEncodeImpl(input.ticket); await writeFileImpl(outputPath, imageBytes); @@ -731,6 +775,23 @@ export async function confirmPairing( } const parsed = parsePairConfirmResponse(responseBody); + if (ticketSource.source === "qr-file" && ticketSource.qrFilePath) { + const unlinkImpl = dependencies.unlinkImpl ?? unlink; + await unlinkImpl(ticketSource.qrFilePath).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + logger.warn("cli.pair.confirm.qr_cleanup_failed", { + path: ticketSource.qrFilePath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + }); + } return { ...parsed, diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index 7e9635c..ab0a9d9 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -79,7 +79,7 @@ Current pairing contract is ticket-based with CLI support: ```json { - "ttlSeconds": 900 + "ttlSeconds": 300 } ``` @@ -97,7 +97,7 @@ Current pairing contract is ticket-based with CLI support: ``` Rules: -- `ticket` is one-time and expires (default 15 minutes). +- `ticket` is one-time and expires (default 5 minutes, max 15 minutes). - Confirm establishes mutual trust for the initiator/responder pair. - Same-agent sender/recipient is allowed by policy without explicit pair entry. diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 136d4d0..ee56711 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -41,9 +41,12 @@ - Keep pairing bootstrap explicit: `/pair/start` and `/pair/confirm` must bypass known-agent gate in auth middleware. - Keep `/pair/start` ownership validation against registry `GET /v1/agents/:id/ownership` using `x-claw-owner-pat`, and map dependency failures to `503`. - Allow optional `PAIRING_ISSUER_URL` override for `/pair/start` ticket issuer origin so cross-proxy forwarding can work when inbound hostnames differ from proxy-to-proxy reachability hostnames. +- Keep pairing tickets issuer-authenticated: `/pair/start` must sign each ticket and register the signing public key in registry (`/v1/proxy-pairing-keys`) before returning ticket data. - Keep cross-proxy `/pair/confirm` forwarding SSRF-safe by default: reject localhost/private/reserved issuer origins when the current proxy origin is non-local. - Enforce that forwarded `/pair/confirm` issuer origins use HTTPS once the proxy origin is non-local, while continuing to allow HTTP when both the proxy and issuer are on local/dev hosts. -- Preserve the original request JSON bytes when forwarding `/pair/confirm` so forwarded PoP/body-signature headers remain valid. +- Before cross-proxy forwarding, resolve issuer signing key from registry (`/v1/proxy-pairing-keys/resolve`) and reject unverified tickets with `403` fail-closed behavior. +- Preserve the original request JSON bytes when forwarding `/pair/confirm`; issuer-side confirmation must validate the ticket payload, not responder PoP headers. +- Forward only minimal `/pair/confirm` headers (`content-type`); never forward responder `Authorization`/PoP headers or arbitrary inbound headers to issuer proxy. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. - Keep `/hooks/agent` trust check explicit: sender/recipient pair must be authorized by trust state before relay delivery. @@ -59,5 +62,6 @@ - Keep relay delivery failure mapping explicit for `/hooks/agent`: DO delivery/RPC failures -> `502`, unavailable DO namespace -> `503`. - Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. - Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. +- Index pairing tickets by ticket `kid` in both in-memory and Durable Object stores; persist the original full ticket string alongside each entry and require exact ticket match on confirm. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. - When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index d291738..b633968 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -331,6 +331,49 @@ describe("proxy auth middleware", () => { expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); }); + it("allows forwarded /pair/confirm without Authorization when responder DID query is present", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + + const response = await harness.app.request( + `${PAIR_CONFIRM_PATH}?responderAgentDid=${encodeURIComponent(KNOWN_PEER_DID)}`, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + ticket: "clwpair1_missing-ticket", + }), + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); + }); + + it("rejects /pair/confirm without Authorization when responder DID query is missing", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + + const response = await harness.app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + ticket: "clwpair1_missing-ticket", + }), + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_MISSING_TOKEN"); + }); + it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { const oldKid = "registry-old-kid"; const newKid = "registry-new-kid"; diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index b7fd6cd..6627d9c 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -481,7 +481,20 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { return; } - const token = parseClawAuthorizationHeader(c.req.header("authorization")); + const authorizationHeader = c.req.header("authorization"); + const forwardedResponderDid = c.req.query("responderAgentDid"); + const isAnonymousForwardedPairConfirm = + c.req.path === PAIR_CONFIRM_PATH && + (typeof authorizationHeader !== "string" || + authorizationHeader.trim().length === 0) && + typeof forwardedResponderDid === "string" && + forwardedResponderDid.trim().length > 0; + if (isAnonymousForwardedPairConfirm) { + await next(); + return; + } + + const token = parseClawAuthorizationHeader(authorizationHeader); const claims = await verifyAitClaims(token); const timestampHeader = c.req.header("x-claw-timestamp"); diff --git a/apps/proxy/src/pairing-constants.ts b/apps/proxy/src/pairing-constants.ts index 88a790f..2075716 100644 --- a/apps/proxy/src/pairing-constants.ts +++ b/apps/proxy/src/pairing-constants.ts @@ -2,7 +2,7 @@ export const PAIR_START_PATH = "/pair/start"; export const PAIR_CONFIRM_PATH = "/pair/confirm"; export const OWNER_PAT_HEADER = "x-claw-owner-pat"; -export const DEFAULT_PAIRING_TICKET_TTL_SECONDS = 900; +export const DEFAULT_PAIRING_TICKET_TTL_SECONDS = 300; export const MAX_PAIRING_TICKET_TTL_SECONDS = 900; export const PROXY_TRUST_DO_NAME = "global-trust"; diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index c603260..6aab68e 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -1,6 +1,10 @@ import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; -import { createPairingTicket, parsePairingTicket } from "./pairing-ticket.js"; +import { + createPairingTicket, + createPairingTicketSigningKey, + parsePairingTicket, +} from "./pairing-ticket.js"; const INITIATOR_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_000)); const RESPONDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_100)); @@ -32,6 +36,31 @@ import { import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; import { createProxyApp } from "./server.js"; +async function createSignedTicketFixture(input: { + issuerProxyUrl: string; + nowMs: number; + expiresAtMs: number; +}) { + const signingKey = await createPairingTicketSigningKey({ + nowMs: input.nowMs, + }); + const created = await createPairingTicket({ + issuerProxyUrl: input.issuerProxyUrl, + expiresAtMs: input.expiresAtMs, + nowMs: input.nowMs, + signingKey: { + pkid: signingKey.pkid, + privateKey: signingKey.privateKey, + }, + }); + + return { + ticket: created.ticket, + publicKeyX: signingKey.publicKeyX, + pkid: signingKey.pkid, + }; +} + function createPairingApp(input?: { fetchImpl?: typeof fetch; nowMs?: () => number; @@ -64,14 +93,23 @@ function createPairingApp(input?: { describe(`POST ${PAIR_START_PATH}`, () => { it("creates a pairing ticket when owner PAT controls caller agent DID", async () => { - const fetchMock = vi.fn(async (_requestInput: unknown) => - Response.json( - { - ownsAgent: true, - }, - { status: 200 }, - ), - ); + const fetchMock = vi.fn(async (requestInput: unknown) => { + const url = String(requestInput); + if (url.includes("/ownership")) { + return Response.json( + { + ownsAgent: true, + }, + { status: 200 }, + ); + } + + if (url.includes("/v1/proxy-pairing-keys")) { + return Response.json({ ok: true }, { status: 201 }); + } + + throw new Error(`Unexpected URL: ${url}`); + }); const fetchImpl = fetchMock as unknown as typeof fetch; const { app } = createPairingApp({ @@ -97,11 +135,61 @@ describe(`POST ${PAIR_START_PATH}`, () => { expect(body.ticket.startsWith("clwpair1_")).toBe(true); expect(body.initiatorAgentDid).toBe(INITIATOR_AGENT_DID); - expect(body.expiresAt).toBe("2023-11-14T22:28:20.000Z"); - expect(fetchImpl).toHaveBeenCalledTimes(1); - const fetchCallUrl = String(fetchMock.mock.calls[0]?.[0] ?? ""); - expect(fetchCallUrl).toContain("/v1/agents/"); - expect(fetchCallUrl).toContain("/ownership"); + expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); + expect(fetchImpl).toHaveBeenCalledTimes(2); + const ownershipCallUrl = String(fetchMock.mock.calls[0]?.[0] ?? ""); + expect(ownershipCallUrl).toContain("/v1/agents/"); + expect(ownershipCallUrl).toContain("/ownership"); + const keyRegisterCallUrl = String(fetchMock.mock.calls[1]?.[0] ?? ""); + expect(keyRegisterCallUrl).toContain("/v1/proxy-pairing-keys"); + }); + + it("normalizes pairing ticket expiry to whole seconds", async () => { + const fetchMock = vi.fn( + async (requestInput: unknown, _requestInit?: RequestInit) => { + const url = String(requestInput); + if (url.includes("/ownership")) { + return Response.json({ ownsAgent: true }, { status: 200 }); + } + + if (url.includes("/v1/proxy-pairing-keys")) { + return Response.json({ ok: true }, { status: 201 }); + } + + throw new Error(`Unexpected URL: ${url}`); + }, + ); + const fetchImpl = fetchMock as unknown as typeof fetch; + + const { app } = createPairingApp({ + fetchImpl, + nowMs: () => 1_700_000_000_123, + }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + [OWNER_PAT_HEADER]: "clw_pat_owner_token", + }, + body: JSON.stringify({}), + }); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + expiresAt: string; + }; + expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); + + const keyRegisterInit = fetchMock.mock.calls[1]?.[1] as + | RequestInit + | undefined; + const keyRegisterBody = JSON.parse( + String(keyRegisterInit?.body ?? "{}"), + ) as { + expiresAt?: string; + }; + expect(keyRegisterBody.expiresAt).toBe("2023-11-14T22:18:20.000Z"); }); it("returns 401 when owner PAT is invalid", async () => { @@ -150,14 +238,18 @@ describe(`POST ${PAIR_START_PATH}`, () => { }); it("uses configured pairing issuer URL when creating ticket", async () => { - const fetchImpl = vi.fn(async (_requestInput: unknown) => - Response.json( - { - ownsAgent: true, - }, - { status: 200 }, - ), - ) as unknown as typeof fetch; + const fetchImpl = vi.fn(async (requestInput: unknown) => { + const url = String(requestInput); + if (url.includes("/ownership")) { + return Response.json({ ownsAgent: true }, { status: 200 }); + } + + if (url.includes("/v1/proxy-pairing-keys")) { + return Response.json({ ok: true }, { status: 201 }); + } + + throw new Error(`Unexpected URL: ${url}`); + }) as unknown as typeof fetch; const { app } = createPairingApp({ fetchImpl, nowMs: () => 1_700_000_000_000, @@ -188,10 +280,16 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { nowMs: () => 1_700_000_000_000, }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); const ticket = await trustStore.createPairingTicket({ initiatorAgentDid: INITIATOR_AGENT_DID, issuerProxyUrl: "http://localhost", - ttlSeconds: 900, + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, }); @@ -234,8 +332,28 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }); it("forwards confirm to issuer proxy when ticket issuer differs", async () => { + const created = await createSignedTicketFixture({ + issuerProxyUrl: "https://issuer.proxy.example", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { - expect(String(url)).toBe("https://issuer.proxy.example/pair/confirm"); + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return Response.json( + { + key: { + publicKeyX: created.publicKeyX, + }, + }, + { status: 200 }, + ); + } + + expect(urlString).toBe( + `https://issuer.proxy.example/pair/confirm?responderAgentDid=${encodeURIComponent(RESPONDER_AGENT_DID)}`, + ); const forwardedBody = JSON.parse(String(init?.body ?? "{}")) as { ticket: string; }; @@ -256,12 +374,6 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { nowMs: () => 1_700_000_000_000, }); - const created = createPairingTicket({ - issuerProxyUrl: "https://issuer.proxy.example", - expiresAtMs: 1_700_000_900_000, - nowMs: 1_700_000_000_000, - }); - const response = await app.request(PAIR_CONFIRM_PATH, { method: "POST", headers: { @@ -274,7 +386,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }); expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(1); + expect(forwardFetch).toHaveBeenCalledTimes(2); expect( await trustStore.isPairAllowed({ initiatorAgentDid: INITIATOR_AGENT_DID, @@ -283,20 +395,69 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { ).toBe(true); }); - it("rejects forwarding to blocked issuer origin for non-local proxy origins", async () => { - const forwardFetch = vi.fn(async () => { - throw new Error("forward fetch should not be called"); + it("rejects forwarded confirm when issuer key cannot be resolved", async () => { + const created = await createSignedTicketFixture({ + issuerProxyUrl: "https://issuer.proxy.example", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, }); + const fetchImpl = vi.fn(async (url: unknown) => { + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return new Response(null, { status: 404 }); + } + + throw new Error(`Unexpected URL: ${urlString}`); + }) as unknown as typeof fetch; + const { app } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, + fetchImpl, nowMs: () => 1_700_000_000_000, }); - const created = createPairingTicket({ + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: created.ticket, + }), + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_TICKET_UNTRUSTED_ISSUER"); + }); + + it("rejects forwarding to blocked issuer origin for non-local proxy origins", async () => { + const created = await createSignedTicketFixture({ issuerProxyUrl: "http://127.0.0.1:8787", - expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + + const forwardFetch = vi.fn(async (url: unknown) => { + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return Response.json( + { + key: { + publicKeyX: created.publicKeyX, + }, + }, + { status: 200 }, + ); + } + + throw new Error("forward fetch should not be called"); + }); + + const { app } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, }); const response = await app.request( @@ -316,11 +477,29 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { expect(response.status).toBe(403); const body = (await response.json()) as { error: { code: string } }; expect(body.error.code).toBe("PROXY_PAIR_TICKET_ISSUER_BLOCKED"); - expect(forwardFetch).not.toHaveBeenCalled(); + expect(forwardFetch).toHaveBeenCalledTimes(1); }); it("rejects HTTP issuer origin when proxy is non-local", async () => { - const forwardFetch = vi.fn(async () => { + const created = await createSignedTicketFixture({ + issuerProxyUrl: "http://issuer.proxy.example", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + + const forwardFetch = vi.fn(async (url: unknown) => { + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return Response.json( + { + key: { + publicKeyX: created.publicKeyX, + }, + }, + { status: 200 }, + ); + } + throw new Error("forward fetch should not be called"); }); @@ -329,12 +508,6 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { nowMs: () => 1_700_000_000_000, }); - const created = createPairingTicket({ - issuerProxyUrl: "http://issuer.proxy.example", - expiresAtMs: 1_700_000_900_000, - nowMs: 1_700_000_000_000, - }); - const response = await app.request( "https://proxy.public.example/pair/confirm", { @@ -352,12 +525,32 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { expect(response.status).toBe(403); const body = (await response.json()) as { error: { code: string } }; expect(body.error.code).toBe("PROXY_PAIR_CONFIRM_ISSUER_INSECURE"); - expect(forwardFetch).not.toHaveBeenCalled(); + expect(forwardFetch).toHaveBeenCalledTimes(1); }); it("allows HTTP issuer origin when both proxy and issuer are local", async () => { + const created = await createSignedTicketFixture({ + issuerProxyUrl: "http://127.0.0.1:8787", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + const forwardFetch = vi.fn(async (url: unknown) => { - expect(String(url)).toBe("http://127.0.0.1:8787/pair/confirm"); + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return Response.json( + { + key: { + publicKeyX: created.publicKeyX, + }, + }, + { status: 200 }, + ); + } + + expect(urlString).toBe( + `http://127.0.0.1:8787/pair/confirm?responderAgentDid=${encodeURIComponent(RESPONDER_AGENT_DID)}`, + ); return Response.json( { @@ -374,12 +567,6 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { nowMs: () => 1_700_000_000_000, }); - const created = createPairingTicket({ - issuerProxyUrl: "http://127.0.0.1:8787", - expiresAtMs: 1_700_000_900_000, - nowMs: 1_700_000_000_000, - }); - const response = await app.request("http://localhost/pair/confirm", { method: "POST", headers: { @@ -392,7 +579,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }); expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(1); + expect(forwardFetch).toHaveBeenCalledTimes(2); expect( await trustStore.isPairAllowed({ initiatorAgentDid: INITIATOR_AGENT_DID, @@ -402,10 +589,27 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }); it("preserves original signed JSON body when forwarding to issuer proxy", async () => { + const created = await createSignedTicketFixture({ + issuerProxyUrl: "https://issuer.proxy.example", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + let expectedBody = ""; - const forwardFetch = vi.fn(async (_url: unknown, init?: RequestInit) => { - expect(String(init?.body ?? "")).toBe(expectedBody); + const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return Response.json( + { + key: { + publicKeyX: created.publicKeyX, + }, + }, + { status: 200 }, + ); + } + expect(String(init?.body ?? "")).toBe(expectedBody); return Response.json( { paired: true, @@ -421,11 +625,6 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { nowMs: () => 1_700_000_000_000, }); - const created = createPairingTicket({ - issuerProxyUrl: "https://issuer.proxy.example", - expiresAtMs: 1_700_000_900_000, - nowMs: 1_700_000_000_000, - }); const bodyRaw = `{ "ticket":"${created.ticket}", "extra":"value" }`; expectedBody = bodyRaw; @@ -439,10 +638,73 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }); expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(1); + expect(forwardFetch).toHaveBeenCalledTimes(2); const forwardedBody = String( - (forwardFetch.mock.calls[0]?.[1] as RequestInit | undefined)?.body ?? "", + (forwardFetch.mock.calls[1]?.[1] as RequestInit | undefined)?.body ?? "", ); expect(forwardedBody).toBe(bodyRaw); }); + + it("forwards only required confirmation headers", async () => { + const created = await createSignedTicketFixture({ + issuerProxyUrl: "https://issuer.proxy.example", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + + const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { + const urlString = String(url); + if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { + return Response.json( + { + key: { + publicKeyX: created.publicKeyX, + }, + }, + { status: 200 }, + ); + } + + const headers = new Headers(init?.headers); + expect(headers.get("authorization")).toBeNull(); + expect(headers.get("x-claw-proof")).toBeNull(); + expect(headers.get("x-claw-body-sha256")).toBeNull(); + expect(headers.get("x-claw-timestamp")).toBeNull(); + expect(headers.get("x-claw-nonce")).toBeNull(); + expect(headers.get("content-type")).toBe("application/json"); + expect(headers.get("x-forwarded-for")).toBeNull(); + + return Response.json( + { + paired: true, + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }, + { status: 201 }, + ); + }); + + const { app } = createPairingApp({ + fetchImpl: forwardFetch as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + authorization: "Claw test-token", + "content-type": "application/json", + "x-claw-proof": "proof", + "x-claw-body-sha256": "sha", + "x-claw-timestamp": "1700000000", + "x-claw-nonce": "nonce", + "x-forwarded-for": "10.0.0.1", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ ticket: created.ticket }), + }); + + expect(response.status).toBe(201); + expect(forwardFetch).toHaveBeenCalledTimes(2); + }); }); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index a53ad95..284bd98 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -1,4 +1,8 @@ -import { parseDid } from "@clawdentity/protocol"; +import { + PROXY_PAIRING_KEYS_PATH, + PROXY_PAIRING_KEYS_RESOLVE_PATH, + parseDid, +} from "@clawdentity/protocol"; import { AppError, type Logger } from "@clawdentity/sdk"; import type { Context } from "hono"; import type { ProxyRequestVariables } from "./auth-middleware.js"; @@ -10,8 +14,11 @@ import { PAIR_START_PATH, } from "./pairing-constants.js"; import { + createPairingTicket, + createPairingTicketSigningKey, PairingTicketParseError, parsePairingTicket, + verifyPairingTicketSignature, } from "./pairing-ticket.js"; import { type ProxyTrustStore, @@ -46,6 +53,7 @@ export type PairConfirmRuntimeOptions = { type CreatePairConfirmHandlerOptions = PairConfirmRuntimeOptions & { logger: Logger; trustStore: ProxyTrustStore; + registryUrl: string; }; function parseOwnerPatHeader(headerValue: string | undefined): string { @@ -146,6 +154,44 @@ async function parseRegistryOwnershipResponse(response: Response): Promise<{ }; } +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function extractErrorCode(payload: unknown): string | undefined { + if (typeof payload !== "object" || payload === null) { + return undefined; + } + + const error = (payload as { error?: unknown }).error; + if (typeof error !== "object" || error === null) { + return undefined; + } + + return typeof (error as { code?: unknown }).code === "string" + ? (error as { code: string }).code + : undefined; +} + +function extractErrorMessage(payload: unknown): string | undefined { + if (typeof payload !== "object" || payload === null) { + return undefined; + } + + const error = (payload as { error?: unknown }).error; + if (typeof error !== "object" || error === null) { + return undefined; + } + + return typeof (error as { message?: unknown }).message === "string" + ? (error as { message: string }).message + : undefined; +} + async function assertPatOwnsInitiatorAgent(input: { fetchImpl: typeof fetch; initiatorAgentDid: string; @@ -221,14 +267,6 @@ async function assertPatOwnsInitiatorAgent(input: { }); } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - function toPairingStoreAppError(error: unknown): AppError { if (error instanceof ProxyTrustStoreError) { return new AppError({ @@ -247,36 +285,6 @@ function toPairingStoreAppError(error: unknown): AppError { }); } -function extractErrorCode(payload: unknown): string | undefined { - if (typeof payload !== "object" || payload === null) { - return undefined; - } - - const error = (payload as { error?: unknown }).error; - if (typeof error !== "object" || error === null) { - return undefined; - } - - return typeof (error as { code?: unknown }).code === "string" - ? (error as { code: string }).code - : undefined; -} - -function extractErrorMessage(payload: unknown): string | undefined { - if (typeof payload !== "object" || payload === null) { - return undefined; - } - - const error = (payload as { error?: unknown }).error; - if (typeof error !== "object" || error === null) { - return undefined; - } - - return typeof (error as { message?: unknown }).message === "string" - ? (error as { message: string }).message - : undefined; -} - function normalizeProxyOrigin(value: string): string { const parsed = new URL(value); return parsed.origin; @@ -577,6 +585,165 @@ function parsePairConfirmResponse(payload: unknown): { }; } +function buildForwardedConfirmHeaders(source: Headers): Headers { + const headers = new Headers(); + const contentType = source.get("content-type"); + if (contentType !== null) { + headers.set("content-type", contentType); + } + + return headers; +} + +function parseResponderDidFromQuery( + responderDidQuery: string | undefined, +): string { + if ( + typeof responderDidQuery !== "string" || + responderDidQuery.trim().length === 0 + ) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "responderAgentDid query parameter is required", + status: 400, + expose: true, + }); + } + + const responderAgentDid = responderDidQuery.trim(); + try { + const parsedResponderDid = parseDid(responderAgentDid); + if (parsedResponderDid.kind !== "agent") { + throw new Error("invalid responder did kind"); + } + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "responderAgentDid must be a valid agent DID", + status: 400, + expose: true, + }); + } + + return responderAgentDid; +} + +async function registerPairingKey(input: { + fetchImpl: typeof fetch; + ownerPat: string; + registryUrl: string; + issuerOrigin: string; + pkid: string; + publicKeyX: string; + expiresAtMs: number; +}): Promise { + let response: Response; + try { + response = await input.fetchImpl( + new URL(PROXY_PAIRING_KEYS_PATH, input.registryUrl), + { + method: "POST", + headers: { + authorization: `Bearer ${input.ownerPat}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + issuerOrigin: input.issuerOrigin, + pkid: input.pkid, + publicKeyX: input.publicKeyX, + expiresAt: new Date(input.expiresAtMs).toISOString(), + }), + }, + ); + } catch { + throw new AppError({ + code: "PROXY_PAIR_TICKET_SIGNING_UNAVAILABLE", + message: "Registry pairing-key registration is unavailable", + status: 503, + expose: true, + }); + } + + if (response.status === 401) { + throw new AppError({ + code: "PROXY_PAIR_OWNER_PAT_INVALID", + message: "Owner PAT is invalid or expired", + status: 401, + expose: true, + }); + } + + if (!response.ok) { + throw new AppError({ + code: "PROXY_PAIR_TICKET_SIGNING_UNAVAILABLE", + message: "Registry pairing-key registration is unavailable", + status: 503, + expose: true, + }); + } +} + +async function resolvePairingKey(input: { + fetchImpl: typeof fetch; + registryUrl: string; + issuerOrigin: string; + pkid: string; +}): Promise<{ publicKeyX: string }> { + const resolveUrl = new URL( + PROXY_PAIRING_KEYS_RESOLVE_PATH, + input.registryUrl, + ); + resolveUrl.searchParams.set("issuerOrigin", input.issuerOrigin); + resolveUrl.searchParams.set("pkid", input.pkid); + + let response: Response; + try { + response = await input.fetchImpl(resolveUrl, { + method: "GET", + }); + } catch { + throw new AppError({ + code: "PROXY_PAIR_TICKET_VERIFY_UNAVAILABLE", + message: "Registry pairing-key lookup is unavailable", + status: 503, + expose: true, + }); + } + + if (response.status === 404) { + throw new AppError({ + code: "PROXY_PAIR_TICKET_UNTRUSTED_ISSUER", + message: "Pairing ticket issuer could not be verified", + status: 403, + expose: true, + }); + } + + if (!response.ok) { + throw new AppError({ + code: "PROXY_PAIR_TICKET_VERIFY_UNAVAILABLE", + message: "Registry pairing-key lookup is unavailable", + status: 503, + expose: true, + }); + } + + const payload = (await parseJsonResponse(response)) as { + key?: { publicKeyX?: unknown }; + }; + const publicKeyX = payload?.key?.publicKeyX; + if (typeof publicKeyX !== "string" || publicKeyX.trim().length === 0) { + throw new AppError({ + code: "PROXY_PAIR_TICKET_VERIFY_UNAVAILABLE", + message: "Registry pairing-key lookup payload is invalid", + status: 503, + expose: true, + }); + } + + return { publicKeyX: publicKeyX.trim() }; +} + export function createPairStartHandler( options: CreatePairStartHandlerOptions, ): (c: PairingRouteContext) => Promise { @@ -612,14 +779,57 @@ export function createPairStartHandler( registryUrl, }); + const issuedAtMs = nowMs(); + const requestedExpiresAtMs = issuedAtMs + ttlSeconds * 1000; const issuerProxyUrl = configuredIssuerProxyUrl ?? normalizeProxyOrigin(c.req.url); + + const signingKey = await createPairingTicketSigningKey({ + nowMs: issuedAtMs, + }).catch(() => { + throw new AppError({ + code: "PROXY_PAIR_TICKET_SIGNING_UNAVAILABLE", + message: "Pairing ticket signing is unavailable", + status: 503, + expose: true, + }); + }); + + const createdTicket = await createPairingTicket({ + issuerProxyUrl, + expiresAtMs: requestedExpiresAtMs, + nowMs: issuedAtMs, + signingKey: { + pkid: signingKey.pkid, + privateKey: signingKey.privateKey, + }, + }).catch(() => { + throw new AppError({ + code: "PROXY_PAIR_TICKET_SIGNING_UNAVAILABLE", + message: "Pairing ticket signing is unavailable", + status: 503, + expose: true, + }); + }); + const expiresAtMs = createdTicket.payload.exp * 1000; + + await registerPairingKey({ + fetchImpl, + ownerPat, + registryUrl, + issuerOrigin: issuerProxyUrl, + pkid: signingKey.pkid, + publicKeyX: signingKey.publicKeyX, + expiresAtMs, + }); + const pairingTicketResult = await options.trustStore .createPairingTicket({ initiatorAgentDid: auth.agentDid, issuerProxyUrl, - ttlSeconds, - nowMs: nowMs(), + ticket: createdTicket.ticket, + expiresAtMs, + nowMs: issuedAtMs, }) .catch((error: unknown) => { throw toPairingStoreAppError(error); @@ -630,6 +840,7 @@ export function createPairStartHandler( initiatorAgentDid: auth.agentDid, issuerProxyUrl: pairingTicketResult.issuerProxyUrl, expiresAt: new Date(pairingTicketResult.expiresAtMs).toISOString(), + pkid: signingKey.pkid, }); return c.json({ @@ -645,16 +856,10 @@ export function createPairConfirmHandler( ): (c: PairingRouteContext) => Promise { const nowMs = options.nowMs ?? Date.now; const fetchImpl = options.fetchImpl ?? fetch; + const registryUrl = normalizeRegistryUrl(options.registryUrl); return async (c) => { const auth = c.get("auth"); - if (auth === undefined) { - throw new AppError({ - code: "PROXY_PAIR_AUTH_CONTEXT_MISSING", - message: "Verified auth context is required", - status: 500, - }); - } const parsedBody = await parseRawJsonBody(c); const body = parsedBody.json as { @@ -698,6 +903,36 @@ export function createPairConfirmHandler( const isIssuerLocal = ticketIssuerOrigin === localProxyOrigin; if (!isIssuerLocal) { + if (auth === undefined) { + throw new AppError({ + code: "PROXY_PAIR_AUTH_REQUIRED", + message: "Authorization is required for cross-proxy confirm", + status: 401, + expose: true, + }); + } + + const resolvedKey = await resolvePairingKey({ + fetchImpl, + registryUrl, + issuerOrigin: ticketIssuerOrigin, + pkid: parsedTicket.pkid, + }); + + const verified = await verifyPairingTicketSignature({ + payload: parsedTicket, + publicKeyX: resolvedKey.publicKeyX, + }).catch(() => false); + + if (!verified) { + throw new AppError({ + code: "PROXY_PAIR_TICKET_UNTRUSTED_ISSUER", + message: "Pairing ticket issuer could not be verified", + status: 403, + expose: true, + }); + } + const localProxyAllowsPrivateForwarding = isBlockedForwardOrigin(localProxyOrigin); const issuerOriginUrl = new URL(ticketIssuerOrigin); @@ -727,11 +962,12 @@ export function createPairConfirmHandler( ticketIssuerOrigin.endsWith("/") ? ticketIssuerOrigin : `${ticketIssuerOrigin}/`, - ).toString(); + ); + issuerConfirmUrl.searchParams.set("responderAgentDid", auth.agentDid); const forwardedResponse = await fetchImpl(issuerConfirmUrl, { method: "POST", - headers: c.req.raw.headers, + headers: buildForwardedConfirmHeaders(c.req.raw.headers), body: parsedBody.rawBody, }).catch((error: unknown) => { throw new AppError({ @@ -789,10 +1025,14 @@ export function createPairConfirmHandler( ); } + const responderAgentDid = + auth?.agentDid ?? + parseResponderDidFromQuery(c.req.query("responderAgentDid")); + const confirmedPairingTicket = await options.trustStore .confirmPairingTicket({ ticket, - responderAgentDid: auth.agentDid, + responderAgentDid, nowMs: nowMs(), }) .catch((error: unknown) => { diff --git a/apps/proxy/src/pairing-ticket.ts b/apps/proxy/src/pairing-ticket.ts index e759359..abd80f0 100644 --- a/apps/proxy/src/pairing-ticket.ts +++ b/apps/proxy/src/pairing-ticket.ts @@ -5,15 +5,26 @@ import { } from "@clawdentity/protocol"; const PAIRING_TICKET_PREFIX = "clwpair1_"; -const PAIRING_TICKET_VERSION = 1; +const PAIRING_TICKET_VERSION = 2; const TICKET_NONCE_BYTES = 18; -export type PairingTicketPayload = { +type PairingTicketUnsignedPayload = { v: number; iss: string; kid: string; nonce: string; exp: number; + pkid: string; +}; + +export type PairingTicketPayload = PairingTicketUnsignedPayload & { + sig: string; +}; + +export type PairingTicketSigningKey = { + pkid: string; + privateKey: CryptoKey; + publicKeyX: string; }; export class PairingTicketParseError extends Error { @@ -65,27 +76,131 @@ function createRandomNonce(): string { return encodeBase64url(bytes); } -export function createPairingTicket(input: { +function canonicalizePairingTicketPayload( + payload: PairingTicketUnsignedPayload, +): string { + return JSON.stringify({ + v: payload.v, + iss: payload.iss, + kid: payload.kid, + nonce: payload.nonce, + exp: payload.exp, + pkid: payload.pkid, + }); +} + +function toUnsignedPayload( + payload: PairingTicketPayload, +): PairingTicketUnsignedPayload { + return { + v: payload.v, + iss: payload.iss, + kid: payload.kid, + nonce: payload.nonce, + exp: payload.exp, + pkid: payload.pkid, + }; +} + +function normalizeNonEmptyString( + value: unknown, + code: string, + message: string, +): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw new PairingTicketParseError(code, message); + } + + return value.trim(); +} + +async function importVerifyKeyFromX(publicKeyX: string): Promise { + return crypto.subtle.importKey( + "jwk", + { + kty: "OKP", + crv: "Ed25519", + x: publicKeyX, + }, + { + name: "Ed25519", + }, + false, + ["verify"], + ); +} + +export async function createPairingTicketSigningKey(input: { + nowMs: number; +}): Promise { + const generated = (await crypto.subtle.generateKey( + { + name: "Ed25519", + }, + true, + ["sign", "verify"], + )) as CryptoKeyPair; + const publicJwk = (await crypto.subtle.exportKey( + "jwk", + generated.publicKey, + )) as JsonWebKey; + if (typeof publicJwk.x !== "string" || publicJwk.x.trim().length === 0) { + throw new PairingTicketParseError( + "PROXY_PAIR_TICKET_KEY_EXPORT_FAILED", + "Pairing ticket signing key export failed", + ); + } + + return { + pkid: generateUlid(input.nowMs), + privateKey: generated.privateKey, + publicKeyX: publicJwk.x, + }; +} + +export async function createPairingTicket(input: { issuerProxyUrl: string; expiresAtMs: number; nowMs: number; -}): { + signingKey: { + pkid: string; + privateKey: CryptoKey; + }; +}): Promise<{ ticket: string; payload: PairingTicketPayload; -} { - const payload: PairingTicketPayload = { +}> { + const payload: PairingTicketUnsignedPayload = { v: PAIRING_TICKET_VERSION, iss: assertHttpUrl(input.issuerProxyUrl), kid: generateUlid(input.nowMs), nonce: createRandomNonce(), exp: Math.floor(input.expiresAtMs / 1000), + pkid: normalizeNonEmptyString( + input.signingKey.pkid, + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ), }; - const encodedPayload = encodeBase64url(utf8Encode(JSON.stringify(payload))); + const signatureBuffer = await crypto.subtle.sign( + { + name: "Ed25519", + }, + input.signingKey.privateKey, + utf8Encode(canonicalizePairingTicketPayload(payload)), + ); + const signedPayload: PairingTicketPayload = { + ...payload, + sig: encodeBase64url(new Uint8Array(signatureBuffer)), + }; + const encodedPayload = encodeBase64url( + utf8Encode(JSON.stringify(signedPayload)), + ); return { ticket: `${PAIRING_TICKET_PREFIX}${encodedPayload}`, - payload, + payload: signedPayload, }; } @@ -130,20 +245,27 @@ export function parsePairingTicket(ticket: string): PairingTicketPayload { ); } - if (typeof payload.kid !== "string" || payload.kid.trim().length === 0) { - throw new PairingTicketParseError( - "PROXY_PAIR_TICKET_INVALID_FORMAT", - "Pairing ticket format is invalid", - ); - } - - if (typeof payload.nonce !== "string" || payload.nonce.trim().length === 0) { - throw new PairingTicketParseError( - "PROXY_PAIR_TICKET_INVALID_FORMAT", - "Pairing ticket format is invalid", - ); - } - + const iss = assertHttpUrl(String(payload.iss ?? "")); + const kid = normalizeNonEmptyString( + payload.kid, + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + const nonce = normalizeNonEmptyString( + payload.nonce, + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + const pkid = normalizeNonEmptyString( + payload.pkid, + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); + const sig = normalizeNonEmptyString( + payload.sig, + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ); if (typeof payload.exp !== "number" || !Number.isInteger(payload.exp)) { throw new PairingTicketParseError( "PROXY_PAIR_TICKET_INVALID_FORMAT", @@ -153,9 +275,42 @@ export function parsePairingTicket(ticket: string): PairingTicketPayload { return { v: PAIRING_TICKET_VERSION, - iss: assertHttpUrl(payload.iss as string), - kid: payload.kid.trim(), - nonce: payload.nonce.trim(), + iss, + kid, + nonce, exp: payload.exp, + pkid, + sig, }; } + +export async function verifyPairingTicketSignature(input: { + payload: PairingTicketPayload; + publicKeyX: string; +}): Promise { + const verifyKey = await importVerifyKeyFromX( + normalizeNonEmptyString( + input.publicKeyX, + "PROXY_PAIR_TICKET_INVALID_FORMAT", + "Pairing ticket format is invalid", + ), + ); + + let signature: Uint8Array; + try { + signature = decodeBase64url(input.payload.sig); + } catch { + return false; + } + + return crypto.subtle.verify( + { + name: "Ed25519", + }, + verifyKey, + signature, + utf8Encode( + canonicalizePairingTicketPayload(toUnsignedPayload(input.payload)), + ), + ); +} diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts index a72fa37..f0f447d 100644 --- a/apps/proxy/src/proxy-trust-state.test.ts +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -1,7 +1,29 @@ +import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; import { describe, expect, it, vi } from "vitest"; +import { + createPairingTicket, + createPairingTicketSigningKey, +} from "./pairing-ticket.js"; import { ProxyTrustState } from "./proxy-trust-state.js"; import { TRUST_STORE_ROUTES } from "./proxy-trust-store.js"; +function tamperTicketNonce(ticket: string): string { + const prefix = "clwpair1_"; + if (!ticket.startsWith(prefix)) { + throw new Error("invalid test ticket format"); + } + const encodedPayload = ticket.slice(prefix.length); + + const payload = JSON.parse( + new TextDecoder().decode(decodeBase64url(encodedPayload)), + ) as { + nonce?: string; + }; + payload.nonce = "tampered-nonce"; + + return `${prefix}${encodeBase64url(new TextEncoder().encode(JSON.stringify(payload)))}`; +} + function createStorageHarness(initial: Record = {}) { const values = new Map(Object.entries(initial)); @@ -42,6 +64,25 @@ function makeRequest(path: string, body: unknown): Request { }); } +async function createSignedTicket(input: { + issuerProxyUrl: string; + nowMs: number; + expiresAtMs: number; +}) { + const signingKey = await createPairingTicketSigningKey({ + nowMs: input.nowMs, + }); + return createPairingTicket({ + issuerProxyUrl: input.issuerProxyUrl, + expiresAtMs: input.expiresAtMs, + nowMs: input.nowMs, + signingKey: { + pkid: signingKey.pkid, + privateKey: signingKey.privateKey, + }, + }); +} + describe("ProxyTrustState", () => { it("persists and answers known-agent checks via agent peer index", async () => { const { proxyTrustState, harness } = createProxyTrustState(); @@ -70,11 +111,18 @@ describe("ProxyTrustState", () => { it("confirms pairing ticket in one operation and persists trust", async () => { const { proxyTrustState } = createProxyTrustState(); + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + const ticketResponse = await proxyTrustState.fetch( makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { initiatorAgentDid: "did:claw:agent:alice", issuerProxyUrl: "https://proxy-a.example.com", - ttlSeconds: 60, + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }), ); @@ -111,4 +159,42 @@ describe("ProxyTrustState", () => { allowed: true, }); }); + + it("rejects tampered ticket text when kid matches stored entry", async () => { + const { proxyTrustState } = createProxyTrustState(); + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + + const ticketResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { + initiatorAgentDid: "did:claw:agent:alice", + issuerProxyUrl: "https://proxy-a.example.com", + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_060_000, + nowMs: 1_700_000_000_000, + }), + ); + const ticketBody = (await ticketResponse.json()) as { ticket: string }; + + const confirmResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { + ticket: tamperTicketNonce(ticketBody.ticket), + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_100, + }), + ); + + expect(confirmResponse.status).toBe(404); + expect( + (await confirmResponse.json()) as { error: { code: string } }, + ).toEqual({ + error: { + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", + }, + }); + }); }); diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index 93f8185..78a46d3 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -1,5 +1,4 @@ import { - createPairingTicket, PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; @@ -10,6 +9,7 @@ import { } from "./proxy-trust-store.js"; type StoredPairingTicket = { + ticket: string; expiresAtMs: number; initiatorAgentDid: string; issuerProxyUrl: string; @@ -109,9 +109,9 @@ export class ProxyTrustState { const pairingTickets = await this.loadPairingTickets(); let mutated = false; - for (const [ticket, details] of Object.entries(pairingTickets)) { + for (const [ticketKid, details] of Object.entries(pairingTickets)) { if (details.expiresAtMs <= nowMs) { - delete pairingTickets[ticket]; + delete pairingTickets[ticketKid]; mutated = true; } } @@ -131,9 +131,10 @@ export class ProxyTrustState { !body || !isNonEmptyString(body.initiatorAgentDid) || !isNonEmptyString(body.issuerProxyUrl) || - typeof body.ttlSeconds !== "number" || - !Number.isInteger(body.ttlSeconds) || - body.ttlSeconds <= 0 + !isNonEmptyString(body.ticket) || + typeof body.expiresAtMs !== "number" || + !Number.isInteger(body.expiresAtMs) || + body.expiresAtMs <= 0 ) { return toErrorResponse({ code: "PROXY_PAIR_START_INVALID_BODY", @@ -143,15 +144,9 @@ export class ProxyTrustState { } const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); - const expiresAtMs = nowMs + body.ttlSeconds * 1000; - - let created: ReturnType; + let parsedTicket: ReturnType; try { - created = createPairingTicket({ - issuerProxyUrl: body.issuerProxyUrl, - expiresAtMs, - nowMs, - }); + parsedTicket = parsePairingTicket(body.ticket); } catch (error) { if (error instanceof PairingTicketParseError) { return toErrorResponse({ @@ -164,21 +159,46 @@ export class ProxyTrustState { throw error; } + if (parsedTicket.iss !== body.issuerProxyUrl) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_INVALID_ISSUER", + message: "Pairing ticket issuer URL is invalid", + status: 400, + }); + } + + if (parsedTicket.exp * 1000 !== body.expiresAtMs) { + return toErrorResponse({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket expiry is invalid", + status: 400, + }); + } + + if (body.expiresAtMs <= nowMs) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + const pairingTickets = await this.loadPairingTickets(); - pairingTickets[created.ticket] = { + pairingTickets[parsedTicket.kid] = { + ticket: body.ticket, initiatorAgentDid: body.initiatorAgentDid, - issuerProxyUrl: created.payload.iss, - expiresAtMs, + issuerProxyUrl: parsedTicket.iss, + expiresAtMs: body.expiresAtMs, }; await this.savePairingTickets(pairingTickets); await this.scheduleNextCodeCleanup(pairingTickets); return Response.json({ - ticket: created.ticket, - expiresAtMs, + ticket: body.ticket, + expiresAtMs: body.expiresAtMs, initiatorAgentDid: body.initiatorAgentDid, - issuerProxyUrl: created.payload.iss, + issuerProxyUrl: parsedTicket.iss, }); } @@ -217,9 +237,9 @@ export class ProxyTrustState { const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); const pairingTickets = await this.loadPairingTickets(); - const stored = pairingTickets[body.ticket]; + const stored = pairingTickets[parsedTicket.kid]; - if (!stored) { + if (!stored || stored.ticket !== body.ticket) { return toErrorResponse({ code: "PROXY_PAIR_TICKET_NOT_FOUND", message: "Pairing ticket not found", @@ -228,7 +248,7 @@ export class ProxyTrustState { } if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { - delete pairingTickets[body.ticket]; + delete pairingTickets[parsedTicket.kid]; await this.savePairingTickets(pairingTickets); await this.scheduleNextCodeCleanup(pairingTickets); return toErrorResponse({ @@ -256,7 +276,7 @@ export class ProxyTrustState { await this.savePairs(pairs); await this.saveAgentPeers(agentPeers); - delete pairingTickets[body.ticket]; + delete pairingTickets[parsedTicket.kid]; await this.savePairingTickets(pairingTickets); await this.scheduleNextCodeCleanup(pairingTickets); @@ -392,7 +412,46 @@ export class ProxyTrustState { return {}; } - return raw; + const normalized: PairingTicketMap = {}; + for (const [entryKey, value] of Object.entries(raw)) { + if (typeof value !== "object" || value === null) { + continue; + } + + const entry = value as { + ticket?: unknown; + expiresAtMs?: unknown; + initiatorAgentDid?: unknown; + issuerProxyUrl?: unknown; + }; + if ( + !isNonEmptyString(entry.initiatorAgentDid) || + !isNonEmptyString(entry.issuerProxyUrl) || + typeof entry.expiresAtMs !== "number" || + !Number.isInteger(entry.expiresAtMs) + ) { + continue; + } + + const ticketCandidate = isNonEmptyString(entry.ticket) + ? entry.ticket + : entryKey; + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(ticketCandidate); + } catch { + continue; + } + + normalized[parsedTicket.kid] = { + ticket: ticketCandidate, + expiresAtMs: entry.expiresAtMs, + initiatorAgentDid: entry.initiatorAgentDid, + issuerProxyUrl: parsedTicket.iss, + }; + } + + return normalized; } private async savePairingTickets( diff --git a/apps/proxy/src/proxy-trust-store.test.ts b/apps/proxy/src/proxy-trust-store.test.ts index 7e625dd..78a4e4b 100644 --- a/apps/proxy/src/proxy-trust-store.test.ts +++ b/apps/proxy/src/proxy-trust-store.test.ts @@ -1,6 +1,48 @@ +import { decodeBase64url, encodeBase64url } from "@clawdentity/protocol"; import { describe, expect, it } from "vitest"; +import { + createPairingTicket, + createPairingTicketSigningKey, +} from "./pairing-ticket.js"; import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; +function tamperTicketNonce(ticket: string): string { + const prefix = "clwpair1_"; + if (!ticket.startsWith(prefix)) { + throw new Error("invalid test ticket format"); + } + const encodedPayload = ticket.slice(prefix.length); + + const payload = JSON.parse( + new TextDecoder().decode(decodeBase64url(encodedPayload)), + ) as { + nonce?: string; + }; + payload.nonce = "tampered-nonce"; + + return `${prefix}${encodeBase64url(new TextEncoder().encode(JSON.stringify(payload)))}`; +} + +async function createSignedTicket(input: { + issuerProxyUrl: string; + nowMs: number; + expiresAtMs: number; +}) { + const signingKey = await createPairingTicketSigningKey({ + nowMs: input.nowMs, + }); + + return createPairingTicket({ + issuerProxyUrl: input.issuerProxyUrl, + expiresAtMs: input.expiresAtMs, + nowMs: input.nowMs, + signingKey: { + pkid: signingKey.pkid, + privateKey: signingKey.privateKey, + }, + }); +} + describe("in-memory proxy trust store", () => { it("allows same-agent sender and recipient without explicit pair entry", async () => { const store = createInMemoryProxyTrustStore(); @@ -50,10 +92,16 @@ describe("in-memory proxy trust store", () => { it("confirms one-time pairing tickets and establishes trust", async () => { const store = createInMemoryProxyTrustStore(); + const created = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", issuerProxyUrl: "https://proxy-a.example.com", - ttlSeconds: 60, + ticket: created.ticket, + expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }); @@ -84,12 +132,45 @@ describe("in-memory proxy trust store", () => { expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(true); }); + it("rejects tampered ticket text when kid matches stored entry", async () => { + const store = createInMemoryProxyTrustStore(); + const created = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + const ticket = await store.createPairingTicket({ + initiatorAgentDid: "did:claw:agent:alice", + issuerProxyUrl: "https://proxy-a.example.com", + ticket: created.ticket, + expiresAtMs: 1_700_000_060_000, + nowMs: 1_700_000_000_000, + }); + + await expect( + store.confirmPairingTicket({ + ticket: tamperTicketNonce(ticket.ticket), + responderAgentDid: "did:claw:agent:bob", + nowMs: 1_700_000_000_100, + }), + ).rejects.toMatchObject({ + code: "PROXY_PAIR_TICKET_NOT_FOUND", + status: 404, + }); + }); + it("rejects expired tickets", async () => { const store = createInMemoryProxyTrustStore(); + const created = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_001_000, + }); const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", issuerProxyUrl: "https://proxy-a.example.com", - ttlSeconds: 1, + ticket: created.ticket, + expiresAtMs: 1_700_000_001_000, nowMs: 1_700_000_000_000, }); @@ -108,17 +189,29 @@ describe("in-memory proxy trust store", () => { it("cleans up unrelated expired tickets during confirm lookups", async () => { const store = createInMemoryProxyTrustStore(); + const expired = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_001_000, + }); const expiredTicket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", issuerProxyUrl: "https://proxy-a.example.com", - ttlSeconds: 1, + ticket: expired.ticket, + expiresAtMs: 1_700_000_001_000, nowMs: 1_700_000_000_000, }); + const valid = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); const validTicket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", issuerProxyUrl: "https://proxy-a.example.com", - ttlSeconds: 60, + ticket: valid.ticket, + expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }); diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 425b7b7..42d5811 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -1,6 +1,5 @@ import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; import { - createPairingTicket, PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; @@ -8,7 +7,8 @@ import { export type PairingTicketInput = { initiatorAgentDid: string; issuerProxyUrl: string; - ttlSeconds: number; + ticket: string; + expiresAtMs: number; nowMs?: number; }; @@ -206,20 +206,21 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { const pairingTickets = new Map< string, { + ticket: string; expiresAtMs: number; initiatorAgentDid: string; issuerProxyUrl: string; } >(); - function cleanup(nowMs: number, skipTicket?: string): void { - for (const [ticket, details] of pairingTickets.entries()) { - if (skipTicket === ticket) { + function cleanup(nowMs: number, skipTicketKid?: string): void { + for (const [ticketKid, details] of pairingTickets.entries()) { + if (skipTicketKid === ticketKid) { continue; } if (details.expiresAtMs <= nowMs) { - pairingTickets.delete(ticket); + pairingTickets.delete(ticketKid); } } } @@ -230,15 +231,12 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { agentPeers.set(leftAgentDid, peers); } - function resolveConfirmablePairingTicket( - input: PairingTicketConfirmInput, - ): PairingTicketConfirmResult { - const nowMs = input.nowMs ?? Date.now(); - cleanup(nowMs, input.ticket); - + function parseStoredTicket( + inputTicket: string, + ): ReturnType { let parsedTicket: ReturnType; try { - parsedTicket = parsePairingTicket(input.ticket); + parsedTicket = parsePairingTicket(inputTicket); } catch (error) { if (error instanceof PairingTicketParseError) { throw new ProxyTrustStoreError({ @@ -251,8 +249,19 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { throw error; } - const stored = pairingTickets.get(input.ticket); - if (!stored) { + return parsedTicket; + } + + function resolveConfirmablePairingTicket(input: PairingTicketConfirmInput): { + pair: PairingTicketConfirmResult; + ticketKid: string; + } { + const nowMs = input.nowMs ?? Date.now(); + const parsedTicket = parseStoredTicket(input.ticket); + cleanup(nowMs, parsedTicket.kid); + + const stored = pairingTickets.get(parsedTicket.kid); + if (!stored || stored.ticket !== input.ticket) { throw new ProxyTrustStoreError({ code: "PROXY_PAIR_TICKET_NOT_FOUND", message: "Pairing ticket not found", @@ -261,7 +270,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { } if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { - pairingTickets.delete(input.ticket); + pairingTickets.delete(parsedTicket.kid); throw new ProxyTrustStoreError({ code: "PROXY_PAIR_TICKET_EXPIRED", message: "Pairing ticket has expired", @@ -278,9 +287,12 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { } return { - initiatorAgentDid: stored.initiatorAgentDid, - responderAgentDid: input.responderAgentDid, - issuerProxyUrl: stored.issuerProxyUrl, + pair: { + initiatorAgentDid: stored.initiatorAgentDid, + responderAgentDid: input.responderAgentDid, + issuerProxyUrl: stored.issuerProxyUrl, + }, + ticketKid: parsedTicket.kid, }; } @@ -289,28 +301,41 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { const nowMs = input.nowMs ?? Date.now(); cleanup(nowMs); - const expiresAtMs = nowMs + input.ttlSeconds * 1000; - const created = createPairingTicket({ - issuerProxyUrl: input.issuerProxyUrl, - expiresAtMs, - nowMs, - }); + const parsedTicket = parseStoredTicket(input.ticket); + + if (parsedTicket.iss !== input.issuerProxyUrl) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_INVALID_ISSUER", + message: "Pairing ticket issuer URL is invalid", + status: 400, + }); + } + + if (parsedTicket.exp * 1000 !== input.expiresAtMs) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket expiry is invalid", + status: 400, + }); + } - pairingTickets.set(created.ticket, { + pairingTickets.set(parsedTicket.kid, { + ticket: input.ticket, initiatorAgentDid: input.initiatorAgentDid, - issuerProxyUrl: created.payload.iss, - expiresAtMs, + issuerProxyUrl: parsedTicket.iss, + expiresAtMs: input.expiresAtMs, }); return { - ticket: created.ticket, - expiresAtMs, + ticket: input.ticket, + expiresAtMs: input.expiresAtMs, initiatorAgentDid: input.initiatorAgentDid, - issuerProxyUrl: created.payload.iss, + issuerProxyUrl: parsedTicket.iss, }; }, async confirmPairingTicket(input) { - const confirmedPair = resolveConfirmablePairingTicket(input); + const { pair: confirmedPair, ticketKid } = + resolveConfirmablePairingTicket(input); pairKeys.add( toPairKey( confirmedPair.initiatorAgentDid, @@ -325,7 +350,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { confirmedPair.responderAgentDid, confirmedPair.initiatorAgentDid, ); - pairingTickets.delete(input.ticket); + pairingTickets.delete(ticketKid); return confirmedPair; }, async isAgentKnown(agentDid) { diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index e9ab595..94c5979 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -157,6 +157,7 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { PAIR_CONFIRM_PATH, createPairConfirmHandler({ logger, + registryUrl: options.config.registryUrl, trustStore, ...options.pairing?.confirm, }), diff --git a/apps/registry/drizzle/0003_proxy_pairing_keys.sql b/apps/registry/drizzle/0003_proxy_pairing_keys.sql new file mode 100644 index 0000000..5cc3ec4 --- /dev/null +++ b/apps/registry/drizzle/0003_proxy_pairing_keys.sql @@ -0,0 +1,14 @@ +CREATE TABLE `proxy_pairing_keys` ( + `id` text PRIMARY KEY NOT NULL, + `issuer_origin` text NOT NULL, + `pkid` text NOT NULL, + `public_key_x` text NOT NULL, + `created_by` text NOT NULL, + `expires_at` text NOT NULL, + `created_at` text NOT NULL, + FOREIGN KEY (`created_by`) REFERENCES `humans`(`id`) ON UPDATE no action ON DELETE no action +); +--> statement-breakpoint +CREATE UNIQUE INDEX `idx_proxy_pairing_keys_issuer_pkid` ON `proxy_pairing_keys` (`issuer_origin`,`pkid`); +--> statement-breakpoint +CREATE INDEX `idx_proxy_pairing_keys_expires_at` ON `proxy_pairing_keys` (`expires_at`); diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 920caa0..ea470e3 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -45,6 +45,13 @@ - For unknown IDs, return `404 AGENT_NOT_FOUND` with no ownership-leak variants. - Keep framework output stable as a non-empty string for legacy rows missing `framework`. +## Proxy Pairing Key Contracts +- `POST /v1/proxy-pairing-keys` requires PAT auth (`createApiKeyAuth`) and stores issuer-scoped pairing signing keys for proxy ticket verification. +- Validate payload strictly: `issuerOrigin` must be URL origin (`http`/`https`), `pkid` non-empty, `publicKeyX` non-empty, `expiresAt` valid future ISO timestamp. +- Keep writes idempotent on (`issuer_origin`, `pkid`) and update key material/expiry when repeated registration arrives. +- `GET /v1/proxy-pairing-keys/resolve` is public and returns only active (non-expired) key metadata needed for proxy ticket verification. +- For unknown/expired keys, return `404 PROXY_PAIRING_KEY_NOT_FOUND`; do not leak extra owner data. + ## Validation - Run `pnpm -F @clawdentity/registry run test` after changing routes or config loading. - Run `pnpm -F @clawdentity/registry run typecheck` before commit. diff --git a/apps/registry/src/db/AGENTS.md b/apps/registry/src/db/AGENTS.md index 19bd6ed..2dff257 100644 --- a/apps/registry/src/db/AGENTS.md +++ b/apps/registry/src/db/AGENTS.md @@ -10,10 +10,12 @@ ## Baseline Requirements - Required tables: `humans`, `agents`, `revocations`, `api_keys`, `agent_auth_sessions`, `agent_auth_events`. +- Pairing-key trust table: `proxy_pairing_keys` for issuer-origin + key-id lookups used by cross-proxy pairing verification. - Required index: `idx_agents_owner_status` on `agents(owner_id, status)`. - Revocation `jti` lookup can be unique or non-unique; current baseline uses `revocations_jti_unique`. - Agent auth refresh lookups require prefix indexes on `agent_auth_sessions.refresh_key_prefix` and `agent_auth_sessions.access_key_prefix`. - One session per agent is enforced by `agent_auth_sessions_agent_id_unique`. +- Proxy pairing key lookups require unique (`issuer_origin`, `pkid`) index and expiry index on `proxy_pairing_keys.expires_at`. ## Query Rules - Prefer Drizzle (`createDb`) for application reads/writes. diff --git a/apps/registry/src/db/schema.ts b/apps/registry/src/db/schema.ts index cdc36da..4aeb86b 100644 --- a/apps/registry/src/db/schema.ts +++ b/apps/registry/src/db/schema.ts @@ -175,3 +175,25 @@ export const invites = sqliteTable("invites", { expires_at: text("expires_at"), created_at: text("created_at").notNull(), }); + +export const proxy_pairing_keys = sqliteTable( + "proxy_pairing_keys", + { + id: text("id").primaryKey(), + issuer_origin: text("issuer_origin").notNull(), + pkid: text("pkid").notNull(), + public_key_x: text("public_key_x").notNull(), + created_by: text("created_by") + .notNull() + .references(() => humans.id), + expires_at: text("expires_at").notNull(), + created_at: text("created_at").notNull(), + }, + (table) => [ + uniqueIndex("idx_proxy_pairing_keys_issuer_pkid").on( + table.issuer_origin, + table.pkid, + ), + index("idx_proxy_pairing_keys_expires_at").on(table.expires_at), + ], +); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 3cccc67..46288a3 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -8,6 +8,8 @@ import { INVITES_REDEEM_PATH, ME_API_KEYS_PATH, makeHumanDid, + PROXY_PAIRING_KEYS_PATH, + PROXY_PAIRING_KEYS_RESOLVE_PATH, } from "@clawdentity/protocol"; import { AppError, @@ -22,7 +24,7 @@ import { signAIT, signCRL, } from "@clawdentity/sdk"; -import { and, desc, eq, isNull, lt } from "drizzle-orm"; +import { and, desc, eq, gt, isNull, lt } from "drizzle-orm"; import { Hono } from "hono"; import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; import { @@ -85,6 +87,7 @@ import { api_keys, humans, invites, + proxy_pairing_keys, revocations, } from "./db/schema.js"; import { @@ -496,6 +499,121 @@ function parseAgentAccessHeaderToken(token: string | undefined): string { } } +function parseIssuerOrigin(value: unknown): string { + if (typeof value !== "string") { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + let parsed: URL; + try { + parsed = new URL(value.trim()); + } catch { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + return parsed.origin; +} + +function parseProxyPairingKeyRegisterPayload(payload: unknown): { + issuerOrigin: string; + pkid: string; + publicKeyX: string; + expiresAt: string; +} { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + const pkid = typeof value.pkid === "string" ? value.pkid.trim() : ""; + const publicKeyX = + typeof value.publicKeyX === "string" ? value.publicKeyX.trim() : ""; + const expiresAt = + typeof value.expiresAt === "string" ? value.expiresAt.trim() : ""; + const issuerOrigin = parseIssuerOrigin(value.issuerOrigin); + + if (pkid.length === 0 || publicKeyX.length === 0 || expiresAt.length === 0) { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + const expiresAtMillis = Date.parse(expiresAt); + if (!Number.isFinite(expiresAtMillis)) { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + if (expiresAtMillis <= Date.now()) { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + return { + issuerOrigin, + pkid, + publicKeyX, + expiresAt: new Date(expiresAtMillis).toISOString(), + }; +} + +function parseProxyPairingKeyResolveQuery(requestUrl: string): { + issuerOrigin: string; + pkid: string; +} { + const url = new URL(requestUrl); + const issuerOrigin = parseIssuerOrigin(url.searchParams.get("issuerOrigin")); + const pkid = url.searchParams.get("pkid")?.trim() ?? ""; + if (pkid.length === 0) { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key query is invalid", + status: 400, + expose: true, + }); + } + + return { + issuerOrigin, + pkid, + }; +} + async function insertAgentAuthEvent(input: { db: ReturnType; agentId: string; @@ -922,6 +1040,94 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { return c.json(mapResolvedAgentRow(row)); }); + app.post(PROXY_PAIRING_KEYS_PATH, createApiKeyAuth(), async (c) => { + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "PROXY_PAIRING_KEY_INVALID", + message: "Pairing key payload is invalid", + status: 400, + expose: true, + }); + } + + const parsed = parseProxyPairingKeyRegisterPayload(payload); + const human = c.get("human"); + const db = createDb(c.env.DB); + const createdAt = nowIso(); + + await db + .insert(proxy_pairing_keys) + .values({ + id: generateUlid(Date.now()), + issuer_origin: parsed.issuerOrigin, + pkid: parsed.pkid, + public_key_x: parsed.publicKeyX, + created_by: human.id, + expires_at: parsed.expiresAt, + created_at: createdAt, + }) + .onConflictDoUpdate({ + target: [proxy_pairing_keys.issuer_origin, proxy_pairing_keys.pkid], + set: { + public_key_x: parsed.publicKeyX, + created_by: human.id, + expires_at: parsed.expiresAt, + created_at: createdAt, + }, + }); + + return c.json( + { + key: { + issuerOrigin: parsed.issuerOrigin, + pkid: parsed.pkid, + expiresAt: parsed.expiresAt, + }, + }, + 201, + ); + }); + + app.get(PROXY_PAIRING_KEYS_RESOLVE_PATH, async (c) => { + const query = parseProxyPairingKeyResolveQuery(c.req.url); + const db = createDb(c.env.DB); + const now = nowIso(); + + const rows = await db + .select({ + issuerOrigin: proxy_pairing_keys.issuer_origin, + pkid: proxy_pairing_keys.pkid, + publicKeyX: proxy_pairing_keys.public_key_x, + expiresAt: proxy_pairing_keys.expires_at, + }) + .from(proxy_pairing_keys) + .where( + and( + eq(proxy_pairing_keys.issuer_origin, query.issuerOrigin), + eq(proxy_pairing_keys.pkid, query.pkid), + gt(proxy_pairing_keys.expires_at, now), + ), + ) + .limit(1); + + const row = rows[0]; + if (!row) { + throw new AppError({ + code: "PROXY_PAIRING_KEY_NOT_FOUND", + message: "Pairing key is not available", + status: 404, + expose: true, + }); + } + + return c.json({ + key: row, + }); + }); + app.get("/v1/me", createApiKeyAuth(), (c) => { return c.json({ human: c.get("human") }); }); diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index 9e2c021..af30a9f 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -19,6 +19,7 @@ - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. - Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `AGENT_AUTH_REFRESH_PATH`, `AGENT_AUTH_VALIDATE_PATH`, `ME_API_KEYS_PATH`) so registry, proxy, and CLI stay contract-synchronized. +- Keep proxy pairing key route constants in protocol exports (`PROXY_PAIRING_KEYS_PATH`, `PROXY_PAIRING_KEYS_RESOLVE_PATH`) so registry write/read APIs and proxy verification paths remain synchronized. - Keep relay contract constants in protocol exports (`RELAY_CONNECT_PATH`, `RELAY_RECIPIENT_AGENT_DID_HEADER`) so connector and hook routing stay synchronized across apps. - Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. - Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 9cd5eda..1a1ec4e 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -5,5 +5,7 @@ export const AGENT_AUTH_VALIDATE_PATH = "/v1/agents/auth/validate"; export const INVITES_PATH = "/v1/invites"; export const INVITES_REDEEM_PATH = "/v1/invites/redeem"; export const ME_API_KEYS_PATH = "/v1/me/api-keys"; +export const PROXY_PAIRING_KEYS_PATH = "/v1/proxy-pairing-keys"; +export const PROXY_PAIRING_KEYS_RESOLVE_PATH = "/v1/proxy-pairing-keys/resolve"; export const RELAY_CONNECT_PATH = "/v1/relay/connect"; export const RELAY_RECIPIENT_AGENT_DID_HEADER = "x-claw-recipient-agent-did"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index 3d99db8..00dcade 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -23,6 +23,8 @@ import { makeAgentDid, makeHumanDid, PROTOCOL_VERSION, + PROXY_PAIRING_KEYS_PATH, + PROXY_PAIRING_KEYS_RESOLVE_PATH, ProtocolParseError, parseAitClaims, parseCrlClaims, @@ -46,6 +48,10 @@ describe("protocol", () => { expect(INVITES_PATH).toBe("/v1/invites"); expect(INVITES_REDEEM_PATH).toBe("/v1/invites/redeem"); expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); + expect(PROXY_PAIRING_KEYS_PATH).toBe("/v1/proxy-pairing-keys"); + expect(PROXY_PAIRING_KEYS_RESOLVE_PATH).toBe( + "/v1/proxy-pairing-keys/resolve", + ); expect(RELAY_CONNECT_PATH).toBe("/v1/relay/connect"); expect(RELAY_RECIPIENT_AGENT_DID_HEADER).toBe("x-claw-recipient-agent-did"); }); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 0aa6e18..4a974be 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -28,6 +28,8 @@ export { INVITES_PATH, INVITES_REDEEM_PATH, ME_API_KEYS_PATH, + PROXY_PAIRING_KEYS_PATH, + PROXY_PAIRING_KEYS_RESOLVE_PATH, RELAY_CONNECT_PATH, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "./endpoints.js"; From 1df224d093591d577f290bb05422024ab757c3ed Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 18:57:39 +0530 Subject: [PATCH 093/190] feat: streamline OpenClaw onboarding, pairing, and relay reliability --- .github/AGENTS.md | 8 +- .github/workflows/deploy-develop.yml | 21 +- .gitignore | 1 + AGENTS.md | 17 +- PRD.md | 8 +- README.md | 52 +- apps/cli/AGENTS.md | 8 +- apps/cli/package.json | 2 +- apps/cli/postinstall.mjs | 35 +- apps/cli/sample-config.json | 3 +- apps/cli/scripts/AGENTS.md | 11 +- .../scripts/openclaw-relay-docker-ready.sh | 263 ++ apps/cli/skill-bundle/AGENTS.md | 2 +- apps/cli/src/AGENTS.md | 15 +- apps/cli/src/commands/AGENTS.md | 25 +- apps/cli/src/commands/agent.test.ts | 24 +- apps/cli/src/commands/api-key.test.ts | 20 +- apps/cli/src/commands/config.test.ts | 70 +- apps/cli/src/commands/config.ts | 34 +- apps/cli/src/commands/connector.test.ts | 73 +- apps/cli/src/commands/connector.ts | 205 +- apps/cli/src/commands/invite.test.ts | 99 +- apps/cli/src/commands/invite.ts | 96 +- apps/cli/src/commands/openclaw.test.ts | 1114 +++++++- apps/cli/src/commands/openclaw.ts | 2411 +++++++++++++++-- apps/cli/src/commands/pair.test.ts | 577 +++- apps/cli/src/commands/pair.ts | 1187 +++++++- apps/cli/src/commands/skill.test.ts | 169 ++ apps/cli/src/commands/skill.ts | 104 + apps/cli/src/commands/verify.test.ts | 14 +- apps/cli/src/commands/verify.ts | 8 +- apps/cli/src/config/AGENTS.md | 15 + apps/cli/src/config/manager.test.ts | 32 +- apps/cli/src/config/manager.ts | 14 +- apps/cli/src/config/registry-metadata.test.ts | 120 + apps/cli/src/config/registry-metadata.ts | 193 ++ apps/cli/src/index.test.ts | 8 + apps/cli/src/index.ts | 2 + apps/cli/src/install-skill-mode.test.ts | 98 +- apps/cli/src/install-skill-mode.ts | 135 +- apps/cli/src/postinstall.ts | 9 +- apps/cli/src/test-env.ts | 17 + apps/openclaw-skill/AGENTS.md | 49 +- apps/openclaw-skill/skill/SKILL.md | 453 ++-- .../skill/references/clawdentity-protocol.md | 148 +- .../skill/references/clawdentity-registry.md | 175 ++ apps/openclaw-skill/src/AGENTS.md | 10 +- .../src/transforms/peers-config.test.ts | 6 +- .../src/transforms/peers-config.ts | 17 +- .../src/transforms/relay-to-peer.test.ts | 3 +- .../src/transforms/relay-to-peer.ts | 258 +- apps/proxy/.env.example | 16 +- apps/proxy/AGENTS.md | 17 +- apps/proxy/package.json | 7 +- apps/proxy/src/AGENTS.md | 24 +- apps/proxy/src/agent-hook-route.test.ts | 72 +- apps/proxy/src/agent-hook-route.ts | 53 +- .../src/agent-rate-limit-middleware.test.ts | 2 +- apps/proxy/src/agent-relay-session.test.ts | 188 +- apps/proxy/src/agent-relay-session.ts | 670 ++++- apps/proxy/src/auth-middleware.test.ts | 61 +- apps/proxy/src/auth-middleware.ts | 34 +- apps/proxy/src/config.test.ts | 61 +- apps/proxy/src/config.ts | 122 +- apps/proxy/src/index.test.ts | 19 +- apps/proxy/src/pairing-constants.ts | 2 +- apps/proxy/src/pairing-route.test.ts | 612 ++--- apps/proxy/src/pairing-route.ts | 1005 ++----- apps/proxy/src/proxy-trust-keys.ts | 10 + apps/proxy/src/proxy-trust-state.test.ts | 110 + apps/proxy/src/proxy-trust-state.ts | 392 ++- apps/proxy/src/proxy-trust-store.test.ts | 108 + apps/proxy/src/proxy-trust-store.ts | 185 +- apps/proxy/src/server.ts | 22 +- apps/proxy/src/worker.ts | 18 +- apps/proxy/wrangler.jsonc | 46 +- apps/registry/.env.example | 5 +- apps/registry/AGENTS.md | 9 +- .../drizzle/0004_internal_services.sql | 22 + apps/registry/package.json | 4 +- apps/registry/src/AGENTS.md | 11 +- apps/registry/src/agent-registration.ts | 15 +- apps/registry/src/auth/AGENTS.md | 7 + apps/registry/src/auth/agent-claw-auth.ts | 2 +- .../src/auth/internal-service-scopes.ts | 61 + apps/registry/src/auth/service-auth.ts | 176 ++ apps/registry/src/db/schema.ts | 25 +- apps/registry/src/server.test.ts | 83 +- apps/registry/src/server.ts | 684 ++++- apps/registry/wrangler.jsonc | 30 +- package.json | 4 +- packages/connector/AGENTS.md | 9 +- packages/connector/src/AGENTS.md | 25 + packages/connector/src/client.test.ts | 91 + packages/connector/src/client.ts | 85 +- packages/connector/src/constants.ts | 8 + packages/connector/src/inbound-inbox.test.ts | 215 ++ packages/connector/src/inbound-inbox.ts | 514 ++++ packages/connector/src/index.ts | 18 +- packages/connector/src/runtime.ts | 526 +++- packages/protocol/AGENTS.md | 3 +- packages/protocol/src/endpoints.ts | 6 +- packages/protocol/src/index.test.ts | 12 +- packages/protocol/src/index.ts | 5 +- packages/sdk/AGENTS.md | 3 + packages/sdk/src/config.test.ts | 62 + packages/sdk/src/config.ts | 4 + packages/sdk/src/event-bus.test.ts | 63 + packages/sdk/src/event-bus.ts | 119 + packages/sdk/src/index.test.ts | 25 + packages/sdk/src/index.ts | 19 + .../sdk/src/registry-identity-client.test.ts | 114 + packages/sdk/src/registry-identity-client.ts | 233 ++ packages/sdk/src/testing/ait-fixtures.test.ts | 2 +- packages/sdk/src/testing/ait-fixtures.ts | 2 +- 115 files changed, 12882 insertions(+), 2713 deletions(-) create mode 100755 apps/cli/scripts/openclaw-relay-docker-ready.sh create mode 100644 apps/cli/src/commands/skill.test.ts create mode 100644 apps/cli/src/commands/skill.ts create mode 100644 apps/cli/src/config/AGENTS.md create mode 100644 apps/cli/src/config/registry-metadata.test.ts create mode 100644 apps/cli/src/config/registry-metadata.ts create mode 100644 apps/cli/src/test-env.ts create mode 100644 apps/openclaw-skill/skill/references/clawdentity-registry.md create mode 100644 apps/proxy/src/proxy-trust-keys.ts create mode 100644 apps/registry/drizzle/0004_internal_services.sql create mode 100644 apps/registry/src/auth/internal-service-scopes.ts create mode 100644 apps/registry/src/auth/service-auth.ts create mode 100644 packages/connector/src/AGENTS.md create mode 100644 packages/connector/src/inbound-inbox.test.ts create mode 100644 packages/connector/src/inbound-inbox.ts create mode 100644 packages/sdk/src/event-bus.test.ts create mode 100644 packages/sdk/src/event-bus.ts create mode 100644 packages/sdk/src/registry-identity-client.test.ts create mode 100644 packages/sdk/src/registry-identity-client.ts diff --git a/.github/AGENTS.md b/.github/AGENTS.md index a035d17..b40e5fb 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -20,8 +20,8 @@ - Run full quality gates before deployment: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r build`, `pnpm -r test`. - Deploy both workers in the same workflow: - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy - - proxy (`apps/proxy`, env `development`) after registry health passes -- Verify registry health at `https://dev.api.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. + - proxy (`apps/proxy`, env `dev`) after registry health passes +- Verify registry health at `https://dev.registry.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. - Health verification should use bounded retries (for example 3 minutes with 10-second polling) and `Cache-Control: no-cache` requests to tolerate short edge propagation delays after deploy. - When using Python `urllib` for health checks, always set explicit request headers (`Accept: application/json` and a custom `User-Agent` such as `Clawdentity-CI/1.0`) because Cloudflare may return `403`/`1010` for the default `Python-urllib/*` user agent. - Use workflow concurrency groups to prevent overlapping deploys for the same environment. @@ -48,12 +48,12 @@ ## Migration Rollback Strategy (Develop) - Capture pre-deploy artifacts: - `pnpm exec wrangler --cwd apps/registry deployments list --env dev --json` - - `pnpm exec wrangler --cwd apps/proxy deployments list --env development --json || true` (non-blocking for first deploy before proxy Worker exists) + - `pnpm exec wrangler --cwd apps/proxy deployments list --env dev --json || true` (non-blocking for first deploy before proxy Worker exists) - `pnpm exec wrangler d1 time-travel info clawdentity-db-dev --timestamp --json` - `pnpm exec wrangler d1 export clawdentity-db-dev --remote --output "${GITHUB_WORKSPACE}/artifacts/"` - Keep deploy snapshot collection non-blocking for Worker deployment listings (pre and post) so rollback artifact capture does not fail the workflow when a Worker has no prior deployment history. - Upload artifacts on every run for operator recovery. - On failed deploy: - Registry rollback: `pnpm exec wrangler --cwd apps/registry rollback --env dev` - - Proxy rollback: `pnpm exec wrangler --cwd apps/proxy rollback --env development` + - Proxy rollback: `pnpm exec wrangler --cwd apps/proxy rollback --env dev` - DB rollback: `pnpm exec wrangler d1 time-travel restore clawdentity-db-dev --env dev --timestamp ` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 18320f6..0bc545d 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -23,6 +23,7 @@ jobs: CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} APP_VERSION: ${{ github.sha }} PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} + REGISTRY_INTERNAL_SERVICE_TOKEN: ${{ secrets.REGISTRY_INTERNAL_SERVICE_TOKEN }} steps: - name: Checkout uses: actions/checkout@v4 @@ -43,6 +44,12 @@ jobs: run: | test -n "${CLOUDFLARE_API_TOKEN}" test -n "${CLOUDFLARE_ACCOUNT_ID}" + test -n "${REGISTRY_INTERNAL_SERVICE_TOKEN}" + + - name: Sync internal service auth secret (registry + proxy) + run: | + printf "%s" "${REGISTRY_INTERNAL_SERVICE_TOKEN}" | pnpm exec wrangler --cwd apps/registry secret put REGISTRY_INTERNAL_SERVICE_TOKEN --env dev + printf "%s" "${REGISTRY_INTERNAL_SERVICE_TOKEN}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_TOKEN --env dev - name: Install dependencies run: pnpm install --frozen-lockfile @@ -67,7 +74,7 @@ jobs: printf "%s\n" "${PREDEPLOY_TS}" > artifacts/predeploy.timestamp pnpm exec wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-pre.json # First proxy deploy may not have an existing Worker/deployments yet. - pnpm exec wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-pre.json || true + pnpm exec wrangler --cwd apps/proxy deployments list --env dev --json > artifacts/proxy-deployments-pre.json || true pnpm exec wrangler --cwd apps/registry d1 time-travel info clawdentity-db-dev --env dev --timestamp "${PREDEPLOY_TS}" --json > artifacts/d1-time-travel-pre.json pnpm exec wrangler --cwd apps/registry d1 export clawdentity-db-dev --remote --env dev --output "${GITHUB_WORKSPACE}/artifacts/d1-dev-predeploy.sql" @@ -81,7 +88,7 @@ jobs: python3 - <<'PY' import json, os, sys, time, urllib.request, urllib.error - url = "https://dev.api.clawdentity.com/health" + url = "https://dev.registry.clawdentity.com/health" expected_version = os.environ.get("APP_VERSION", "") if not expected_version: raise SystemExit("APP_VERSION was not set in workflow environment") @@ -123,11 +130,11 @@ jobs: time.sleep(delay_seconds) PY - - name: Deploy proxy to development environment + - name: Deploy proxy to dev environment run: | mkdir -p artifacts PROXY_DEPLOY_OUTPUT_FILE="artifacts/proxy-deploy-output.txt" - pnpm exec wrangler --cwd apps/proxy deploy --env development --var APP_VERSION:${APP_VERSION} 2>&1 | tee "${PROXY_DEPLOY_OUTPUT_FILE}" + pnpm exec wrangler --cwd apps/proxy deploy --env dev --var APP_VERSION:${APP_VERSION} 2>&1 | tee "${PROXY_DEPLOY_OUTPUT_FILE}" PROXY_WORKERS_DEV_URL="$(grep -Eo 'https://[[:alnum:]._-]+\.workers\.dev' "${PROXY_DEPLOY_OUTPUT_FILE}" | head -n 1 || true)" PROXY_HEALTH_URL="" @@ -202,7 +209,7 @@ jobs: run: | mkdir -p artifacts pnpm exec wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-post.json || true - pnpm exec wrangler --cwd apps/proxy deployments list --env development --json > artifacts/proxy-deployments-post.json || true + pnpm exec wrangler --cwd apps/proxy deployments list --env dev --json > artifacts/proxy-deployments-post.json || true pnpm exec wrangler --cwd apps/registry d1 migrations list clawdentity-db-dev --remote --env dev > artifacts/d1-migrations-post.txt || true - name: Rollback instructions on failure @@ -213,8 +220,8 @@ jobs: echo " wrangler --cwd apps/registry rollback --env dev -y -m \"ci rollback\"" echo "" echo "Proxy Worker rollback:" - echo " wrangler --cwd apps/proxy deployments list --env development --json" - echo " wrangler --cwd apps/proxy rollback --env development -y -m \"ci rollback\"" + echo " wrangler --cwd apps/proxy deployments list --env dev --json" + echo " wrangler --cwd apps/proxy rollback --env dev -y -m \"ci rollback\"" echo "" echo "D1 rollback:" echo " wrangler --cwd apps/registry d1 time-travel restore clawdentity-db-dev --env dev --timestamp \"${PREDEPLOY_TS}\"" diff --git a/.gitignore b/.gitignore index 3744b8c..a31d036 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ nx.bat .env.* !.env.example .dev.vars +.pnpm-store/ \ No newline at end of file diff --git a/AGENTS.md b/AGENTS.md index 8672509..89d2de5 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -5,7 +5,7 @@ - Keep product docs and issue governance in sync with the active GitHub tracker. - When shipping features, UX of the user is most important aspect. - Remember users run clawdentity in the machines which are not exposed to internet. -- The location of the openclaw aka clawdbot is here at /Users/dev/Workdir/clawdbot which is what we are building the current +- The location of the openclaw is here at /Users/dev/Workdir/openclaw which is what we are building the current - Based on the changes made to the cli, always plan for changes in skills as well. Both go together ## Core Rules @@ -49,13 +49,13 @@ - **Environment separation** via wrangler environments in `apps/registry/wrangler.jsonc`: - `--env dev` for development (Worker: `clawdentity-registry-dev`, D1: `clawdentity-db-dev`) - `--env production` for production (Worker: `clawdentity-registry`, D1: `clawdentity-db`) -- **Local dev** uses `wrangler dev --env dev` with local SQLite. Override vars via `apps/registry/.dev.vars` (gitignored). +- **Local dev** uses `wrangler dev --env dev` with local SQLite. Override vars via per-worker `.env` files (for example `apps/registry/.env`). - Use `pnpm -F @clawdentity/registry run dev:local` (or root alias `pnpm dev:registry:local`) to apply local D1 migrations before starting dev server. - **One-touch deploy** scripts in `apps/registry/package.json`: - `deploy:dev` — migrates remote dev D1 + deploys dev Worker - `deploy:production` — migrates remote prod D1 + deploys prod Worker - **Secrets** are set via `wrangler secret put --env `, never committed. -- `.dev.vars` is for local development overrides only. It is gitignored. +- Per-worker `.env` files are for local development overrides only. They are gitignored. ## Database & Migrations - ORM: **Drizzle** with SQLite dialect targeting Cloudflare D1. @@ -90,9 +90,8 @@ - Pass mock bindings as the third argument: `app.request("/path", {}, { DB: {}, ENVIRONMENT: "test" })`. ## Dual OpenClaw Container Baseline (Skill E2E) -- Runtime stack for local dual-agent tests lives in sibling repo `~/Workdir/clawdbot`: +- Runtime stack for local dual-agent tests lives in sibling repo `~/Workdir/openclaw`: - Compose file: `docker-compose.dual.yml` - - Env file: `.env.dual` - Containers: `clawdbot-agent-alpha-1` (`localhost:18789`), `clawdbot-agent-beta-1` (`localhost:19001`) - Clean pre-skill baseline state is persisted as host snapshots: - `~/.openclaw-baselines/alpha-kimi-preskill` @@ -109,6 +108,12 @@ - stable aliases: - `~/.openclaw-baselines/alpha-kimi-paired-stable-latest` - `~/.openclaw-baselines/beta-kimi-paired-stable-latest` +- Current env-enabled clean baseline (saved on 2026-02-18) is: + - `~/.openclaw-baselines/alpha-kimi-env-enabled-20260218-155534` + - `~/.openclaw-baselines/beta-kimi-env-enabled-20260218-155534` + - stable aliases: + - `~/.openclaw-baselines/alpha-kimi-env-enabled-latest` + - `~/.openclaw-baselines/beta-kimi-env-enabled-latest` - Baseline contract: - OpenClaw config exists (`~/.openclaw/openclaw.json`) with `agents.defaults.model.primary = "kimi-coding/k2p5"`. - No Clawdentity relay skill artifacts are installed in workspace yet. @@ -125,6 +130,8 @@ - Copy `~/.openclaw-alpha` and `~/.openclaw-beta` into new timestamped folders under `~/.openclaw-baselines`. - Start dual compose stack. - Update this section with the new snapshot folder names. +- Env-enabled baseline restore (for prompt-only runs needing provider auth): + - `rsync -a --delete ~/.openclaw-baselines/alpha-kimi-env-enabled-latest/ ~/.openclaw-alpha/ && rsync -a --delete ~/.openclaw-baselines/beta-kimi-env-enabled-latest/ ~/.openclaw-beta/` - Pairing issue runbook (`Disconnected (1008): pairing required` in UI): - Cause: OpenClaw device approval is pending; this is gateway pairing, not Clawdentity peer trust pairing. - Scope clarification: diff --git a/PRD.md b/PRD.md index 5ba3e6b..4d4dac0 100644 --- a/PRD.md +++ b/PRD.md @@ -73,7 +73,7 @@ Because OpenClaw requires `hooks.token` and expects Bearer/token auth for `/hook - Verify token (`claw verify`) - Personal PAT lifecycle (`clawdentity api-key create|list|revoke`) - Share contact card (`claw share`) - - npm-first skill install path (`npm install clawdentity --skill`) that prepares OpenClaw relay skill artifacts automatically + - CLI skill install path (`clawdentity skill install`) that prepares OpenClaw relay skill artifacts automatically - **Proxy** - Verify inbound Clawdentity headers @@ -157,12 +157,12 @@ Verifier must enforce: - Revoked PATs must fail auth with `401 API_KEY_REVOKED`. - Unrelated active PATs must continue to authenticate after targeted key revocation. -### 6.8 npm-first OpenClaw skill install -- Installer detects npm skill mode via install-time npm config/environment. +### 6.8 OpenClaw skill install command +- Installer runs explicitly via `clawdentity skill install`. - Installer must prepare these artifacts without manual copy steps: - `SKILL.md` - `references/*` - - `relay-to-peer.mjs` in workspace skill path and hooks transform path + - `relay-to-peer.mjs` in managed skill path and hooks transform path - Runtime installs must not depend on sibling workspace packages; required skill assets are bundled with the CLI package. - Re-running install must be idempotent and safe. - Missing source artifacts must fail with actionable errors. diff --git a/README.md b/README.md index 0ba8d69..00ddf89 100644 --- a/README.md +++ b/README.md @@ -233,28 +233,20 @@ CLI (operator's machine) Registry | `iss` | Registry URL — who vouches for this identity | | `exp` | Expiry — credential lifetime (1-90 days) | -### Step 3: Peer Discovery (Out-of-Band Invite) +### Step 3: Peer Routing Setup (Out-of-Band Metadata) -Alice creates an invite code for Bob. No secrets are exchanged — only a DID and endpoint. +Operators exchange peer metadata out-of-band (alias, DID, proxy URL). No relay invite code is required. ``` Alice's Operator Bob's Operator │ │ - │ clawdentity openclaw invite create │ - │ → Encodes: { │ - │ did: "did:claw:agent:...", │ - │ proxyUrl: "https://alice-proxy/ │ - │ hooks/agent", │ - │ alias: "bob", │ - │ name: "Bob Agent" │ - │ } │ - │ → Base64url invite code │ - │ │ - │ Shares code out-of-band ─────────────►│ - │ (email, QR, chat, etc.) │ + │ Shares metadata out-of-band ─────────►│ + │ alias, DID, proxy URL │ │ │ │ │ clawdentity openclaw setup - │ │ bob --invite-code + │ │ bob --peer-alias alice + │ │ --peer-did did:claw:agent:... + │ │ --peer-proxy-url https://alice-proxy/hooks/agent │ │ │ │ Stores peer in peers.json: │ │ { "alice": { @@ -266,7 +258,7 @@ Alice's Operator Bob's Operator │ │ Configures OpenClaw hooks ``` -**Security:** The invite contains only public information (DID + proxy URL). No keys, tokens, or secrets are exchanged. Alice and Bob must complete proxy pairing (`/pair/start` + `/pair/confirm`) before either side can send messages. +**Security:** Setup uses only public peer metadata (DID + proxy URL + alias). No keys, tokens, or secrets are exchanged. Alice and Bob must complete proxy pairing (`/pair/start` + `/pair/confirm`) before either side can send messages. ### Step 4: First Message (Bob → Alice) @@ -465,22 +457,27 @@ clawdentity/ ### Proxy Worker local runs -- Local env (`ENVIRONMENT=local`): `pnpm dev:proxy` -- Development env (`ENVIRONMENT=development`): `pnpm dev:proxy:development` +- Development env (`ENVIRONMENT=development`): `pnpm dev:proxy` +- Local env (`ENVIRONMENT=local`): `pnpm dev:proxy:local` - Fresh deploy-like env: `pnpm dev:proxy:fresh` - Development deploy command: `pnpm -F @clawdentity/proxy run deploy:dev` - Production deploy command: `pnpm -F @clawdentity/proxy run deploy:production` - Environment intent: `local` is local Wrangler development only; `development` and `production` are cloud deployment environments. +### Registry Worker local runs + +- Development env (`ENVIRONMENT=development`): `pnpm dev:registry` +- Development env with local D1 migration apply: `pnpm dev:registry:local` + ### Develop deployment automation - GitHub workflow: `.github/workflows/deploy-develop.yml` - Trigger: push to `develop` - Runs full quality gates, then deploys: - registry (`apps/registry`, env `dev`) with D1 migrations - - proxy (`apps/proxy`, env `development`) + - proxy (`apps/proxy`, env `dev`) - Health checks must pass with `version == $GITHUB_SHA` for: - - `https://dev.api.clawdentity.com/health` + - `https://dev.registry.clawdentity.com/health` - deployed proxy `/health` URL (workers.dev URL extracted from wrangler output, or optional `PROXY_HEALTH_URL` secret override) - Required GitHub secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID` @@ -497,6 +494,7 @@ clawdentity/ - `clawdentity connector start ` to run local relay connector runtime. - `clawdentity connector service install ` to configure connector autostart after reboot/login (`launchd` on macOS, `systemd --user` on Linux). - `clawdentity connector service uninstall ` to remove connector autostart service. +- `clawdentity skill install` to install/update OpenClaw relay skill artifacts under `~/.openclaw`. ### 5) Onboarding and control model @@ -514,18 +512,18 @@ clawdentity/ --- -## OpenClaw skill install (npm-first) +## OpenClaw skill install (CLI command) -Expected operator flow starts from npm: +Expected operator flow starts from the CLI command: ```bash -npm install clawdentity --skill +clawdentity skill install ``` -When `--skill` mode is detected, installer logic prepares OpenClaw runtime artifacts automatically: -- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` -- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/references/*` -- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` +Installer logic prepares OpenClaw runtime artifacts automatically: +- `~/.openclaw/skills/clawdentity-openclaw-relay/SKILL.md` +- `~/.openclaw/skills/clawdentity-openclaw-relay/references/*` +- `~/.openclaw/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` - `~/.openclaw/hooks/transforms/relay-to-peer.mjs` Install is idempotent and logs deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index 02734b8..bb5af3b 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -7,11 +7,11 @@ ## Command Architecture - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). -- Keep `src/postinstall.ts` as a thin install entrypoint only; it should detect npm `--skill` mode and call shared installer helpers without mutating runtime CLI command wiring. -- Keep package identity clear: workspace package name is `clawdentity` and published install entrypoint remains `npm install clawdentity --skill`. +- Keep `src/postinstall.ts` as a no-op compatibility shim; skill installation is command-driven via `clawdentity skill install`. +- Keep package identity clear: workspace package name is `clawdentity`. - Keep runtime version parity: source `CLI_VERSION` from the package metadata (`package.json`) at runtime, never from a hardcoded literal in `src/index.ts`. - Implement command groups under `src/commands/*` and register them from `createProgram()`. -- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`, `connector`) so automation and docs do not drift. +- Keep top-level command contracts stable (`config`, `agent`, `admin`, `api-key`, `invite`, `verify`, `openclaw`, `connector`, `skill`) so automation and docs do not drift. - Reuse shared command helpers from `src/commands/helpers.ts` (especially `withErrorHandling`) instead of duplicating command-level try/catch blocks. - Use `process.exitCode` instead of `process.exit()`. - Use `@clawdentity/sdk` `createLogger` for runtime logging; avoid direct `console.*` calls in CLI app code. @@ -20,7 +20,7 @@ - Reject agent names that are only `.` or `..` before resolving directories or files to prevent accidental traversal of home config directories. - Keep published CLI artifacts standalone-installable: bundle runtime imports into `dist/*` and avoid `workspace:*` runtime dependencies in published `package.json`. - Keep publish artifacts ESM-compatible and avoid bundling CJS-only runtime deps that rely on dynamic `require` (for example `ws`); externalize them and declare them in CLI `dependencies` so installed binaries start cleanly. -- npm `--skill` installer behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. +- `skill install` behavior must be idempotent and deterministic: reruns should only report `installed`, `updated`, or `unchanged` per artifact with stable output ordering. - Keep `skill-bundle/openclaw-skill/` generated from `apps/openclaw-skill` only; do not hand-edit bundled files. - Keep generated bundle policy strict: `sync-skill-bundle` must copy from `apps/openclaw-skill/dist/relay-to-peer.mjs` and fail if source build artifacts are missing. - Keep generated bundle files out of git; rely on `build`/`prepack` to rebuild `skill-bundle` before `npm pack`/`npm publish`. diff --git a/apps/cli/package.json b/apps/cli/package.json index ce5b067..3b08f18 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,6 +1,6 @@ { "name": "clawdentity", - "version": "0.0.3", + "version": "0.0.20", "type": "module", "publishConfig": { "access": "public" diff --git a/apps/cli/postinstall.mjs b/apps/cli/postinstall.mjs index 5f051a6..7ca79f1 100644 --- a/apps/cli/postinstall.mjs +++ b/apps/cli/postinstall.mjs @@ -1,46 +1,13 @@ -import { constants } from "node:fs"; -import { access } from "node:fs/promises"; import { dirname, join } from "node:path"; import { fileURLToPath, pathToFileURL } from "node:url"; -function parseBooleanFlag(value) { - if (typeof value !== "string") { - return undefined; - } - - const normalized = value.trim().toLowerCase(); - if ( - normalized === "" || - normalized === "1" || - normalized === "true" || - normalized === "yes" - ) { - return true; - } - - if (normalized === "0" || normalized === "false" || normalized === "no") { - return false; - } - - return undefined; -} - const packageRoot = dirname(fileURLToPath(import.meta.url)); const bundledPostinstallPath = join(packageRoot, "dist", "postinstall.js"); -const skillRequested = parseBooleanFlag(process.env.npm_config_skill) === true; try { - await access(bundledPostinstallPath, constants.R_OK); await import(pathToFileURL(bundledPostinstallPath).href); } catch (error) { - if (error && typeof error === "object" && error.code === "ENOENT") { - if (skillRequested) { - process.stderr.write( - `[clawdentity] skill install failed: build artifact not found at ${bundledPostinstallPath}\n`, - ); - process.exitCode = 1; - } - } else { + if (!(error && typeof error === "object" && error.code === "ENOENT")) { const message = error instanceof Error ? error.message : String(error); process.stderr.write(`[clawdentity] postinstall failed: ${message}\n`); process.exitCode = 1; diff --git a/apps/cli/sample-config.json b/apps/cli/sample-config.json index 3f58dde..2194f07 100644 --- a/apps/cli/sample-config.json +++ b/apps/cli/sample-config.json @@ -1,3 +1,4 @@ { - "registryUrl": "https://api.clawdentity.com" + "registryUrl": "https://registry.clawdentity.com", + "proxyUrl": "https://proxy.clawdentity.com" } diff --git a/apps/cli/scripts/AGENTS.md b/apps/cli/scripts/AGENTS.md index 03b4ab5..e98617e 100644 --- a/apps/cli/scripts/AGENTS.md +++ b/apps/cli/scripts/AGENTS.md @@ -6,7 +6,16 @@ ## Rules - `sync-skill-bundle.mjs` is the source of truth for copying OpenClaw skill assets into `apps/cli/skill-bundle/`. - `sync-skill-bundle.mjs` must copy only from built source artifacts (`apps/openclaw-skill/dist/relay-to-peer.mjs`) and never fallback to stale bundled copies. -- `verify-skill-bundle.mjs` must validate the exact artifacts required by npm `--skill` install flow. +- `verify-skill-bundle.mjs` must validate the exact artifacts required by `clawdentity skill install`. +- `openclaw-relay-docker-ready.sh` is the only Docker local-test entrypoint: + - Restore alpha/beta profiles from pre-skill baselines (`alpha-kimi-preskill`, `beta-kimi-preskill` by default). + - Preserve existing `.env` files by default (`PRESERVE_ENV=1`) so model API keys remain configured. + - Enforce gateway safety defaults (`gateway.mode=local`, `bind=lan`, `controlUi.allowInsecureAuth=true`) and ensure auth tokens exist. + - Resolve UI tokenized URLs with env-first precedence (`OPENCLAW_GATEWAY_TOKEN` from profile `.env`, then `openclaw.json`) to avoid token drift. + - Persist generated gateway token back into each profile `.env` when missing so restarts remain deterministic. + - Always upsert `CLAWDENTITY_REGISTRY_URL` and `CLAWDENTITY_PROXY_URL` into each profile `.env` for container runtime (`host.docker.internal` defaults) so invite redemption and setup do not fall back to production endpoints. + - UI readiness must use HTTP success probes (not brittle HTML marker matching) with container-log diagnostics on timeout. + - Remove any `clawdentity` package/skill residue from workspace plus clear sessions and memory DB, so testing always starts at skill installation. - Scripts must fail with actionable errors when required source artifacts are missing. - Keep script output concise and stable for CI/release logs. - Do not add install-time network fetches to packaging scripts. diff --git a/apps/cli/scripts/openclaw-relay-docker-ready.sh b/apps/cli/scripts/openclaw-relay-docker-ready.sh new file mode 100755 index 0000000..59333a1 --- /dev/null +++ b/apps/cli/scripts/openclaw-relay-docker-ready.sh @@ -0,0 +1,263 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +CLAWDENTITY_ENV_FILE="${CLAWDENTITY_ENV_FILE:-$REPO_ROOT/.env}" + +load_dotenv() { + local env_file="$1" + [[ -f "$env_file" ]] || return 0 + set -a + set +u + # shellcheck disable=SC1090 + source "$env_file" + set -u + set +a +} + +load_dotenv "$CLAWDENTITY_ENV_FILE" + +DOCKER_COMPOSE_FILE="${DOCKER_COMPOSE_FILE:-/Users/dev/Workdir/openclaw/docker-compose.dual.yml}" + +OPENCLAW_ALPHA_HOME="${OPENCLAW_ALPHA_HOME:-$HOME/.openclaw-alpha}" +OPENCLAW_BETA_HOME="${OPENCLAW_BETA_HOME:-$HOME/.openclaw-beta}" + +BASELINE_ALPHA="${BASELINE_ALPHA:-$HOME/.openclaw-baselines/alpha-kimi-preskill}" +BASELINE_BETA="${BASELINE_BETA:-$HOME/.openclaw-baselines/beta-kimi-preskill}" + +PRESERVE_ENV="${PRESERVE_ENV:-1}" +WAIT_TIMEOUT_SECONDS="${WAIT_TIMEOUT_SECONDS:-90}" +ALPHA_CONTAINER="${ALPHA_CONTAINER:-clawdbot-agent-alpha-1}" +BETA_CONTAINER="${BETA_CONTAINER:-clawdbot-agent-beta-1}" +DOCKER_REGISTRY_URL="${DOCKER_REGISTRY_URL:-${CLAWDENTITY_REGISTRY_URL:-http://host.docker.internal:8788}}" +DOCKER_PROXY_URL="${DOCKER_PROXY_URL:-${CLAWDENTITY_PROXY_URL:-http://host.docker.internal:8787}}" + +log() { + printf '[openclaw-relay-ready] %s\n' "$*" +} + +fail() { + printf '[openclaw-relay-ready] ERROR: %s\n' "$*" >&2 + exit 1 +} + +require_command() { + command -v "$1" >/dev/null 2>&1 || fail "Missing required command: $1" +} + +require_dir() { + local path="$1" + [[ -d "$path" ]] || fail "Directory not found: $path" +} + +docker_compose_dual() { + [[ -f "$DOCKER_COMPOSE_FILE" ]] || fail "docker compose file not found: $DOCKER_COMPOSE_FILE" + docker compose -f "$DOCKER_COMPOSE_FILE" "$@" +} + +wait_for_ui() { + local port="$1" + local container="$2" + local waited=0 + + while true; do + if curl -fsS --max-time 2 "http://127.0.0.1:${port}/" >/dev/null 2>&1; then + log "Port ${port}: UI ready" + return + fi + if docker exec "$container" sh -lc "curl -fsS --max-time 2 http://127.0.0.1:18789/ >/dev/null 2>&1"; then + log "Port ${port}: UI ready (container-local probe)" + return + fi + + waited=$((waited + 1)) + if [[ "$waited" -ge "$WAIT_TIMEOUT_SECONDS" ]]; then + docker logs --tail 120 "$container" >&2 || true + fail "Port ${port}: UI readiness timeout (${WAIT_TIMEOUT_SECONDS}s)" + fi + sleep 1 + done +} + +write_gateway_defaults() { + node -e ' + const fs = require("fs"); + const crypto = require("crypto"); + const paths = process.argv.slice(1, 3); + const registryUrl = process.argv[3]; + const proxyUrl = process.argv[4]; + + const readEnvFile = (envPath) => (fs.existsSync(envPath) ? fs.readFileSync(envPath, "utf8") : ""); + + const readEnvToken = (raw) => { + for (const line of raw.split(/\r?\n/)) { + const match = line.match(/^\s*OPENCLAW_GATEWAY_TOKEN\s*=\s*(.+)\s*$/); + if (!match) { + continue; + } + const value = match[1].trim().replace(/^"+|"+$/g, ""); + if (value.length > 0) { + return value; + } + } + return null; + }; + + const upsertEnvValue = (raw, key, value) => { + const line = `${key}=${value}`; + const keyPattern = new RegExp(`^\\s*${key}\\s*=.*$`, "m"); + if (keyPattern.test(raw)) { + const next = raw.replace(keyPattern, line); + return next.endsWith("\n") ? next : `${next}\n`; + } + if (raw.trim().length === 0) { + return `${line}\n`; + } + return raw.endsWith("\n") ? `${raw}${line}\n` : `${raw}\n${line}\n`; + }; + + for (const configPath of paths) { + const envPath = configPath.replace(/openclaw\.json$/, ".env"); + const envRaw = readEnvFile(envPath); + const cfg = JSON.parse(fs.readFileSync(configPath, "utf8")); + cfg.gateway = cfg.gateway || {}; + cfg.gateway.mode = "local"; + cfg.gateway.bind = "lan"; + cfg.gateway.controlUi = { + ...(cfg.gateway.controlUi || {}), + allowInsecureAuth: true, + }; + if (typeof cfg.gateway.auth !== "object" || cfg.gateway.auth === null) { + cfg.gateway.auth = {}; + } + const envToken = readEnvToken(envRaw); + const configToken = + typeof cfg.gateway.auth.token === "string" && cfg.gateway.auth.token.trim().length > 0 + ? cfg.gateway.auth.token.trim() + : null; + const token = envToken || configToken || crypto.randomBytes(24).toString("hex"); + cfg.gateway.auth.token = token; + fs.writeFileSync(configPath, `${JSON.stringify(cfg, null, 2)}\n`); + let nextEnvRaw = upsertEnvValue(envRaw, "OPENCLAW_GATEWAY_TOKEN", token); + nextEnvRaw = upsertEnvValue(nextEnvRaw, "CLAWDENTITY_REGISTRY_URL", registryUrl); + nextEnvRaw = upsertEnvValue(nextEnvRaw, "CLAWDENTITY_PROXY_URL", proxyUrl); + fs.writeFileSync(envPath, nextEnvRaw); + } + ' \ + "$OPENCLAW_ALPHA_HOME/openclaw.json" \ + "$OPENCLAW_BETA_HOME/openclaw.json" \ + "$DOCKER_REGISTRY_URL" \ + "$DOCKER_PROXY_URL" +} + +remove_skill_artifacts() { + local profile_path="$1" + rm -rf \ + "$profile_path/skills/clawdentity-openclaw-relay" \ + "$profile_path/workspace/skills/clawdentity-openclaw-relay" \ + "$profile_path/workspace/node_modules/clawdentity" + rm -f "$profile_path/hooks/transforms/relay-to-peer.mjs" +} + +clear_runtime_state() { + local profile_path="$1" + rm -f "$profile_path/memory/main.sqlite" + if [[ -d "$profile_path/agents/main/sessions" ]]; then + find "$profile_path/agents/main/sessions" -type f -delete + fi +} + +print_urls() { + node -e ' + const fs = require("fs"); + const alphaHome = process.argv[1]; + const betaHome = process.argv[2]; + + const tokenFromEnvFile = (profileHome) => { + const envPath = `${profileHome}/.env`; + if (!fs.existsSync(envPath)) return null; + const raw = fs.readFileSync(envPath, "utf8"); + for (const line of raw.split(/\r?\n/)) { + const match = line.match(/^\s*OPENCLAW_GATEWAY_TOKEN\s*=\s*(.+)\s*$/); + if (!match) continue; + const value = match[1].trim().replace(/^"+|"+$/g, ""); + if (value.length > 0) return value; + } + return null; + }; + + const tokenFromConfig = (profileHome) => { + const cfg = JSON.parse(fs.readFileSync(`${profileHome}/openclaw.json`, "utf8")); + const token = cfg?.gateway?.auth?.token; + return typeof token === "string" && token.trim().length > 0 ? token.trim() : ""; + }; + + const alphaToken = tokenFromEnvFile(alphaHome) || tokenFromConfig(alphaHome); + const betaToken = tokenFromEnvFile(betaHome) || tokenFromConfig(betaHome); + console.log(`alpha_url=http://localhost:18789/#token=${alphaToken}`); + console.log(`beta_url=http://localhost:19001/#token=${betaToken}`); + ' \ + "$OPENCLAW_ALPHA_HOME" \ + "$OPENCLAW_BETA_HOME" +} + +run() { + require_command docker + require_command rsync + require_command node + require_command curl + require_dir "$BASELINE_ALPHA" + require_dir "$BASELINE_BETA" + + local tmp_dir + tmp_dir="$(mktemp -d)" + trap 'rm -rf "${tmp_dir:-}"' EXIT + + if [[ "$PRESERVE_ENV" == "1" ]]; then + [[ -f "$OPENCLAW_ALPHA_HOME/.env" ]] || fail "Missing .env: $OPENCLAW_ALPHA_HOME/.env" + [[ -f "$OPENCLAW_BETA_HOME/.env" ]] || fail "Missing .env: $OPENCLAW_BETA_HOME/.env" + cp "$OPENCLAW_ALPHA_HOME/.env" "$tmp_dir/alpha.env" + cp "$OPENCLAW_BETA_HOME/.env" "$tmp_dir/beta.env" + fi + + log "Stopping dual OpenClaw stack" + docker_compose_dual down --remove-orphans + + log "Restoring factory baseline profiles" + rsync -a --delete "$BASELINE_ALPHA/" "$OPENCLAW_ALPHA_HOME/" + rsync -a --delete "$BASELINE_BETA/" "$OPENCLAW_BETA_HOME/" + + if [[ "$PRESERVE_ENV" == "1" ]]; then + log "Restoring preserved .env API configuration" + cp "$tmp_dir/alpha.env" "$OPENCLAW_ALPHA_HOME/.env" + cp "$tmp_dir/beta.env" "$OPENCLAW_BETA_HOME/.env" + fi + + log "Applying gateway defaults + clearing runtime state" + write_gateway_defaults + clear_runtime_state "$OPENCLAW_ALPHA_HOME" + clear_runtime_state "$OPENCLAW_BETA_HOME" + remove_skill_artifacts "$OPENCLAW_ALPHA_HOME" + remove_skill_artifacts "$OPENCLAW_BETA_HOME" + + log "Starting dual OpenClaw stack" + docker_compose_dual up -d + + wait_for_ui 18789 "$ALPHA_CONTAINER" + wait_for_ui 19001 "$BETA_CONTAINER" + + print_urls + + printf 'alpha_sessions=%s\n' "$(find "$OPENCLAW_ALPHA_HOME/agents/main/sessions" -type f 2>/dev/null | wc -l | tr -d ' ')" + printf 'beta_sessions=%s\n' "$(find "$OPENCLAW_BETA_HOME/agents/main/sessions" -type f 2>/dev/null | wc -l | tr -d ' ')" + [[ -d "$OPENCLAW_ALPHA_HOME/skills/clawdentity-openclaw-relay" ]] && echo "alpha_skill_present=1" || echo "alpha_skill_present=0" + [[ -d "$OPENCLAW_BETA_HOME/skills/clawdentity-openclaw-relay" ]] && echo "beta_skill_present=1" || echo "beta_skill_present=0" + [[ -d "$OPENCLAW_ALPHA_HOME/workspace/node_modules/clawdentity" ]] && echo "alpha_pkg_present=1" || echo "alpha_pkg_present=0" + [[ -d "$OPENCLAW_BETA_HOME/workspace/node_modules/clawdentity" ]] && echo "beta_pkg_present=1" || echo "beta_pkg_present=0" + + log "Ready state complete" +} + +run diff --git a/apps/cli/skill-bundle/AGENTS.md b/apps/cli/skill-bundle/AGENTS.md index dc8a7ba..652d9f4 100644 --- a/apps/cli/skill-bundle/AGENTS.md +++ b/apps/cli/skill-bundle/AGENTS.md @@ -1,7 +1,7 @@ # AGENTS.md (apps/cli/skill-bundle) ## Purpose -- Store bundled skill artifacts shipped with the CLI package for npm `--skill` postinstall. +- Store bundled skill artifacts shipped with the CLI package for `clawdentity skill install`. ## Rules - Treat this folder as generated release input; do not hand-edit bundled files. diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index fa76a8e..62f2af3 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -13,20 +13,23 @@ - API-key lifecycle command logic should stay in `commands/api-key.ts`; keep create/list/revoke request mapping explicit and keep token exposure limited to create output only. - Connector runtime command logic should stay in `commands/connector.ts`; keep startup orchestration deterministic and avoid embedding connector runtime implementation details in the CLI. - Keep connector runtime import bundled at build time (from `@clawdentity/connector`) so published `clawdentity` installs do not depend on unpublished workspace runtime packages. -- Registry invite lifecycle command logic should stay in `commands/invite.ts`; keep it strictly scoped to registry onboarding invites and separate from `commands/openclaw.ts` peer-relay invite codes. -- `invite redeem` must print the returned PAT once, then persist config in deterministic order (`registryUrl`, then `apiKey`) so bootstrap/onboarding state is predictable. +- Registry invite lifecycle command logic should stay in `commands/invite.ts`; keep it strictly scoped to registry onboarding invites. +- `invite redeem` must require `--display-name `, print the returned PAT once, then persist config in deterministic order (`registryUrl`, then `apiKey`, then resolved `proxyUrl`, then `humanName`) so bootstrap/onboarding state is predictable. - `invite` command routes must use endpoint constants from `@clawdentity/protocol` (`INVITES_PATH`, `INVITES_REDEEM_PATH`) instead of inline path literals. - Agent auth refresh state is stored per-agent at `~/.clawdentity/agents//registry-auth.json` and must be written with secure file permissions. - `agent auth refresh` must use `Authorization: Claw ` + PoP headers from local agent keys and must not require PAT config. - `pair` command logic should stay in `commands/pair.ts`; keep proxy pairing bootstrap (`/pair/start`, `/pair/confirm`) CLI-driven with local AIT + PoP proof headers and one-time ticket QR support (`--qr`, `--qr-file`). +- `pair start`/`pair confirm` must send profile metadata (`initiatorProfile`/`responderProfile`) with both `agentName` and `humanName`. +- Pairing must fail fast with `CLI_PAIR_HUMAN_NAME_MISSING` when local config does not include `humanName`. +- Pairing peer persistence must write explicit peer metadata (`agentName`, `humanName`) in `~/.clawdentity/peers.json`; do not collapse profile metadata into a single `name` field. +- `openclaw setup` peers snapshot sync must preserve `agentName`/`humanName` fields from `~/.clawdentity/peers.json`. - `connector start ` must validate local agent material (`identity.json`, `ait.jwt`, `secret.key`, `registry-auth.json`) before starting runtime and must fail with stable CLI errors when files are missing/invalid. - `connector start` must print the local outbound handoff endpoint so transform troubleshooting is deterministic. - `connector service install ` must install user-scoped autostart integration (`launchd` on macOS, `systemd --user` on Linux) so connector runtime survives host restarts. - `connector service uninstall ` must be idempotent and remove the generated service file even when the service is already stopped/unloaded. -## Skill Install Mode -- Keep npm skill-install logic in shared helpers (`install-skill-mode.ts`) and invoke it from `postinstall.ts`; do not embed installer logic inside command factories. -- Detect install mode via npm environment (`npm_config_skill` and npm argv fallback) so non-skill installs remain unaffected. +## Skill Install +- Keep skill-install logic in shared helpers (`install-skill-mode.ts`) and invoke it through `commands/skill.ts`; do not hide installer side effects in package install hooks. - Resolve skill artifacts in this order: explicit override, bundled `skill-bundle/openclaw-skill`, installed `@clawdentity/openclaw-skill`, then workspace fallback. - Skill install must copy `SKILL.md`, `references/*`, and `relay-to-peer.mjs` into OpenClaw runtime paths under `~/.openclaw` and must fail with actionable errors when source artifacts are missing. - Installer logs must be deterministic and explicit (`installed`, `updated`, `unchanged`) so automated skill tests can assert outcomes reliably. @@ -46,6 +49,6 @@ ## Testing Rules - Command tests must capture `stdout`/`stderr` and assert exit-code behavior. - Include success, revoked, invalid token, keyset failure, CRL failure, and cache-hit scenarios for `verify`. -- For OpenClaw invite/setup flow, cover invite encode/decode, config patch idempotency, and missing-file validation. +- For OpenClaw setup flow, cover self-setup behavior, config patch idempotency, and missing-file validation. - For registry invite flow, cover admin-auth create path, public redeem path, config persistence failures, and command exit-code behavior. - Keep tests deterministic by mocking network and filesystem dependencies. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index e3e870b..b96ed54 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -26,15 +26,20 @@ - `openclaw invite` must generate self-contained invite code from admin-provided DID + proxy URL. - `openclaw setup` must be idempotent for relay mapping updates and peer map writes. - `openclaw setup` must persist/update `~/.clawdentity/openclaw-relay.json` with the resolved `openclawBaseUrl` so downstream proxy runtime can boot without manual env edits. +- `openclaw setup` is the primary self-ready command: it must also bring connector runtime online (unless `--no-runtime-start`) and verify websocket connectivity before returning success. +- `openclaw setup --runtime-mode ` controls runtime startup policy; `auto` should prefer service mode and fallback to detached when service tooling is unavailable. - `openclaw setup --openclaw-base-url` should only be needed when OpenClaw is not reachable on the default `http://127.0.0.1:18789`. - `openclaw setup` must set `hooks.allowRequestSessionKey=false` by default and retain `hooks.allowedSessionKeyPrefixes` enforcement for safer `/hooks/agent` session routing. +- `openclaw setup` must treat `hooks.defaultSessionKey` as an OpenClaw request session key (`main`, `global`, `subagent:*`), not a canonical `agent::...` store key. +- `openclaw setup` must normalize legacy canonical defaults (`agent::`) to request-key format (``) before writing config, so hook runs route to the expected UI session. +- When deriving fallback hook session routing, follow OpenClaw runtime semantics (`session.scope=global` -> `global`; otherwise `session.mainKey` with fallback `main`). - Keep thrown command errors static (no interpolated runtime values); include variable context in error details/log fields. Diagnostic check output (`openclaw doctor`, `openclaw relay test`) may include concrete paths/aliases so operators can remediate quickly. - Keep invite-type distinction explicit in output/docs: - `clw_inv_...` = registry onboarding invite (`invite redeem`) - - `clawd1_...` = OpenClaw peer relay invite (`openclaw setup`) + - `clawd1_...` = OpenClaw peer relay invite (`openclaw invite`) ## Connector Command Rules -- `connector start ` is the runtime entrypoint for local relay handoff and must remain long-running when connector runtime provides a wait/closed primitive. +- `connector start ` is the advanced/manual runtime entrypoint for local relay handoff and must remain long-running when connector runtime provides a wait/closed primitive. - Validate agent local state before start (`identity.json`, `ait.jwt`, `secret.key`, `registry-auth.json`) and fail early with deterministic operator-facing errors. - Keep connector startup wiring behind dependency-injected helpers so tests can mock module loading/runtime behavior without requiring a live connector package. - Print resolved outbound endpoint and proxy websocket URL (when provided by runtime) so operators can verify local handoff and upstream connectivity. @@ -78,13 +83,20 @@ - Mock network and filesystem dependencies in command tests. - Include success and failure scenarios for external calls, parsing, and cache behavior. - Assert exit code behavior in addition to stdout/stderr text. +- Keep command tests hermetic by sanitizing host `CLAWDENTITY_*` env overrides in `beforeEach`; tests should explicitly set only the env vars needed for that case. ## OpenClaw Diagnostic Command Rules -- `openclaw doctor` must stay read-only and validate required local state: resolved CLI config (`registryUrl` + `apiKey`), selected agent marker, local agent credentials, peers map integrity (and requested `--peer` alias), transform presence, hook mapping, and OpenClaw base URL resolution. +- `openclaw doctor` must stay read-only and validate required local state: resolved CLI config (`registryUrl` + `apiKey` + `proxyUrl` unless env override), selected agent marker, local agent credentials, peers map integrity (and requested `--peer` alias), transform presence, hook mapping, OpenClaw base URL resolution, and connector runtime websocket readiness. +- `openclaw doctor` must validate hook-session safety invariants (`hooks.defaultSessionKey`, `hooks.allowRequestSessionKey=false`, required `hooks.allowedSessionKeyPrefixes`) and fail with deterministic remediation when drifted. +- `openclaw doctor` must validate pending OpenClaw gateway device approvals (`/devices/pending.json`) so `pairing required` conditions are surfaced before relay tests. +- `openclaw doctor` must validate connector inbound durability state (`state.connectorInboundInbox`) from connector `/v1/status` so queued local replay backlog is visible to operators. +- `openclaw doctor` must validate local OpenClaw hook replay health (`state.openclawHookHealth`) from connector `/v1/status` and fail when connector reports replay failures with pending inbox backlog. +- `openclaw setup` must attempt automatic recovery for pending OpenClaw gateway device approvals before failing checklist validation, so normal onboarding does not require manual `openclaw devices approve` steps. - `openclaw doctor` must treat malformed/unreadable CLI config as a failed diagnostic check, not a thrown exception, so full per-check output remains available. - Relay hook mapping validation must require the expected mapping path (`send-to-peer`) and only accept optional `id` when it matches `clawdentity-send-to-peer`. - `openclaw doctor` must print deterministic check IDs and actionable fix hints for each failed check. - `openclaw doctor --json` must emit a stable machine-readable envelope with overall status + per-check results for CI scripting. +- `openclaw setup` must run an internal post-setup checklist (doctor-style, without registry config dependency) and fail fast when local hook/runtime/device prerequisites are not healthy. ## OpenClaw Relay Test Command Rules - `openclaw relay test --peer ` must run doctor-style preflight checks before sending the probe payload. @@ -94,11 +106,14 @@ ## Pair Command Rules - `pair start ` must call proxy `/pair/start` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. -- `pair start` must send owner PAT via `x-claw-owner-pat`, defaulting to configured API key unless explicitly overridden by `--owner-pat`. +- `pair start` must rely on local Claw agent auth + PoP headers only; ownership is validated server-side via proxy-to-registry internal service auth. - `pair start --qr` must generate a one-time local PNG QR containing the returned ticket and print the filesystem path. - `pair start --qr` must sweep expired QR artifacts in `~/.clawdentity/pairing` before writing a new file. - `pair confirm ` must call proxy `/pair/confirm` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. - `pair confirm` must accept either `--qr-file ` (primary) or `--ticket ` (fallback), never both. - `pair confirm --qr-file` must delete the consumed QR file after successful confirm (best effort, non-fatal on cleanup failure). -- `pair` commands must accept proxy URL via `--proxy-url` and fallback to env `CLAWDENTITY_PROXY_URL` when the flag is absent. +- `pair status --ticket ` must poll `/pair/status` and persist peers locally when status transitions to `confirmed`. +- After peer persistence, pair flows must best-effort sync OpenClaw transform peer snapshot (`hooks/transforms/clawdentity-peers.json`) when `~/.clawdentity/openclaw-relay.json` provides `relayTransformPeersPath`, so relay delivery works without manual file copying. +- `pair start --wait` should use `/pair/status` polling and auto-save the responder peer locally so reverse pairing is not required. +- `pair` commands must resolve proxy URL automatically from CLI config/registry metadata, with `CLAWDENTITY_PROXY_URL` env override support. - `pair` commands must fail with deterministic operator messages for invalid ticket/QR input, missing local agent proof material, and proxy auth/state errors. diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index d4b2d2d..df7fbe7 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -144,7 +144,7 @@ describe("agent create command", () => { vi.stubGlobal("fetch", mockFetch); mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "pat_123", }); @@ -217,7 +217,7 @@ describe("agent create command", () => { Uint8Array.from([1, 2, 3]), ); expect(mockFetch).toHaveBeenCalledWith( - "https://api.clawdentity.com/v1/agents/challenge", + "https://registry.clawdentity.com/v1/agents/challenge", expect.objectContaining({ method: "POST", headers: expect.objectContaining({ @@ -227,7 +227,7 @@ describe("agent create command", () => { }), ); expect(mockFetch).toHaveBeenCalledWith( - "https://api.clawdentity.com/v1/agents", + "https://registry.clawdentity.com/v1/agents", expect.objectContaining({ method: "POST", headers: expect.objectContaining({ @@ -275,7 +275,7 @@ describe("agent create command", () => { it("fails when API key is missing", async () => { mockedResolveConfig.mockResolvedValueOnce({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); const result = await runAgentCommand(["create", "agent-01"]); @@ -409,7 +409,7 @@ describe("agent auth refresh command", () => { if (filePath.endsWith("/identity.json")) { return JSON.stringify({ did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); } if (filePath.endsWith("/secret.key")) { @@ -447,7 +447,7 @@ describe("agent auth refresh command", () => { expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( expect.objectContaining({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", ait: "ait.jwt.value", refreshToken: "clw_rft_old_refresh", }), @@ -484,7 +484,7 @@ describe("agent auth refresh command", () => { if (filePath.endsWith("/identity.json")) { return JSON.stringify({ did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); } if (filePath.endsWith("/secret.key")) { @@ -510,7 +510,7 @@ describe("agent auth refresh command", () => { if (filePath.endsWith("/identity.json")) { return JSON.stringify({ did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - registryUrl: "https://api.clawdentity.com/registry", + registryUrl: "https://registry.clawdentity.com/registry", }); } if (filePath.endsWith("/secret.key")) { @@ -533,7 +533,7 @@ describe("agent auth refresh command", () => { expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( expect.objectContaining({ - registryUrl: "https://api.clawdentity.com/registry", + registryUrl: "https://registry.clawdentity.com/registry", }), ); }); @@ -549,7 +549,7 @@ describe("agent revoke command", () => { vi.stubGlobal("fetch", mockFetch); mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "pat_123", }); @@ -579,7 +579,7 @@ describe("agent revoke command", () => { "utf-8", ); expect(mockFetch).toHaveBeenCalledWith( - `https://api.clawdentity.com/v1/agents/${agentId}`, + `https://registry.clawdentity.com/v1/agents/${agentId}`, expect.objectContaining({ method: "DELETE", headers: expect.objectContaining({ @@ -610,7 +610,7 @@ describe("agent revoke command", () => { it("fails when API key is missing", async () => { mockedResolveConfig.mockResolvedValueOnce({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); const result = await runAgentCommand(["revoke", "agent-01"]); diff --git a/apps/cli/src/commands/api-key.test.ts b/apps/cli/src/commands/api-key.test.ts index 0f8e01c..a1e2355 100644 --- a/apps/cli/src/commands/api-key.test.ts +++ b/apps/cli/src/commands/api-key.test.ts @@ -48,7 +48,7 @@ async function runApiKeyCommand(args: string[]) { const command = createApiKeyCommand({ fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }), }); @@ -84,7 +84,7 @@ describe("api-key command helpers", () => { mockFetch.mockReset(); mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }); }); @@ -112,7 +112,7 @@ describe("api-key command helpers", () => { { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }), }, @@ -120,7 +120,7 @@ describe("api-key command helpers", () => { expect(result.apiKey.token).toBe("clw_pat_created"); expect(mockFetch).toHaveBeenCalledWith( - "https://api.clawdentity.com/v1/me/api-keys", + "https://registry.clawdentity.com/v1/me/api-keys", expect.objectContaining({ method: "POST", headers: expect.objectContaining({ @@ -157,7 +157,7 @@ describe("api-key command helpers", () => { { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }), }, @@ -177,7 +177,7 @@ describe("api-key command helpers", () => { { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }), }, @@ -185,7 +185,7 @@ describe("api-key command helpers", () => { expect(result.apiKeyId).toBe("01KJ8E2A4F8B10V8R8A6T8XKZ9"); expect(mockFetch).toHaveBeenCalledWith( - "https://api.clawdentity.com/v1/me/api-keys/01KJ8E2A4F8B10V8R8A6T8XKZ9", + "https://registry.clawdentity.com/v1/me/api-keys/01KJ8E2A4F8B10V8R8A6T8XKZ9", expect.objectContaining({ method: "DELETE", headers: expect.objectContaining({ @@ -197,7 +197,7 @@ describe("api-key command helpers", () => { it("fails create when local API key is not configured", async () => { mockedResolveConfig.mockResolvedValueOnce({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); await expect( @@ -222,7 +222,7 @@ describe("api-key command helpers", () => { { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }), }, @@ -251,7 +251,7 @@ describe("api-key command helpers", () => { { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", apiKey: "clw_pat_local", }), }, diff --git a/apps/cli/src/commands/config.test.ts b/apps/cli/src/commands/config.test.ts index d24126b..65f0930 100644 --- a/apps/cli/src/commands/config.test.ts +++ b/apps/cli/src/commands/config.test.ts @@ -15,6 +15,13 @@ vi.mock("../config/manager.js", () => ({ writeConfig: vi.fn(), })); +vi.mock("../config/registry-metadata.js", () => ({ + fetchRegistryMetadata: vi.fn(), + normalizeRegistryUrl: vi.fn((value: string) => + value.endsWith("/") ? value : `${value}/`, + ), +})); + vi.mock("@clawdentity/sdk", () => ({ createLogger: vi.fn(() => ({ child: vi.fn(), @@ -32,6 +39,8 @@ import { setConfigValue, writeConfig, } from "../config/manager.js"; +import { fetchRegistryMetadata } from "../config/registry-metadata.js"; +import { resetClawdentityEnv } from "../test-env.js"; import { createConfigCommand } from "./config.js"; const mockedAccess = vi.mocked(access); @@ -40,6 +49,7 @@ const mockedWriteConfig = vi.mocked(writeConfig); const mockedSetConfigValue = vi.mocked(setConfigValue); const mockedGetConfigValue = vi.mocked(getConfigValue); const mockedResolveConfig = vi.mocked(resolveConfig); +const mockedFetchRegistryMetadata = vi.mocked(fetchRegistryMetadata); const previousEnv = process.env; const buildErrnoError = (code: string): NodeJS.ErrnoException => { @@ -98,13 +108,17 @@ const runConfigCommand = async (args: string[]) => { describe("config command", () => { beforeEach(() => { vi.clearAllMocks(); - process.env = { ...previousEnv }; + process.env = resetClawdentityEnv(previousEnv); mockedReadConfig.mockResolvedValue({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", + }); + mockedFetchRegistryMetadata.mockResolvedValue({ + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://proxy.clawdentity.com/", }); }); @@ -120,7 +134,8 @@ describe("config command", () => { expect(mockedReadConfig).toHaveBeenCalled(); expect(mockedWriteConfig).toHaveBeenCalledWith({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://proxy.clawdentity.com/", }); expect(result.stdout).toContain( "Initialized config at /mock-home/.clawdentity/config.json", @@ -134,38 +149,53 @@ describe("config command", () => { await runConfigCommand([ "init", "--registry-url", - "https://dev.api.clawdentity.com", + "https://dev.registry.clawdentity.com", ]); expect(mockedWriteConfig).toHaveBeenCalledWith({ - registryUrl: "https://dev.api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://proxy.clawdentity.com/", }); + expect(mockedFetchRegistryMetadata).toHaveBeenCalledWith( + "https://dev.registry.clawdentity.com/", + expect.any(Object), + ); }); it("initializes config with env registry override", async () => { mockedAccess.mockRejectedValueOnce(buildErrnoError("ENOENT")); - process.env.CLAWDENTITY_REGISTRY = "https://dev.api.clawdentity.com"; + process.env.CLAWDENTITY_REGISTRY = "https://dev.registry.clawdentity.com"; await runConfigCommand(["init"]); expect(mockedWriteConfig).toHaveBeenCalledWith({ - registryUrl: "https://dev.api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://proxy.clawdentity.com/", }); + expect(mockedFetchRegistryMetadata).toHaveBeenCalledWith( + "https://dev.registry.clawdentity.com/", + expect.any(Object), + ); }); it("prefers --registry-url over env registry override", async () => { mockedAccess.mockRejectedValueOnce(buildErrnoError("ENOENT")); - process.env.CLAWDENTITY_REGISTRY = "https://env.api.clawdentity.com"; + process.env.CLAWDENTITY_REGISTRY = "https://env.registry.clawdentity.com"; await runConfigCommand([ "init", "--registry-url", - "https://flag.api.clawdentity.com", + "https://flag.registry.clawdentity.com", ]); expect(mockedWriteConfig).toHaveBeenCalledWith({ - registryUrl: "https://flag.api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://proxy.clawdentity.com/", }); + expect(mockedFetchRegistryMetadata).toHaveBeenCalledWith( + "https://flag.registry.clawdentity.com/", + expect.any(Object), + ); }); it("skips init when config already exists", async () => { @@ -174,6 +204,7 @@ describe("config command", () => { const result = await runConfigCommand(["init"]); expect(mockedWriteConfig).not.toHaveBeenCalled(); + expect(mockedFetchRegistryMetadata).not.toHaveBeenCalled(); expect(result.stdout).toContain( "Config already exists at /mock-home/.clawdentity/config.json", ); @@ -188,6 +219,21 @@ describe("config command", () => { ); }); + it("sets proxy url", async () => { + await runConfigCommand(["set", "proxyUrl", "http://localhost:8787"]); + + expect(mockedSetConfigValue).toHaveBeenCalledWith( + "proxyUrl", + "http://localhost:8787", + ); + }); + + it("sets human name", async () => { + await runConfigCommand(["set", "humanName", "Ravi"]); + + expect(mockedSetConfigValue).toHaveBeenCalledWith("humanName", "Ravi"); + }); + it("masks apiKey output when setting", async () => { const result = await runConfigCommand(["set", "apiKey", "super-secret"]); @@ -223,11 +269,13 @@ describe("config command", () => { mockedResolveConfig.mockResolvedValueOnce({ registryUrl: "http://localhost:8787", apiKey: "super-secret", + humanName: "Ravi", }); const result = await runConfigCommand(["show"]); expect(result.stdout).toContain("http://localhost:8787"); expect(result.stdout).toContain('"apiKey": "********"'); + expect(result.stdout).toContain('"humanName": "Ravi"'); }); }); diff --git a/apps/cli/src/commands/config.ts b/apps/cli/src/commands/config.ts index 0737333..05707a8 100644 --- a/apps/cli/src/commands/config.ts +++ b/apps/cli/src/commands/config.ts @@ -11,6 +11,10 @@ import { setConfigValue, writeConfig, } from "../config/manager.js"; +import { + fetchRegistryMetadata, + normalizeRegistryUrl, +} from "../config/registry-metadata.js"; import { writeStderrLine, writeStdoutLine } from "../io.js"; import { withErrorHandling } from "./helpers.js"; @@ -18,7 +22,9 @@ const logger = createLogger({ service: "cli", module: "config" }); const VALID_KEYS = [ "registryUrl", + "proxyUrl", "apiKey", + "humanName", ] as const satisfies readonly CliConfigKey[]; const isValidConfigKey = (value: string): value is CliConfigKey => { @@ -59,6 +65,10 @@ interface ConfigInitOptions { registryUrl?: string; } +type ConfigCommandDependencies = { + fetchImpl?: typeof fetch; +}; + const getEnvRegistryUrlOverride = (): string | undefined => { const envCandidates = [ process.env.CLAWDENTITY_REGISTRY_URL, @@ -70,7 +80,9 @@ const getEnvRegistryUrlOverride = (): string | undefined => { }); }; -export const createConfigCommand = (): Command => { +export const createConfigCommand = ( + dependencies: ConfigCommandDependencies = {}, +): Command => { const configCommand = new Command("config").description( "Manage local CLI configuration", ); @@ -94,19 +106,33 @@ export const createConfigCommand = (): Command => { } const config = await readConfig(); - const registryUrl = + const requestedRegistryUrl = options.registryUrl ?? getEnvRegistryUrlOverride() ?? config.registryUrl; + const normalizedRegistryUrl = + normalizeRegistryUrl(requestedRegistryUrl); + const metadata = await fetchRegistryMetadata(normalizedRegistryUrl, { + fetchImpl: dependencies.fetchImpl, + }); await writeConfig({ ...config, - registryUrl, + registryUrl: metadata.registryUrl, + proxyUrl: metadata.proxyUrl, }); writeStdoutLine(`Initialized config at ${configFilePath}`); writeStdoutLine( - JSON.stringify(maskApiKey({ ...config, registryUrl }), null, 2), + JSON.stringify( + maskApiKey({ + ...config, + registryUrl: metadata.registryUrl, + proxyUrl: metadata.proxyUrl, + }), + null, + 2, + ), ); }), ); diff --git a/apps/cli/src/commands/connector.test.ts b/apps/cli/src/commands/connector.test.ts index 43031e9..8f6d2cd 100644 --- a/apps/cli/src/commands/connector.test.ts +++ b/apps/cli/src/commands/connector.test.ts @@ -36,7 +36,10 @@ async function runConnectorCommand( resolveCurrentModulePathImpl?: () => string; resolveCurrentPlatformImpl?: () => NodeJS.Platform; resolveCurrentUidImpl?: () => number; - resolveConfigImpl?: () => Promise<{ registryUrl: string }>; + resolveConfigImpl?: () => Promise<{ + registryUrl: string; + proxyUrl?: string; + }>; resolveNodeExecPathImpl?: () => string; writeFileImpl?: ( filePath: string, @@ -257,7 +260,8 @@ describe("connector command", () => { }), readFileImpl, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", + proxyUrl: "https://proxy.clawdentity.com", }), }); @@ -275,7 +279,7 @@ describe("connector command", () => { }, outboundBaseUrl: "http://127.0.0.1:19400", outboundPath: "/v1/outbound", - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }), ); expect(result.stdout).toContain( @@ -291,6 +295,63 @@ describe("connector command", () => { expect(result.exitCode).toBeUndefined(); }); + it("uses hook token from openclaw-relay.json when connector option is omitted", async () => { + const startConnectorRuntime = vi.fn(async () => ({ + outboundUrl: "http://127.0.0.1:19400/v1/outbound", + waitUntilStopped: async () => {}, + })); + const readFileImpl = vi.fn(async (path: string): Promise => { + if (path.endsWith("/ait.jwt")) { + return "mock.ait.jwt\n"; + } + + if (path.endsWith("/secret.key")) { + return "mock.secret.key\n"; + } + + if (path.endsWith("/identity.json")) { + return JSON.stringify({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + }); + } + + if (path.endsWith("/registry-auth.json")) { + return JSON.stringify({ + accessToken: "clw_agt_access", + refreshToken: "clw_rft_refresh", + }); + } + + if (path.endsWith("/openclaw-relay.json")) { + return JSON.stringify({ + openclawBaseUrl: "http://127.0.0.1:18789", + openclawHookToken: "relay-runtime-token", + }); + } + + throw createErrnoError("ENOENT"); + }); + + const result = await runConnectorCommand(["start", "alpha-agent"], { + getConfigDirImpl: () => "/mock-home/.clawdentity", + loadConnectorModule: async () => ({ + startConnectorRuntime, + }), + readFileImpl, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com", + proxyUrl: "https://proxy.clawdentity.com", + }), + }); + + expect(startConnectorRuntime).toHaveBeenCalledWith( + expect.objectContaining({ + openclawHookToken: "relay-runtime-token", + }), + ); + expect(result.exitCode).toBeUndefined(); + }); + it("fails when required agent credentials are missing", async () => { const readFileImpl = vi.fn(async (_path: string): Promise => { throw createErrnoError("ENOENT"); @@ -304,7 +365,8 @@ describe("connector command", () => { }), readFileImpl, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", + proxyUrl: "https://proxy.clawdentity.com", }), }); @@ -332,7 +394,8 @@ describe("connector command", () => { loadConnectorModule: async () => ({}), readFileImpl, resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", + proxyUrl: "https://proxy.clawdentity.com", }), }); diff --git a/apps/cli/src/commands/connector.ts b/apps/cli/src/commands/connector.ts index eeb4c22..2c34a29 100644 --- a/apps/cli/src/commands/connector.ts +++ b/apps/cli/src/commands/connector.ts @@ -8,6 +8,7 @@ import { startConnectorRuntime as bundledStartConnectorRuntime } from "@clawdent import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; +import { fetchRegistryMetadata } from "../config/registry-metadata.js"; import { writeStdoutLine } from "../io.js"; import { assertValidAgentName } from "./agent-name.js"; import { withErrorHandling } from "./helpers.js"; @@ -20,6 +21,8 @@ const IDENTITY_FILE_NAME = "identity.json"; const AIT_FILE_NAME = "ait.jwt"; const SECRET_KEY_FILE_NAME = "secret.key"; const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; +const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; +const OPENCLAW_CONNECTORS_FILE_NAME = "openclaw-connectors.json"; const SERVICE_LOG_DIR_NAME = "logs"; const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; @@ -65,7 +68,10 @@ type ConnectorModule = { }; type ReadFileText = (path: string, encoding: "utf8") => Promise; -type ResolveConfigLike = () => Promise<{ registryUrl: string }>; +type ResolveConfigLike = () => Promise<{ + registryUrl: string; + proxyUrl?: string; +}>; type ExecFileLike = ( file: string, args?: readonly string[], @@ -91,6 +97,7 @@ type ResolveCurrentUidLike = () => number; type ConnectorCommandDependencies = { execFileImpl?: ExecFileLike; + fetchImpl?: typeof fetch; getConfigDirImpl?: typeof getConfigDir; getHomeDirImpl?: ResolveHomeDirLike; loadConnectorModule?: () => Promise; @@ -128,6 +135,10 @@ export type ConnectorStartResult = { runtime?: ConnectorRuntime | undefined; }; +type OpenclawRelayRuntimeConfig = { + openclawHookToken?: string; +}; + export type ConnectorServiceInstallResult = { serviceFilePath: string; serviceName: string; @@ -231,6 +242,92 @@ function parseConnectorBaseUrl(value: string): string { return parsed.toString(); } +function parseProxyWebsocketUrl(value: string): string { + let parsed: URL; + try { + parsed = new URL(value); + } catch { + throw createCliError( + "CLI_CONNECTOR_INVALID_PROXY_URL", + "Proxy websocket URL is invalid", + ); + } + + if ( + parsed.protocol !== "ws:" && + parsed.protocol !== "wss:" && + parsed.protocol !== "http:" && + parsed.protocol !== "https:" + ) { + throw createCliError( + "CLI_CONNECTOR_INVALID_PROXY_URL", + "Proxy websocket URL is invalid", + ); + } + + return parsed.toString(); +} + +function resolveProxyWebsocketUrlFromEnv(): string | undefined { + const explicitProxyWsUrl = process.env.CLAWDENTITY_PROXY_WS_URL; + if ( + typeof explicitProxyWsUrl === "string" && + explicitProxyWsUrl.trim().length > 0 + ) { + return parseProxyWebsocketUrl(explicitProxyWsUrl.trim()); + } + + const proxyUrl = process.env.CLAWDENTITY_PROXY_URL; + if (typeof proxyUrl === "string" && proxyUrl.trim().length > 0) { + return parseProxyWebsocketUrl(proxyUrl.trim()); + } + + return undefined; +} + +async function resolveProxyWebsocketUrl(input: { + explicitProxyWsUrl?: string; + configProxyUrl?: string; + registryUrl: string; + fetchImpl?: typeof fetch; +}): Promise { + if ( + typeof input.explicitProxyWsUrl === "string" && + input.explicitProxyWsUrl.trim().length > 0 + ) { + return parseProxyWebsocketUrl(input.explicitProxyWsUrl.trim()); + } + + const fromEnv = resolveProxyWebsocketUrlFromEnv(); + if (fromEnv !== undefined) { + return fromEnv; + } + + if ( + typeof input.configProxyUrl === "string" && + input.configProxyUrl.trim().length > 0 + ) { + return parseProxyWebsocketUrl(input.configProxyUrl.trim()); + } + + const fetchImpl = input.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl === "function") { + try { + const metadata = await fetchRegistryMetadata(input.registryUrl, { + fetchImpl, + }); + return parseProxyWebsocketUrl(metadata.proxyUrl); + } catch { + // Fall through to deterministic operator guidance below. + } + } + + throw createCliError( + "CLI_CONNECTOR_PROXY_URL_REQUIRED", + "Proxy URL is required for connector startup. Run `clawdentity invite redeem ` or set CLAWDENTITY_PROXY_URL / CLAWDENTITY_PROXY_WS_URL.", + ); +} + function normalizeOutboundPath(pathValue: string): string { const trimmed = pathValue.trim(); if (trimmed.length === 0) { @@ -243,15 +340,54 @@ function normalizeOutboundPath(pathValue: string): string { return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; } -function resolveConnectorBaseUrl(): string { +function resolveConnectorBaseUrlFromEnv(): string | undefined { const value = process.env.CLAWDENTITY_CONNECTOR_BASE_URL; if (typeof value !== "string" || value.trim().length === 0) { - return DEFAULT_CONNECTOR_BASE_URL; + return undefined; } return parseConnectorBaseUrl(value.trim()); } +async function readConnectorAssignedBaseUrl( + configDir: string, + agentName: string, + readFileImpl: ReadFileText, +): Promise { + const assignmentsPath = join(configDir, OPENCLAW_CONNECTORS_FILE_NAME); + let raw: string; + try { + raw = await readFileImpl(assignmentsPath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw createCliError( + "CLI_CONNECTOR_INVALID_ASSIGNMENTS", + "Connector assignments config is invalid JSON", + { assignmentsPath }, + ); + } + + if (!isRecord(parsed) || !isRecord(parsed.agents)) { + return undefined; + } + + const entry = parsed.agents[agentName]; + if (!isRecord(entry) || typeof entry.connectorBaseUrl !== "string") { + return undefined; + } + + return parseConnectorBaseUrl(entry.connectorBaseUrl); +} + function resolveConnectorOutboundPath(): string { const value = process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH; if (typeof value !== "string" || value.trim().length === 0) { @@ -297,6 +433,45 @@ async function readRequiredTrimmedFile( return trimmed; } +async function readRelayRuntimeConfig( + configDir: string, + readFileImpl: ReadFileText, +): Promise { + const filePath = join(configDir, OPENCLAW_RELAY_RUNTIME_FILE_NAME); + let raw: string; + try { + raw = await readFileImpl(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + return undefined; + } + if (!isRecord(parsed)) { + return undefined; + } + + const openclawHookToken = + typeof parsed.openclawHookToken === "string" && + parsed.openclawHookToken.trim().length > 0 + ? parsed.openclawHookToken.trim() + : undefined; + if (!openclawHookToken) { + return undefined; + } + + return { + openclawHookToken, + }; +} + function parseJsonRecord( value: string, code: string, @@ -819,6 +994,7 @@ export async function startConnectorForAgent( const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; const readFileImpl: ReadFileText = dependencies.readFileImpl ?? ((path, encoding) => readFile(path, encoding)); + const fetchImpl = dependencies.fetchImpl ?? globalThis.fetch; const loadConnectorModule = dependencies.loadConnectorModule ?? loadDefaultConnectorModule; const configDir = getConfigDirImpl(); @@ -829,6 +1005,8 @@ export async function startConnectorForAgent( rawSecretKey, rawIdentity, rawRegistryAuth, + assignedConnectorBaseUrl, + relayRuntimeConfig, config, connectorModule, ] = await Promise.all([ @@ -852,6 +1030,8 @@ export async function startConnectorForAgent( REGISTRY_AUTH_FILE_NAME, readFileImpl, ), + readConnectorAssignedBaseUrl(configDir, agentName, readFileImpl), + readRelayRuntimeConfig(configDir, readFileImpl), resolveConfigImpl(), loadConnectorModule(), ]); @@ -865,7 +1045,18 @@ export async function startConnectorForAgent( const identity = parseAgentIdentity(rawIdentity); const registryAuth = parseRegistryAuth(rawRegistryAuth); - const outboundBaseUrl = resolveConnectorBaseUrl(); + const resolvedProxyWebsocketUrl = await resolveProxyWebsocketUrl({ + explicitProxyWsUrl: commandOptions.proxyWsUrl, + configProxyUrl: config.proxyUrl, + registryUrl: config.registryUrl, + fetchImpl, + }); + const openclawHookToken = + commandOptions.openclawHookToken ?? relayRuntimeConfig?.openclawHookToken; + const outboundBaseUrl = + resolveConnectorBaseUrlFromEnv() ?? + assignedConnectorBaseUrl ?? + DEFAULT_CONNECTOR_BASE_URL; const outboundPath = resolveConnectorOutboundPath(); const runtime = await connectorModule.startConnectorRuntime({ agentName, @@ -873,10 +1064,10 @@ export async function startConnectorForAgent( registryUrl: config.registryUrl, outboundBaseUrl, outboundPath, - proxyWebsocketUrl: commandOptions.proxyWsUrl, + proxyWebsocketUrl: resolvedProxyWebsocketUrl, openclawBaseUrl: commandOptions.openclawBaseUrl, openclawHookPath: commandOptions.openclawHookPath, - openclawHookToken: commandOptions.openclawHookToken, + openclawHookToken, credentials: { agentDid: identity.did, ait: rawAit, @@ -898,7 +1089,7 @@ export async function startConnectorForAgent( ? runtime.websocketUrl : typeof runtime.proxyWebsocketUrl === "string" ? runtime.proxyWebsocketUrl - : undefined + : resolvedProxyWebsocketUrl : undefined; return { diff --git a/apps/cli/src/commands/invite.test.ts b/apps/cli/src/commands/invite.test.ts index 839b8a9..23502bc 100644 --- a/apps/cli/src/commands/invite.test.ts +++ b/apps/cli/src/commands/invite.test.ts @@ -23,7 +23,11 @@ async function runInviteCommand( args: string[], input: { fetchImpl?: typeof fetch; - resolveConfigImpl?: () => Promise<{ registryUrl: string; apiKey?: string }>; + resolveConfigImpl?: () => Promise<{ + registryUrl: string; + apiKey?: string; + humanName?: string; + }>; setConfigValueImpl?: typeof setConfigValue; } = {}, ) { @@ -159,12 +163,16 @@ describe("invite command helpers", () => { name: "invite-issued", token: "clw_pat_invite_token", }, + human: { + displayName: "Invitee Alpha", + }, + proxyUrl: "https://proxy.clawdentity.com", }), ); const result = await redeemInvite( "clw_invite_123", - {}, + { displayName: "Invitee Alpha" }, { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ @@ -175,6 +183,8 @@ describe("invite command helpers", () => { expect(result.apiKeyToken).toBe("clw_pat_invite_token"); expect(result.apiKeyName).toBe("invite-issued"); + expect(result.humanName).toBe("Invitee Alpha"); + expect(result.proxyUrl).toBe("https://proxy.clawdentity.com/"); const [calledUrl, calledInit] = mockFetch.mock.calls[0] as [ string, RequestInit, @@ -186,6 +196,8 @@ describe("invite command helpers", () => { ); expect(JSON.parse(String(calledInit.body))).toEqual({ code: "clw_invite_123", + displayName: "Invitee Alpha", + apiKeyName: undefined, }); }); @@ -195,7 +207,7 @@ describe("invite command helpers", () => { await expect( redeemInvite( "clw_invite_123", - {}, + { displayName: "Invitee Alpha" }, { fetchImpl: mockFetch as unknown as typeof fetch, resolveConfigImpl: async () => ({ @@ -208,15 +220,39 @@ describe("invite command helpers", () => { message: "Invite redeem response is invalid", }); }); + + it("requires display name for invite redeem", async () => { + await expect( + redeemInvite( + "clw_invite_123", + {}, + { + fetchImpl: mockFetch as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.clawdentity.com", + }), + }, + ), + ).rejects.toMatchObject({ + code: "CLI_INVITE_REDEEM_DISPLAY_NAME_REQUIRED", + }); + expect(mockFetch).not.toHaveBeenCalled(); + }); }); describe("persist redeem config", () => { - it("saves registry url and api key sequentially", async () => { + it("saves registry url, api key, and proxy url sequentially", async () => { const setConfigValueMock = vi.fn(async () => {}); - await persistRedeemConfig("https://api.clawdentity.com/", "token", { - setConfigValueImpl: setConfigValueMock, - }); + await persistRedeemConfig( + "https://api.clawdentity.com/", + "token", + "https://proxy.clawdentity.com/", + "Invitee Alpha", + { + setConfigValueImpl: setConfigValueMock, + }, + ); expect(setConfigValueMock).toHaveBeenNthCalledWith( 1, @@ -224,6 +260,16 @@ describe("persist redeem config", () => { "https://api.clawdentity.com/", ); expect(setConfigValueMock).toHaveBeenNthCalledWith(2, "apiKey", "token"); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 3, + "proxyUrl", + "https://proxy.clawdentity.com/", + ); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 4, + "humanName", + "Invitee Alpha", + ); }); it("throws CLI error when config persistence fails", async () => { @@ -232,9 +278,15 @@ describe("persist redeem config", () => { }); await expect( - persistRedeemConfig("https://api.clawdentity.com/", "token", { - setConfigValueImpl: setConfigValueMock, - }), + persistRedeemConfig( + "https://api.clawdentity.com/", + "token", + "https://proxy.clawdentity.com/", + "Invitee Alpha", + { + setConfigValueImpl: setConfigValueMock, + }, + ), ).rejects.toMatchObject({ code: "CLI_INVITE_REDEEM_CONFIG_PERSISTENCE_FAILED", message: "Failed to save redeemed API key locally", @@ -279,19 +331,24 @@ describe("invite command output", () => { name: "invite-issued", token: "clw_pat_invite_token", }, + human: { + displayName: "Invitee Alpha", + }, + proxyUrl: "https://proxy.clawdentity.com", }), ); const setConfigValueMock = vi.fn(async () => {}); - const result = await runInviteCommand(["redeem", "clw_invite_123"], { - setConfigValueImpl: setConfigValueMock, - resolveConfigImpl: async () => ({ - registryUrl: "https://api.clawdentity.com", - }), - }); + const result = await runInviteCommand( + ["redeem", "clw_invite_123", "--display-name", "Invitee Alpha"], + { + setConfigValueImpl: setConfigValueMock, + }, + ); expect(result.exitCode).toBeUndefined(); expect(result.stdout).toContain("Invite redeemed"); + expect(result.stdout).toContain("Human name: Invitee Alpha"); expect(result.stdout).toContain("API key token (shown once):"); expect(result.stdout).toContain("clw_pat_invite_token"); expect(result.stdout).toContain("API key saved to local config"); @@ -305,6 +362,16 @@ describe("invite command output", () => { "apiKey", "clw_pat_invite_token", ); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 3, + "proxyUrl", + "https://proxy.clawdentity.com/", + ); + expect(setConfigValueMock).toHaveBeenNthCalledWith( + 4, + "humanName", + "Invitee Alpha", + ); }); it("sets exit code and stderr on create failure", async () => { diff --git a/apps/cli/src/commands/invite.ts b/apps/cli/src/commands/invite.ts index 1320cc5..a8c4d5c 100644 --- a/apps/cli/src/commands/invite.ts +++ b/apps/cli/src/commands/invite.ts @@ -6,6 +6,11 @@ import { resolveConfig, setConfigValue, } from "../config/manager.js"; +import { + fetchRegistryMetadata, + normalizeRegistryUrl, + toRegistryRequestUrl, +} from "../config/registry-metadata.js"; import { writeStdoutLine } from "../io.js"; import { withErrorHandling } from "./helpers.js"; @@ -18,6 +23,8 @@ type InviteCreateOptions = { type InviteRedeemOptions = { registryUrl?: string; + displayName?: string; + apiKeyName?: string; }; type InviteRecord = { @@ -36,6 +43,8 @@ export type InviteRedeemResult = { apiKeyToken: string; apiKeyId?: string; apiKeyName?: string; + humanName: string; + proxyUrl: string; registryUrl: string; }; @@ -84,21 +93,29 @@ function createCliError(code: string, message: string): AppError { }); } +function normalizeProxyUrl(value: string): string { + try { + const parsed = new URL(value); + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + throw new Error("invalid protocol"); + } + + return parsed.toString(); + } catch { + throw createCliError( + "CLI_INVITE_REDEEM_INVALID_RESPONSE", + "Invite redeem response is invalid", + ); + } +} + function resolveRegistryUrl(input: { overrideRegistryUrl: string | undefined; configRegistryUrl: string; }): string { const candidate = parseNonEmptyString(input.overrideRegistryUrl) || input.configRegistryUrl; - - try { - return new URL(candidate).toString(); - } catch { - throw createCliError( - "CLI_INVITE_INVALID_REGISTRY_URL", - "Registry URL is invalid", - ); - } + return normalizeRegistryUrl(candidate); } function requireApiKey(config: CliConfig): string { @@ -112,14 +129,6 @@ function requireApiKey(config: CliConfig): string { ); } -function toRegistryRequestUrl(registryUrl: string, path: string): string { - const normalizedBaseUrl = registryUrl.endsWith("/") - ? registryUrl - : `${registryUrl}/`; - - return new URL(path.slice(1), normalizedBaseUrl).toString(); -} - function extractRegistryErrorCode(payload: unknown): string | undefined { if (!isRecord(payload)) { return undefined; @@ -308,11 +317,23 @@ function parseInviteRedeemResponse( const apiKeyId = parseNonEmptyString(apiKeySource.id); const apiKeyName = parseNonEmptyString(apiKeySource.name); + const humanSource = isRecord(payload.human) ? payload.human : undefined; + const humanName = parseNonEmptyString(humanSource?.displayName); + const proxyUrl = parseNonEmptyString(payload.proxyUrl); + + if (humanName.length === 0) { + throw createCliError( + "CLI_INVITE_REDEEM_INVALID_RESPONSE", + "Invite redeem response is invalid", + ); + } return { apiKeyToken, apiKeyId: apiKeyId.length > 0 ? apiKeyId : undefined, apiKeyName: apiKeyName.length > 0 ? apiKeyName : undefined, + humanName, + proxyUrl, }; } @@ -384,6 +405,15 @@ export async function redeemInvite( ); } + const displayName = parseNonEmptyString(options.displayName); + if (displayName.length === 0) { + throw createCliError( + "CLI_INVITE_REDEEM_DISPLAY_NAME_REQUIRED", + "Display name is required. Pass --display-name .", + ); + } + const apiKeyName = parseNonEmptyString(options.apiKeyName); + const runtime = await resolveInviteRuntime(options.registryUrl, dependencies); const response = await executeInviteRequest({ fetchImpl: runtime.fetchImpl, @@ -393,7 +423,11 @@ export async function redeemInvite( headers: { "content-type": "application/json", }, - body: JSON.stringify({ code: inviteCode }), + body: JSON.stringify({ + code: inviteCode, + displayName, + apiKeyName: apiKeyName.length > 0 ? apiKeyName : undefined, + }), }, }); @@ -405,8 +439,19 @@ export async function redeemInvite( ); } + const parsedRedeem = parseInviteRedeemResponse(responseBody); + const proxyUrl = + parsedRedeem.proxyUrl.length > 0 + ? parsedRedeem.proxyUrl + : ( + await fetchRegistryMetadata(runtime.registryUrl, { + fetchImpl: runtime.fetchImpl, + }) + ).proxyUrl; + return { - ...parseInviteRedeemResponse(responseBody), + ...parsedRedeem, + proxyUrl: normalizeProxyUrl(proxyUrl), registryUrl: runtime.registryUrl, }; } @@ -414,6 +459,8 @@ export async function redeemInvite( export async function persistRedeemConfig( registryUrl: string, apiKeyToken: string, + proxyUrl: string, + humanName: string, dependencies: InvitePersistenceDependencies = {}, ): Promise { const setConfigValueImpl = dependencies.setConfigValueImpl ?? setConfigValue; @@ -421,6 +468,8 @@ export async function persistRedeemConfig( try { await setConfigValueImpl("registryUrl", registryUrl); await setConfigValueImpl("apiKey", apiKeyToken); + await setConfigValueImpl("proxyUrl", proxyUrl); + await setConfigValueImpl("humanName", humanName); } catch (error) { logger.warn("cli.invite_redeem_config_persist_failed", { errorName: error instanceof Error ? error.name : "unknown", @@ -470,6 +519,11 @@ export const createInviteCommand = ( inviteCommand .command("redeem ") .description("Redeem a registry invite code and store PAT locally") + .requiredOption( + "--display-name ", + "Human display name used for onboarding", + ) + .option("--api-key-name ", "Optional API key label") .option("--registry-url ", "Override registry URL") .action( withErrorHandling( @@ -480,10 +534,12 @@ export const createInviteCommand = ( logger.info("cli.invite_redeemed", { apiKeyId: result.apiKeyId, apiKeyName: result.apiKeyName, + humanName: result.humanName, registryUrl: result.registryUrl, }); writeStdoutLine("Invite redeemed"); + writeStdoutLine(`Human name: ${result.humanName}`); if (result.apiKeyName) { writeStdoutLine(`API key name: ${result.apiKeyName}`); } @@ -494,6 +550,8 @@ export const createInviteCommand = ( await persistRedeemConfig( result.registryUrl, result.apiKeyToken, + result.proxyUrl, + result.humanName, dependencies, ); writeStdoutLine("API key saved to local config"); diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 77781ca..b9b7592 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -14,7 +14,9 @@ import { decodeOpenclawInviteCode, runOpenclawDoctor, runOpenclawRelayTest, + setupOpenclawRelay, setupOpenclawRelayFromInvite, + setupOpenclawSelfReady, } from "./openclaw.js"; type OpenclawSandbox = { @@ -71,13 +73,51 @@ function seedLocalAgentCredentials(homeDir: string, agentName: string): void { writeFileSync(join(agentDir, "ait.jwt"), "mock.ait.jwt", "utf8"); } +function seedPeersConfig( + homeDir: string, + peers: Record< + string, + { did: string; proxyUrl: string; agentName?: string; humanName?: string } + >, +): void { + const peersPath = join(homeDir, ".clawdentity", "peers.json"); + mkdirSync(dirname(peersPath), { recursive: true }); + writeFileSync(peersPath, `${JSON.stringify({ peers }, null, 2)}\n`, "utf8"); +} + +function connectorReadyFetch(): typeof fetch { + return async () => + new Response( + JSON.stringify({ + status: "ok", + websocketConnected: true, + inboundInbox: { + pendingCount: 0, + pendingBytes: 0, + replayerActive: false, + }, + openclawHook: { + url: "http://127.0.0.1:18789/hooks/agent", + lastAttemptStatus: "ok", + }, + }), + { + status: 200, + headers: { + "content-type": "application/json", + }, + }, + ); +} + describe("openclaw command helpers", () => { it("creates and decodes invite codes", () => { const invite = createOpenclawInviteCode({ did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", proxyUrl: "https://beta.example.com/hooks/agent", peerAlias: "beta", - name: "Beta Agent", + agentName: "beta", + humanName: "Ira", }); expect(invite.code.startsWith("clawd1_")).toBe(true); @@ -87,32 +127,22 @@ describe("openclaw command helpers", () => { expect(decoded.did).toBe("did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"); expect(decoded.proxyUrl).toBe("https://beta.example.com/hooks/agent"); expect(decoded.alias).toBe("beta"); - expect(decoded.name).toBe("Beta Agent"); + expect(decoded.agentName).toBe("beta"); + expect(decoded.humanName).toBe("Ira"); expect(decoded.issuedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); }); - it("applies relay setup from invite and patches OpenClaw config", async () => { + it("applies relay setup and patches OpenClaw config", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "http://beta-proxy.local:4000/hooks/agent", - peerAlias: "beta", - name: "Beta", - }); - - const result = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, + const result = await setupOpenclawRelay("alpha", { homeDir: sandbox.homeDir, openclawDir: sandbox.openclawDir, transformSource: sandbox.transformSourcePath, }); - expect(result.peerAlias).toBe("beta"); - expect(result.peerDid).toBe("did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7"); - const copiedTransform = readFileSync(result.transformTargetPath, "utf8"); expect(copiedTransform).toContain("relay(ctx)"); @@ -121,6 +151,8 @@ describe("openclaw command helpers", () => { ) as { hooks: { enabled?: boolean; + token?: string; + defaultSessionKey?: string; allowRequestSessionKey?: boolean; allowedSessionKeyPrefixes?: string[]; mappings?: Array>; @@ -128,8 +160,12 @@ describe("openclaw command helpers", () => { }; expect(openclawConfig.hooks.enabled).toBe(true); + expect(typeof openclawConfig.hooks.token).toBe("string"); + expect(openclawConfig.hooks.token?.length ?? 0).toBeGreaterThan(0); + expect(openclawConfig.hooks.defaultSessionKey).toBe("main"); expect(openclawConfig.hooks.allowRequestSessionKey).toBe(false); expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("main"); expect( openclawConfig.hooks.mappings?.some( (mapping) => @@ -148,13 +184,17 @@ describe("openclaw command helpers", () => { "utf8", ), ) as { - peers: Record; + peers: Record< + string, + { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; + } + >; }; - expect(peers.peers.beta).toEqual({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "http://beta-proxy.local:4000/hooks/agent", - name: "Beta", - }); + expect(peers.peers).toEqual({}); const selectedAgent = readFileSync( join(sandbox.homeDir, ".clawdentity", "openclaw-agent-name"), @@ -163,6 +203,13 @@ describe("openclaw command helpers", () => { expect(selectedAgent).toBe("alpha"); expect(result.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + expect(result.connectorBaseUrl).toBe("http://127.0.0.1:19400"); + expect(readFileSync(result.relayTransformRuntimePath, "utf8")).toContain( + '"connectorBaseUrl": "http://host.docker.internal:19400"', + ); + expect(readFileSync(result.relayTransformPeersPath, "utf8")).toContain( + '"peers": {}', + ); const relayRuntimeConfig = JSON.parse( readFileSync( join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), @@ -170,10 +217,320 @@ describe("openclaw command helpers", () => { ), ) as { openclawBaseUrl: string; + openclawHookToken?: string; + relayTransformPeersPath?: string; updatedAt: string; }; expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + expect(relayRuntimeConfig.openclawHookToken).toBe( + openclawConfig.hooks.token, + ); + expect(relayRuntimeConfig.relayTransformPeersPath).toBe( + result.relayTransformPeersPath, + ); expect(relayRuntimeConfig.updatedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + + const connectorAssignments = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-connectors.json"), + "utf8", + ), + ) as { + agents: Record; + }; + expect(connectorAssignments.agents.alpha.connectorBaseUrl).toBe( + "http://127.0.0.1:19400", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("supports setup-only mode without runtime startup", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const result = await setupOpenclawSelfReady("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + noRuntimeStart: true, + }); + + expect(result.runtimeMode).toBe("none"); + expect(result.runtimeStatus).toBe("skipped"); + expect(result.websocketStatus).toBe("skipped"); + expect(readFileSync(result.transformTargetPath, "utf8")).toContain( + "relay(ctx)", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("auto-recovers setup checklist when OpenClaw has pending gateway device approvals", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const pendingPath = join(sandbox.openclawDir, "devices", "pending.json"); + mkdirSync(dirname(pendingPath), { recursive: true }); + writeFileSync( + pendingPath, + JSON.stringify( + { + "request-1": { + requestId: "request-1", + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const approvedRequestIds: string[] = []; + const result = await setupOpenclawSelfReady("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + noRuntimeStart: true, + gatewayDeviceApprovalRunner: async ({ requestId }) => { + approvedRequestIds.push(requestId); + writeFileSync(pendingPath, JSON.stringify({}, null, 2), "utf8"); + return { + ok: true, + }; + }, + }); + + expect(result.runtimeMode).toBe("none"); + expect(approvedRequestIds).toEqual(["request-1"]); + const pendingAfterRecovery = JSON.parse( + readFileSync(pendingPath, "utf8"), + ) as Record; + expect(Object.keys(pendingAfterRecovery)).toHaveLength(0); + } finally { + sandbox.cleanup(); + } + }); + + it("fails setup checklist when gateway approval runner is unavailable", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const pendingPath = join(sandbox.openclawDir, "devices", "pending.json"); + mkdirSync(dirname(pendingPath), { recursive: true }); + writeFileSync( + pendingPath, + JSON.stringify( + { + "request-1": { + requestId: "request-1", + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + await expect( + setupOpenclawSelfReady("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + noRuntimeStart: true, + gatewayDeviceApprovalRunner: async () => ({ + ok: false, + unavailable: true, + errorMessage: "spawn openclaw ENOENT", + }), + }), + ).rejects.toMatchObject({ + code: "CLI_OPENCLAW_SETUP_CHECKLIST_FAILED", + }); + } finally { + sandbox.cleanup(); + } + }); + + it("preserves explicit hook request session key (including subagent keys)", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + hooks: { + enabled: true, + token: "existing-token", + defaultSessionKey: "subagent:planner", + allowedSessionKeyPrefixes: ["hook:"], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + token?: string; + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.token).toBe("existing-token"); + expect(openclawConfig.hooks.defaultSessionKey).toBe("subagent:planner"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( + "subagent:planner", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("normalizes legacy canonical hook default session key to request format", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + hooks: { + enabled: true, + token: "existing-token", + defaultSessionKey: "agent:ops:subagent:planner", + allowedSessionKeyPrefixes: ["hook:"], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + token?: string; + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.token).toBe("existing-token"); + expect(openclawConfig.hooks.defaultSessionKey).toBe("subagent:planner"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( + "subagent:planner", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("derives hook default session key from OpenClaw session scope and main key", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + session: { mainKey: "work" }, + agents: { + list: [{ id: "main" }, { id: "ops-team", default: true }], + }, + hooks: { + enabled: false, + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.defaultSessionKey).toBe("work"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("work"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses global hook default session when OpenClaw session scope is global", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + session: { scope: "global" }, + hooks: { + enabled: false, + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.defaultSessionKey).toBe("global"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( + "global", + ); } finally { sandbox.cleanup(); } @@ -253,6 +610,238 @@ describe("openclaw command helpers", () => { } }); + it("resolves OpenClaw state/config paths from env variables", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousStateDir = process.env.OPENCLAW_STATE_DIR; + const previousConfigPath = process.env.OPENCLAW_CONFIG_PATH; + + try { + const customStateDir = join(sandbox.homeDir, ".openclaw-custom"); + const customConfigPath = join(customStateDir, "openclaw.custom.json"); + mkdirSync(customStateDir, { recursive: true }); + writeFileSync( + customConfigPath, + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + process.env.OPENCLAW_STATE_DIR = customStateDir; + process.env.OPENCLAW_CONFIG_PATH = customConfigPath; + + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigPath).toBe(customConfigPath); + expect(result.transformTargetPath).toBe( + join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), + ); + } finally { + if (previousStateDir === undefined) { + delete process.env.OPENCLAW_STATE_DIR; + } else { + process.env.OPENCLAW_STATE_DIR = previousStateDir; + } + if (previousConfigPath === undefined) { + delete process.env.OPENCLAW_CONFIG_PATH; + } else { + process.env.OPENCLAW_CONFIG_PATH = previousConfigPath; + } + sandbox.cleanup(); + } + }); + + it("resolves OpenClaw state/config paths from legacy CLAWDBOT_* env aliases", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousStateDir = process.env.CLAWDBOT_STATE_DIR; + const previousConfigPath = process.env.CLAWDBOT_CONFIG_PATH; + const previousOpenclawStateDir = process.env.OPENCLAW_STATE_DIR; + const previousOpenclawConfigPath = process.env.OPENCLAW_CONFIG_PATH; + + try { + const customStateDir = join(sandbox.homeDir, ".clawdbot-custom"); + const customConfigPath = join(customStateDir, "clawdbot.custom.json"); + mkdirSync(customStateDir, { recursive: true }); + writeFileSync( + customConfigPath, + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + delete process.env.OPENCLAW_STATE_DIR; + delete process.env.OPENCLAW_CONFIG_PATH; + process.env.CLAWDBOT_STATE_DIR = customStateDir; + process.env.CLAWDBOT_CONFIG_PATH = customConfigPath; + + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigPath).toBe(customConfigPath); + expect(result.transformTargetPath).toBe( + join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), + ); + } finally { + if (previousStateDir === undefined) { + delete process.env.CLAWDBOT_STATE_DIR; + } else { + process.env.CLAWDBOT_STATE_DIR = previousStateDir; + } + if (previousConfigPath === undefined) { + delete process.env.CLAWDBOT_CONFIG_PATH; + } else { + process.env.CLAWDBOT_CONFIG_PATH = previousConfigPath; + } + if (previousOpenclawStateDir === undefined) { + delete process.env.OPENCLAW_STATE_DIR; + } else { + process.env.OPENCLAW_STATE_DIR = previousOpenclawStateDir; + } + if (previousOpenclawConfigPath === undefined) { + delete process.env.OPENCLAW_CONFIG_PATH; + } else { + process.env.OPENCLAW_CONFIG_PATH = previousOpenclawConfigPath; + } + sandbox.cleanup(); + } + }); + + it("resolves default OpenClaw state from OPENCLAW_HOME", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousOpenclawHome = process.env.OPENCLAW_HOME; + const previousOpenclawStateDir = process.env.OPENCLAW_STATE_DIR; + const previousOpenclawConfigPath = process.env.OPENCLAW_CONFIG_PATH; + const previousClawdbotStateDir = process.env.CLAWDBOT_STATE_DIR; + const previousClawdbotConfigPath = process.env.CLAWDBOT_CONFIG_PATH; + + try { + const customHome = join(sandbox.homeDir, "openclaw-home"); + const customStateDir = join(customHome, ".openclaw"); + const customConfigPath = join(customStateDir, "openclaw.json"); + mkdirSync(customStateDir, { recursive: true }); + writeFileSync( + customConfigPath, + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + process.env.OPENCLAW_HOME = customHome; + delete process.env.OPENCLAW_STATE_DIR; + delete process.env.OPENCLAW_CONFIG_PATH; + delete process.env.CLAWDBOT_STATE_DIR; + delete process.env.CLAWDBOT_CONFIG_PATH; + + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigPath).toBe(customConfigPath); + expect(result.transformTargetPath).toBe( + join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), + ); + } finally { + if (previousOpenclawHome === undefined) { + delete process.env.OPENCLAW_HOME; + } else { + process.env.OPENCLAW_HOME = previousOpenclawHome; + } + if (previousOpenclawStateDir === undefined) { + delete process.env.OPENCLAW_STATE_DIR; + } else { + process.env.OPENCLAW_STATE_DIR = previousOpenclawStateDir; + } + if (previousOpenclawConfigPath === undefined) { + delete process.env.OPENCLAW_CONFIG_PATH; + } else { + process.env.OPENCLAW_CONFIG_PATH = previousOpenclawConfigPath; + } + if (previousClawdbotStateDir === undefined) { + delete process.env.CLAWDBOT_STATE_DIR; + } else { + process.env.CLAWDBOT_STATE_DIR = previousClawdbotStateDir; + } + if (previousClawdbotConfigPath === undefined) { + delete process.env.CLAWDBOT_CONFIG_PATH; + } else { + process.env.CLAWDBOT_CONFIG_PATH = previousClawdbotConfigPath; + } + sandbox.cleanup(); + } + }); + + it("allocates distinct connector base URLs per local agent", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + seedLocalAgentCredentials(sandbox.homeDir, "beta"); + + try { + const alphaInvite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + const betaInvite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8", + proxyUrl: "https://alpha.example.com/hooks/agent", + peerAlias: "alpha", + }); + + const alphaResult = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: alphaInvite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const betaOpenclawDir = join(sandbox.homeDir, "openclaw-beta"); + mkdirSync(betaOpenclawDir, { recursive: true }); + writeFileSync( + join(betaOpenclawDir, "openclaw.json"), + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + const betaResult = await setupOpenclawRelayFromInvite("beta", { + inviteCode: betaInvite.code, + homeDir: sandbox.homeDir, + openclawDir: betaOpenclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(alphaResult.connectorBaseUrl).toBe("http://127.0.0.1:19400"); + expect(betaResult.connectorBaseUrl).toBe("http://127.0.0.1:19401"); + } finally { + sandbox.cleanup(); + } + }); + it("keeps send-to-peer mapping idempotent across repeated setup", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); @@ -295,7 +884,95 @@ describe("openclaw command helpers", () => { } }); - it("requires peer alias when invite code omits it", async () => { + it("preserves existing OpenClaw hooks token and mirrors it to relay runtime config", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + writeFileSync( + openclawConfigPath, + JSON.stringify( + { + hooks: { + enabled: true, + token: "existing-hook-token", + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { token?: string }; + }; + expect(openclawConfig.hooks.token).toBe("existing-hook-token"); + + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawHookToken?: string; + }; + expect(relayRuntimeConfig.openclawHookToken).toBe("existing-hook-token"); + } finally { + sandbox.cleanup(); + } + }); + + it("supports self setup without peer routing details", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const peers = JSON.parse( + readFileSync( + join(sandbox.homeDir, ".clawdentity", "peers.json"), + "utf8", + ), + ) as { + peers: Record< + string, + { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; + } + >; + }; + expect(peers.peers).toEqual({}); + } finally { + sandbox.cleanup(); + } + }); + + it("reports healthy doctor status when relay setup is complete", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); @@ -303,36 +980,42 @@ describe("openclaw command helpers", () => { const invite = createOpenclawInviteCode({ did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), }); - await expect( - setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }), - ).rejects.toThrow( - "Peer alias is required. Include alias in invite code or pass --peer-alias.", + expect(result.status).toBe("healthy"); + expect(result.checks.every((check) => check.status === "pass")).toBe( + true, ); } finally { sandbox.cleanup(); } }); - it("reports healthy doctor status when relay setup is complete", async () => { + it("reports healthy doctor status when setup is complete without peers", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, + await setupOpenclawRelay("alpha", { homeDir: sandbox.homeDir, openclawDir: sandbox.openclawDir, transformSource: sandbox.transformSourcePath, @@ -341,16 +1024,24 @@ describe("openclaw command helpers", () => { const result = await runOpenclawDoctor({ homeDir: sandbox.homeDir, openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }), }); expect(result.status).toBe("healthy"); - expect(result.checks.every((check) => check.status === "pass")).toBe( - true, - ); + expect( + result.checks.some( + (check) => + check.id === "state.peers" && + check.status === "pass" && + check.message === + "no peers are configured yet (optional until pairing)", + ), + ).toBe(true); } finally { sandbox.cleanup(); } @@ -380,6 +1071,7 @@ describe("openclaw command helpers", () => { peerAlias: "gamma", resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }), }); @@ -463,6 +1155,7 @@ describe("openclaw command helpers", () => { openclawDir: sandbox.openclawDir, resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }), }); @@ -479,6 +1172,234 @@ describe("openclaw command helpers", () => { } }); + it("fails doctor hook session routing check when hook session constraints drift", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { + allowRequestSessionKey?: boolean; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + openclawConfig.hooks.allowRequestSessionKey = true; + openclawConfig.hooks.allowedSessionKeyPrefixes = ["hook:"]; + writeFileSync( + openclawConfigPath, + `${JSON.stringify(openclawConfig, null, 2)}\n`, + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.hookSessionRouting" && + check.status === "fail" && + check.message.includes("hooks.allowRequestSessionKey is not false"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook session routing check when default session uses canonical agent format", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + openclawConfig.hooks.defaultSessionKey = "agent:main:main"; + openclawConfig.hooks.allowedSessionKeyPrefixes = [ + "hook:", + "agent:main:main", + ]; + writeFileSync( + openclawConfigPath, + `${JSON.stringify(openclawConfig, null, 2)}\n`, + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.hookSessionRouting" && + check.status === "fail" && + check.message.includes("canonical agent format"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor when OpenClaw has pending gateway device approvals", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const pendingPath = join(sandbox.openclawDir, "devices", "pending.json"); + mkdirSync(dirname(pendingPath), { recursive: true }); + writeFileSync( + pendingPath, + JSON.stringify( + { + "request-1": { + requestId: "request-1", + }, + }, + null, + 2, + ), + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.gatewayDevicePairing" && + check.status === "fail" && + check.message.includes("pending gateway device approvals: 1"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook health check when connector reports replay failures", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const unhealthyConnectorFetch: typeof fetch = async () => + new Response( + JSON.stringify({ + status: "ok", + websocketConnected: true, + inboundInbox: { + pendingCount: 2, + pendingBytes: 512, + oldestPendingAt: "2026-01-01T00:00:00.000Z", + lastReplayError: + "Local OpenClaw hook rejected payload with status 500", + replayerActive: false, + }, + openclawHook: { + url: "http://127.0.0.1:18789/hooks/agent", + lastAttemptStatus: "failed", + lastAttemptAt: "2026-01-01T00:00:00.000Z", + }, + }), + { + status: 200, + headers: { + "content-type": "application/json", + }, + }, + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: unhealthyConnectorFetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.openclawHookHealth" && + check.status === "fail" && + check.message.includes( + "connector replay to local OpenClaw hook is failing", + ), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + it("applies --peer filter for doctor command", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); @@ -506,6 +1427,7 @@ describe("openclaw command helpers", () => { JSON.stringify( { registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }, null, @@ -517,8 +1439,10 @@ describe("openclaw command helpers", () => { const baseline = await runOpenclawDoctor({ homeDir: sandbox.homeDir, openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }), }); @@ -558,6 +1482,12 @@ describe("openclaw command helpers", () => { openclawDir: sandbox.openclawDir, transformSource: sandbox.transformSourcePath, }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); const result = await runOpenclawRelayTest({ peer: "beta", @@ -566,6 +1496,7 @@ describe("openclaw command helpers", () => { fetchImpl: async () => new Response(null, { status: 204 }), resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }), }); @@ -578,6 +1509,96 @@ describe("openclaw command helpers", () => { } }); + it("auto-selects peer for relay test when exactly one peer is configured", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayTest({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async () => new Response(null, { status: 204 }), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("success"); + expect(result.peerAlias).toBe("beta"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses hook token from relay runtime config when relay test option/env is unset", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousHookToken = process.env.OPENCLAW_HOOK_TOKEN; + delete process.env.OPENCLAW_HOOK_TOKEN; + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + let sentHookToken: string | undefined; + await runOpenclawRelayTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async (_input, init) => { + const headers = new Headers(init?.headers); + sentHookToken = headers.get("x-openclaw-token") ?? undefined; + return new Response(null, { status: 204 }); + }, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(typeof sentHookToken).toBe("string"); + expect(sentHookToken?.length ?? 0).toBeGreaterThan(0); + } finally { + if (previousHookToken === undefined) { + delete process.env.OPENCLAW_HOOK_TOKEN; + } else { + process.env.OPENCLAW_HOOK_TOKEN = previousHookToken; + } + sandbox.cleanup(); + } + }); + it("returns relay test failure when probe is rejected", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); @@ -595,6 +1616,12 @@ describe("openclaw command helpers", () => { openclawDir: sandbox.openclawDir, transformSource: sandbox.transformSourcePath, }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); const result = await runOpenclawRelayTest({ peer: "beta", @@ -604,6 +1631,7 @@ describe("openclaw command helpers", () => { new Response("connector offline", { status: 500 }), resolveConfigImpl: async () => ({ registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", apiKey: "test-api-key", }), }); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index e09f53d..45cfd5d 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -1,6 +1,10 @@ +import { spawn } from "node:child_process"; +import { randomBytes } from "node:crypto"; +import { existsSync } from "node:fs"; import { chmod, copyFile, mkdir, readFile, writeFile } from "node:fs/promises"; import { homedir } from "node:os"; -import { dirname, join } from "node:path"; +import { dirname, join, resolve as resolvePath } from "node:path"; +import { fileURLToPath } from "node:url"; import { decodeBase64url, encodeBase64url, @@ -11,6 +15,7 @@ import { Command } from "commander"; import { resolveConfig } from "../config/manager.js"; import { writeStdoutLine } from "../io.js"; import { assertValidAgentName } from "./agent-name.js"; +import { installConnectorServiceForAgent } from "./connector.js"; import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "openclaw" }); @@ -22,17 +27,51 @@ const SECRET_KEY_FILE_NAME = "secret.key"; const PEERS_FILE_NAME = "peers.json"; const OPENCLAW_DIR_NAME = ".openclaw"; const OPENCLAW_CONFIG_FILE_NAME = "openclaw.json"; +const LEGACY_OPENCLAW_STATE_DIR_NAMES = [ + ".clawdbot", + ".moldbot", + ".moltbot", +] as const; +const LEGACY_OPENCLAW_CONFIG_FILE_NAMES = [ + "clawdbot.json", + "moldbot.json", + "moltbot.json", +] as const; const OPENCLAW_AGENT_FILE_NAME = "openclaw-agent-name"; const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; +const OPENCLAW_CONNECTORS_FILE_NAME = "openclaw-connectors.json"; const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; +const RELAY_RUNTIME_FILE_NAME = "clawdentity-relay.json"; +const RELAY_PEERS_FILE_NAME = "clawdentity-peers.json"; const HOOK_MAPPING_ID = "clawdentity-send-to-peer"; const HOOK_PATH_SEND_TO_PEER = "send-to-peer"; const OPENCLAW_SEND_TO_PEER_HOOK_PATH = "hooks/send-to-peer"; const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; +const DEFAULT_OPENCLAW_MAIN_SESSION_KEY = "main"; +const DEFAULT_CONNECTOR_PORT = 19400; +const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; +const DEFAULT_CONNECTOR_STATUS_PATH = "/v1/status"; +const DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS = 30; +const CONNECTOR_HOST_LOOPBACK = "127.0.0.1"; +const CONNECTOR_HOST_DOCKER = "host.docker.internal"; +const CONNECTOR_HOST_DOCKER_GATEWAY = "gateway.docker.internal"; +const CONNECTOR_HOST_LINUX_BRIDGE = "172.17.0.1"; +const CONNECTOR_RUN_DIR_NAME = "run"; const INVITE_CODE_PREFIX = "clawd1_"; const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; const FILE_MODE = 0o600; +const OPENCLAW_HOOK_TOKEN_BYTES = 32; +const OPENCLAW_SETUP_COMMAND_HINT = + "Run: clawdentity openclaw setup "; +const OPENCLAW_SETUP_RESTART_COMMAND_HINT = `${OPENCLAW_SETUP_COMMAND_HINT} and restart OpenClaw`; +const OPENCLAW_SETUP_WITH_BASE_URL_HINT = `${OPENCLAW_SETUP_COMMAND_HINT} --openclaw-base-url `; +const OPENCLAW_PAIRING_COMMAND_HINT = + "Run QR pairing first: clawdentity pair start --qr and clawdentity pair confirm --qr-file "; +const OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT = + "Run: clawdentity openclaw setup (auto-recovers pending OpenClaw gateway device approvals)"; +const OPENCLAW_GATEWAY_APPROVAL_COMMAND = "openclaw"; +const OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS = 10_000; const textEncoder = new TextEncoder(); const textDecoder = new TextDecoder(); @@ -42,23 +81,28 @@ type OpenclawInvitePayload = { did: string; proxyUrl: string; alias?: string; - name?: string; + agentName?: string; + humanName?: string; }; type OpenclawInviteOptions = { did: string; proxyUrl: string; peerAlias?: string; - name?: string; + agentName?: string; + humanName?: string; }; type OpenclawSetupOptions = { - inviteCode: string; - peerAlias?: string; + inviteCode?: string; openclawDir?: string; transformSource?: string; openclawBaseUrl?: string; + runtimeMode?: string; + waitTimeoutSeconds?: string; + noRuntimeStart?: boolean; homeDir?: string; + gatewayDeviceApprovalRunner?: OpenclawGatewayDeviceApprovalRunner; }; type OpenclawDoctorOptions = { @@ -66,6 +110,9 @@ type OpenclawDoctorOptions = { openclawDir?: string; peerAlias?: string; resolveConfigImpl?: typeof resolveConfig; + fetchImpl?: typeof fetch; + includeConfigCheck?: boolean; + includeConnectorRuntimeCheck?: boolean; json?: boolean; }; @@ -75,8 +122,17 @@ type OpenclawDoctorCommandOptions = { json?: boolean; }; +type OpenclawSetupCommandOptions = { + openclawDir?: string; + transformSource?: string; + openclawBaseUrl?: string; + runtimeMode?: string; + waitTimeoutSeconds?: string; + noRuntimeStart?: boolean; +}; + type OpenclawRelayTestOptions = { - peer: string; + peer?: string; homeDir?: string; openclawDir?: string; openclawBaseUrl?: string; @@ -88,10 +144,44 @@ type OpenclawRelayTestOptions = { json?: boolean; }; +type OpenclawGatewayDeviceApprovalInput = { + requestId: string; + openclawDir: string; + openclawConfigPath: string; +}; + +type OpenclawGatewayDeviceApprovalExecution = { + ok: boolean; + unavailable?: boolean; + exitCode?: number; + stdout?: string; + stderr?: string; + errorMessage?: string; +}; + +type OpenclawGatewayDeviceApprovalRunner = ( + input: OpenclawGatewayDeviceApprovalInput, +) => Promise; + +type OpenclawGatewayDeviceApprovalAttempt = { + requestId: string; + ok: boolean; + unavailable: boolean; + reason?: string; + exitCode?: number; +}; + +type OpenclawGatewayDeviceApprovalSummary = { + gatewayDevicePendingPath: string; + pendingRequestIds: string[]; + attempts: OpenclawGatewayDeviceApprovalAttempt[]; +}; + type PeerEntry = { did: string; proxyUrl: string; - name?: string; + agentName?: string; + humanName?: string; }; type PeersConfig = { @@ -103,24 +193,48 @@ export type OpenclawInviteResult = { did: string; proxyUrl: string; peerAlias?: string; - name?: string; + agentName?: string; + humanName?: string; }; export type OpenclawSetupResult = { - peerAlias: string; - peerDid: string; - peerProxyUrl: string; openclawConfigPath: string; transformTargetPath: string; + relayTransformRuntimePath: string; + relayTransformPeersPath: string; openclawBaseUrl: string; + connectorBaseUrl: string; relayRuntimeConfigPath: string; }; +type OpenclawRuntimeMode = "auto" | "service" | "detached"; + +type OpenclawRuntimeResult = { + runtimeMode: "none" | "service" | "detached" | "existing"; + runtimeStatus: "running" | "skipped"; + websocketStatus: "connected" | "skipped"; + connectorStatusUrl?: string; +}; + +export type OpenclawSelfSetupResult = OpenclawSetupResult & + OpenclawRuntimeResult; + type OpenclawRelayRuntimeConfig = { openclawBaseUrl: string; + openclawHookToken?: string; + relayTransformPeersPath?: string; updatedAt?: string; }; +type ConnectorAssignmentEntry = { + connectorBaseUrl: string; + updatedAt: string; +}; + +type ConnectorAssignmentsConfig = { + agents: Record; +}; + type OpenclawDoctorCheckId = | "config.registry" | "state.selectedAgent" @@ -128,7 +242,13 @@ type OpenclawDoctorCheckId = | "state.peers" | "state.transform" | "state.hookMapping" - | "state.openclawBaseUrl"; + | "state.hookToken" + | "state.hookSessionRouting" + | "state.gatewayDevicePairing" + | "state.openclawBaseUrl" + | "state.connectorRuntime" + | "state.connectorInboundInbox" + | "state.openclawHookHealth"; type OpenclawDoctorCheckStatus = "pass" | "fail"; @@ -207,12 +327,15 @@ function parseNonEmptyString(value: unknown, label: string): string { return trimmed; } -function parseOptionalName(value: unknown): string | undefined { +function parseOptionalProfileName( + value: unknown, + label: "agentName" | "humanName", +): string | undefined { if (value === undefined) { return undefined; } - return parseNonEmptyString(value, "name"); + return parseNonEmptyString(value, label); } function parsePeerAlias(value: unknown): string { @@ -321,9 +444,14 @@ function parseInvitePayload(value: unknown): OpenclawInvitePayload { const proxyUrl = parseProxyUrl(value.proxyUrl); const alias = value.alias === undefined ? undefined : parsePeerAlias(value.alias); - const name = parseOptionalName(value.name); + const agentName = parseOptionalProfileName(value.agentName, "agentName"); + const humanName = parseOptionalProfileName(value.humanName, "humanName"); - if (alias === undefined && name === undefined) { + if ( + alias === undefined && + agentName === undefined && + humanName === undefined + ) { return { v: 1, issuedAt, @@ -332,7 +460,7 @@ function parseInvitePayload(value: unknown): OpenclawInvitePayload { }; } - if (name === undefined) { + if (agentName === undefined && humanName === undefined) { return { v: 1, issuedAt, @@ -348,7 +476,8 @@ function parseInvitePayload(value: unknown): OpenclawInvitePayload { did, proxyUrl, alias, - name, + agentName, + humanName, }; } @@ -360,12 +489,71 @@ function resolveHomeDir(homeDir?: string): string { return homedir(); } +function resolveHomePrefixedPath(input: string, homeDir: string): string { + const trimmed = input.trim(); + if (trimmed.startsWith("~")) { + return resolvePath(trimmed.replace(/^~(?=$|[\\/])/, homeDir)); + } + return resolvePath(trimmed); +} + +function readNonEmptyEnvPath( + value: string | undefined, + homeDir: string, +): string | undefined { + if (typeof value !== "string" || value.trim().length === 0) { + return undefined; + } + return resolveHomePrefixedPath(value, homeDir); +} + +function resolveOpenclawHomeDir(homeDir: string): string { + const envOpenclawHome = readNonEmptyEnvPath( + process.env.OPENCLAW_HOME, + homeDir, + ); + return envOpenclawHome ?? homeDir; +} + +function resolveDefaultOpenclawStateDir(openclawHomeDir: string): string { + const newStateDir = join(openclawHomeDir, OPENCLAW_DIR_NAME); + if (existsSync(newStateDir)) { + return newStateDir; + } + + for (const legacyDirName of LEGACY_OPENCLAW_STATE_DIR_NAMES) { + const legacyStateDir = join(openclawHomeDir, legacyDirName); + if (existsSync(legacyStateDir)) { + return legacyStateDir; + } + } + + return newStateDir; +} + function resolveOpenclawDir(openclawDir: string | undefined, homeDir: string) { if (typeof openclawDir === "string" && openclawDir.trim().length > 0) { - return openclawDir.trim(); + return resolveHomePrefixedPath(openclawDir, homeDir); + } + + const envStateDir = readNonEmptyEnvPath( + process.env.OPENCLAW_STATE_DIR ?? process.env.CLAWDBOT_STATE_DIR, + homeDir, + ); + if (envStateDir !== undefined) { + return envStateDir; + } + + const envConfigPath = readNonEmptyEnvPath( + process.env.OPENCLAW_CONFIG_PATH ?? process.env.CLAWDBOT_CONFIG_PATH, + homeDir, + ); + if (envConfigPath !== undefined) { + return dirname(envConfigPath); } - return join(homeDir, OPENCLAW_DIR_NAME); + const openclawHomeDir = resolveOpenclawHomeDir(homeDir); + return resolveDefaultOpenclawStateDir(openclawHomeDir); } function resolveAgentDirectory(homeDir: string, agentName: string): string { @@ -376,18 +564,36 @@ function resolvePeersPath(homeDir: string): string { return join(homeDir, CLAWDENTITY_DIR_NAME, PEERS_FILE_NAME); } -function resolveOpenclawConfigPath(openclawDir: string): string { - return join(openclawDir, OPENCLAW_CONFIG_FILE_NAME); +function resolveOpenclawConfigPath( + openclawDir: string, + homeDir: string, +): string { + const envConfigPath = readNonEmptyEnvPath( + process.env.OPENCLAW_CONFIG_PATH ?? process.env.CLAWDBOT_CONFIG_PATH, + homeDir, + ); + if (envConfigPath !== undefined) { + return envConfigPath; + } + + const configCandidates = [ + join(openclawDir, OPENCLAW_CONFIG_FILE_NAME), + ...LEGACY_OPENCLAW_CONFIG_FILE_NAMES.map((fileName) => + join(openclawDir, fileName), + ), + ]; + + for (const candidate of configCandidates) { + if (existsSync(candidate)) { + return candidate; + } + } + + return configCandidates[0]; } function resolveDefaultTransformSource(openclawDir: string): string { - return join( - openclawDir, - "workspace", - "skills", - SKILL_DIR_NAME, - RELAY_MODULE_FILE_NAME, - ); + return join(openclawDir, "skills", SKILL_DIR_NAME, RELAY_MODULE_FILE_NAME); } function resolveTransformTargetPath(openclawDir: string): string { @@ -402,6 +608,215 @@ function resolveRelayRuntimeConfigPath(homeDir: string): string { return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_RELAY_RUNTIME_FILE_NAME); } +function resolveConnectorAssignmentsPath(homeDir: string): string { + return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_CONNECTORS_FILE_NAME); +} + +function resolveTransformRuntimePath(openclawDir: string): string { + return join(openclawDir, "hooks", "transforms", RELAY_RUNTIME_FILE_NAME); +} + +function resolveTransformPeersPath(openclawDir: string): string { + return join(openclawDir, "hooks", "transforms", RELAY_PEERS_FILE_NAME); +} + +type OpenclawGatewayPendingState = + | { + status: "missing"; + gatewayDevicePendingPath: string; + } + | { + status: "invalid"; + gatewayDevicePendingPath: string; + } + | { + status: "unreadable"; + gatewayDevicePendingPath: string; + } + | { + status: "ok"; + gatewayDevicePendingPath: string; + pendingRequestIds: string[]; + }; + +async function readOpenclawGatewayPendingState( + openclawDir: string, +): Promise { + const gatewayDevicePendingPath = join(openclawDir, "devices", "pending.json"); + try { + const pendingPayload = await readJsonFile(gatewayDevicePendingPath); + if (!isRecord(pendingPayload)) { + return { + status: "invalid", + gatewayDevicePendingPath, + }; + } + return { + status: "ok", + gatewayDevicePendingPath, + pendingRequestIds: Object.keys(pendingPayload), + }; + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return { + status: "missing", + gatewayDevicePendingPath, + }; + } + return { + status: "unreadable", + gatewayDevicePendingPath, + }; + } +} + +function resolveOpenclawGatewayApprovalCommand(): string { + const envOverride = process.env.OPENCLAW_GATEWAY_APPROVAL_COMMAND?.trim(); + if (typeof envOverride === "string" && envOverride.length > 0) { + return envOverride; + } + return OPENCLAW_GATEWAY_APPROVAL_COMMAND; +} + +async function runOpenclawGatewayApprovalCommand(input: { + command: string; + args: string[]; + openclawDir: string; + openclawConfigPath: string; +}): Promise { + return await new Promise( + (resolve) => { + const child = spawn(input.command, input.args, { + env: { + ...process.env, + OPENCLAW_STATE_DIR: input.openclawDir, + OPENCLAW_CONFIG_PATH: input.openclawConfigPath, + }, + stdio: ["ignore", "pipe", "pipe"], + }); + + let settled = false; + let stdout = ""; + let stderr = ""; + + const finalize = (result: OpenclawGatewayDeviceApprovalExecution) => { + if (settled) { + return; + } + settled = true; + resolve({ + ...result, + stdout: stdout.trim(), + stderr: stderr.trim(), + }); + }; + + const timeout = setTimeout(() => { + try { + child.kill("SIGTERM"); + } catch { + // Best-effort timeout shutdown. + } + finalize({ + ok: false, + errorMessage: `command timed out after ${OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS}ms`, + }); + }, OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS); + + child.stdout?.on("data", (chunk: Buffer | string) => { + stdout += String(chunk); + }); + child.stderr?.on("data", (chunk: Buffer | string) => { + stderr += String(chunk); + }); + + child.once("error", (error) => { + clearTimeout(timeout); + const errorCode = getErrorCode(error); + finalize({ + ok: false, + unavailable: errorCode === "ENOENT", + errorMessage: + error instanceof Error + ? error.message + : "failed to run openclaw command", + }); + }); + + child.once("close", (exitCode) => { + clearTimeout(timeout); + finalize({ + ok: exitCode === 0, + exitCode: typeof exitCode === "number" ? exitCode : undefined, + }); + }); + }, + ); +} + +async function runOpenclawGatewayDeviceApproval( + input: OpenclawGatewayDeviceApprovalInput, +): Promise { + const command = resolveOpenclawGatewayApprovalCommand(); + return await runOpenclawGatewayApprovalCommand({ + command, + args: ["devices", "approve", input.requestId, "--json"], + openclawDir: input.openclawDir, + openclawConfigPath: input.openclawConfigPath, + }); +} + +async function autoApproveOpenclawGatewayDevices(input: { + homeDir: string; + openclawDir: string; + runner?: OpenclawGatewayDeviceApprovalRunner; +}): Promise { + const pendingState = await readOpenclawGatewayPendingState(input.openclawDir); + if ( + pendingState.status !== "ok" || + pendingState.pendingRequestIds.length === 0 + ) { + return undefined; + } + + const openclawConfigPath = resolveOpenclawConfigPath( + input.openclawDir, + input.homeDir, + ); + const approvalRunner = input.runner ?? runOpenclawGatewayDeviceApproval; + const attempts: OpenclawGatewayDeviceApprovalAttempt[] = []; + + for (const requestId of pendingState.pendingRequestIds) { + const execution = await approvalRunner({ + requestId, + openclawDir: input.openclawDir, + openclawConfigPath, + }); + attempts.push({ + requestId, + ok: execution.ok, + unavailable: execution.unavailable === true, + reason: + execution.errorMessage ?? + (execution.stderr && execution.stderr.length > 0 + ? execution.stderr + : execution.stdout && execution.stdout.length > 0 + ? execution.stdout + : undefined), + exitCode: execution.exitCode, + }); + if (execution.unavailable === true) { + break; + } + } + + return { + gatewayDevicePendingPath: pendingState.gatewayDevicePendingPath, + pendingRequestIds: pendingState.pendingRequestIds, + attempts, + }; +} + async function readJsonFile(filePath: string): Promise { const raw = await readFile(filePath, "utf8"); @@ -551,14 +966,15 @@ async function loadPeersConfig(peersPath: string): Promise { const did = parseAgentDid(value.did, `Peer ${normalizedAlias} did`); const proxyUrl = parseProxyUrl(value.proxyUrl); - const name = parseOptionalName(value.name); + const agentName = parseOptionalProfileName(value.agentName, "agentName"); + const humanName = parseOptionalProfileName(value.humanName, "humanName"); - if (name === undefined) { + if (agentName === undefined && humanName === undefined) { peers[normalizedAlias] = { did, proxyUrl }; continue; } - peers[normalizedAlias] = { did, proxyUrl, name }; + peers[normalizedAlias] = { did, proxyUrl, agentName, humanName }; } return { peers }; @@ -571,131 +987,746 @@ async function savePeersConfig( await writeSecureFile(peersPath, `${JSON.stringify(config, null, 2)}\n`); } -function parseRelayRuntimeConfig( +function parseConnectorBaseUrlForAssignment( value: unknown, - relayRuntimeConfigPath: string, -): OpenclawRelayRuntimeConfig { + label: string, +): string { + return parseHttpUrl(value, { + label, + code: "CLI_OPENCLAW_INVALID_CONNECTOR_BASE_URL", + message: "Connector base URL must be a valid URL", + }); +} + +function parseConnectorAssignments( + value: unknown, + connectorAssignmentsPath: string, +): ConnectorAssignmentsConfig { if (!isRecord(value)) { throw createCliError( - "CLI_OPENCLAW_INVALID_RELAY_RUNTIME_CONFIG", - "Relay runtime config must be an object", - { relayRuntimeConfigPath }, + "CLI_OPENCLAW_INVALID_CONNECTOR_ASSIGNMENTS", + "Connector assignments config must be an object", + { connectorAssignmentsPath }, ); } - const updatedAt = - typeof value.updatedAt === "string" && value.updatedAt.trim().length > 0 - ? value.updatedAt.trim() - : undefined; + const agentsRaw = value.agents; + if (!isRecord(agentsRaw)) { + return { agents: {} }; + } - return { - openclawBaseUrl: parseOpenclawBaseUrl(value.openclawBaseUrl), - updatedAt, - }; + const agents: Record = {}; + for (const [agentName, entryValue] of Object.entries(agentsRaw)) { + if (!isRecord(entryValue)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_CONNECTOR_ASSIGNMENTS", + "Connector assignment entry must be an object", + { connectorAssignmentsPath, agentName }, + ); + } + + const connectorBaseUrl = parseConnectorBaseUrlForAssignment( + entryValue.connectorBaseUrl, + "connectorBaseUrl", + ); + const updatedAt = + typeof entryValue.updatedAt === "string" && + entryValue.updatedAt.trim().length > 0 + ? entryValue.updatedAt.trim() + : nowIso(); + + agents[assertValidAgentName(agentName)] = { + connectorBaseUrl, + updatedAt, + }; + } + + return { agents }; } -async function loadRelayRuntimeConfig( - relayRuntimeConfigPath: string, -): Promise { +async function loadConnectorAssignments( + connectorAssignmentsPath: string, +): Promise { let parsed: unknown; try { - parsed = await readJsonFile(relayRuntimeConfigPath); + parsed = await readJsonFile(connectorAssignmentsPath); } catch (error) { if (getErrorCode(error) === "ENOENT") { - return undefined; + return { agents: {} }; } - throw error; } - return parseRelayRuntimeConfig(parsed, relayRuntimeConfigPath); + return parseConnectorAssignments(parsed, connectorAssignmentsPath); } -async function saveRelayRuntimeConfig( - relayRuntimeConfigPath: string, - openclawBaseUrl: string, +async function saveConnectorAssignments( + connectorAssignmentsPath: string, + config: ConnectorAssignmentsConfig, ): Promise { - const config: OpenclawRelayRuntimeConfig = { - openclawBaseUrl, - updatedAt: nowIso(), - }; - await writeSecureFile( - relayRuntimeConfigPath, + connectorAssignmentsPath, `${JSON.stringify(config, null, 2)}\n`, ); } -async function resolveOpenclawBaseUrl(input: { - optionValue?: string; - relayRuntimeConfigPath: string; -}): Promise { - if ( - typeof input.optionValue === "string" && - input.optionValue.trim().length > 0 - ) { - return parseOpenclawBaseUrl(input.optionValue); +function parseConnectorPortFromBaseUrl(baseUrl: string): number { + const parsed = new URL(baseUrl); + if (parsed.port) { + return Number(parsed.port); } + return parsed.protocol === "https:" ? 443 : 80; +} - const envOpenclawBaseUrl = process.env.OPENCLAW_BASE_URL; +function allocateConnectorPort( + assignments: ConnectorAssignmentsConfig, + agentName: string, +): number { + const existing = assignments.agents[agentName]; + if (existing) { + return parseConnectorPortFromBaseUrl(existing.connectorBaseUrl); + } + + const usedPorts = new Set(); + for (const entry of Object.values(assignments.agents)) { + usedPorts.add(parseConnectorPortFromBaseUrl(entry.connectorBaseUrl)); + } + + let nextPort = DEFAULT_CONNECTOR_PORT; + while (usedPorts.has(nextPort)) { + nextPort += 1; + } + + return nextPort; +} + +function buildConnectorBaseUrl(host: string, port: number): string { + return `http://${host}:${port}`; +} + +function buildRelayConnectorBaseUrls(port: number): string[] { + return [ + buildConnectorBaseUrl(CONNECTOR_HOST_DOCKER, port), + buildConnectorBaseUrl(CONNECTOR_HOST_DOCKER_GATEWAY, port), + buildConnectorBaseUrl(CONNECTOR_HOST_LINUX_BRIDGE, port), + buildConnectorBaseUrl(CONNECTOR_HOST_LOOPBACK, port), + ]; +} + +function parseOpenclawRuntimeMode(value: unknown): OpenclawRuntimeMode { + if (typeof value !== "string" || value.trim().length === 0) { + return "auto"; + } + + const normalized = value.trim().toLowerCase(); if ( - typeof envOpenclawBaseUrl === "string" && - envOpenclawBaseUrl.trim().length > 0 + normalized === "auto" || + normalized === "service" || + normalized === "detached" ) { - return parseOpenclawBaseUrl(envOpenclawBaseUrl); + return normalized; } - const existingConfig = await loadRelayRuntimeConfig( - input.relayRuntimeConfigPath, + throw createCliError( + "CLI_OPENCLAW_SETUP_RUNTIME_MODE_INVALID", + "runtimeMode must be one of: auto, service, detached", ); - if (existingConfig !== undefined) { - return existingConfig.openclawBaseUrl; +} + +function parseWaitTimeoutSeconds(value: unknown): number { + if (typeof value !== "string" || value.trim().length === 0) { + return DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS; } - return DEFAULT_OPENCLAW_BASE_URL; + const parsed = Number.parseInt(value, 10); + if (!Number.isInteger(parsed) || parsed < 1) { + throw createCliError( + "CLI_OPENCLAW_SETUP_TIMEOUT_INVALID", + "waitTimeoutSeconds must be a positive integer", + ); + } + + return parsed; } -function normalizeStringArrayWithValue( - value: unknown, - requiredValue: string, -): string[] { - const normalized = new Set(); +function resolveConnectorStatusUrl(connectorBaseUrl: string): string { + const normalizedBase = connectorBaseUrl.endsWith("/") + ? connectorBaseUrl + : `${connectorBaseUrl}/`; + return new URL( + DEFAULT_CONNECTOR_STATUS_PATH.slice(1), + normalizedBase, + ).toString(); +} - if (Array.isArray(value)) { - for (const item of value) { - if (typeof item !== "string") { - continue; - } +type ConnectorHealthStatus = { + connected: boolean; + inboundInbox?: { + lastReplayAt?: string; + lastReplayError?: string; + nextAttemptAt?: string; + oldestPendingAt?: string; + pendingBytes?: number; + pendingCount?: number; + replayerActive?: boolean; + }; + openclawHook?: { + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + url?: string; + }; + reachable: boolean; + statusUrl: string; + reason?: string; +}; - const trimmed = item.trim(); - if (trimmed.length > 0) { - normalized.add(trimmed); - } - } +function parseConnectorStatusPayload(payload: unknown): { + inboundInbox?: { + lastReplayAt?: string; + lastReplayError?: string; + nextAttemptAt?: string; + oldestPendingAt?: string; + pendingBytes?: number; + pendingCount?: number; + replayerActive?: boolean; + }; + openclawHook?: { + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + url?: string; + }; + websocketConnected: boolean; +} { + if (!isRecord(payload) || typeof payload.websocketConnected !== "boolean") { + throw createCliError( + "CLI_OPENCLAW_SETUP_CONNECTOR_STATUS_INVALID", + "Connector status response is invalid", + ); } - normalized.add(requiredValue); - - return Array.from(normalized); + return { + websocketConnected: payload.websocketConnected, + inboundInbox: isRecord(payload.inboundInbox) + ? { + pendingCount: + typeof payload.inboundInbox.pendingCount === "number" + ? payload.inboundInbox.pendingCount + : undefined, + pendingBytes: + typeof payload.inboundInbox.pendingBytes === "number" + ? payload.inboundInbox.pendingBytes + : undefined, + oldestPendingAt: + typeof payload.inboundInbox.oldestPendingAt === "string" + ? payload.inboundInbox.oldestPendingAt + : undefined, + nextAttemptAt: + typeof payload.inboundInbox.nextAttemptAt === "string" + ? payload.inboundInbox.nextAttemptAt + : undefined, + lastReplayAt: + typeof payload.inboundInbox.lastReplayAt === "string" + ? payload.inboundInbox.lastReplayAt + : undefined, + lastReplayError: + typeof payload.inboundInbox.lastReplayError === "string" + ? payload.inboundInbox.lastReplayError + : undefined, + replayerActive: + typeof payload.inboundInbox.replayerActive === "boolean" + ? payload.inboundInbox.replayerActive + : undefined, + } + : undefined, + openclawHook: isRecord(payload.openclawHook) + ? { + url: + typeof payload.openclawHook.url === "string" + ? payload.openclawHook.url + : undefined, + lastAttemptAt: + typeof payload.openclawHook.lastAttemptAt === "string" + ? payload.openclawHook.lastAttemptAt + : undefined, + lastAttemptStatus: + payload.openclawHook.lastAttemptStatus === "ok" || + payload.openclawHook.lastAttemptStatus === "failed" + ? payload.openclawHook.lastAttemptStatus + : undefined, + } + : undefined, + }; } -function upsertRelayHookMapping( - mappingsValue: unknown, -): Record[] { - const mappings = Array.isArray(mappingsValue) - ? mappingsValue.filter(isRecord).map((mapping) => ({ ...mapping })) - : []; - - const existingIndex = mappings.findIndex((mapping) => { - if (mapping.id === HOOK_MAPPING_ID) { - return true; +async function fetchConnectorHealthStatus(input: { + connectorBaseUrl: string; + fetchImpl: typeof fetch; +}): Promise { + const statusUrl = resolveConnectorStatusUrl(input.connectorBaseUrl); + try { + const response = await input.fetchImpl(statusUrl, { + method: "GET", + headers: { + accept: "application/json", + }, + }); + if (!response.ok) { + return { + connected: false, + reachable: false, + statusUrl, + reason: `HTTP ${response.status}`, + }; } - if (!isRecord(mapping.match)) { - return false; + let payload: unknown; + try { + payload = await response.json(); + } catch { + return { + connected: false, + reachable: false, + statusUrl, + reason: "invalid JSON payload", + }; } - return mapping.match.path === HOOK_PATH_SEND_TO_PEER; + const parsed = parseConnectorStatusPayload(payload); + return { + connected: parsed.websocketConnected, + inboundInbox: parsed.inboundInbox, + openclawHook: parsed.openclawHook, + reachable: true, + statusUrl, + reason: parsed.websocketConnected + ? undefined + : "connector websocket is disconnected", + }; + } catch { + return { + connected: false, + reachable: false, + statusUrl, + reason: "connector status endpoint is unreachable", + }; + } +} + +async function waitForConnectorConnected(input: { + connectorBaseUrl: string; + fetchImpl: typeof fetch; + waitTimeoutSeconds: number; +}): Promise { + const deadline = Date.now() + input.waitTimeoutSeconds * 1000; + let latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + + while (!latest.connected && Date.now() < deadline) { + await new Promise((resolve) => { + setTimeout(resolve, 1000); + }); + latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + } + + if (!latest.connected) { + throw createCliError( + "CLI_OPENCLAW_SETUP_CONNECTOR_NOT_READY", + `Connector runtime is not websocket-connected after ${input.waitTimeoutSeconds} seconds`, + { + connectorBaseUrl: input.connectorBaseUrl, + connectorStatusUrl: latest.statusUrl, + reason: latest.reason, + }, + ); + } + + return latest; +} + +function resolveConnectorRunDir(homeDir: string): string { + return join(homeDir, CLAWDENTITY_DIR_NAME, CONNECTOR_RUN_DIR_NAME); +} + +function resolveConnectorPidPath(homeDir: string, agentName: string): string { + return join(resolveConnectorRunDir(homeDir), `connector-${agentName}.pid`); +} + +async function readConnectorPidFile( + pidPath: string, +): Promise { + try { + const raw = (await readFile(pidPath, "utf8")).trim(); + if (raw.length === 0) { + return undefined; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isInteger(parsed) || parsed <= 0) { + return undefined; + } + + return parsed; + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + throw error; + } +} + +function isPidRunning(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch { + return false; + } +} + +async function stopDetachedConnectorIfRunning(input: { + homeDir: string; + agentName: string; +}): Promise { + const pidPath = resolveConnectorPidPath(input.homeDir, input.agentName); + const pid = await readConnectorPidFile(pidPath); + if (pid === undefined || !isPidRunning(pid)) { + return; + } + + try { + process.kill(pid, "SIGTERM"); + } catch { + // Ignore stale pid races; setup health checks will verify readiness. + } +} + +function resolveCliEntryPathForDetachedStart(): string { + const argvEntry = typeof process.argv[1] === "string" ? process.argv[1] : ""; + if (argvEntry.length > 0 && existsSync(argvEntry)) { + return argvEntry; + } + + const modulePath = fileURLToPath(import.meta.url); + return join(dirname(modulePath), "..", "bin.js"); +} + +async function startDetachedConnectorRuntime(input: { + agentName: string; + homeDir: string; + openclawBaseUrl: string; +}): Promise { + await stopDetachedConnectorIfRunning({ + homeDir: input.homeDir, + agentName: input.agentName, + }); + const runDir = resolveConnectorRunDir(input.homeDir); + await mkdir(runDir, { recursive: true }); + + const cliEntryPath = resolveCliEntryPathForDetachedStart(); + const args = [ + cliEntryPath, + "connector", + "start", + input.agentName, + "--openclaw-base-url", + input.openclawBaseUrl, + ]; + const child = spawn(process.execPath, args, { + detached: true, + stdio: "ignore", + env: process.env, + }); + child.unref(); + await writeSecureFile( + resolveConnectorPidPath(input.homeDir, input.agentName), + `${child.pid}\n`, + ); +} + +async function startSetupConnectorRuntime(input: { + agentName: string; + homeDir: string; + openclawBaseUrl: string; + connectorBaseUrl: string; + mode: OpenclawRuntimeMode; + waitTimeoutSeconds: number; + fetchImpl: typeof fetch; +}): Promise { + if (input.mode !== "service") { + const existingStatus = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + if (existingStatus.connected) { + return { + runtimeMode: "existing", + runtimeStatus: "running", + websocketStatus: "connected", + connectorStatusUrl: existingStatus.statusUrl, + }; + } + } + + let runtimeMode: "service" | "detached" = "service"; + if (input.mode === "detached") { + runtimeMode = "detached"; + } else { + try { + await installConnectorServiceForAgent(input.agentName, { + platform: "auto", + openclawBaseUrl: input.openclawBaseUrl, + }); + runtimeMode = "service"; + } catch (error) { + if (input.mode === "service") { + throw error; + } + runtimeMode = "detached"; + logger.warn("cli.openclaw.setup.service_fallback_detached", { + agentName: input.agentName, + reason: error instanceof Error ? error.message : "unknown", + }); + } + } + + if (runtimeMode === "detached") { + await startDetachedConnectorRuntime({ + agentName: input.agentName, + homeDir: input.homeDir, + openclawBaseUrl: input.openclawBaseUrl, + }); + } + + const connectedStatus = await waitForConnectorConnected({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + waitTimeoutSeconds: input.waitTimeoutSeconds, + }); + + return { + runtimeMode, + runtimeStatus: "running", + websocketStatus: "connected", + connectorStatusUrl: connectedStatus.statusUrl, + }; +} + +function parseRelayRuntimeConfig( + value: unknown, + relayRuntimeConfigPath: string, +): OpenclawRelayRuntimeConfig { + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_RELAY_RUNTIME_CONFIG", + "Relay runtime config must be an object", + { relayRuntimeConfigPath }, + ); + } + + const updatedAt = + typeof value.updatedAt === "string" && value.updatedAt.trim().length > 0 + ? value.updatedAt.trim() + : undefined; + const openclawHookToken = + typeof value.openclawHookToken === "string" && + value.openclawHookToken.trim().length > 0 + ? value.openclawHookToken.trim() + : undefined; + const relayTransformPeersPath = + typeof value.relayTransformPeersPath === "string" && + value.relayTransformPeersPath.trim().length > 0 + ? value.relayTransformPeersPath.trim() + : undefined; + + return { + openclawBaseUrl: parseOpenclawBaseUrl(value.openclawBaseUrl), + openclawHookToken, + relayTransformPeersPath, + updatedAt, + }; +} + +async function loadRelayRuntimeConfig( + relayRuntimeConfigPath: string, +): Promise { + let parsed: unknown; + try { + parsed = await readJsonFile(relayRuntimeConfigPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + + throw error; + } + + return parseRelayRuntimeConfig(parsed, relayRuntimeConfigPath); +} + +async function saveRelayRuntimeConfig( + relayRuntimeConfigPath: string, + openclawBaseUrl: string, + openclawHookToken?: string, + relayTransformPeersPath?: string, +): Promise { + const config: OpenclawRelayRuntimeConfig = { + openclawBaseUrl, + ...(openclawHookToken ? { openclawHookToken } : {}), + ...(relayTransformPeersPath ? { relayTransformPeersPath } : {}), + updatedAt: nowIso(), + }; + + await writeSecureFile( + relayRuntimeConfigPath, + `${JSON.stringify(config, null, 2)}\n`, + ); +} + +async function resolveOpenclawBaseUrl(input: { + optionValue?: string; + relayRuntimeConfigPath: string; +}): Promise { + if ( + typeof input.optionValue === "string" && + input.optionValue.trim().length > 0 + ) { + return parseOpenclawBaseUrl(input.optionValue); + } + + const envOpenclawBaseUrl = process.env.OPENCLAW_BASE_URL; + if ( + typeof envOpenclawBaseUrl === "string" && + envOpenclawBaseUrl.trim().length > 0 + ) { + return parseOpenclawBaseUrl(envOpenclawBaseUrl); + } + + const existingConfig = await loadRelayRuntimeConfig( + input.relayRuntimeConfigPath, + ); + if (existingConfig !== undefined) { + return existingConfig.openclawBaseUrl; + } + + return DEFAULT_OPENCLAW_BASE_URL; +} + +function normalizeStringArrayWithValues( + value: unknown, + requiredValues: readonly string[], +): string[] { + const normalized = new Set(); + + if (Array.isArray(value)) { + for (const item of value) { + if (typeof item !== "string") { + continue; + } + + const trimmed = item.trim(); + if (trimmed.length > 0) { + normalized.add(trimmed); + } + } + } + + for (const requiredValue of requiredValues) { + const trimmed = requiredValue.trim(); + if (trimmed.length > 0) { + normalized.add(trimmed); + } + } + + return Array.from(normalized); +} + +function resolveHookDefaultSessionKey( + config: Record, + hooks: Record, +): string { + const session = isRecord(config.session) ? config.session : {}; + const scope = + typeof session.scope === "string" ? session.scope.trim().toLowerCase() : ""; + const configuredMainSessionKey = + resolveConfiguredOpenclawMainSessionKey(session); + + if ( + typeof hooks.defaultSessionKey === "string" && + hooks.defaultSessionKey.trim().length > 0 + ) { + return normalizeLegacyHookDefaultSessionKey( + hooks.defaultSessionKey, + configuredMainSessionKey, + ); + } + + if (scope === "global") { + return "global"; + } + + return configuredMainSessionKey; +} + +function resolveConfiguredOpenclawMainSessionKey( + session: Record, +): string { + if ( + typeof session.mainKey === "string" && + session.mainKey.trim().length > 0 + ) { + return session.mainKey.trim(); + } + + return DEFAULT_OPENCLAW_MAIN_SESSION_KEY; +} + +function normalizeLegacyHookDefaultSessionKey( + value: string, + fallbackSessionKey: string, +): string { + const trimmed = value.trim(); + const legacyMatch = /^agent:[^:]+:(.+)$/i.exec(trimmed); + if (!legacyMatch) { + return trimmed; + } + const routedSessionKey = legacyMatch[1]?.trim(); + if (typeof routedSessionKey === "string" && routedSessionKey.length > 0) { + return routedSessionKey; + } + + return fallbackSessionKey; +} + +function isCanonicalAgentSessionKey(value: string): boolean { + return /^agent:[^:]+:.+/i.test(value.trim()); +} + +function generateOpenclawHookToken(): string { + return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); +} + +function upsertRelayHookMapping( + mappingsValue: unknown, +): Record[] { + const mappings = Array.isArray(mappingsValue) + ? mappingsValue.filter(isRecord).map((mapping) => ({ ...mapping })) + : []; + + const existingIndex = mappings.findIndex((mapping) => { + if (mapping.id === HOOK_MAPPING_ID) { + return true; + } + + if (!isRecord(mapping.match)) { + return false; + } + + return mapping.match.path === HOOK_PATH_SEND_TO_PEER; }); const baseMapping = @@ -729,7 +1760,10 @@ function upsertRelayHookMapping( return mappings; } -async function patchOpenclawConfig(openclawConfigPath: string): Promise { +async function patchOpenclawConfig( + openclawConfigPath: string, + hookToken?: string, +): Promise<{ hookToken: string }> { let config: unknown; try { config = await readJsonFile(openclawConfigPath); @@ -754,12 +1788,25 @@ async function patchOpenclawConfig(openclawConfigPath: string): Promise { } const hooks = isRecord(config.hooks) ? { ...config.hooks } : {}; + const existingHookToken = + typeof hooks.token === "string" && hooks.token.trim().length > 0 + ? hooks.token.trim() + : undefined; + const preferredHookToken = + typeof hookToken === "string" && hookToken.trim().length > 0 + ? hookToken.trim() + : undefined; + const resolvedHookToken = + existingHookToken ?? preferredHookToken ?? generateOpenclawHookToken(); + const defaultSessionKey = resolveHookDefaultSessionKey(config, hooks); hooks.enabled = true; + hooks.token = resolvedHookToken; + hooks.defaultSessionKey = defaultSessionKey; hooks.allowRequestSessionKey = false; - hooks.allowedSessionKeyPrefixes = normalizeStringArrayWithValue( + hooks.allowedSessionKeyPrefixes = normalizeStringArrayWithValues( hooks.allowedSessionKeyPrefixes, - "hook:", + ["hook:", defaultSessionKey], ); hooks.mappings = upsertRelayHookMapping(hooks.mappings); @@ -773,6 +1820,10 @@ async function patchOpenclawConfig(openclawConfigPath: string): Promise { `${JSON.stringify(nextConfig, null, 2)}\n`, "utf8", ); + + return { + hookToken: resolvedHookToken, + }; } function toDoctorCheck( @@ -825,8 +1876,11 @@ function parseDoctorPeerAlias(peerAlias?: string): string | undefined { return parsePeerAlias(peerAlias); } -function resolveHookToken(optionValue?: string): string | undefined { - const trimmedOption = optionValue?.trim(); +async function resolveHookToken(input: { + optionValue?: string; + relayRuntimeConfigPath: string; +}): Promise { + const trimmedOption = input.optionValue?.trim(); if (trimmedOption !== undefined && trimmedOption.length > 0) { return trimmedOption; } @@ -836,6 +1890,13 @@ function resolveHookToken(optionValue?: string): string | undefined { return envValue; } + const existingConfig = await loadRelayRuntimeConfig( + input.relayRuntimeConfigPath, + ); + if (existingConfig?.openclawHookToken) { + return existingConfig.openclawHookToken; + } + return undefined; } @@ -900,57 +1961,125 @@ export async function runOpenclawDoctor( const peerAlias = parseDoctorPeerAlias(options.peerAlias); const checks: OpenclawDoctorCheckResult[] = []; - const resolveConfigImpl = options.resolveConfigImpl ?? resolveConfig; - try { - const resolvedConfig = await resolveConfigImpl(); - if ( - typeof resolvedConfig.registryUrl !== "string" || - resolvedConfig.registryUrl.trim().length === 0 - ) { + if (options.includeConfigCheck !== false) { + const resolveConfigImpl = options.resolveConfigImpl ?? resolveConfig; + try { + const resolvedConfig = await resolveConfigImpl(); + const envProxyUrl = + typeof process.env.CLAWDENTITY_PROXY_URL === "string" + ? process.env.CLAWDENTITY_PROXY_URL.trim() + : ""; + if ( + typeof resolvedConfig.registryUrl !== "string" || + resolvedConfig.registryUrl.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "registryUrl is missing", + remediationHint: + "Run: clawdentity config set registryUrl ", + }), + ); + } else if ( + typeof resolvedConfig.apiKey !== "string" || + resolvedConfig.apiKey.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "apiKey is missing", + remediationHint: "Run: clawdentity config set apiKey ", + }), + ); + } else if (envProxyUrl.length > 0) { + let hasValidEnvProxyUrl = true; + try { + parseProxyUrl(envProxyUrl); + } catch { + hasValidEnvProxyUrl = false; + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "CLAWDENTITY_PROXY_URL is invalid", + remediationHint: + "Set CLAWDENTITY_PROXY_URL to a valid http(s) URL or unset it", + }), + ); + } + + if (hasValidEnvProxyUrl) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: + "registryUrl and apiKey are configured (proxy URL override is active via CLAWDENTITY_PROXY_URL)", + }), + ); + } + } else if ( + typeof resolvedConfig.proxyUrl !== "string" || + resolvedConfig.proxyUrl.trim().length === 0 + ) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "proxyUrl is missing", + remediationHint: + "Run: clawdentity invite redeem or clawdentity config init", + }), + ); + } else { + let hasValidConfigProxyUrl = true; + try { + parseProxyUrl(resolvedConfig.proxyUrl); + } catch { + hasValidConfigProxyUrl = false; + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "proxyUrl is invalid", + remediationHint: + "Run: clawdentity invite redeem or clawdentity config init", + }), + ); + } + + if (hasValidConfigProxyUrl) { + checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: "registryUrl, apiKey, and proxyUrl are configured", + }), + ); + } + } + } catch { checks.push( toDoctorCheck({ id: "config.registry", label: "CLI config", status: "fail", - message: "registryUrl is missing", + message: "unable to resolve CLI config", remediationHint: - "Run: clawdentity config set registryUrl ", - }), - ); - } else if ( - typeof resolvedConfig.apiKey !== "string" || - resolvedConfig.apiKey.trim().length === 0 - ) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "apiKey is missing", - remediationHint: "Run: clawdentity config set apiKey ", - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "pass", - message: "registryUrl and apiKey are configured", + "Fix ~/.clawdentity/config.json or rerun: clawdentity config init", }), ); } - } catch { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "unable to resolve CLI config", - remediationHint: - "Fix ~/.clawdentity/config.json or rerun: clawdentity config init", - }), - ); } const selectedAgentPath = resolveOpenclawAgentNamePath(homeDir); @@ -976,8 +2105,7 @@ export async function runOpenclawDoctor( message: missing ? `missing ${selectedAgentPath}` : "selected agent marker is invalid", - remediationHint: - "Run: clawdentity openclaw setup --invite-code ", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, }), ); } @@ -989,8 +2117,7 @@ export async function runOpenclawDoctor( label: "Local agent credentials", status: "fail", message: "cannot validate credentials without selected agent marker", - remediationHint: - "Run: clawdentity openclaw setup --invite-code ", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, }), ); } else { @@ -1043,8 +2170,7 @@ export async function runOpenclawDoctor( label: "Peers map", status: "fail", message: `peer alias is missing: ${peerAlias}`, - remediationHint: - "Run: clawdentity openclaw setup --invite-code --peer-alias ", + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, details: { peersPath, peerAlias }, }), ); @@ -1064,10 +2190,8 @@ export async function runOpenclawDoctor( toDoctorCheck({ id: "state.peers", label: "Peers map", - status: "fail", - message: "no peers are configured", - remediationHint: - "Run: clawdentity openclaw setup --invite-code ", + status: "pass", + message: "no peers are configured yet (optional until pairing)", details: { peersPath }, }), ); @@ -1097,17 +2221,33 @@ export async function runOpenclawDoctor( } const transformTargetPath = resolveTransformTargetPath(openclawDir); + const relayTransformRuntimePath = resolveTransformRuntimePath(openclawDir); + const relayTransformPeersPath = resolveTransformPeersPath(openclawDir); try { const transformContents = await readFile(transformTargetPath, "utf8"); - if (transformContents.trim().length === 0) { + const runtimeContents = await readFile(relayTransformRuntimePath, "utf8"); + const peersSnapshotContents = await readFile( + relayTransformPeersPath, + "utf8", + ); + + if ( + transformContents.trim().length === 0 || + runtimeContents.trim().length === 0 || + peersSnapshotContents.trim().length === 0 + ) { checks.push( toDoctorCheck({ id: "state.transform", label: "Relay transform", status: "fail", - message: `transform file is empty: ${transformTargetPath}`, - remediationHint: "Run: npm install clawdentity --skill", - details: { transformTargetPath }, + message: "relay transform artifacts are missing or empty", + remediationHint: "Run: clawdentity skill install", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, }), ); } else { @@ -1116,56 +2256,163 @@ export async function runOpenclawDoctor( id: "state.transform", label: "Relay transform", status: "pass", - message: "relay transform file exists", - details: { transformTargetPath }, + message: "relay transform artifacts are present", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, }), ); } - } catch { - checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "fail", - message: `missing transform file: ${transformTargetPath}`, - remediationHint: "Run: npm install clawdentity --skill", - details: { transformTargetPath }, - }), - ); - } + } catch { + checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: "missing relay transform artifacts", + remediationHint: "Run: clawdentity skill install", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } + + const openclawConfigPath = resolveOpenclawConfigPath(openclawDir, homeDir); + try { + const openclawConfig = await readJsonFile(openclawConfigPath); + if (!isRecord(openclawConfig)) { + throw new Error("root"); + } + const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; + const hooksEnabled = hooks.enabled === true; + const hookToken = + typeof hooks.token === "string" && hooks.token.trim().length > 0 + ? hooks.token.trim() + : undefined; + const defaultSessionKey = + typeof hooks.defaultSessionKey === "string" && + hooks.defaultSessionKey.trim().length > 0 + ? hooks.defaultSessionKey.trim() + : undefined; + const allowRequestSessionKey = hooks.allowRequestSessionKey === false; + const allowedSessionKeyPrefixes = normalizeStringArrayWithValues( + hooks.allowedSessionKeyPrefixes, + [], + ); + const missingRequiredSessionPrefixes = + defaultSessionKey === undefined + ? ["hook:"] + : ["hook:", defaultSessionKey].filter( + (prefix) => !allowedSessionKeyPrefixes.includes(prefix), + ); + const mappings = Array.isArray(hooks.mappings) + ? hooks.mappings.filter(isRecord) + : []; + const relayMapping = mappings.find((mapping) => + isRelayHookMapping(mapping), + ); + if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { + checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `missing send-to-peer mapping in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "pass", + message: "send-to-peer mapping is configured", + details: { openclawConfigPath }, + }), + ); + } + + if (!hooksEnabled) { + checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `hooks.enabled is not true in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else if (hookToken === undefined) { + checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `hooks.token is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "pass", + message: "hooks token is configured", + details: { openclawConfigPath }, + }), + ); + } + + const sessionRoutingIssues: string[] = []; + if (defaultSessionKey === undefined) { + sessionRoutingIssues.push("hooks.defaultSessionKey is missing"); + } + if (!allowRequestSessionKey) { + sessionRoutingIssues.push("hooks.allowRequestSessionKey is not false"); + } + if (missingRequiredSessionPrefixes.length > 0) { + sessionRoutingIssues.push( + `hooks.allowedSessionKeyPrefixes is missing: ${missingRequiredSessionPrefixes.join(", ")}`, + ); + } + if ( + defaultSessionKey !== undefined && + isCanonicalAgentSessionKey(defaultSessionKey) + ) { + sessionRoutingIssues.push( + "hooks.defaultSessionKey uses canonical agent format (agent::...); use OpenClaw request session keys like main, global, or subagent:*", + ); + } - const openclawConfigPath = resolveOpenclawConfigPath(openclawDir); - try { - const openclawConfig = await readJsonFile(openclawConfigPath); - if (!isRecord(openclawConfig)) { - throw new Error("root"); - } - const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; - const mappings = Array.isArray(hooks.mappings) - ? hooks.mappings.filter(isRecord) - : []; - const relayMapping = mappings.find((mapping) => - isRelayHookMapping(mapping), - ); - if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { + if (sessionRoutingIssues.length > 0) { checks.push( toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", status: "fail", - message: `missing send-to-peer mapping in ${openclawConfigPath}`, - remediationHint: - "Run: clawdentity openclaw setup --invite-code ", + message: sessionRoutingIssues.join("; "), + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, details: { openclawConfigPath }, }), ); } else { checks.push( toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", status: "pass", - message: "send-to-peer mapping is configured", + message: + "hooks default session and allowed session prefixes are configured", details: { openclawConfigPath }, }), ); @@ -1178,7 +2425,29 @@ export async function runOpenclawDoctor( status: "fail", message: `unable to read ${openclawConfigPath}`, remediationHint: - "Ensure ~/.openclaw/openclaw.json exists and rerun openclaw setup", + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", details: { openclawConfigPath }, }), ); @@ -1204,12 +2473,332 @@ export async function runOpenclawDoctor( label: "OpenClaw base URL", status: "fail", message: `unable to resolve OpenClaw base URL from ${relayRuntimeConfigPath}`, - remediationHint: - "Run: clawdentity openclaw setup --invite-code --openclaw-base-url ", + remediationHint: OPENCLAW_SETUP_WITH_BASE_URL_HINT, + }), + ); + } + + const gatewayPendingState = + await readOpenclawGatewayPendingState(openclawDir); + if (gatewayPendingState.status === "missing") { + checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "pass", + message: "no pending gateway device approvals file was found", + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.status === "invalid") { + checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `invalid pending device approvals file: ${gatewayPendingState.gatewayDevicePendingPath}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.status === "unreadable") { + checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `unable to read pending device approvals at ${gatewayPendingState.gatewayDevicePendingPath}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.pendingRequestIds.length === 0) { + checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "pass", + message: "no pending gateway device approvals", + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `pending gateway device approvals: ${gatewayPendingState.pendingRequestIds.length}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + pendingRequestIds: gatewayPendingState.pendingRequestIds, + }, }), ); } + if (options.includeConnectorRuntimeCheck !== false) { + if (selectedAgentName === undefined) { + checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: + "cannot validate connector runtime without selected agent marker", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: + "cannot validate connector inbound inbox without selected agent marker", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: + "cannot validate OpenClaw hook health without selected agent marker", + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + }), + ); + } else { + const connectorAssignmentsPath = resolveConnectorAssignmentsPath(homeDir); + try { + const connectorAssignments = await loadConnectorAssignments( + connectorAssignmentsPath, + ); + const assignment = connectorAssignments.agents[selectedAgentName]; + if (assignment === undefined) { + checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: `no connector assignment found for ${selectedAgentName}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { connectorAssignmentsPath, selectedAgentName }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: `no connector assignment found for ${selectedAgentName}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { connectorAssignmentsPath, selectedAgentName }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: `no connector assignment found for ${selectedAgentName}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { connectorAssignmentsPath, selectedAgentName }, + }), + ); + } else { + const fetchImpl = options.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: + "fetch implementation is unavailable for connector checks", + remediationHint: + "Run doctor in a Node runtime with fetch support, or rerun openclaw setup", + }), + ); + checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: + "fetch implementation is unavailable for connector inbox checks", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: + "fetch implementation is unavailable for OpenClaw hook health checks", + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + }), + ); + } else { + const connectorStatus = await fetchConnectorHealthStatus({ + connectorBaseUrl: assignment.connectorBaseUrl, + fetchImpl, + }); + if (connectorStatus.connected) { + checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "pass", + message: `connector websocket is connected (${assignment.connectorBaseUrl})`, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + const inboxPendingCount = + connectorStatus.inboundInbox?.pendingCount ?? 0; + const replayError = connectorStatus.inboundInbox?.lastReplayError; + checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "pass", + message: + inboxPendingCount === 0 + ? "connector inbound inbox is empty" + : `connector inbound inbox has ${inboxPendingCount} pending message(s)`, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + ...connectorStatus.inboundInbox, + }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: + connectorStatus.openclawHook?.lastAttemptStatus === + "failed" && inboxPendingCount > 0 + ? "fail" + : "pass", + message: + connectorStatus.openclawHook?.lastAttemptStatus === + "failed" && inboxPendingCount > 0 + ? `connector replay to local OpenClaw hook is failing: ${replayError ?? "unknown error"}` + : "connector replay to local OpenClaw hook is healthy", + remediationHint: + connectorStatus.openclawHook?.lastAttemptStatus === + "failed" && inboxPendingCount > 0 + ? OPENCLAW_SETUP_RESTART_COMMAND_HINT + : undefined, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + ...connectorStatus.openclawHook, + inboxPendingCount, + }, + }), + ); + } else { + const reason = + connectorStatus.reason ?? "connector runtime is unavailable"; + checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: `connector runtime is not ready: ${reason}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: `unable to read connector inbound inbox status: ${reason}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: `unable to verify OpenClaw hook health: ${reason}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + } + } + } + } catch { + checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: `unable to read connector assignments at ${connectorAssignmentsPath}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { connectorAssignmentsPath }, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: + "cannot validate connector inbound inbox without connector assignment", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: + "cannot validate OpenClaw hook health without connector assignment", + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + }), + ); + } + } + } + return toDoctorResult(checks); } @@ -1228,8 +2817,14 @@ function parseRelayProbeFailure(input: { if (input.status === 404) { return { message: "OpenClaw send-to-peer hook is unavailable", - remediationHint: - "Run: clawdentity openclaw setup --invite-code ", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }; + } + + if (input.status === 405) { + return { + message: "OpenClaw send-to-peer hook is not enabled for POST requests", + remediationHint: `${OPENCLAW_SETUP_COMMAND_HINT}, then restart OpenClaw`, }; } @@ -1237,7 +2832,7 @@ function parseRelayProbeFailure(input: { return { message: "Relay probe failed inside local relay pipeline", remediationHint: - "Check connector runtime and peer alias; rerun clawdentity openclaw doctor --peer ", + "Check peer pairing and rerun: clawdentity openclaw setup ", }; } @@ -1250,19 +2845,72 @@ function parseRelayProbeFailure(input: { }; } +async function resolveRelayProbePeerAlias(input: { + homeDir: string; + peerAliasOption?: string; +}): Promise { + if ( + typeof input.peerAliasOption === "string" && + input.peerAliasOption.trim().length > 0 + ) { + return parsePeerAlias(input.peerAliasOption); + } + + const peersPath = resolvePeersPath(input.homeDir); + const peersConfig = await loadPeersConfig(peersPath); + const peerAliases = Object.keys(peersConfig.peers); + + if (peerAliases.length === 1) { + return peerAliases[0]; + } + + if (peerAliases.length === 0) { + throw createCliError( + "CLI_OPENCLAW_RELAY_TEST_PEER_REQUIRED", + "No paired peer is configured yet. Complete QR pairing first.", + { peersPath }, + ); + } + + throw createCliError( + "CLI_OPENCLAW_RELAY_TEST_PEER_REQUIRED", + "Multiple peers are configured. Pass --peer to choose one.", + { peersPath, peerAliases }, + ); +} + export async function runOpenclawRelayTest( options: OpenclawRelayTestOptions, ): Promise { const homeDir = resolveHomeDir(options.homeDir); const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); - const peerAlias = parsePeerAlias(options.peer); + const checkedAt = nowIso(); + let peerAlias: string; + try { + peerAlias = await resolveRelayProbePeerAlias({ + homeDir, + peerAliasOption: options.peer, + }); + } catch (error) { + const appError = error instanceof AppError ? error : undefined; + return { + status: "failure", + checkedAt, + peerAlias: "unresolved", + endpoint: toSendToPeerEndpoint(DEFAULT_OPENCLAW_BASE_URL), + message: appError?.message ?? "Unable to resolve relay peer alias", + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, + details: appError?.details as Record | undefined, + }; + } + const preflight = await runOpenclawDoctor({ homeDir, openclawDir, peerAlias, resolveConfigImpl: options.resolveConfigImpl, + includeConnectorRuntimeCheck: false, }); - const checkedAt = nowIso(); const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); let openclawBaseUrl = DEFAULT_OPENCLAW_BASE_URL; @@ -1303,7 +2951,10 @@ export async function runOpenclawRelayTest( }; } - const hookToken = resolveHookToken(options.hookToken); + const hookToken = await resolveHookToken({ + optionValue: options.hookToken, + relayRuntimeConfigPath, + }); const fetchImpl = options.fetchImpl ?? globalThis.fetch; if (typeof fetchImpl !== "function") { return { @@ -1385,7 +3036,8 @@ export function createOpenclawInviteCode( options.peerAlias === undefined ? undefined : parsePeerAlias(options.peerAlias); - const name = parseOptionalName(options.name); + const agentName = parseOptionalProfileName(options.agentName, "agentName"); + const humanName = parseOptionalProfileName(options.humanName, "humanName"); const payload = parseInvitePayload({ v: 1, @@ -1393,7 +3045,8 @@ export function createOpenclawInviteCode( did, proxyUrl, alias: peerAlias, - name, + agentName, + humanName, }); const result: OpenclawInviteResult = { @@ -1401,7 +3054,8 @@ export function createOpenclawInviteCode( did: payload.did, proxyUrl: payload.proxyUrl, peerAlias: payload.alias, - name: payload.name, + agentName: payload.agentName, + humanName: payload.humanName, }; return result; @@ -1411,14 +3065,14 @@ export function decodeOpenclawInviteCode(code: string): OpenclawInvitePayload { return decodeInvitePayload(code); } -export async function setupOpenclawRelayFromInvite( +export async function setupOpenclawRelay( agentName: string, options: OpenclawSetupOptions, ): Promise { const normalizedAgentName = assertValidAgentName(agentName); const homeDir = resolveHomeDir(options.homeDir); const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); - const openclawConfigPath = resolveOpenclawConfigPath(openclawDir); + const openclawConfigPath = resolveOpenclawConfigPath(openclawDir, homeDir); const transformSource = typeof options.transformSource === "string" && options.transformSource.trim().length > 0 @@ -1426,21 +3080,13 @@ export async function setupOpenclawRelayFromInvite( : resolveDefaultTransformSource(openclawDir); const transformTargetPath = resolveTransformTargetPath(openclawDir); const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); + const existingRelayRuntimeConfig = await loadRelayRuntimeConfig( + relayRuntimeConfigPath, + ); const openclawBaseUrl = await resolveOpenclawBaseUrl({ optionValue: options.openclawBaseUrl, relayRuntimeConfigPath, }); - const invite = decodeInvitePayload(options.inviteCode); - const peerAliasCandidate = options.peerAlias ?? invite.alias; - - if (!peerAliasCandidate) { - throw createCliError( - "CLI_OPENCLAW_PEER_ALIAS_REQUIRED", - "Peer alias is required. Include alias in invite code or pass --peer-alias.", - ); - } - - const peerAlias = parsePeerAlias(peerAliasCandidate); await ensureLocalAgentCredentials(homeDir, normalizedAgentName); await mkdir(dirname(transformTargetPath), { recursive: true }); @@ -1458,108 +3104,301 @@ export async function setupOpenclawRelayFromInvite( throw error; } - await patchOpenclawConfig(openclawConfigPath); + const patchedOpenclawConfig = await patchOpenclawConfig( + openclawConfigPath, + existingRelayRuntimeConfig?.openclawHookToken, + ); const peersPath = resolvePeersPath(homeDir); const peers = await loadPeersConfig(peersPath); - peers.peers[peerAlias] = - invite.name === undefined - ? { did: invite.did, proxyUrl: invite.proxyUrl } - : { did: invite.did, proxyUrl: invite.proxyUrl, name: invite.name }; await savePeersConfig(peersPath, peers); + const relayTransformPeersPath = resolveTransformPeersPath(openclawDir); + await writeSecureFile( + relayTransformPeersPath, + `${JSON.stringify(peers, null, 2)}\n`, + ); + + const connectorAssignmentsPath = resolveConnectorAssignmentsPath(homeDir); + const connectorAssignments = await loadConnectorAssignments( + connectorAssignmentsPath, + ); + const connectorPort = allocateConnectorPort( + connectorAssignments, + normalizedAgentName, + ); + const connectorBaseUrl = buildConnectorBaseUrl( + CONNECTOR_HOST_LOOPBACK, + connectorPort, + ); + connectorAssignments.agents[normalizedAgentName] = { + connectorBaseUrl, + updatedAt: nowIso(), + }; + await saveConnectorAssignments( + connectorAssignmentsPath, + connectorAssignments, + ); + + const relayTransformRuntimePath = resolveTransformRuntimePath(openclawDir); + await writeSecureFile( + relayTransformRuntimePath, + `${JSON.stringify( + { + version: 1, + connectorBaseUrl: buildRelayConnectorBaseUrls(connectorPort)[0], + connectorBaseUrls: buildRelayConnectorBaseUrls(connectorPort), + connectorPath: DEFAULT_CONNECTOR_OUTBOUND_PATH, + peersConfigPath: RELAY_PEERS_FILE_NAME, + updatedAt: nowIso(), + }, + null, + 2, + )}\n`, + ); + const agentNamePath = resolveOpenclawAgentNamePath(homeDir); await writeSecureFile(agentNamePath, `${normalizedAgentName}\n`); - await saveRelayRuntimeConfig(relayRuntimeConfigPath, openclawBaseUrl); + await saveRelayRuntimeConfig( + relayRuntimeConfigPath, + openclawBaseUrl, + patchedOpenclawConfig.hookToken, + relayTransformPeersPath, + ); logger.info("cli.openclaw_setup_completed", { agentName: normalizedAgentName, - peerAlias, - peerDid: invite.did, openclawConfigPath, transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, openclawBaseUrl, + connectorBaseUrl, relayRuntimeConfigPath, }); return { - peerAlias, - peerDid: invite.did, - peerProxyUrl: invite.proxyUrl, openclawConfigPath, transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, openclawBaseUrl, + connectorBaseUrl, relayRuntimeConfigPath, }; } -export const createOpenclawCommand = (): Command => { - const openclawCommand = new Command("openclaw").description( - "Manage OpenClaw invite codes and relay setup", +async function assertSetupChecklistHealthy(input: { + homeDir: string; + openclawDir: string; + includeConnectorRuntimeCheck: boolean; + gatewayDeviceApprovalRunner?: OpenclawGatewayDeviceApprovalRunner; +}): Promise { + let checklist = await runOpenclawDoctor({ + homeDir: input.homeDir, + openclawDir: input.openclawDir, + includeConfigCheck: false, + includeConnectorRuntimeCheck: input.includeConnectorRuntimeCheck, + }); + + if (checklist.status === "healthy") { + return; + } + + let gatewayApprovalSummary: OpenclawGatewayDeviceApprovalSummary | undefined; + const gatewayPairingFailure = checklist.checks.find( + (check) => + check.id === "state.gatewayDevicePairing" && check.status === "fail", ); + if (gatewayPairingFailure !== undefined) { + gatewayApprovalSummary = await autoApproveOpenclawGatewayDevices({ + homeDir: input.homeDir, + openclawDir: input.openclawDir, + runner: input.gatewayDeviceApprovalRunner, + }); + if (gatewayApprovalSummary !== undefined) { + const successfulAttempts = gatewayApprovalSummary.attempts.filter( + (attempt) => attempt.ok, + ).length; + const failedAttempts = gatewayApprovalSummary.attempts.filter( + (attempt) => !attempt.ok, + ); + logger.info("cli.openclaw_setup_gateway_device_recovery_attempted", { + openclawDir: input.openclawDir, + pendingCount: gatewayApprovalSummary.pendingRequestIds.length, + successfulAttempts, + failedAttempts: failedAttempts.length, + commandUnavailable: failedAttempts.some( + (attempt) => attempt.unavailable, + ), + }); + checklist = await runOpenclawDoctor({ + homeDir: input.homeDir, + openclawDir: input.openclawDir, + includeConfigCheck: false, + includeConnectorRuntimeCheck: input.includeConnectorRuntimeCheck, + }); + if (checklist.status === "healthy") { + return; + } + } + } - openclawCommand - .command("invite") - .description("Create an invite code for peer relay onboarding") - .requiredOption("--did ", "Peer agent DID (did:claw:agent:...)") - .requiredOption( - "--proxy-url ", - "Public proxy URL ending in /hooks/agent", - ) - .option("--peer-alias ", "Suggested peer alias for the receiver") - .option("--name ", "Human-friendly peer display name") - .action( - withErrorHandling( - "openclaw invite", - async (options: OpenclawInviteOptions) => { - const invite = createOpenclawInviteCode(options); - - writeStdoutLine(`Invite code: ${invite.code}`); - writeStdoutLine(`Agent DID: ${invite.did}`); - writeStdoutLine(`Proxy URL: ${invite.proxyUrl}`); - if (invite.peerAlias) { - writeStdoutLine(`Suggested Alias: ${invite.peerAlias}`); - } - }, - ), + const firstFailure = checklist.checks.find( + (check) => check.status === "fail", + ); + const unavailableGatewayApprovalAttempt = + gatewayApprovalSummary?.attempts.find((attempt) => attempt.unavailable); + const remediationHint = + unavailableGatewayApprovalAttempt !== undefined && + firstFailure?.id === "state.gatewayDevicePairing" + ? `${OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT}. Ensure the \`${resolveOpenclawGatewayApprovalCommand()}\` command is available.` + : firstFailure?.remediationHint; + throw createCliError( + "CLI_OPENCLAW_SETUP_CHECKLIST_FAILED", + "OpenClaw setup checklist failed", + { + firstFailedCheckId: firstFailure?.id, + firstFailedCheckMessage: firstFailure?.message, + remediationHint, + gatewayDeviceApproval: gatewayApprovalSummary, + checks: checklist.checks, + }, + ); +} + +export async function setupOpenclawSelfReady( + agentName: string, + options: OpenclawSetupOptions, +): Promise { + const resolvedHomeDir = resolveHomeDir(options.homeDir); + const resolvedOpenclawDir = resolveOpenclawDir( + options.openclawDir, + resolvedHomeDir, + ); + const setup = await setupOpenclawRelay(agentName, { + ...options, + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + }); + if (options.noRuntimeStart === true) { + await assertSetupChecklistHealthy({ + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + includeConnectorRuntimeCheck: false, + gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, + }); + return { + ...setup, + runtimeMode: "none", + runtimeStatus: "skipped", + websocketStatus: "skipped", + }; + } + + const fetchImpl = globalThis.fetch; + if (typeof fetchImpl !== "function") { + throw createCliError( + "CLI_OPENCLAW_SETUP_FETCH_UNAVAILABLE", + "Runtime fetch is unavailable for connector readiness checks", ); + } + + const resolvedMode = parseOpenclawRuntimeMode(options.runtimeMode); + const waitTimeoutSeconds = parseWaitTimeoutSeconds( + options.waitTimeoutSeconds, + ); + const runtime = await startSetupConnectorRuntime({ + agentName: assertValidAgentName(agentName), + homeDir: resolvedHomeDir, + openclawBaseUrl: setup.openclawBaseUrl, + connectorBaseUrl: setup.connectorBaseUrl, + mode: resolvedMode, + waitTimeoutSeconds, + fetchImpl, + }); + + await assertSetupChecklistHealthy({ + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + includeConnectorRuntimeCheck: true, + gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, + }); + + return { + ...setup, + ...runtime, + }; +} + +export async function setupOpenclawRelayFromInvite( + agentName: string, + options: OpenclawSetupOptions, +): Promise { + return setupOpenclawRelay(agentName, options); +} + +export const createOpenclawCommand = (): Command => { + const openclawCommand = new Command("openclaw").description( + "Manage OpenClaw relay setup", + ); openclawCommand .command("setup ") - .description("Apply OpenClaw relay setup using an invite code") - .requiredOption( - "--invite-code ", - "Invite code shared by peer operator", - ) - .option("--peer-alias ", "Override peer alias for local routing") + .description("Apply OpenClaw relay setup") .option( "--openclaw-dir ", "OpenClaw state directory (default ~/.openclaw)", ) .option( "--transform-source ", - "Path to relay-to-peer.mjs (default /workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs)", + "Path to relay-to-peer.mjs (default /skills/clawdentity-openclaw-relay/relay-to-peer.mjs)", ) .option( "--openclaw-base-url ", "Base URL for local OpenClaw hook API (default http://127.0.0.1:18789)", ) + .option( + "--runtime-mode ", + "Connector runtime mode: auto | service | detached (default auto)", + ) + .option( + "--wait-timeout-seconds ", + "Seconds to wait for connector websocket readiness (default 30)", + ) + .option( + "--no-runtime-start", + "Skip connector runtime startup (advanced/manual mode)", + ) .action( withErrorHandling( "openclaw setup", - async (agentName: string, options: OpenclawSetupOptions) => { - const result = await setupOpenclawRelayFromInvite(agentName, options); - writeStdoutLine(`Peer alias configured: ${result.peerAlias}`); - writeStdoutLine(`Peer DID: ${result.peerDid}`); - writeStdoutLine(`Peer proxy URL: ${result.peerProxyUrl}`); + async (agentName: string, options: OpenclawSetupCommandOptions) => { + const result = await setupOpenclawSelfReady(agentName, options); + writeStdoutLine("Self setup complete"); writeStdoutLine( `Updated OpenClaw config: ${result.openclawConfigPath}`, ); writeStdoutLine(`Installed transform: ${result.transformTargetPath}`); + writeStdoutLine( + `Transform runtime config: ${result.relayTransformRuntimePath}`, + ); + writeStdoutLine( + `Transform peers snapshot: ${result.relayTransformPeersPath}`, + ); + writeStdoutLine(`Connector base URL: ${result.connectorBaseUrl}`); writeStdoutLine(`OpenClaw base URL: ${result.openclawBaseUrl}`); writeStdoutLine( `Relay runtime config: ${result.relayRuntimeConfigPath}`, ); + writeStdoutLine(`Runtime mode: ${result.runtimeMode}`); + writeStdoutLine(`Runtime status: ${result.runtimeStatus}`); + writeStdoutLine(`WebSocket status: ${result.websocketStatus}`); + if (result.connectorStatusUrl) { + writeStdoutLine( + `Connector status URL: ${result.connectorStatusUrl}`, + ); + } }, ), ); @@ -1601,8 +3440,10 @@ export const createOpenclawCommand = (): Command => { relayCommand .command("test") - .description("Send a relay probe to a configured peer alias") - .requiredOption("--peer ", "Peer alias in ~/.clawdentity/peers.json") + .description( + "Send a relay probe to a configured peer (auto-selects when one peer exists)", + ) + .option("--peer ", "Peer alias in ~/.clawdentity/peers.json") .option( "--openclaw-base-url ", "Base URL for local OpenClaw hook API (default OPENCLAW_BASE_URL or relay runtime config)", diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts index 811f59a..5166af3 100644 --- a/apps/cli/src/commands/pair.test.ts +++ b/apps/cli/src/commands/pair.test.ts @@ -4,7 +4,13 @@ import { } from "@clawdentity/sdk"; import { Command } from "commander"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import { confirmPairing, createPairCommand, startPairing } from "./pair.js"; +import { resetClawdentityEnv } from "../test-env.js"; +import { + confirmPairing, + createPairCommand, + getPairingStatus, + startPairing, +} from "./pair.js"; const buildErrnoError = (code: string): NodeJS.ErrnoException => { const error = new Error(code) as NodeJS.ErrnoException; @@ -17,12 +23,32 @@ type PairFixture = { secretKeyBase64url: string; }; +const INITIATOR_PROFILE = { + agentName: "alpha", + humanName: "Ravi", +}; + +const RESPONDER_PROFILE = { + agentName: "beta", + humanName: "Ira", +}; + const createPairFixture = async (): Promise => { const keypair = await generateEd25519Keypair(); const encoded = encodeEd25519KeypairBase64url(keypair); + const header = Buffer.from(JSON.stringify({ alg: "EdDSA", typ: "JWT" })) + .toString("base64url") + .trim(); + const payload = Buffer.from( + JSON.stringify({ + sub: "did:claw:agent:01HAAA11111111111111111111", + }), + ) + .toString("base64url") + .trim(); return { - ait: "ey.mock.ait", + ait: `${header}.${payload}.sig`, secretKeyBase64url: encoded.secretKey, }; }; @@ -53,7 +79,7 @@ const previousEnv = process.env; describe("pair command helpers", () => { beforeEach(() => { vi.clearAllMocks(); - process.env = { ...previousEnv }; + process.env = resetClawdentityEnv(previousEnv); }); afterEach(() => { @@ -69,12 +95,29 @@ describe("pair command helpers", () => { "notes.txt", ]); const unlinkImpl = vi.fn(async () => undefined); - const writeFileImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn( + async ( + _filePath: string, + _data: string | Uint8Array, + _encoding?: BufferEncoding, + ) => undefined, + ); const mkdirImpl = vi.fn(async () => undefined); - const fetchImpl = vi.fn(async (_url: string, _init?: RequestInit) => { + const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + return Response.json( { initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, ticket: "clwpair1_eyJ2IjoxfQ", expiresAt: "2026-02-18T00:00:00.000Z", }, @@ -85,7 +128,6 @@ describe("pair command helpers", () => { const result = await startPairing( "alpha", { - proxyUrl: "https://alpha.proxy.example", ttlSeconds: "900", qr: true, }, @@ -105,8 +147,8 @@ describe("pair command helpers", () => { unlinkImpl as unknown as typeof import("node:fs/promises").unlink, qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), resolveConfigImpl: async () => ({ - registryUrl: "https://dev.api.clawdentity.com/", - apiKey: "clw_pat_configured", + registryUrl: "https://dev.registry.clawdentity.com/", + humanName: INITIATOR_PROFILE.humanName, }), getConfigDirImpl: () => "/tmp/.clawdentity", }, @@ -124,32 +166,31 @@ describe("pair command helpers", () => { ); expect(writeFileImpl).toHaveBeenCalledTimes(1); expect(mkdirImpl).toHaveBeenCalledTimes(1); - const [, init] = fetchImpl.mock.calls[0] as [string, RequestInit]; + const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; expect(init?.method).toBe("POST"); const headers = new Headers(init?.headers); expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); - expect(headers.get("x-claw-owner-pat")).toBe("clw_pat_configured"); expect(headers.get("x-claw-proof")).toBeTruthy(); expect(headers.get("x-claw-body-sha256")).toBeTruthy(); expect(headers.get("x-claw-timestamp")).toBe("1700000000"); expect(headers.get("x-claw-nonce")).toBe("nonce-start"); expect(String(init?.body ?? "")).toContain("ttlSeconds"); + expect(String(init?.body ?? "")).toContain("initiatorProfile"); }); - it("uses CLAWDENTITY_PROXY_URL when --proxy-url is omitted", async () => { + it("uses CLAWDENTITY_PROXY_URL when no proxy override options are present", async () => { process.env.CLAWDENTITY_PROXY_URL = "https://env.proxy.example"; const fixture = await createPairFixture(); const result = await startPairing( "alpha", - { - ownerPat: "clw_pat_explicit", - }, + {}, { fetchImpl: (async () => Response.json( { initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, ticket: "clwpair1_eyJ2IjoxfQ", expiresAt: "2026-02-18T00:00:00.000Z", }, @@ -161,7 +202,8 @@ describe("pair command helpers", () => { fixture, ) as unknown as typeof import("node:fs/promises").readFile, resolveConfigImpl: async () => ({ - registryUrl: "https://dev.api.clawdentity.com/", + registryUrl: "https://dev.registry.clawdentity.com/", + humanName: INITIATOR_PROFILE.humanName, }), getConfigDirImpl: () => "/tmp/.clawdentity", }, @@ -170,39 +212,125 @@ describe("pair command helpers", () => { expect(result.proxyUrl).toBe("https://env.proxy.example/"); }); - it("fails start when owner PAT is missing", async () => { + it("uses registry metadata proxyUrl when env override is omitted", async () => { + const fixture = await createPairFixture(); + + const result = await startPairing( + "alpha", + {}, + { + fetchImpl: (async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://saved.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.registry.clawdentity.com/", + proxyUrl: "https://saved.proxy.example", + humanName: INITIATOR_PROFILE.humanName, + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.proxyUrl).toBe("https://saved.proxy.example/"); + }); + + it("fails when configured proxyUrl does not match registry metadata", async () => { const fixture = await createPairFixture(); await expect( startPairing( "alpha", + {}, { - proxyUrl: "https://alpha.proxy.example", - }, - { + fetchImpl: (async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://proxy.clawdentity.com", + }, + { status: 200 }, + ); + } + + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", readFileImpl: createReadFileMock( fixture, ) as unknown as typeof import("node:fs/promises").readFile, resolveConfigImpl: async () => ({ - registryUrl: "https://dev.api.clawdentity.com/", + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://stale.proxy.clawdentity.com", + humanName: INITIATOR_PROFILE.humanName, }), getConfigDirImpl: () => "/tmp/.clawdentity", }, ), ).rejects.toMatchObject({ - message: expect.stringContaining("Owner PAT is required"), + code: "CLI_PAIR_PROXY_URL_MISMATCH", }); }); it("confirms pairing with qr-file ticket decode", async () => { const fixture = await createPairFixture(); const unlinkImpl = vi.fn(async () => undefined); - const fetchImpl = vi.fn(async (_url: string, _init?: RequestInit) => { + const mkdirImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const qrTicket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://beta.proxy.example", + }, + { status: 200 }, + ); + } + return Response.json( { paired: true, initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, }, { status: 201 }, ); @@ -211,7 +339,6 @@ describe("pair command helpers", () => { const result = await confirmPairing( "beta", { - proxyUrl: "https://beta.proxy.example", qrFile: "/tmp/pair.png", }, { @@ -223,14 +350,25 @@ describe("pair command helpers", () => { ) as unknown as typeof import("node:fs/promises").readFile, unlinkImpl: unlinkImpl as unknown as typeof import("node:fs/promises").unlink, - qrDecodeImpl: () => "clwpair1_ticket", + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + qrDecodeImpl: () => qrTicket, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), getConfigDirImpl: () => "/tmp/.clawdentity", }, ); expect(result.paired).toBe(true); expect(result.proxyUrl).toBe("https://beta.proxy.example/"); - const [, init] = fetchImpl.mock.calls[0] as [string, RequestInit]; + expect(result.peerAlias).toBe("peer-11111111"); + const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; const headers = new Headers(init?.headers); expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); expect(headers.get("x-claw-proof")).toBeTruthy(); @@ -238,9 +376,257 @@ describe("pair command helpers", () => { expect(headers.get("x-claw-owner-pat")).toBeNull(); expect(headers.get("x-claw-timestamp")).toBe("1700000000"); expect(headers.get("x-claw-nonce")).toBe("nonce-confirm"); - expect(String(init?.body ?? "")).toContain("clwpair1_ticket"); + expect(String(init?.body ?? "")).toContain(qrTicket); + expect(String(init?.body ?? "")).toContain("responderProfile"); expect(unlinkImpl).toHaveBeenCalledTimes(1); expect(unlinkImpl).toHaveBeenCalledWith("/tmp/pair.png"); + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(chmodImpl).toHaveBeenCalledTimes(1); + }); + + it("syncs OpenClaw relay peers snapshot after pair confirm", async () => { + const fixture = await createPairFixture(); + const runtimeConfigPath = "/tmp/.clawdentity/openclaw-relay.json"; + const relayPeersPath = + "/tmp/.openclaw/hooks/transforms/clawdentity-peers.json"; + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + + const readFileImpl = vi.fn( + async (filePath: string, _encoding?: BufferEncoding) => { + if (filePath.endsWith("/ait.jwt")) { + return fixture.ait; + } + + if (filePath.endsWith("/secret.key")) { + return fixture.secretKeyBase64url; + } + + if (filePath === runtimeConfigPath) { + return JSON.stringify({ + openclawBaseUrl: "http://127.0.0.1:18789", + relayTransformPeersPath: relayPeersPath, + }); + } + + if (filePath === relayPeersPath) { + return JSON.stringify({ peers: {} }); + } + + throw buildErrnoError("ENOENT"); + }, + ); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://beta.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + }, + { status: 201 }, + ); + }); + + const result = await confirmPairing( + "beta", + { + ticket, + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: + readFileImpl as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.paired).toBe(true); + expect(result.peerAlias).toBe("peer-11111111"); + expect(writeFileImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/peers.json", + expect.any(String), + "utf8", + ); + expect(writeFileImpl).toHaveBeenCalledWith( + relayPeersPath, + expect.any(String), + "utf8", + ); + expect(mkdirImpl).toHaveBeenCalledTimes(2); + expect(chmodImpl).toHaveBeenCalledTimes(2); + }); + + it("checks pending pair status without persisting peers", async () => { + const fixture = await createPairFixture(); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + const result = await getPairingStatus( + "alpha", + { + ticket, + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.status).toBe("pending"); + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); + expect(result.peerAlias).toBeUndefined(); + expect(writeFileImpl).toHaveBeenCalledTimes(0); + expect(mkdirImpl).toHaveBeenCalledTimes(0); + expect(chmodImpl).toHaveBeenCalledTimes(0); + }); + + it("polls pair status until confirmed and persists peer for initiator", async () => { + const fixture = await createPairFixture(); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const sleepImpl = vi.fn(async () => undefined); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const statusResponses = [ + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { + status: "confirmed", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + confirmedAt: "2026-02-18T00:00:05.000Z", + }, + ]; + let statusIndex = 0; + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + const payload = + statusResponses[Math.min(statusIndex, statusResponses.length - 1)]; + statusIndex += 1; + return Response.json(payload, { status: 200 }); + }); + + const nowSequence = [1_700_000_000, 1_700_000_001, 1_700_000_002]; + const result = await getPairingStatus( + "alpha", + { + ticket, + wait: true, + waitSeconds: "10", + pollIntervalSeconds: "1", + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => nowSequence.shift() ?? 1_700_000_003, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + sleepImpl, + }, + ); + + expect(result.status).toBe("confirmed"); + expect(result.peerAlias).toBe("peer-22222222"); + expect(sleepImpl).toHaveBeenCalledTimes(1); + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(mkdirImpl).toHaveBeenCalledTimes(1); + expect(chmodImpl).toHaveBeenCalledTimes(1); }); }); @@ -302,15 +688,27 @@ describe("pair command output", () => { it("prints pairing ticket from pair start", async () => { const fixture = await createPairFixture(); const command = createPairCommand({ - fetchImpl: (async () => - Response.json( + fetchImpl: (async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( { initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, ticket: "clwpair1_eyJ2IjoxfQ", expiresAt: "2026-02-18T00:00:00.000Z", }, { status: 200 }, - )) as unknown as typeof fetch, + ); + }) as unknown as typeof fetch, nowSecondsImpl: () => 1_700_000_000, nonceFactoryImpl: () => "nonce-start", qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), @@ -324,20 +722,131 @@ describe("pair command output", () => { async () => undefined, ) as unknown as typeof import("node:fs/promises").mkdir, resolveConfigImpl: async () => ({ - registryUrl: "https://dev.api.clawdentity.com/", + registryUrl: "https://dev.registry.clawdentity.com/", apiKey: "clw_pat_configured", + humanName: INITIATOR_PROFILE.humanName, }), getConfigDirImpl: () => "/tmp/.clawdentity", }); - const result = await runPairCommand( - ["start", "alpha", "--proxy-url", "https://alpha.proxy.example", "--qr"], - command, - ); + const result = await runPairCommand(["start", "alpha", "--qr"], command); expect(result.exitCode).toBeUndefined(); expect(result.stdout).toContain("Pairing ticket created"); expect(result.stdout).toContain("Ticket: clwpair1_eyJ2IjoxfQ"); expect(result.stdout).toContain("QR File: "); }); + + it("prints saved peer alias from pair confirm", async () => { + const fixture = await createPairFixture(); + const qrTicket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const command = createPairCommand({ + fetchImpl: (async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://beta.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + }, + { status: 201 }, + ); + }) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").chmod, + unlinkImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").unlink, + qrDecodeImpl: () => qrTicket, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }); + + const result = await runPairCommand( + ["confirm", "beta", "--qr-file", "/tmp/pair.png"], + command, + ); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Pairing confirmed"); + expect(result.stdout).toContain("Peer alias saved: peer-11111111"); + }); + + it("prints pairing status from pair status", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const command = createPairCommand({ + fetchImpl: (async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }); + + const result = await runPairCommand( + ["status", "alpha", "--ticket", ticket], + command, + ); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Status: pending"); + expect(result.stdout).toContain( + "Initiator Agent DID: did:claw:agent:01HAAA11111111111111111111", + ); + }); }); diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 1554bfe..7ac5dbb 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -1,7 +1,14 @@ import { randomBytes } from "node:crypto"; -import { mkdir, readdir, readFile, unlink, writeFile } from "node:fs/promises"; +import { + chmod, + mkdir, + readdir, + readFile, + unlink, + writeFile, +} from "node:fs/promises"; import { dirname, join, resolve } from "node:path"; -import { decodeBase64url } from "@clawdentity/protocol"; +import { decodeBase64url, parseDid } from "@clawdentity/protocol"; import { AppError, createLogger, signHttpRequest } from "@clawdentity/sdk"; import { Command } from "commander"; import jsQR from "jsqr"; @@ -12,6 +19,7 @@ import { getConfigDir, resolveConfig, } from "../config/manager.js"; +import { fetchRegistryMetadata } from "../config/registry-metadata.js"; import { writeStdoutLine } from "../io.js"; import { assertValidAgentName } from "./agent-name.js"; import { withErrorHandling } from "./helpers.js"; @@ -22,29 +30,43 @@ const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; const SECRET_KEY_FILE_NAME = "secret.key"; const PAIRING_QR_DIR_NAME = "pairing"; +const PEERS_FILE_NAME = "peers.json"; +const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; const PAIR_START_PATH = "/pair/start"; const PAIR_CONFIRM_PATH = "/pair/confirm"; -const OWNER_PAT_HEADER = "x-claw-owner-pat"; +const PAIR_STATUS_PATH = "/pair/status"; const NONCE_SIZE = 24; const PAIRING_TICKET_PREFIX = "clwpair1_"; const PAIRING_QR_MAX_AGE_SECONDS = 900; const PAIRING_QR_FILENAME_PATTERN = /-pair-(\d+)\.png$/; +const FILE_MODE = 0o600; +const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; +const DEFAULT_STATUS_WAIT_SECONDS = 300; +const DEFAULT_STATUS_POLL_INTERVAL_SECONDS = 3; +const MAX_PROFILE_NAME_LENGTH = 64; export type PairStartOptions = { - ownerPat?: string; - proxyUrl?: string; ttlSeconds?: string; qr?: boolean; qrOutput?: string; + wait?: boolean; + waitSeconds?: string; + pollIntervalSeconds?: string; }; export type PairConfirmOptions = { - proxyUrl?: string; qrFile?: string; ticket?: string; }; +export type PairStatusOptions = { + ticket?: string; + wait?: boolean; + waitSeconds?: string; + pollIntervalSeconds?: string; +}; + type PairRequestOptions = { fetchImpl?: typeof fetch; getConfigDirImpl?: typeof getConfigDir; @@ -52,9 +74,11 @@ type PairRequestOptions = { nonceFactoryImpl?: () => string; readFileImpl?: typeof readFile; writeFileImpl?: typeof writeFile; + chmodImpl?: typeof chmod; mkdirImpl?: typeof mkdir; readdirImpl?: typeof readdir; unlinkImpl?: typeof unlink; + sleepImpl?: (ms: number) => Promise; resolveConfigImpl?: () => Promise; qrEncodeImpl?: (ticket: string) => Promise; qrDecodeImpl?: (imageBytes: Uint8Array) => string; @@ -64,6 +88,7 @@ type PairCommandDependencies = PairRequestOptions; type PairStartResult = { initiatorAgentDid: string; + initiatorProfile: PeerProfile; ticket: string; expiresAt: string; proxyUrl: string; @@ -73,8 +98,23 @@ type PairStartResult = { type PairConfirmResult = { paired: boolean; initiatorAgentDid: string; + initiatorProfile: PeerProfile; responderAgentDid: string; + responderProfile: PeerProfile; + proxyUrl: string; + peerAlias?: string; +}; + +type PairStatusResult = { + status: "pending" | "confirmed"; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid?: string; + responderProfile?: PeerProfile; + expiresAt: string; + confirmedAt?: string; proxyUrl: string; + peerAlias?: string; }; type RegistryErrorEnvelope = { @@ -84,11 +124,27 @@ type RegistryErrorEnvelope = { }; }; +type PeerEntry = { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; +}; + +type PeersConfig = { + peers: Record; +}; + type LocalAgentProofMaterial = { ait: string; secretKey: Uint8Array; }; +type PeerProfile = { + agentName: string; + humanName: string; +}; + const isRecord = (value: unknown): value is Record => { return typeof value === "object" && value !== null; }; @@ -109,6 +165,60 @@ function parseNonEmptyString(value: unknown): string { return value.trim(); } +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function parseProfileName( + value: unknown, + label: "agentName" | "humanName", +): string { + const candidate = parseNonEmptyString(value); + if (candidate.length === 0) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + `${label} is required for pairing`, + ); + } + + if (candidate.length > MAX_PROFILE_NAME_LENGTH) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + `${label} must be at most ${MAX_PROFILE_NAME_LENGTH} characters`, + ); + } + + if (hasControlChars(candidate)) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + `${label} contains control characters`, + ); + } + + return candidate; +} + +function parsePeerProfile(payload: unknown): PeerProfile { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + "Pair profile must be an object", + ); + } + + return { + agentName: parseProfileName(payload.agentName, "agentName"), + humanName: parseProfileName(payload.humanName, "humanName"), + }; +} + function parsePairingTicket(value: unknown): string { const ticket = parseNonEmptyString(value); if (!ticket.startsWith(PAIRING_TICKET_PREFIX)) { @@ -121,6 +231,386 @@ function parsePairingTicket(value: unknown): string { return ticket; } +function parsePairingTicketIssuerOrigin(ticket: string): string { + const encodedPayload = ticket.slice(PAIRING_TICKET_PREFIX.length); + if (encodedPayload.length === 0) { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + let payloadRaw: string; + try { + payloadRaw = new TextDecoder().decode(decodeBase64url(encodedPayload)); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + let payload: unknown; + try { + payload = JSON.parse(payloadRaw); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + if (!isRecord(payload) || typeof payload.iss !== "string") { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + let issuerUrl: URL; + try { + issuerUrl = new URL(payload.iss); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + if (issuerUrl.protocol !== "https:" && issuerUrl.protocol !== "http:") { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + return issuerUrl.origin; +} + +function parseAitAgentDid(ait: string): string { + const parts = ait.split("."); + if (parts.length < 2) { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + let payloadRaw: string; + try { + payloadRaw = new TextDecoder().decode(decodeBase64url(parts[1] ?? "")); + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + let payload: unknown; + try { + payload = JSON.parse(payloadRaw); + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + if (!isRecord(payload) || typeof payload.sub !== "string") { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + const candidate = payload.sub.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + throw new Error("invalid kind"); + } + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + return candidate; +} + +function parsePeerAlias(value: string): string { + if (value.length === 0 || value.length > 128) { + throw createCliError( + "CLI_PAIR_PEER_ALIAS_INVALID", + "Generated peer alias is invalid", + ); + } + + if (!PEER_ALIAS_PATTERN.test(value)) { + throw createCliError( + "CLI_PAIR_PEER_ALIAS_INVALID", + "Generated peer alias is invalid", + ); + } + + return value; +} + +function derivePeerAliasBase(peerDid: string): string { + try { + const parsed = parseDid(peerDid); + if (parsed.kind === "agent") { + return parsePeerAlias(`peer-${parsed.ulid.slice(-8).toLowerCase()}`); + } + } catch { + // Fall through to generic alias. + } + + return "peer"; +} + +function resolvePeerAlias(input: { + peers: Record; + peerDid: string; +}): string { + for (const [alias, entry] of Object.entries(input.peers)) { + if (entry.did === input.peerDid) { + return alias; + } + } + + const baseAlias = derivePeerAliasBase(input.peerDid); + if (input.peers[baseAlias] === undefined) { + return baseAlias; + } + + let index = 2; + while (input.peers[`${baseAlias}-${index}`] !== undefined) { + index += 1; + } + + return `${baseAlias}-${index}`; +} + +function resolvePeersConfigPath(getConfigDirImpl: typeof getConfigDir): string { + return join(getConfigDirImpl(), PEERS_FILE_NAME); +} + +function parsePeerEntry(value: unknown): PeerEntry { + if (!isRecord(value)) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer entry must be an object", + ); + } + + const did = parseNonEmptyString(value.did); + const proxyUrl = parseNonEmptyString(value.proxyUrl); + if (did.length === 0 || proxyUrl.length === 0) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer entry is invalid", + ); + } + + const agentNameRaw = parseNonEmptyString(value.agentName); + const humanNameRaw = parseNonEmptyString(value.humanName); + + const entry: PeerEntry = { + did, + proxyUrl, + }; + if (agentNameRaw.length > 0) { + entry.agentName = parseProfileName(agentNameRaw, "agentName"); + } + if (humanNameRaw.length > 0) { + entry.humanName = parseProfileName(humanNameRaw, "humanName"); + } + return entry; +} + +async function loadPeersConfig(input: { + getConfigDirImpl: typeof getConfigDir; + readFileImpl: typeof readFile; +}): Promise { + const peersPath = resolvePeersConfigPath(input.getConfigDirImpl); + let raw: string; + try { + raw = await input.readFileImpl(peersPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return { peers: {} }; + } + + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer config is not valid JSON", + ); + } + + if (!isRecord(parsed)) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer config must be a JSON object", + ); + } + + if (parsed.peers === undefined) { + return { peers: {} }; + } + + if (!isRecord(parsed.peers)) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer config peers field must be an object", + ); + } + + const peers: Record = {}; + for (const [alias, value] of Object.entries(parsed.peers)) { + peers[parsePeerAlias(alias)] = parsePeerEntry(value); + } + + return { peers }; +} + +async function savePeersConfig(input: { + config: PeersConfig; + getConfigDirImpl: typeof getConfigDir; + mkdirImpl: typeof mkdir; + writeFileImpl: typeof writeFile; + chmodImpl: typeof chmod; +}): Promise { + const peersPath = resolvePeersConfigPath(input.getConfigDirImpl); + await input.mkdirImpl(dirname(peersPath), { recursive: true }); + await input.writeFileImpl( + peersPath, + `${JSON.stringify(input.config, null, 2)}\n`, + "utf8", + ); + await input.chmodImpl(peersPath, FILE_MODE); +} + +function resolveRelayRuntimeConfigPath( + getConfigDirImpl: typeof getConfigDir, +): string { + return join(getConfigDirImpl(), OPENCLAW_RELAY_RUNTIME_FILE_NAME); +} + +async function loadRelayTransformPeersPath(input: { + getConfigDirImpl: typeof getConfigDir; + readFileImpl: typeof readFile; +}): Promise { + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath( + input.getConfigDirImpl, + ); + let raw: string; + try { + raw = await input.readFileImpl(relayRuntimeConfigPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return undefined; + } + + logger.warn("cli.pair.relay_runtime_read_failed", { + relayRuntimeConfigPath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + logger.warn("cli.pair.relay_runtime_invalid_json", { + relayRuntimeConfigPath, + }); + return undefined; + } + + if (!isRecord(parsed)) { + return undefined; + } + + const relayTransformPeersPath = parseNonEmptyString( + parsed.relayTransformPeersPath, + ); + if (relayTransformPeersPath.length === 0) { + return undefined; + } + + return resolve(relayTransformPeersPath); +} + +async function syncOpenclawRelayPeersSnapshot(input: { + config: PeersConfig; + getConfigDirImpl: typeof getConfigDir; + readFileImpl: typeof readFile; + mkdirImpl: typeof mkdir; + writeFileImpl: typeof writeFile; + chmodImpl: typeof chmod; +}): Promise { + const relayTransformPeersPath = await loadRelayTransformPeersPath({ + getConfigDirImpl: input.getConfigDirImpl, + readFileImpl: input.readFileImpl, + }); + if (relayTransformPeersPath === undefined) { + return; + } + + try { + await input.readFileImpl(relayTransformPeersPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + logger.warn("cli.pair.relay_peers_snapshot_probe_failed", { + relayTransformPeersPath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + return; + } + + try { + await input.mkdirImpl(dirname(relayTransformPeersPath), { + recursive: true, + }); + await input.writeFileImpl( + relayTransformPeersPath, + `${JSON.stringify(input.config, null, 2)}\n`, + "utf8", + ); + await input.chmodImpl(relayTransformPeersPath, FILE_MODE); + } catch (error) { + logger.warn("cli.pair.relay_peers_snapshot_write_failed", { + relayTransformPeersPath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + } +} + function parseTtlSeconds(value: string | undefined): number | undefined { const raw = parseNonEmptyString(value); if (raw.length === 0) { @@ -138,18 +628,46 @@ function parseTtlSeconds(value: string | undefined): number | undefined { return parsed; } -function resolveProxyUrl(overrideProxyUrl: string | undefined): string { - const candidate = - parseNonEmptyString(overrideProxyUrl) || - parseNonEmptyString(process.env.CLAWDENTITY_PROXY_URL); +function parsePositiveIntegerOption(input: { + value: string | undefined; + optionName: string; + defaultValue: number; +}): number { + const raw = parseNonEmptyString(input.value); + if (raw.length === 0) { + return input.defaultValue; + } - if (candidate.length === 0) { + const parsed = Number.parseInt(raw, 10); + if (!Number.isInteger(parsed) || parsed < 1) { throw createCliError( - "CLI_PAIR_PROXY_URL_REQUIRED", - "Proxy URL is required. Pass --proxy-url or set CLAWDENTITY_PROXY_URL.", + "CLI_PAIR_STATUS_WAIT_INVALID", + `${input.optionName} must be a positive integer`, ); } + return parsed; +} + +function resolveLocalPairProfile(input: { + config: CliConfig; + agentName: string; +}): PeerProfile { + const humanName = parseNonEmptyString(input.config.humanName); + if (humanName.length === 0) { + throw createCliError( + "CLI_PAIR_HUMAN_NAME_MISSING", + "Human name is missing. Run `clawdentity invite redeem --display-name ` or `clawdentity config set humanName `.", + ); + } + + return { + agentName: parseProfileName(input.agentName, "agentName"), + humanName: parseProfileName(humanName, "humanName"), + }; +} + +function parseProxyUrl(candidate: string): string { try { const parsed = new URL(candidate); if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { @@ -162,6 +680,36 @@ function resolveProxyUrl(overrideProxyUrl: string | undefined): string { } } +async function resolveProxyUrl(input: { + config: CliConfig; + fetchImpl: typeof fetch; +}): Promise { + const fromEnv = parseNonEmptyString(process.env.CLAWDENTITY_PROXY_URL); + if (fromEnv.length > 0) { + return parseProxyUrl(fromEnv); + } + + const metadata = await fetchRegistryMetadata(input.config.registryUrl, { + fetchImpl: input.fetchImpl, + }); + const metadataProxyUrl = parseProxyUrl(metadata.proxyUrl); + + const configuredProxyUrl = parseNonEmptyString(input.config.proxyUrl); + if (configuredProxyUrl.length === 0) { + return metadataProxyUrl; + } + + const normalizedConfiguredProxyUrl = parseProxyUrl(configuredProxyUrl); + if (normalizedConfiguredProxyUrl === metadataProxyUrl) { + return metadataProxyUrl; + } + + throw createCliError( + "CLI_PAIR_PROXY_URL_MISMATCH", + `Configured proxy URL does not match registry metadata. config=${normalizedConfiguredProxyUrl} metadata=${metadataProxyUrl}. Rerun onboarding invite redeem to refresh config.`, + ); +} + function toProxyRequestUrl(proxyUrl: string, path: string): string { const normalizedBase = proxyUrl.endsWith("/") ? proxyUrl : `${proxyUrl}/`; return new URL(path.slice(1), normalizedBase).toString(); @@ -227,16 +775,10 @@ function mapStartPairError(status: number, payload: unknown): string { const code = extractErrorCode(payload); const message = extractErrorMessage(payload); - if (code === "PROXY_PAIR_OWNER_PAT_INVALID" || status === 401) { + if (code === "PROXY_PAIR_OWNERSHIP_FORBIDDEN" || status === 403) { return message - ? `Owner PAT is invalid (401): ${message}` - : "Owner PAT is invalid or expired (401)."; - } - - if (code === "PROXY_PAIR_OWNER_PAT_FORBIDDEN" || status === 403) { - return message - ? `Owner PAT does not control initiator agent DID (403): ${message}` - : "Owner PAT does not control initiator agent DID (403)."; + ? `Initiator agent ownership check failed (403): ${message}` + : "Initiator agent ownership check failed (403)."; } if (status === 400) { @@ -270,8 +812,43 @@ function mapConfirmPairError(status: number, payload: unknown): string { if (status === 400) { return message - ? `Pair confirm request is invalid (400): ${message}` - : "Pair confirm request is invalid (400)."; + ? `Pair confirm request is invalid (400): ${message}` + : "Pair confirm request is invalid (400)."; + } + + if (status >= 500) { + return `Proxy pairing service is unavailable (${status}).`; + } + + if (message) { + return `Pair confirm failed (${status}): ${message}`; + } + + return `Pair confirm failed (${status})`; +} + +function mapStatusPairError(status: number, payload: unknown): string { + const code = extractErrorCode(payload); + const message = extractErrorMessage(payload); + + if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { + return "Pairing ticket not found"; + } + + if (code === "PROXY_PAIR_TICKET_EXPIRED" || status === 410) { + return "Pairing ticket has expired"; + } + + if (code === "PROXY_PAIR_STATUS_FORBIDDEN" || status === 403) { + return message + ? `Pair status request is forbidden (403): ${message}` + : "Pair status request is forbidden (403)."; + } + + if (status === 400) { + return message + ? `Pair status request is invalid (400): ${message}` + : "Pair status request is invalid (400)."; } if (status >= 500) { @@ -279,10 +856,10 @@ function mapConfirmPairError(status: number, payload: unknown): string { } if (message) { - return `Pair confirm failed (${status}): ${message}`; + return `Pair status failed (${status}): ${message}`; } - return `Pair confirm failed (${status})`; + return `Pair status failed (${status})`; } function parsePairStartResponse( @@ -298,6 +875,7 @@ function parsePairStartResponse( const ticket = parsePairingTicket(payload.ticket); const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); const expiresAt = parseNonEmptyString(payload.expiresAt); + let initiatorProfile: PeerProfile; if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { throw createCliError( @@ -305,10 +883,19 @@ function parsePairStartResponse( "Pair start response is invalid", ); } + try { + initiatorProfile = parsePeerProfile(payload.initiatorProfile); + } catch { + throw createCliError( + "CLI_PAIR_START_INVALID_RESPONSE", + "Pair start response is invalid", + ); + } return { ticket, initiatorAgentDid, + initiatorProfile, expiresAt, }; } @@ -326,6 +913,8 @@ function parsePairConfirmResponse( const paired = payload.paired === true; const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); + let initiatorProfile: PeerProfile; + let responderProfile: PeerProfile; if ( !paired || @@ -337,11 +926,98 @@ function parsePairConfirmResponse( "Pair confirm response is invalid", ); } + try { + initiatorProfile = parsePeerProfile(payload.initiatorProfile); + responderProfile = parsePeerProfile(payload.responderProfile); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_INVALID_RESPONSE", + "Pair confirm response is invalid", + ); + } return { paired, initiatorAgentDid, responderAgentDid, + initiatorProfile, + responderProfile, + }; +} + +function parsePairStatusResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + const statusRaw = parseNonEmptyString(payload.status); + if (statusRaw !== "pending" && statusRaw !== "confirmed") { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); + const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); + const expiresAt = parseNonEmptyString(payload.expiresAt); + const confirmedAt = parseNonEmptyString(payload.confirmedAt); + let initiatorProfile: PeerProfile; + + if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + if (statusRaw === "confirmed" && responderAgentDid.length === 0) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + try { + initiatorProfile = parsePeerProfile(payload.initiatorProfile); + } catch { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + let responderProfile: PeerProfile | undefined; + if (payload.responderProfile !== undefined) { + try { + responderProfile = parsePeerProfile(payload.responderProfile); + } catch { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + } + if (statusRaw === "confirmed" && responderProfile === undefined) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + return { + status: statusRaw, + initiatorAgentDid, + initiatorProfile, + responderAgentDid: + responderAgentDid.length > 0 ? responderAgentDid : undefined, + responderProfile, + expiresAt, + confirmedAt: confirmedAt.length > 0 ? confirmedAt : undefined, }; } @@ -421,24 +1097,6 @@ async function readAgentProofMaterial( }; } -function resolveOwnerPat(options: { - explicitOwnerPat: string | undefined; - config: CliConfig; -}): string { - const ownerPat = - parseNonEmptyString(options.explicitOwnerPat) || - parseNonEmptyString(options.config.apiKey); - - if (ownerPat.length > 0) { - return ownerPat; - } - - throw createCliError( - "CLI_PAIR_START_OWNER_PAT_REQUIRED", - "Owner PAT is required. Pass --owner-pat or configure API key with `clawdentity invite redeem` / `clawdentity config set apiKey `.", - ); -} - async function buildSignedHeaders(input: { bodyBytes?: Uint8Array; method: string; @@ -600,6 +1258,53 @@ function resolveConfirmTicketSource(options: PairConfirmOptions): { ); } +async function persistPairedPeer(input: { + ticket: string; + peerDid: string; + peerProfile: PeerProfile; + dependencies: PairRequestOptions; +}): Promise { + const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; + const readFileImpl = input.dependencies.readFileImpl ?? readFile; + const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; + const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; + const chmodImpl = input.dependencies.chmodImpl ?? chmod; + + const issuerOrigin = parsePairingTicketIssuerOrigin(input.ticket); + const peerProxyUrl = new URL("/hooks/agent", `${issuerOrigin}/`).toString(); + const peersConfig = await loadPeersConfig({ + getConfigDirImpl, + readFileImpl, + }); + const alias = resolvePeerAlias({ + peers: peersConfig.peers, + peerDid: input.peerDid, + }); + peersConfig.peers[alias] = { + did: input.peerDid, + proxyUrl: peerProxyUrl, + agentName: input.peerProfile.agentName, + humanName: input.peerProfile.humanName, + }; + await savePeersConfig({ + config: peersConfig, + getConfigDirImpl, + mkdirImpl, + writeFileImpl, + chmodImpl, + }); + await syncOpenclawRelayPeersSnapshot({ + config: peersConfig, + getConfigDirImpl, + readFileImpl, + mkdirImpl, + writeFileImpl, + chmodImpl, + }); + + return alias; +} + export async function startPairing( agentName: string, options: PairStartOptions, @@ -614,22 +1319,26 @@ export async function startPairing( (() => randomBytes(NONCE_SIZE).toString("base64url")); const ttlSeconds = parseTtlSeconds(options.ttlSeconds); - const proxyUrl = resolveProxyUrl(options.proxyUrl); - const config = await resolveConfigImpl(); - const ownerPat = resolveOwnerPat({ - explicitOwnerPat: options.ownerPat, + const proxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); + const normalizedAgentName = assertValidAgentName(agentName); + const initiatorProfile = resolveLocalPairProfile({ config, + agentName: normalizedAgentName, }); const { ait, secretKey } = await readAgentProofMaterial( - agentName, + normalizedAgentName, dependencies, ); const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_START_PATH); const requestBody = JSON.stringify({ ttlSeconds, + initiatorProfile, }); const bodyBytes = new TextEncoder().encode(requestBody); @@ -652,7 +1361,6 @@ export async function startPairing( headers: { authorization: `Claw ${ait}`, "content-type": "application/json", - [OWNER_PAT_HEADER]: ownerPat, ...signedHeaders, }, body: requestBody, @@ -693,6 +1401,7 @@ export async function confirmPairing( dependencies: PairRequestOptions = {}, ): Promise { const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; const nowSecondsImpl = dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); const nonceFactoryImpl = @@ -700,9 +1409,18 @@ export async function confirmPairing( (() => randomBytes(NONCE_SIZE).toString("base64url")); const readFileImpl = dependencies.readFileImpl ?? readFile; const qrDecodeImpl = dependencies.qrDecodeImpl ?? decodeTicketFromPng; + const config = await resolveConfigImpl(); + const normalizedAgentName = assertValidAgentName(agentName); + const responderProfile = resolveLocalPairProfile({ + config, + agentName: normalizedAgentName, + }); const ticketSource = resolveConfirmTicketSource(options); - const proxyUrl = resolveProxyUrl(options.proxyUrl); + const proxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); let ticket = ticketSource.ticket; if (ticketSource.source === "qr-file") { @@ -732,12 +1450,15 @@ export async function confirmPairing( } const { ait, secretKey } = await readAgentProofMaterial( - agentName, + normalizedAgentName, dependencies, ); const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_CONFIRM_PATH); - const requestBody = JSON.stringify({ ticket }); + const requestBody = JSON.stringify({ + ticket, + responderProfile, + }); const bodyBytes = new TextEncoder().encode(requestBody); const timestampSeconds = nowSecondsImpl(); @@ -775,6 +1496,13 @@ export async function confirmPairing( } const parsed = parsePairConfirmResponse(responseBody); + const peerAlias = await persistPairedPeer({ + ticket, + peerDid: parsed.initiatorAgentDid, + peerProfile: parsed.initiatorProfile, + dependencies, + }); + if (ticketSource.source === "qr-file" && ticketSource.qrFilePath) { const unlinkImpl = dependencies.unlinkImpl ?? unlink; await unlinkImpl(ticketSource.qrFilePath).catch((error) => { @@ -796,9 +1524,202 @@ export async function confirmPairing( return { ...parsed, proxyUrl, + peerAlias, + }; +} + +async function getPairingStatusOnce( + agentName: string, + options: { ticket: string }, + dependencies: PairRequestOptions = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const nowSecondsImpl = + dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nonceFactoryImpl = + dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + const config = await resolveConfigImpl(); + const proxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); + + const ticket = parsePairingTicket(options.ticket); + const { ait, secretKey } = await readAgentProofMaterial( + agentName, + dependencies, + ); + const callerAgentDid = parseAitAgentDid(ait); + + const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_STATUS_PATH); + const requestBody = JSON.stringify({ ticket }); + const bodyBytes = new TextEncoder().encode(requestBody); + const timestampSeconds = nowSecondsImpl(); + const nonce = nonceFactoryImpl(); + const signedHeaders = await buildSignedHeaders({ + method: "POST", + requestUrl, + bodyBytes, + secretKey, + timestampSeconds, + nonce, + }); + + const response = await executePairRequest({ + fetchImpl, + url: requestUrl, + init: { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signedHeaders, + }, + body: requestBody, + }, + }); + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_PAIR_STATUS_FAILED", + mapStatusPairError(response.status, responseBody), + ); + } + + const parsed = parsePairStatusResponse(responseBody); + let peerAlias: string | undefined; + if (parsed.status === "confirmed") { + const responderAgentDid = parsed.responderAgentDid; + if (!responderAgentDid) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + const peerDid = + callerAgentDid === parsed.initiatorAgentDid + ? responderAgentDid + : callerAgentDid === responderAgentDid + ? parsed.initiatorAgentDid + : undefined; + const peerProfile = + callerAgentDid === parsed.initiatorAgentDid + ? parsed.responderProfile + : callerAgentDid === responderAgentDid + ? parsed.initiatorProfile + : undefined; + if (!peerDid) { + throw createCliError( + "CLI_PAIR_STATUS_FORBIDDEN", + "Local agent is not a participant in the pairing ticket", + ); + } + if (!peerProfile) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + peerAlias = await persistPairedPeer({ + ticket, + peerDid, + peerProfile, + dependencies, + }); + } + + return { + ...parsed, + proxyUrl, + peerAlias, }; } +async function waitForPairingStatus(input: { + agentName: string; + ticket: string; + waitSeconds: number; + pollIntervalSeconds: number; + dependencies: PairRequestOptions; +}): Promise { + const nowSecondsImpl = + input.dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const sleepImpl = + input.dependencies.sleepImpl ?? + (async (ms: number) => { + await new Promise((resolve) => { + setTimeout(resolve, ms); + }); + }); + + const deadlineSeconds = nowSecondsImpl() + input.waitSeconds; + while (true) { + const status = await getPairingStatusOnce( + input.agentName, + { ticket: input.ticket }, + input.dependencies, + ); + + if (status.status === "confirmed") { + return status; + } + + const nowSeconds = nowSecondsImpl(); + if (nowSeconds >= deadlineSeconds) { + throw createCliError( + "CLI_PAIR_STATUS_WAIT_TIMEOUT", + `Pairing is still pending after ${input.waitSeconds} seconds`, + ); + } + + const remainingSeconds = Math.max(0, deadlineSeconds - nowSeconds); + const sleepSeconds = Math.min(input.pollIntervalSeconds, remainingSeconds); + await sleepImpl(sleepSeconds * 1000); + } +} + +export async function getPairingStatus( + agentName: string, + options: PairStatusOptions, + dependencies: PairRequestOptions = {}, +): Promise { + const ticketRaw = parseNonEmptyString(options.ticket); + if (ticketRaw.length === 0) { + throw createCliError( + "CLI_PAIR_STATUS_TICKET_REQUIRED", + "Pair status requires --ticket ", + ); + } + const ticket = parsePairingTicket(ticketRaw); + + if (options.wait !== true) { + return getPairingStatusOnce(agentName, { ticket }, dependencies); + } + + const waitSeconds = parsePositiveIntegerOption({ + value: options.waitSeconds, + optionName: "waitSeconds", + defaultValue: DEFAULT_STATUS_WAIT_SECONDS, + }); + const pollIntervalSeconds = parsePositiveIntegerOption({ + value: options.pollIntervalSeconds, + optionName: "pollIntervalSeconds", + defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + }); + + return waitForPairingStatus({ + agentName, + ticket, + waitSeconds, + pollIntervalSeconds, + dependencies, + }); +} + export const createPairCommand = ( dependencies: PairCommandDependencies = {}, ): Command => { @@ -809,17 +1730,21 @@ export const createPairCommand = ( pairCommand .command("start ") .description("Start pairing and issue one-time pairing ticket") + .option("--ttl-seconds ", "Pairing ticket expiry in seconds") + .option("--qr", "Generate a local QR file for sharing") + .option("--qr-output ", "Write QR PNG to a specific file path") .option( - "--proxy-url ", - "Initiator proxy base URL (or set CLAWDENTITY_PROXY_URL)", + "--wait", + "Wait for responder confirmation and auto-save peer on initiator", ) .option( - "--owner-pat ", - "Owner PAT override (defaults to configured API key)", + "--wait-seconds ", + "Max seconds to poll for confirmation (default: 300)", + ) + .option( + "--poll-interval-seconds ", + "Polling interval in seconds while waiting (default: 3)", ) - .option("--ttl-seconds ", "Pairing ticket expiry in seconds") - .option("--qr", "Generate a local QR file for sharing") - .option("--qr-output ", "Write QR PNG to a specific file path") .action( withErrorHandling( "pair start", @@ -836,10 +1761,71 @@ export const createPairCommand = ( writeStdoutLine("Pairing ticket created"); writeStdoutLine(`Ticket: ${result.ticket}`); writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); writeStdoutLine(`Expires At: ${result.expiresAt}`); if (result.qrPath) { writeStdoutLine(`QR File: ${result.qrPath}`); } + + if (options.wait === true) { + const waitSeconds = parsePositiveIntegerOption({ + value: options.waitSeconds, + optionName: "waitSeconds", + defaultValue: DEFAULT_STATUS_WAIT_SECONDS, + }); + const pollIntervalSeconds = parsePositiveIntegerOption({ + value: options.pollIntervalSeconds, + optionName: "pollIntervalSeconds", + defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + }); + + writeStdoutLine( + `Waiting for confirmation (timeout=${waitSeconds}s, interval=${pollIntervalSeconds}s) ...`, + ); + + const status = await waitForPairingStatus({ + agentName, + ticket: result.ticket, + waitSeconds, + pollIntervalSeconds, + dependencies, + }); + + logger.info("cli.pair_status_confirmed_after_start", { + initiatorAgentDid: status.initiatorAgentDid, + responderAgentDid: status.responderAgentDid, + peerAlias: status.peerAlias, + }); + + writeStdoutLine("Pairing confirmed"); + writeStdoutLine(`Status: ${status.status}`); + if (status.initiatorAgentDid) { + writeStdoutLine( + `Initiator Agent DID: ${status.initiatorAgentDid}`, + ); + } + if (status.responderAgentDid) { + writeStdoutLine( + `Responder Agent DID: ${status.responderAgentDid}`, + ); + } + if (status.responderProfile) { + writeStdoutLine( + `Responder Agent Name: ${status.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${status.responderProfile.humanName}`, + ); + } + if (status.peerAlias) { + writeStdoutLine(`Peer alias saved: ${status.peerAlias}`); + } + } }, ), ); @@ -849,10 +1835,6 @@ export const createPairCommand = ( .description("Confirm pairing using one-time pairing ticket") .option("--ticket ", "One-time pairing ticket (clwpair1_...)") .option("--qr-file ", "Path to pairing QR PNG file") - .option( - "--proxy-url ", - "Responder proxy base URL (or set CLAWDENTITY_PROXY_URL)", - ) .action( withErrorHandling( "pair confirm", @@ -863,12 +1845,89 @@ export const createPairCommand = ( initiatorAgentDid: result.initiatorAgentDid, responderAgentDid: result.responderAgentDid, proxyUrl: result.proxyUrl, + peerAlias: result.peerAlias, }); writeStdoutLine("Pairing confirmed"); writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); + writeStdoutLine( + `Responder Agent Name: ${result.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${result.responderProfile.humanName}`, + ); writeStdoutLine(`Paired: ${result.paired ? "true" : "false"}`); + if (result.peerAlias) { + writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); + } + }, + ), + ); + + pairCommand + .command("status ") + .description("Check pairing ticket status and sync local peer on confirm") + .option("--ticket ", "One-time pairing ticket (clwpair1_...)") + .option("--wait", "Poll until ticket is confirmed or timeout is reached") + .option( + "--wait-seconds ", + "Max seconds to poll for confirmation (default: 300)", + ) + .option( + "--poll-interval-seconds ", + "Polling interval in seconds while waiting (default: 3)", + ) + .action( + withErrorHandling( + "pair status", + async (agentName: string, options: PairStatusOptions) => { + const result = await getPairingStatus( + agentName, + options, + dependencies, + ); + + logger.info("cli.pair_status", { + initiatorAgentDid: result.initiatorAgentDid, + responderAgentDid: result.responderAgentDid, + status: result.status, + proxyUrl: result.proxyUrl, + peerAlias: result.peerAlias, + }); + + writeStdoutLine(`Status: ${result.status}`); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); + if (result.responderAgentDid) { + writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); + } + if (result.responderProfile) { + writeStdoutLine( + `Responder Agent Name: ${result.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${result.responderProfile.humanName}`, + ); + } + writeStdoutLine(`Expires At: ${result.expiresAt}`); + if (result.confirmedAt) { + writeStdoutLine(`Confirmed At: ${result.confirmedAt}`); + } + if (result.peerAlias) { + writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); + } }, ), ); diff --git a/apps/cli/src/commands/skill.test.ts b/apps/cli/src/commands/skill.test.ts new file mode 100644 index 0000000..8faee8c --- /dev/null +++ b/apps/cli/src/commands/skill.test.ts @@ -0,0 +1,169 @@ +import { Command } from "commander"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +vi.mock("../install-skill-mode.js", () => ({ + formatSkillInstallError: vi.fn((error: unknown) => { + if (error instanceof Error) { + return `formatted: ${error.message}`; + } + + return `formatted: ${String(error)}`; + }), + installOpenclawSkillArtifacts: vi.fn(), +})); + +import { + formatSkillInstallError, + installOpenclawSkillArtifacts, + type SkillInstallResult, +} from "../install-skill-mode.js"; +import { createSkillCommand } from "./skill.js"; + +const mockedInstallOpenclawSkillArtifacts = vi.mocked( + installOpenclawSkillArtifacts, +); +const mockedFormatSkillInstallError = vi.mocked(formatSkillInstallError); + +type RunResult = { + exitCode: number | undefined; + stdout: string; + stderr: string; +}; + +const runSkillCommand = async (args: string[]): Promise => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createSkillCommand(); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "skill", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stdout: stdout.join(""), + stderr: stderr.join(""), + }; +}; + +const toResult = (openclawDir: string): SkillInstallResult => ({ + homeDir: "/home/test", + openclawDir, + skillPackageRoot: "/pkg/openclaw-skill", + targetSkillDirectory: `${openclawDir}/skills/clawdentity-openclaw-relay`, + records: [ + { + action: "installed", + sourcePath: "/pkg/openclaw-skill/skill/SKILL.md", + targetPath: `${openclawDir}/skills/clawdentity-openclaw-relay/SKILL.md`, + }, + ], +}); + +describe("skill command", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + process.exitCode = undefined; + }); + + it("installs skill artifacts with default OpenClaw dir", async () => { + mockedInstallOpenclawSkillArtifacts.mockResolvedValueOnce( + toResult("/home/test/.openclaw"), + ); + + const result = await runSkillCommand(["install"]); + + expect(mockedInstallOpenclawSkillArtifacts).toHaveBeenCalledWith({ + openclawDir: undefined, + skillPackageRoot: undefined, + }); + expect(result.stdout).toContain("OpenClaw dir: /home/test/.openclaw"); + expect(result.stdout).toContain("installed=1 updated=0 unchanged=0"); + expect(result.exitCode).toBeUndefined(); + }); + + it("installs skill artifacts for each provided OpenClaw dir", async () => { + mockedInstallOpenclawSkillArtifacts + .mockResolvedValueOnce(toResult("/profiles/alpha")) + .mockResolvedValueOnce(toResult("/profiles/beta")); + + const result = await runSkillCommand([ + "install", + "--openclaw-dir", + "/profiles/alpha", + "--openclaw-dir", + "/profiles/beta", + ]); + + expect(mockedInstallOpenclawSkillArtifacts).toHaveBeenNthCalledWith(1, { + openclawDir: "/profiles/alpha", + skillPackageRoot: undefined, + }); + expect(mockedInstallOpenclawSkillArtifacts).toHaveBeenNthCalledWith(2, { + openclawDir: "/profiles/beta", + skillPackageRoot: undefined, + }); + expect(result.stdout).toContain("OpenClaw dir: /profiles/alpha"); + expect(result.stdout).toContain("OpenClaw dir: /profiles/beta"); + }); + + it("prints machine-readable output with --json", async () => { + mockedInstallOpenclawSkillArtifacts.mockResolvedValueOnce( + toResult("/home/test/.openclaw"), + ); + + const result = await runSkillCommand(["install", "--json"]); + + const parsed = JSON.parse(result.stdout) as { + installs: SkillInstallResult[]; + }; + + expect(parsed.installs).toHaveLength(1); + expect(parsed.installs[0]?.openclawDir).toBe("/home/test/.openclaw"); + }); + + it("formats install errors and exits non-zero", async () => { + mockedInstallOpenclawSkillArtifacts.mockRejectedValueOnce( + new Error("artifact missing"), + ); + + const result = await runSkillCommand(["install"]); + + expect(mockedFormatSkillInstallError).toHaveBeenCalled(); + expect(result.stderr).toContain("formatted: artifact missing"); + expect(result.exitCode).toBe(1); + }); +}); diff --git a/apps/cli/src/commands/skill.ts b/apps/cli/src/commands/skill.ts new file mode 100644 index 0000000..9c31105 --- /dev/null +++ b/apps/cli/src/commands/skill.ts @@ -0,0 +1,104 @@ +import { Command } from "commander"; +import { + formatSkillInstallError, + installOpenclawSkillArtifacts, + type SkillInstallResult, +} from "../install-skill-mode.js"; +import { writeStdoutLine } from "../io.js"; +import { withErrorHandling } from "./helpers.js"; + +type SkillInstallCommandOptions = { + openclawDir?: string[]; + skillPackageRoot?: string; + json?: boolean; +}; + +function collectStringOption(value: string, previous: string[]): string[] { + const trimmed = value.trim(); + if (trimmed.length === 0) { + return previous; + } + + return [...previous, trimmed]; +} + +function toInstallSummary(records: SkillInstallResult["records"]): string { + const installed = records.filter((record) => record.action === "installed"); + const updated = records.filter((record) => record.action === "updated"); + const unchanged = records.filter((record) => record.action === "unchanged"); + + return `installed=${installed.length} updated=${updated.length} unchanged=${unchanged.length}`; +} + +async function runSkillInstall( + options: SkillInstallCommandOptions, +): Promise { + const requestedDirs = (options.openclawDir ?? []).filter( + (dir) => dir.trim().length > 0, + ); + const dirs = requestedDirs.length > 0 ? requestedDirs : [undefined]; + const results: SkillInstallResult[] = []; + + for (const openclawDir of dirs) { + const result = await installOpenclawSkillArtifacts({ + openclawDir, + skillPackageRoot: options.skillPackageRoot, + }); + results.push(result); + } + + return results; +} + +export const createSkillCommand = (): Command => { + const skillCommand = new Command("skill").description( + "Install and manage Clawdentity skill artifacts", + ); + + skillCommand + .command("install") + .description("Install Clawdentity OpenClaw skill artifacts") + .option( + "--openclaw-dir ", + "OpenClaw state directory target (repeat for multiple profiles)", + collectStringOption, + [], + ) + .option( + "--skill-package-root ", + "Override skill package root (defaults to bundled assets)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "skill install", + async (options: SkillInstallCommandOptions) => { + let results: SkillInstallResult[]; + try { + results = await runSkillInstall(options); + } catch (error) { + throw new Error(formatSkillInstallError(error)); + } + + if (options.json) { + writeStdoutLine(JSON.stringify({ installs: results }, null, 2)); + return; + } + + for (const result of results) { + writeStdoutLine(`OpenClaw dir: ${result.openclawDir}`); + writeStdoutLine(`Skill source: ${result.skillPackageRoot}`); + writeStdoutLine(`Target skill dir: ${result.targetSkillDirectory}`); + for (const record of result.records) { + writeStdoutLine( + `${record.action}: ${record.targetPath} (source: ${record.sourcePath})`, + ); + } + writeStdoutLine(toInstallSummary(result.records)); + } + }, + ), + ); + + return skillCommand; +}; diff --git a/apps/cli/src/commands/verify.test.ts b/apps/cli/src/commands/verify.test.ts index 97a8f5c..f2fecce 100644 --- a/apps/cli/src/commands/verify.test.ts +++ b/apps/cli/src/commands/verify.test.ts @@ -114,7 +114,7 @@ const activeSigningKey = { } as const; const tokenClaims = { - iss: "https://api.clawdentity.com", + iss: "https://registry.clawdentity.com", sub: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", ownerDid: "did:claw:human:01HF7YAT00W6W7CM7N3W5FDXT5", name: "agent-01", @@ -133,7 +133,7 @@ const tokenClaims = { } as const; const crlClaims = { - iss: "https://api.clawdentity.com", + iss: "https://registry.clawdentity.com", jti: "01HF7YAT4TXP6AW5QNXA2Y9K43", iat: 1_700_000_000, exp: 1_900_000_000, @@ -153,7 +153,7 @@ describe("verify command", () => { mockedTokenReadFile.mockRejectedValue(buildErrnoError("ENOENT")); mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://api.clawdentity.com", + registryUrl: "https://registry.clawdentity.com", }); mockedReadCacheFile.mockResolvedValue(undefined); mockedWriteCacheFile.mockResolvedValue(undefined); @@ -278,7 +278,7 @@ describe("verify command", () => { mockedReadCacheFile.mockImplementation(async (fileName: string) => { if (fileName === "registry-keys.json") { return JSON.stringify({ - registryUrl: "https://api.clawdentity.com/", + registryUrl: "https://registry.clawdentity.com/", fetchedAtMs: Date.now() - 1_000, keys: [activeSigningKey], }); @@ -286,7 +286,7 @@ describe("verify command", () => { if (fileName === "crl-claims.json") { return JSON.stringify({ - registryUrl: "https://api.clawdentity.com/", + registryUrl: "https://registry.clawdentity.com/", fetchedAtMs: Date.now() - 1_000, claims: crlClaims, }); @@ -312,7 +312,7 @@ describe("verify command", () => { mockedReadCacheFile.mockImplementation(async (fileName: string) => { if (fileName === "registry-keys.json") { return JSON.stringify({ - registryUrl: "https://api.clawdentity.com/", + registryUrl: "https://registry.clawdentity.com/", fetchedAtMs: Date.now() - 60 * 60 * 1000 - 1, keys: [activeSigningKey], }); @@ -320,7 +320,7 @@ describe("verify command", () => { if (fileName === "crl-claims.json") { return JSON.stringify({ - registryUrl: "https://api.clawdentity.com/", + registryUrl: "https://registry.clawdentity.com/", fetchedAtMs: Date.now() - 15 * 60 * 1000 - 1, claims: crlClaims, }); diff --git a/apps/cli/src/commands/verify.ts b/apps/cli/src/commands/verify.ts index 348a037..c0b27da 100644 --- a/apps/cli/src/commands/verify.ts +++ b/apps/cli/src/commands/verify.ts @@ -82,12 +82,12 @@ const toRegistryUrl = (registryUrl: string, path: string): string => { const toExpectedIssuer = (registryUrl: string): string | undefined => { try { const hostname = new URL(registryUrl).hostname; - if (hostname === "api.clawdentity.com") { - return "https://api.clawdentity.com"; + if (hostname === "registry.clawdentity.com") { + return "https://registry.clawdentity.com"; } - if (hostname === "dev.api.clawdentity.com") { - return "https://dev.api.clawdentity.com"; + if (hostname === "dev.registry.clawdentity.com") { + return "https://dev.registry.clawdentity.com"; } return undefined; diff --git a/apps/cli/src/config/AGENTS.md b/apps/cli/src/config/AGENTS.md new file mode 100644 index 0000000..8f504be --- /dev/null +++ b/apps/cli/src/config/AGENTS.md @@ -0,0 +1,15 @@ +# AGENTS.md (apps/cli/src/config) + +## Purpose +- Keep CLI config resolution deterministic across local/dev/prod and hermetic under tests. + +## Config Rules +- `manager.ts` must keep precedence stable: file config defaults first, then explicit env overrides. +- Keep human profile config in `manager.ts` (`humanName`) with env override support (`CLAWDENTITY_HUMAN_NAME`) and deterministic precedence. +- `registry-metadata.ts` should be the only module that fetches registry metadata for config bootstrap flows. +- Avoid hidden host coupling in config tests; do not depend on shell-exported `CLAWDENTITY_*` values. + +## Testing Rules +- Reset `CLAWDENTITY_*` env overrides in `beforeEach` and set only the variables needed by each test case. +- Assert both positive resolution (`registryUrl`/`proxyUrl`) and precedence behavior (`CLAWDENTITY_REGISTRY_URL` over `CLAWDENTITY_REGISTRY`). +- Include `humanName` precedence coverage (`CLAWDENTITY_HUMAN_NAME` over file config). diff --git a/apps/cli/src/config/manager.test.ts b/apps/cli/src/config/manager.test.ts index e977451..9287510 100644 --- a/apps/cli/src/config/manager.test.ts +++ b/apps/cli/src/config/manager.test.ts @@ -13,6 +13,7 @@ vi.mock("node:fs/promises", () => ({ writeFile: vi.fn(), })); +import { resetClawdentityEnv } from "../test-env.js"; import { DEFAULT_REGISTRY_URL, getCacheDir, @@ -46,7 +47,7 @@ describe("config manager", () => { beforeEach(() => { vi.clearAllMocks(); mockedHomedir.mockReturnValue("/mock-home"); - process.env = { ...previousEnv }; + process.env = resetClawdentityEnv(previousEnv); }); afterEach(() => { @@ -62,11 +63,14 @@ describe("config manager", () => { }); it("merges file contents with defaults", async () => { - mockedReadFile.mockResolvedValueOnce('{"apiKey":"secret"}'); + mockedReadFile.mockResolvedValueOnce( + '{"apiKey":"secret","humanName":"Ravi"}', + ); await expect(readConfig()).resolves.toEqual({ registryUrl: DEFAULT_REGISTRY_URL, apiKey: "secret", + humanName: "Ravi", }); }); @@ -107,6 +111,16 @@ describe("config manager", () => { }); }); + it("applies proxy env override over file config", async () => { + mockedReadFile.mockResolvedValueOnce('{"proxyUrl":"http://file:8787"}'); + process.env.CLAWDENTITY_PROXY_URL = "http://env:8787"; + + await expect(resolveConfig()).resolves.toEqual({ + registryUrl: DEFAULT_REGISTRY_URL, + proxyUrl: "http://env:8787", + }); + }); + it("applies CLAWDENTITY_REGISTRY when CLAWDENTITY_REGISTRY_URL is unset", async () => { mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); process.env.CLAWDENTITY_REGISTRY = "http://legacy-env:8787"; @@ -136,6 +150,16 @@ describe("config manager", () => { }); }); + it("prefers env humanName over config file", async () => { + mockedReadFile.mockResolvedValueOnce('{"humanName":"from-file"}'); + process.env.CLAWDENTITY_HUMAN_NAME = "from-env"; + + await expect(resolveConfig()).resolves.toEqual({ + registryUrl: DEFAULT_REGISTRY_URL, + humanName: "from-env", + }); + }); + it("returns a single resolved value", async () => { mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); process.env.CLAWDENTITY_REGISTRY_URL = "http://env:8787"; @@ -148,11 +172,11 @@ describe("config manager", () => { it("reads, merges, and writes when setting values", async () => { mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); - await setConfigValue("apiKey", "new-token"); + await setConfigValue("proxyUrl", "http://proxy:8787"); expect(mockedWriteFile).toHaveBeenCalledWith( "/mock-home/.clawdentity/config.json", - '{\n "registryUrl": "http://file:8787",\n "apiKey": "new-token"\n}\n', + '{\n "registryUrl": "http://file:8787",\n "proxyUrl": "http://proxy:8787"\n}\n', "utf-8", ); }); diff --git a/apps/cli/src/config/manager.ts b/apps/cli/src/config/manager.ts index c95e39f..9e45f6f 100644 --- a/apps/cli/src/config/manager.ts +++ b/apps/cli/src/config/manager.ts @@ -2,11 +2,13 @@ import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { homedir } from "node:os"; import { dirname, join } from "node:path"; -export const DEFAULT_REGISTRY_URL = "https://api.clawdentity.com"; +export const DEFAULT_REGISTRY_URL = "https://registry.clawdentity.com"; export interface CliConfig { registryUrl: string; + proxyUrl?: string; apiKey?: string; + humanName?: string; } export type CliConfigKey = keyof CliConfig; @@ -18,7 +20,9 @@ const FILE_MODE = 0o600; const ENV_KEY_MAP: Record = { registryUrl: "CLAWDENTITY_REGISTRY_URL", + proxyUrl: "CLAWDENTITY_PROXY_URL", apiKey: "CLAWDENTITY_API_KEY", + humanName: "CLAWDENTITY_HUMAN_NAME", }; const LEGACY_ENV_KEY_MAP: Partial> = { @@ -46,10 +50,18 @@ const normalizeConfig = (raw: unknown): CliConfig => { config.registryUrl = raw.registryUrl; } + if (typeof raw.proxyUrl === "string" && raw.proxyUrl.length > 0) { + config.proxyUrl = raw.proxyUrl; + } + if (typeof raw.apiKey === "string" && raw.apiKey.length > 0) { config.apiKey = raw.apiKey; } + if (typeof raw.humanName === "string" && raw.humanName.length > 0) { + config.humanName = raw.humanName; + } + return config; }; diff --git a/apps/cli/src/config/registry-metadata.test.ts b/apps/cli/src/config/registry-metadata.test.ts new file mode 100644 index 0000000..bab4d60 --- /dev/null +++ b/apps/cli/src/config/registry-metadata.test.ts @@ -0,0 +1,120 @@ +import { describe, expect, it, vi } from "vitest"; +import { + fetchRegistryMetadata, + normalizeRegistryUrl, + toRegistryRequestUrl, +} from "./registry-metadata.js"; + +const createJsonResponse = (status: number, body: unknown): Response => { + return { + ok: status >= 200 && status < 300, + status, + json: vi.fn(async () => body), + } as unknown as Response; +}; + +describe("registry metadata helpers", () => { + it("normalizes registry URLs", () => { + expect(normalizeRegistryUrl("https://registry.clawdentity.com")).toBe( + "https://registry.clawdentity.com/", + ); + }); + + it("builds request URL from registry base", () => { + expect( + toRegistryRequestUrl("https://registry.clawdentity.com", "/v1/metadata"), + ).toBe("https://registry.clawdentity.com/v1/metadata"); + }); + + it("fetches metadata with proxy URL", async () => { + const fetchImpl = vi.fn(async () => + createJsonResponse(200, { + status: "ok", + environment: "production", + version: "sha-123", + registryUrl: "https://registry.clawdentity.com", + proxyUrl: "https://proxy.clawdentity.com", + }), + ); + + const result = await fetchRegistryMetadata( + "https://registry.clawdentity.com", + { + fetchImpl: fetchImpl as unknown as typeof fetch, + }, + ); + + expect(result).toEqual({ + environment: "production", + version: "sha-123", + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://proxy.clawdentity.com/", + }); + expect(fetchImpl).toHaveBeenCalledWith( + "https://registry.clawdentity.com/v1/metadata", + expect.objectContaining({ method: "GET" }), + ); + }); + + it("falls back to input registry URL when metadata omits registryUrl", async () => { + const fetchImpl = vi.fn(async () => + createJsonResponse(200, { + status: "ok", + proxyUrl: "https://dev.proxy.clawdentity.com", + }), + ); + + const result = await fetchRegistryMetadata( + "https://dev.registry.clawdentity.com", + { + fetchImpl: fetchImpl as unknown as typeof fetch, + }, + ); + + expect(result.registryUrl).toBe("https://dev.registry.clawdentity.com/"); + expect(result.proxyUrl).toBe("https://dev.proxy.clawdentity.com/"); + }); + + it("fails when metadata endpoint is unavailable", async () => { + const fetchImpl = vi.fn(async () => + createJsonResponse(503, { + error: { code: "DEP_UNAVAILABLE", message: "down" }, + }), + ); + + await expect( + fetchRegistryMetadata("https://registry.clawdentity.com", { + fetchImpl: fetchImpl as unknown as typeof fetch, + }), + ).rejects.toMatchObject({ + code: "CLI_REGISTRY_METADATA_FETCH_FAILED", + message: "Registry metadata request failed (503). Try again later.", + }); + }); + + it("fails when metadata payload is invalid", async () => { + const fetchImpl = vi.fn(async () => + createJsonResponse(200, { + status: "ok", + }), + ); + + await expect( + fetchRegistryMetadata("https://registry.clawdentity.com", { + fetchImpl: fetchImpl as unknown as typeof fetch, + }), + ).rejects.toMatchObject({ + code: "CLI_REGISTRY_METADATA_INVALID_RESPONSE", + }); + }); + + it("fails when registry URL is invalid", async () => { + await expect( + fetchRegistryMetadata("not-a-url", { + fetchImpl: vi.fn() as unknown as typeof fetch, + }), + ).rejects.toMatchObject({ + code: "CLI_REGISTRY_URL_INVALID", + }); + }); +}); diff --git a/apps/cli/src/config/registry-metadata.ts b/apps/cli/src/config/registry-metadata.ts new file mode 100644 index 0000000..fe939d1 --- /dev/null +++ b/apps/cli/src/config/registry-metadata.ts @@ -0,0 +1,193 @@ +import { REGISTRY_METADATA_PATH } from "@clawdentity/protocol"; +import { AppError } from "@clawdentity/sdk"; + +type RegistryErrorEnvelope = { + error?: { + code?: string; + message?: string; + }; +}; + +export type RegistryMetadata = { + environment?: string; + proxyUrl: string; + registryUrl: string; + version?: string; +}; + +export type RegistryMetadataDependencies = { + fetchImpl?: typeof fetch; +}; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function parseNonEmptyString(value: unknown): string { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +} + +function createCliError(code: string, message: string): AppError { + return new AppError({ + code, + message, + status: 400, + }); +} + +function parseUrl(candidate: string, label: string): URL { + let parsed: URL; + try { + parsed = new URL(candidate); + } catch { + throw createCliError("CLI_REGISTRY_URL_INVALID", `${label} is invalid`); + } + + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + throw createCliError("CLI_REGISTRY_URL_INVALID", `${label} is invalid`); + } + + return parsed; +} + +export function normalizeRegistryUrl(registryUrl: string): string { + return parseUrl(registryUrl, "Registry URL").toString(); +} + +export function toRegistryRequestUrl( + registryUrl: string, + path: string, +): string { + const normalizedRegistryUrl = normalizeRegistryUrl(registryUrl); + const base = normalizedRegistryUrl.endsWith("/") + ? normalizedRegistryUrl + : `${normalizedRegistryUrl}/`; + + return new URL(path.slice(1), base).toString(); +} + +function extractRegistryErrorMessage(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const trimmed = envelope.error.message.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function parseMetadataPayload( + payload: unknown, + fallbackRegistryUrl: string, +): RegistryMetadata { + if (!isRecord(payload)) { + throw createCliError( + "CLI_REGISTRY_METADATA_INVALID_RESPONSE", + "Registry metadata response is invalid", + ); + } + + const proxyUrlRaw = parseNonEmptyString(payload.proxyUrl); + if (proxyUrlRaw.length === 0) { + throw createCliError( + "CLI_REGISTRY_METADATA_INVALID_RESPONSE", + "Registry metadata response is invalid", + ); + } + + const proxyUrl = parseUrl(proxyUrlRaw, "Proxy URL").toString(); + + const registryUrlRaw = parseNonEmptyString(payload.registryUrl); + const registryUrl = + registryUrlRaw.length > 0 + ? parseUrl(registryUrlRaw, "Registry URL").toString() + : normalizeRegistryUrl(fallbackRegistryUrl); + + const environment = parseNonEmptyString(payload.environment); + const version = parseNonEmptyString(payload.version); + + return { + proxyUrl, + registryUrl, + environment: environment.length > 0 ? environment : undefined, + version: version.length > 0 ? version : undefined, + }; +} + +function mapMetadataError(status: number, payload: unknown): string { + const registryMessage = extractRegistryErrorMessage(payload); + + if (status === 404) { + return "Registry metadata endpoint is unavailable (404)."; + } + + if (status >= 500) { + return `Registry metadata request failed (${status}). Try again later.`; + } + + if (registryMessage) { + return `Registry metadata request failed (${status}): ${registryMessage}`; + } + + return `Registry metadata request failed (${status}).`; +} + +export async function fetchRegistryMetadata( + registryUrl: string, + dependencies: RegistryMetadataDependencies = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + throw createCliError( + "CLI_REGISTRY_METADATA_FETCH_UNAVAILABLE", + "Runtime fetch is unavailable for registry metadata lookup", + ); + } + + const normalizedRegistryUrl = normalizeRegistryUrl(registryUrl); + const requestUrl = toRegistryRequestUrl( + normalizedRegistryUrl, + REGISTRY_METADATA_PATH, + ); + + let response: Response; + try { + response = await fetchImpl(requestUrl, { + method: "GET", + headers: { + accept: "application/json", + }, + }); + } catch { + throw createCliError( + "CLI_REGISTRY_METADATA_REQUEST_FAILED", + "Unable to reach registry metadata endpoint. Check registryUrl and network access.", + ); + } + + const payload = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_REGISTRY_METADATA_FETCH_FAILED", + mapMetadataError(response.status, payload), + ); + } + + return parseMetadataPayload(payload, normalizedRegistryUrl); +} diff --git a/apps/cli/src/index.test.ts b/apps/cli/src/index.test.ts index 02eb11a..2c349ff 100644 --- a/apps/cli/src/index.test.ts +++ b/apps/cli/src/index.test.ts @@ -86,6 +86,14 @@ describe("cli", () => { expect(hasInviteCommand).toBe(true); }); + it("registers the skill command", () => { + const hasSkillCommand = createProgram() + .commands.map((command) => command.name()) + .includes("skill"); + + expect(hasSkillCommand).toBe(true); + }); + it("prints version output", async () => { const output: string[] = []; const program = createProgram(); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 0d28ae1..e6c1582 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -8,6 +8,7 @@ import { createConnectorCommand } from "./commands/connector.js"; import { createInviteCommand } from "./commands/invite.js"; import { createOpenclawCommand } from "./commands/openclaw.js"; import { createPairCommand } from "./commands/pair.js"; +import { createSkillCommand } from "./commands/skill.js"; import { createVerifyCommand } from "./commands/verify.js"; const require = createRequire(import.meta.url); @@ -39,5 +40,6 @@ export const createProgram = (): Command => { .addCommand(createInviteCommand()) .addCommand(createOpenclawCommand()) .addCommand(createPairCommand()) + .addCommand(createSkillCommand()) .addCommand(createVerifyCommand()); }; diff --git a/apps/cli/src/install-skill-mode.test.ts b/apps/cli/src/install-skill-mode.test.ts index c8e1d2b..2dd2086 100644 --- a/apps/cli/src/install-skill-mode.test.ts +++ b/apps/cli/src/install-skill-mode.test.ts @@ -9,11 +9,7 @@ import { tmpdir } from "node:os"; import { join } from "node:path"; import { describe, expect, it } from "vitest"; import type { SkillInstallError } from "./install-skill-mode.js"; -import { - installOpenclawSkillArtifacts, - isSkillInstallRequested, - runNpmSkillInstall, -} from "./install-skill-mode.js"; +import { installOpenclawSkillArtifacts } from "./install-skill-mode.js"; type SkillSandbox = { cleanup: () => void; @@ -60,32 +56,6 @@ function createSkillSandbox(): SkillSandbox { }; } -describe("install skill mode detection", () => { - it("detects --skill from npm_config_skill env", () => { - expect(isSkillInstallRequested({ npm_config_skill: "true" })).toBe(true); - expect(isSkillInstallRequested({ npm_config_skill: "1" })).toBe(true); - expect(isSkillInstallRequested({ npm_config_skill: "false" })).toBe(false); - }); - - it("detects --skill from npm_config_argv", () => { - expect( - isSkillInstallRequested({ - npm_config_argv: JSON.stringify({ - original: ["install", "clawdentity", "--skill"], - }), - }), - ).toBe(true); - - expect( - isSkillInstallRequested({ - npm_config_argv: JSON.stringify({ - original: ["install", "clawdentity"], - }), - }), - ).toBe(false); - }); -}); - describe("installOpenclawSkillArtifacts", () => { it("installs skill artifacts and remains idempotent on rerun", async () => { const sandbox = createSkillSandbox(); @@ -103,14 +73,12 @@ describe("installOpenclawSkillArtifacts", () => { const skillPath = join( sandbox.openclawDir, - "workspace", "skills", "clawdentity-openclaw-relay", "SKILL.md", ); - const workspaceRelayPath = join( + const skillRelayPath = join( sandbox.openclawDir, - "workspace", "skills", "clawdentity-openclaw-relay", "relay-to-peer.mjs", @@ -123,7 +91,6 @@ describe("installOpenclawSkillArtifacts", () => { ); const referencePath = join( sandbox.openclawDir, - "workspace", "skills", "clawdentity-openclaw-relay", "references", @@ -131,9 +98,12 @@ describe("installOpenclawSkillArtifacts", () => { ); expect(readFileSync(skillPath, "utf8")).toContain("Clawdentity"); - expect(readFileSync(workspaceRelayPath, "utf8")).toContain("relayToPeer"); + expect(readFileSync(skillRelayPath, "utf8")).toContain("relayToPeer"); expect(readFileSync(hooksRelayPath, "utf8")).toContain("relay-to-peer"); expect(readFileSync(referencePath, "utf8")).toContain("Protocol"); + expect(firstRun.targetSkillDirectory).toBe( + join(sandbox.openclawDir, "skills", "clawdentity-openclaw-relay"), + ); const secondRun = await installOpenclawSkillArtifacts({ homeDir: sandbox.homeDir, @@ -200,59 +170,3 @@ describe("installOpenclawSkillArtifacts", () => { } }); }); - -describe("runNpmSkillInstall", () => { - it("skips install when --skill is not set", async () => { - const result = await runNpmSkillInstall({ - env: {}, - writeStdout: () => undefined, - writeStderr: () => undefined, - }); - - expect(result.skipped).toBe(true); - }); - - it("installs skill artifacts when --skill is set", async () => { - const sandbox = createSkillSandbox(); - const stdout: string[] = []; - const stderr: string[] = []; - - try { - const result = await runNpmSkillInstall({ - env: { - npm_config_skill: "true", - CLAWDENTITY_SKILL_PACKAGE_ROOT: sandbox.skillPackageRoot, - }, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - writeStdout: (line) => stdout.push(line), - writeStderr: (line) => stderr.push(line), - }); - - expect(result.skipped).toBe(false); - expect(stderr).toHaveLength(0); - expect(stdout.some((line) => line.includes("skill install mode"))).toBe( - true, - ); - - const skillPath = join( - sandbox.openclawDir, - "workspace", - "skills", - "clawdentity-openclaw-relay", - "SKILL.md", - ); - const hooksRelayPath = join( - sandbox.openclawDir, - "hooks", - "transforms", - "relay-to-peer.mjs", - ); - - expect(readFileSync(skillPath, "utf8")).toContain("OpenClaw Relay"); - expect(readFileSync(hooksRelayPath, "utf8")).toContain("relayToPeer"); - } finally { - sandbox.cleanup(); - } - }); -}); diff --git a/apps/cli/src/install-skill-mode.ts b/apps/cli/src/install-skill-mode.ts index ea0bf42..51fcc83 100644 --- a/apps/cli/src/install-skill-mode.ts +++ b/apps/cli/src/install-skill-mode.ts @@ -4,7 +4,6 @@ import { createRequire } from "node:module"; import { homedir } from "node:os"; import { dirname, join, relative } from "node:path"; import { fileURLToPath } from "node:url"; -import { writeStderrLine, writeStdoutLine } from "./io.js"; const OPENCLAW_DIR_NAME = ".openclaw"; const SKILL_PACKAGE_NAME = "@clawdentity/openclaw-skill"; @@ -27,14 +26,6 @@ export type SkillInstallResult = { records: SkillInstallRecord[]; }; -export type RunNpmSkillInstallResult = - | { - skipped: true; - } - | ({ - skipped: false; - } & SkillInstallResult); - type SkillInstallOptions = { homeDir?: string; openclawDir?: string; @@ -42,12 +33,6 @@ type SkillInstallOptions = { env?: NodeJS.ProcessEnv; }; -type RunNpmSkillInstallOptions = SkillInstallOptions & { - env?: NodeJS.ProcessEnv; - writeStdout?: (line: string) => void; - writeStderr?: (line: string) => void; -}; - type SkillInstallArtifact = { sourcePath: string; targetPath: string; @@ -86,61 +71,6 @@ function getErrorCode(error: unknown): string | undefined { return typeof error.code === "string" ? error.code : undefined; } -function parseBooleanFlag(value: string | undefined): boolean | undefined { - if (value === undefined) { - return undefined; - } - - const normalized = value.trim().toLowerCase(); - if ( - normalized === "" || - normalized === "1" || - normalized === "true" || - normalized === "yes" - ) { - return true; - } - - if (normalized === "0" || normalized === "false" || normalized === "no") { - return false; - } - - return undefined; -} - -function hasSkillFlagInNpmArgv(rawArgv: string | undefined): boolean { - if (!rawArgv || rawArgv.trim().length === 0) { - return false; - } - - let parsed: unknown; - try { - parsed = JSON.parse(rawArgv); - } catch { - return false; - } - - if (!isRecord(parsed)) { - return false; - } - - const original = parsed.original; - if (!Array.isArray(original)) { - return false; - } - - return original.some((entry) => entry === "--skill"); -} - -export function isSkillInstallRequested(env: NodeJS.ProcessEnv = process.env) { - const envFlag = parseBooleanFlag(env.npm_config_skill); - if (envFlag !== undefined) { - return envFlag; - } - - return hasSkillFlagInNpmArgv(env.npm_config_argv); -} - function resolveHomeDir(inputHomeDir?: string): string { if (typeof inputHomeDir === "string" && inputHomeDir.trim().length > 0) { return inputHomeDir.trim(); @@ -209,7 +139,7 @@ function resolveSkillPackageRoot(input: { throw new SkillInstallError({ code: "CLI_SKILL_PACKAGE_NOT_FOUND", message: - "Skill artifacts are unavailable. Set CLAWDENTITY_SKILL_PACKAGE_ROOT or provide bundled skill assets before using --skill mode.", + "Skill artifacts are unavailable. Set CLAWDENTITY_SKILL_PACKAGE_ROOT or provide bundled skill assets before running skill install.", details: { packageName: SKILL_PACKAGE_NAME, bundledSkillRoot, @@ -310,12 +240,7 @@ async function resolveArtifacts(input: { }); } - const targetSkillRoot = join( - input.openclawDir, - "workspace", - "skills", - SKILL_DIR_NAME, - ); + const targetSkillRoot = join(input.openclawDir, "skills", SKILL_DIR_NAME); const artifacts: SkillInstallArtifact[] = [ { @@ -408,24 +333,11 @@ export async function installOpenclawSkillArtifacts( homeDir, openclawDir, skillPackageRoot, - targetSkillDirectory: join( - openclawDir, - "workspace", - "skills", - SKILL_DIR_NAME, - ), + targetSkillDirectory: join(openclawDir, "skills", SKILL_DIR_NAME), records, }; } -function toSummaryCounts(records: SkillInstallRecord[]): string { - const installed = records.filter((record) => record.action === "installed"); - const updated = records.filter((record) => record.action === "updated"); - const unchanged = records.filter((record) => record.action === "unchanged"); - - return `installed=${installed.length} updated=${updated.length} unchanged=${unchanged.length}`; -} - export function formatSkillInstallError(error: unknown): string { if (error instanceof SkillInstallError) { const details = Object.entries(error.details) @@ -445,44 +357,3 @@ export function formatSkillInstallError(error: unknown): string { return String(error); } - -export async function runNpmSkillInstall( - options: RunNpmSkillInstallOptions = {}, -): Promise { - const env = options.env ?? process.env; - const writeStdout = options.writeStdout ?? writeStdoutLine; - const writeStderr = options.writeStderr ?? writeStderrLine; - - if (!isSkillInstallRequested(env)) { - return { skipped: true }; - } - - writeStdout("[clawdentity] skill install mode detected (--skill)"); - - try { - const result = await installOpenclawSkillArtifacts({ - env, - homeDir: options.homeDir, - openclawDir: options.openclawDir, - skillPackageRoot: options.skillPackageRoot, - }); - - for (const record of result.records) { - writeStdout( - `[clawdentity] ${record.action}: ${record.targetPath} (source: ${record.sourcePath})`, - ); - } - - writeStdout(`[clawdentity] ${toSummaryCounts(result.records)}`); - - return { - skipped: false, - ...result, - }; - } catch (error) { - writeStderr( - `[clawdentity] skill install failed: ${formatSkillInstallError(error)}`, - ); - throw error; - } -} diff --git a/apps/cli/src/postinstall.ts b/apps/cli/src/postinstall.ts index 57c44c5..5d20c00 100644 --- a/apps/cli/src/postinstall.ts +++ b/apps/cli/src/postinstall.ts @@ -1,5 +1,4 @@ -import { runNpmSkillInstall } from "./install-skill-mode.js"; - -runNpmSkillInstall().catch(() => { - process.exitCode = 1; -}); +// Intentionally left as a no-op. +// Skill artifacts are installed explicitly through: +// clawdentity skill install +export {}; diff --git a/apps/cli/src/test-env.ts b/apps/cli/src/test-env.ts new file mode 100644 index 0000000..4420c06 --- /dev/null +++ b/apps/cli/src/test-env.ts @@ -0,0 +1,17 @@ +const CLAWDENTITY_ENV_OVERRIDE_KEYS = [ + "CLAWDENTITY_REGISTRY_URL", + "CLAWDENTITY_REGISTRY", + "CLAWDENTITY_PROXY_URL", + "CLAWDENTITY_API_KEY", +] as const; + +export function resetClawdentityEnv( + source: NodeJS.ProcessEnv, +): NodeJS.ProcessEnv { + const sanitized = { ...source }; + for (const key of CLAWDENTITY_ENV_OVERRIDE_KEYS) { + delete sanitized[key]; + } + + return sanitized; +} diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 3246d74..665e68a 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -3,11 +3,17 @@ ## Purpose - Define conventions for the OpenClaw skill package that relays selected payloads to remote Clawdentity peers. - Keep peer routing config and local connector handoff deterministic and testable. +- Keep peer profile metadata explicit and non-ambiguous (`agentName`, `humanName`). ## Filesystem Contracts - Peer routing map lives at `~/.clawdentity/peers.json` by default. -- Local relay handoff targets connector runtime endpoint `http://127.0.0.1:19400/v1/outbound` by default (override via connector env/options when needed). +- `openclaw setup` must project peer + relay runtime snapshots into OpenClaw-local transform directory so containerized gateways can read relay state without mounting `~/.clawdentity`: + - `/hooks/transforms/clawdentity-peers.json` + - `/hooks/transforms/clawdentity-relay.json` +- Local relay handoff uses connector endpoint candidates from `clawdentity-relay.json` and must work across macOS/Linux Docker hosts. - Relay setup should preserve local OpenClaw upstream URL in `~/.clawdentity/openclaw-relay.json` for proxy runtime fallback. +- Relay setup must also persist `openclawHookToken` in `~/.clawdentity/openclaw-relay.json` so connector runtime can authenticate OpenClaw `/hooks/*` delivery without manual token flags. +- Relay setup must persist per-agent connector bind assignment in `~/.clawdentity/openclaw-connectors.json`. - Never commit local runtime files (`peers.json`, `secret.key`, `ait.jwt`) to the repository. ## Transform Rules @@ -22,14 +28,27 @@ - return `null` after successful relay so local handling is skipped - If `payload.peer` is absent, return payload unchanged. - Keep setup flow CLI-driven via `clawdentity openclaw setup`; do not add `configure-hooks.sh`. +- Keep setup flow fully automated via CLI: `openclaw setup` provisions/retains `hooks.token`, starts connector runtime, auto-recovers pending gateway device approvals when possible, verifies websocket readiness, and fails fast only when unrecoverable drift remains. +- Keep setup/doctor expectations aligned with connector durable inbox semantics: connector can acknowledge persisted inbound relay messages before local OpenClaw hook delivery, with replay status exposed via `/v1/status` and doctor checks. +- Keep `connector start` documented as advanced/manual recovery only; never require it in the default onboarding flow. +- Keep setup/doctor path resolution compatible with OpenClaw runtime env overrides: + - `OPENCLAW_CONFIG_PATH` and legacy `CLAWDBOT_CONFIG_PATH` + - `OPENCLAW_STATE_DIR` and legacy `CLAWDBOT_STATE_DIR` + - `OPENCLAW_HOME` when explicit config/state overrides are unset ## Maintainability - Keep filesystem path logic centralized; avoid hardcoding `~/.clawdentity` paths across multiple files. - Keep relay behavior pure except for explicit dependencies (`fetch`, filesystem) so tests stay deterministic. - Prefer schema-first runtime validation over ad-hoc guards. - Keep skill docs aligned with connector architecture: do not document direct transform-to-peer-proxy signing. -- Keep `skill/SKILL.md` command utilization section explicit and executable with current CLI commands used by this skill (`config`, `invite redeem`, `agent`, `openclaw setup/doctor/relay test`, `pair`, `connector start`, optional `connector service install`). +- Keep `skill/SKILL.md` command utilization section explicit and executable with current CLI commands used by this skill (`config`, `invite redeem`, `agent`, `openclaw setup/doctor/relay test`, `pair`, advanced `connector start`/`connector service install`). - Keep pairing flow documented as CLI-based (`clawdentity pair start`, `clawdentity pair confirm`), not raw proxy HTTP calls. +- Keep pairing metadata documented and preserved end-to-end: pair APIs exchange `initiatorProfile`/`responderProfile` and peer map stores `agentName` + `humanName`. +- Keep pairing flow deterministic in docs: + - Initiator default is `clawdentity pair start --qr --wait`. + - Responder confirms with `pair confirm`. + - If initiator ran without `--wait`, recover with `pair status --ticket --wait`. +- Keep relay-result docs aligned with proxy behavior: `202 state=queued` is an expected async delivery state (not a pairing failure), with retry handled by proxy queue policy. - When `src/transforms/relay-to-peer.ts` relay envelope, endpoint defaults, or failure mapping changes, update: - `skill/SKILL.md` - `skill/references/clawdentity-protocol.md` @@ -41,14 +60,25 @@ - `pnpm -F @clawdentity/openclaw-skill build` ## Skill Runtime Behavior -- Keep onboarding prompts input-focused (invite/API key/URLs) and let the skill decide command execution. +- Keep onboarding prompts input-focused (registry invite/API key/agent name) and let the skill decide command execution. +- Enforce invite-first onboarding: ask for `clw_inv_...` by default and redeem invite before any API-key fallback. +- Enforce invite-first onboarding with human identity capture: use `clawdentity invite redeem --display-name `. +- Allow raw API-key path only when user explicitly says invite is unavailable. +- Never state that API key must be provided before onboarding; invite redeem is the default API-key issuance path. - For first-time onboarding, prefer registry invite redeem (`clw_inv_...`) before asking for API key. +- Require a CLI behavior guard before setup execution: + - `clawdentity openclaw setup --help` must not show peer-routing flags and must not show `--invite-code`. + - If `--invite-code` appears, upgrade CLI (`npm install -g clawdentity@latest`) before proceeding. - Disambiguate invite types in prompts: - `clw_inv_...` = registry onboarding invite (yields PAT via `invite redeem`) - - `clawd1_...` = peer relay invite (used by `openclaw setup`) - `clwpair1_...` = proxy trust pairing ticket (used by `pair start` / `pair confirm`) +- Avoid endpoint drift suggestions in onboarding prompts: do not suggest registry/proxy host changes unless user explicitly asks. +- Keep endpoint defaults production-first (`registry.clawdentity.com`, `proxy.clawdentity.com`); local Docker/development must be handled via env overrides (`CLAWDENTITY_REGISTRY_URL`, `CLAWDENTITY_PROXY_URL`, `CLAWDENTITY_PROXY_WS_URL`). +- If env overrides are present, do not treat config-file URL mismatch as a blocker. +- Relay setup must be self-setup only via `openclaw setup `; peer mappings are created automatically by QR pairing (`pair confirm`). +- Setup success is self-readiness only: do not require peer configuration before reporting onboarding complete. - The agent should run required npm/CLI/filesystem operations via tools and only ask the human for missing inputs. -- Report deterministic completion outputs: local DID, peer alias, and generated filesystem paths. +- Report deterministic completion outputs: local DID, pairing ticket/QR path, saved peer alias, and generated filesystem paths. ## Dual Container Test State - For local user-flow validation against two OpenClaw gateways, use: @@ -75,10 +105,7 @@ - Use this snapshot as the starting point for every skill install regression run. - Pairing troubleshooting: - If UI shows `Disconnected (1008): pairing required`, OpenClaw device approval is pending. + - `openclaw doctor` surfaces this as `state.gatewayDevicePairing`. + - First-line recovery is always `clawdentity openclaw setup ` (auto-approval path). - This is not Clawdentity proxy trust pairing (`/pair/start` + `/pair/confirm`); it is only OpenClaw UI/device approval. - - Run: - - `docker exec clawdbot-agent-alpha-1 sh -lc 'node openclaw.mjs devices list --json'` - - `docker exec clawdbot-agent-beta-1 sh -lc 'node openclaw.mjs devices list --json'` - - Approve any pending request IDs: - - `docker exec clawdbot-agent-alpha-1 sh -lc 'node openclaw.mjs devices approve '` - - `docker exec clawdbot-agent-beta-1 sh -lc 'node openclaw.mjs devices approve '` + - Manual device approval commands are operator fallback only when setup reports the local `openclaw` command is unavailable. diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index 2bf3783..bce7f3e 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -1,194 +1,343 @@ --- name: clawdentity_openclaw_relay -description: This skill should be used when the user asks to "install clawdentity relay skill", "set up agent-to-agent relay from invite code", "connect OpenClaw agents with invite code", or needs OpenClaw peer communication through the local Clawdentity connector runtime. -version: 0.1.0 +description: This skill should be used when the user asks to "set up Clawdentity relay", "pair two agents", "verify an agent token", "rotate API key", "refresh agent auth", "revoke an agent", "troubleshoot relay", "uninstall connector service", or needs OpenClaw relay onboarding, lifecycle management, or pairing workflows. +version: 0.3.0 --- # Clawdentity OpenClaw Relay Skill -This skill configures an OpenClaw agent to relay selected webhook payloads to a peer through the local Clawdentity connector runtime using a single invite code. +This skill prepares a local OpenClaw agent in a strict sequence: +1. finish registry onboarding by redeeming an invite (`clw_inv_...`) and store API key +2. create local agent identity +3. run `clawdentity openclaw setup ` (config + runtime + readiness) +4. become ready to start or accept QR pairing -## Trigger Conditions +After setup, this skill also covers lifecycle operations: token refresh, API key rotation, agent revocation, service teardown, and token verification. -Use this skill when any of the following are requested: -- Install relay support for OpenClaw peer communication. -- Complete first-time setup from an invite code. -- Repair broken relay setup after config drift. -- Verify invite-code onboarding and peer mapping. +Relay invite codes are not part of this flow. ## Filesystem Truth (must be used exactly) ### OpenClaw state files -- OpenClaw state root (default): `~/.openclaw` -- OpenClaw config: `~/.openclaw/openclaw.json` +- OpenClaw state root (default): `~/.openclaw` (legacy fallback dirs may exist: `~/.clawdbot`, `~/.moldbot`, `~/.moltbot`) +- OpenClaw config: `/openclaw.json` (legacy names may exist: `clawdbot.json`, `moldbot.json`, `moltbot.json`) +- OpenClaw config env overrides: `OPENCLAW_CONFIG_PATH`, legacy `CLAWDBOT_CONFIG_PATH` +- OpenClaw state env overrides: `OPENCLAW_STATE_DIR`, legacy `CLAWDBOT_STATE_DIR` +- OpenClaw home override: `OPENCLAW_HOME` - Transform target path: `~/.openclaw/hooks/transforms/relay-to-peer.mjs` -- Workspace skill location: `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` +- Transform runtime snapshot: `~/.openclaw/hooks/transforms/clawdentity-relay.json` +- Transform peers snapshot: `~/.openclaw/hooks/transforms/clawdentity-peers.json` +- Managed skill location: `~/.openclaw/skills/clawdentity-openclaw-relay/SKILL.md` - Default transform source expected by CLI setup: - `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` + `~/.openclaw/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` ### Clawdentity identity files - Clawdentity root: `~/.clawdentity` - Agent config: `~/.clawdentity/config.json` - Agent identity directory: `~/.clawdentity/agents//` - Agent private key: `~/.clawdentity/agents//secret.key` +- Agent public key: `~/.clawdentity/agents//public.key` +- Agent identity metadata: `~/.clawdentity/agents//identity.json` +- Agent registry auth: `~/.clawdentity/agents//registry-auth.json` - Agent AIT token: `~/.clawdentity/agents//ait.jwt` - Peer map: `~/.clawdentity/peers.json` - Local selected agent marker: `~/.clawdentity/openclaw-agent-name` - Relay runtime config: `~/.clawdentity/openclaw-relay.json` +- Connector assignment map: `~/.clawdentity/openclaw-connectors.json` -## Invite Input Assumption +### Pairing ephemeral files +- QR PNG storage: `~/.clawdentity/pairing/` (auto-cleaned after 900s) -Invite creation is outside this skill scope. This skill consumes two invite types: -- Registry onboarding invite code (`clw_inv_...`) to obtain PAT when local API key is missing. -- OpenClaw peer relay invite code (`clawd1_...`) for `openclaw setup`. +### Cache files +- Registry signing keys cache: `~/.clawdentity/cache/registry-keys.json` (1-hour TTL) +- Certificate revocation list cache: `~/.clawdentity/cache/crl-claims.json` (15-minute TTL) -## Human + Agent Flow (strict user-style) +## Inputs -Follow this order. Do not skip steps. +Required for onboarding: +- Registry onboarding invite code: `clw_inv_...` (default onboarding path) +- Local agent name +- Human display name (used by invite redeem and pairing profile metadata) + +Optional only for recovery/advanced operator flows: +- Existing API key (only when user explicitly says no invite is available) + +Required for pairing phase (after setup): +- Pairing QR from the other side (`clwpair1_...` inside QR image) or inline ticket string + +Note: Registry operators must run `admin bootstrap` before creating invites. See `references/clawdentity-registry.md` for details. ## Tool Execution Contract (Agent) -This skill is operational. The agent must execute side effects via tools. -- Use the `exec` tool for npm/CLI/filesystem actions needed by onboarding. -- Do not ask the human to run shell commands that the agent can run itself. -- Ask the human only for missing secrets/inputs (for example API key or invite code). -- Report final status with concrete outputs (local DID, peer alias, written paths). - -## CLI Command Utilization (required) - -Use these commands as the default execution path for skill utilization: - -- Initialize local CLI config: - - `clawdentity config init` - - `clawdentity config init --registry-url ` (supports first-run registry URL override) - - or set env before init: `CLAWDENTITY_REGISTRY_URL=` (primary global override) - - compatible alias: `CLAWDENTITY_REGISTRY=` -- Configure registry URL and API key when missing: - - `clawdentity config set registryUrl ` -- Complete registry onboarding when API key is missing: - - `clawdentity invite redeem ` - - `clawdentity invite redeem --registry-url ` -- Configure API key only for non-invite fallback: - - `clawdentity config set apiKey ` -- Create and inspect local OpenClaw agent identity: - - `clawdentity agent create --framework openclaw` - - `clawdentity agent inspect ` -- Apply OpenClaw invite setup: - - `clawdentity openclaw setup --invite-code ` -- Start connector runtime for relay handoff: - - `clawdentity connector start ` -- Optional persistent connector autostart: - - `clawdentity connector service install ` -- Validate health and delivery: - - `clawdentity openclaw doctor` - - `clawdentity openclaw relay test --peer ` - -Pairing bootstrap uses CLI commands in the current release: - -- Owner/initiator starts pairing on initiator proxy: - - `clawdentity pair start --proxy-url --qr` - - Optionally pass explicit owner PAT: `--owner-pat ` -- Responder confirms on responder proxy: - - `clawdentity pair confirm --qr-file --proxy-url ` - - optional global proxy URL env fallback: `CLAWDENTITY_PROXY_URL=` - -Successful confirm establishes mutual trust for the two agent DIDs. After confirm, both directions are allowed for trusted delivery. - -1. Confirm prerequisites with the human. -- Confirm `clawdentity` CLI is installed and runnable. +- Execute side effects directly with tools/CLI. +- Do not ask humans to run commands that the agent can run. +- Ask humans only for missing secrets or missing required command inputs. +- Keep status output concrete: created agent DID, generated API key status, written files. + +## Command Utilization (required) + +### Config management +- `clawdentity config init` +- `clawdentity config init --registry-url ` +- `clawdentity config set registryUrl ` +- `clawdentity config set apiKey ` (manual recovery only) +- `clawdentity config get ` +- `clawdentity config show` + +### Invite management +- `clawdentity invite redeem --display-name ` +- `clawdentity invite redeem --display-name --registry-url ` +- `clawdentity invite create` (admin only, see registry reference) +- `clawdentity invite create --expires-at ` (admin only) + +### Agent identity +- `clawdentity agent create --framework openclaw` +- `clawdentity agent create --framework openclaw --ttl-days ` +- `clawdentity agent inspect ` +- `clawdentity agent auth refresh ` +- `clawdentity agent revoke ` + +### API key lifecycle +- `clawdentity api-key create` +- `clawdentity api-key list` +- `clawdentity api-key revoke ` + +### OpenClaw relay setup +- `clawdentity skill install` +- `clawdentity openclaw setup ` +- `clawdentity openclaw setup --transform-source ` +- `clawdentity openclaw setup --openclaw-dir --openclaw-base-url ` + +### OpenClaw diagnostics +- `clawdentity openclaw doctor` +- `clawdentity openclaw doctor --peer ` +- `clawdentity openclaw doctor --json` +- `clawdentity openclaw relay test` +- `clawdentity openclaw relay test --peer --hook-token --json` +- `clawdentity openclaw relay test --session-id --message ` + +### Connector runtime (advanced/manual only) +- `clawdentity connector start ` +- `clawdentity connector start --proxy-ws-url ` +- `clawdentity connector start --openclaw-hook-token ` +- `clawdentity connector service install ` +- `clawdentity connector service install --platform ` +- `clawdentity connector service uninstall ` +- `clawdentity connector service uninstall --platform ` + +### Pairing +- `clawdentity pair start --qr` +- `clawdentity pair start --qr --qr-output ` +- `clawdentity pair start --qr --ttl-seconds ` +- `clawdentity pair start --qr --wait` +- `clawdentity pair start --qr --wait --wait-seconds --poll-interval-seconds ` +- `clawdentity pair confirm --qr-file ` +- `clawdentity pair confirm --ticket ` +- `clawdentity pair status --ticket ` +- `clawdentity pair status --ticket --wait` + +### Token verification +- `clawdentity verify ` + +### Admin (registry operators only) +- `clawdentity admin bootstrap --bootstrap-secret ` +- `clawdentity admin bootstrap --bootstrap-secret --display-name --api-key-name --registry-url ` + +## Journey (strict order) + +1. Validate prerequisites. +- Confirm CLI is installed. +- Confirm CLI setup command is self-setup only: + - `clawdentity openclaw setup --help` must not include peer routing flags. + - it must **not** include `--invite-code`. +- If `--invite-code` appears, treat CLI as outdated and upgrade before continuing: + - `npm install -g clawdentity@latest` - Confirm local agent name. -- Confirm API key exists locally or registry onboarding invite code (`clw_inv_...`) is available. -- Confirm OpenClaw peer relay invite code (`clawd1_...`) is available for setup. -- Do not request API key and registry invite code in the same prompt. -- Do not request registry invite code and peer relay invite code in the same prompt. -- Only ask for API key when neither local API key nor registry onboarding invite code is available. -- Confirm OpenClaw state directory path if non-default. -- Confirm OpenClaw base URL if local endpoint is non-default. -- Confirm each side proxy URL for pairing command execution. - -2. Confirm skill artifact exists in workspace skills directory. -- Ensure `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` exists. -- If missing, install/update skill package contents before setup. - -3. Initialize local Clawdentity config. +- Confirm local human display name for onboarding. +- Check local API key status with `clawdentity config get apiKey`. +- If API key is missing, ask for onboarding invite `clw_inv_...` and continue with invite redeem. +- Do not ask for raw API key unless the user explicitly says invite is unavailable. +- Confirm OpenClaw path/base URL only if non-default. +- Do not ask for pairing inputs before onboarding is complete. + +2. Ensure skill artifact exists. +- Ensure `relay-to-peer.mjs` exists in managed skill path. +- If missing, run `clawdentity skill install` and continue. + +3. Initialize local CLI config. - Run `clawdentity config init`. -- Use `clawdentity config init --registry-url ` when registry URL override is required. - -4. Complete registry onboarding auth before agent creation. -- If API key already exists, continue. -- Else redeem registry onboarding invite: - - `clawdentity invite redeem ` - - optional: `--registry-url ` -- If registry invite code is unavailable, fallback to API key path: - - ask human for API key - - run `clawdentity config set apiKey ` - -5. Configure local Clawdentity identity for this OpenClaw agent. -- Create identity: `clawdentity agent create --framework openclaw`. -- Verify identity: `clawdentity agent inspect `. - -6. Run automated setup from peer relay invite code. -- Execute: - `clawdentity openclaw setup --invite-code ` -- Use `--openclaw-dir ` when state directory is non-default. -- Use `--openclaw-base-url ` when local OpenClaw HTTP endpoint is non-default. -- Use `--peer-alias ` only when alias override is required. - -7. Verify setup outputs. -- Confirm setup reports: - - peer alias - - peer DID - - updated OpenClaw config path - - installed transform path - - OpenClaw base URL - - relay runtime config path -- Confirm `~/.clawdentity/openclaw-agent-name` is set to the local agent name. - -8. Start connector runtime for local relay handoff. -- Run `clawdentity connector start `. -- Optional: run `clawdentity connector service install ` for persistent autostart. - -9. Complete trust pairing bootstrap. -- Run pairing start from owner/initiator side: - - `clawdentity pair start --proxy-url --qr` -- Share the one-time QR image with responder side. -- Run pairing confirm from responder side: - - `clawdentity pair confirm --qr-file --proxy-url ` -- Confirm pairing success before relay test. - -10. Validate with user-style relay test. -- Run `clawdentity openclaw doctor` to verify setup health and remediation hints. -- Run `clawdentity openclaw relay test --peer ` to execute a probe. -- Confirm probe success and connector-mediated delivery logs. -- Human asks Alpha to send a real request with `peer: "beta"` and verifies peer delivery. - -## Required question policy - -Ask the human only when required inputs are missing: -- Missing local agent name. -- Missing peer relay invite code (`clawd1_...`). -- Missing registry onboarding invite code (`clw_inv_...`) when API key is absent. -- Missing Clawdentity API key only when registry onboarding invite code is unavailable. -- Missing initiator/responder proxy URLs for pairing commands. -- Unclear OpenClaw state directory. -- Non-default OpenClaw base URL. -- Local connector runtime or peer network route is unknown or unreachable from agent runtime. +- If needed, run with `--registry-url`. + +4. Finish onboarding and generate API key. +- Preferred path: run `clawdentity invite redeem --display-name `. +- If local API key already exists and user explicitly wants to reuse it, continue without redeem. +- Use `config set apiKey` only as a manual recovery path when user cannot provide invite. +- Confirm output shows: + - `Invite redeemed` + - API key token printed once + - `API key saved to local config` + - `Human name: ` +- Stop and fix if this step fails. Do not proceed to pairing. + +5. Create local OpenClaw agent identity. +- Run `clawdentity agent create --framework openclaw`. +- Optionally add `--ttl-days ` to control token lifetime. +- Run `clawdentity agent inspect `. + +6. Configure relay setup. +- Run: + `clawdentity openclaw setup ` +- Add optional: + - `--openclaw-dir ` + - `--openclaw-base-url ` + - `--transform-source ` (custom relay transform location) +- Verify output contains: + - self-setup completion + - OpenClaw config path and relay runtime path + - runtime mode/status + - websocket status `connected` + - setup checklist is healthy (fails fast when hook/device/runtime prerequisites drift) + +7. Validate readiness. +- `clawdentity openclaw setup` already runs an internal checklist and auto-recovers pending OpenClaw gateway device approvals when possible. +- Run `clawdentity openclaw doctor` only for diagnostics or CI reporting. +- Use `--json` for machine-readable output. +- Use `--peer ` to validate a specific peer exists after pairing. +- Doctor check IDs and remediation: + +| Check ID | Validates | Remediation on Failure | +|----------|-----------|----------------------| +| `config.registry` | `registryUrl`, `apiKey`, and `proxyUrl` in config (or proxy env override) | `clawdentity config init` or `invite redeem` | +| `state.selectedAgent` | Agent marker at `~/.clawdentity/openclaw-agent-name` | `clawdentity openclaw setup ` | +| `state.credentials` | `ait.jwt` and `secret.key` exist and non-empty | `clawdentity agent create ` or `agent auth refresh ` | +| `state.peers` | Peers config valid; requested `--peer` alias exists | `clawdentity pair start` / `pair confirm` (optional until pairing) | +| `state.transform` | Relay transform artifacts in OpenClaw hooks dir | Reinstall skill package or `openclaw setup ` | +| `state.hookMapping` | `send-to-peer` hook mapping in OpenClaw config | `clawdentity openclaw setup ` | +| `state.hookToken` | Hooks enabled with token in OpenClaw config | `clawdentity openclaw setup ` then restart OpenClaw | +| `state.hookSessionRouting` | `hooks.defaultSessionKey`, `hooks.allowRequestSessionKey=false`, and required prefixes (`hook:`, default session key) | `clawdentity openclaw setup ` then restart OpenClaw | +| `state.gatewayDevicePairing` | Pending OpenClaw device approvals (prevents `pairing required` websocket errors) | Re-run `clawdentity openclaw setup ` so setup auto-recovers approvals | +| `state.openclawBaseUrl` | OpenClaw base URL resolvable | `clawdentity openclaw setup --openclaw-base-url ` | +| `state.connectorRuntime` | Local connector runtime reachable and websocket-connected | `clawdentity openclaw setup ` | +| `state.connectorInboundInbox` | Connector local inbound inbox backlog and replay queue state (`/v1/status`) | Re-run `clawdentity openclaw setup ` and verify connector runtime health | +| `state.openclawHookHealth` | Connector replay status for local OpenClaw hook delivery (`/v1/status`) | Re-run `clawdentity openclaw setup ` and restart OpenClaw if hook replay stays failed | + +- At this point the agent is ready to start pairing or accept pairing. + +8. Pairing phase (separate from onboarding). +- Required default initiator flow: + - `clawdentity pair start --qr --wait` + - Optional overrides: `--ttl-seconds `, `--qr-output `, `--wait-seconds `, `--poll-interval-seconds ` +- Why `--wait` is required by default: + - responder saves peer during `pair confirm` + - initiator saves peer only after confirmed status is observed (`pair start --wait` or `pair status`) +- Responder (two mutually exclusive paths): + - QR path: `clawdentity pair confirm --qr-file ` + - Inline ticket path: `clawdentity pair confirm --ticket ` + - Cannot provide both `--qr-file` and `--ticket` simultaneously. +- Pair confirm auto-saves peer DID/proxy mapping locally from QR ticket metadata. +- Pair start/confirm/status exchange profile metadata: + - `initiatorProfile = { agentName, humanName }` + - `responderProfile = { agentName, humanName }` +- Local peer entries in `~/.clawdentity/peers.json` should include: + - `did` + - `proxyUrl` + - `agentName` + - `humanName` +- If initiator started without `--wait`, initiator must run: + - `clawdentity pair status --ticket --wait` + - This persists the peer on initiator after responder confirmation. +- Confirm pairing success, then run `clawdentity openclaw relay test`. + +9. Post-pairing verification. +- Run `clawdentity verify ` to confirm the local agent token is valid. +- Verify output shows token status, expiry, and no revocation. +- Run `clawdentity openclaw doctor --peer ` to confirm the new peer is visible. +- Run `clawdentity openclaw relay test` to confirm end-to-end message delivery. +- Relay delivery is asynchronous: proxy accepts deliveries with `202`, and `state=queued` is expected when the recipient connector is temporarily offline. +- `state=queued` is not a pairing failure. The proxy retries delivery automatically while the message is within queue TTL/retry limits. +- Note: `relay test` runs preflight doctor checks before sending the probe. + +## Lifecycle Management + +### Token expiry recovery +1. Run `clawdentity agent auth refresh `. +2. Reconcile runtime with `clawdentity openclaw setup `. +3. If manual runtime mode is required, run `clawdentity connector start `. +4. Verify with `clawdentity agent inspect ` to confirm new expiry. + +### API key rotation +1. Create new key: `clawdentity api-key create`. +2. Save new key: `clawdentity config set apiKey `. +3. Revoke old key: `clawdentity api-key revoke `. +4. Verify with `clawdentity config get apiKey`. + +### Agent decommission +1. Revoke agent: `clawdentity agent revoke `. +2. Revocation is idempotent; repeat calls are safe. +3. CRL propagation may lag up to 15 minutes for verifiers using cached CRL. + +### Service teardown +1. Uninstall service: `clawdentity connector service uninstall `. +2. Idempotent; safe to run even if service was already removed. +3. Use `--platform ` to target a specific platform. + +### Token verification +- Verify any AIT: `clawdentity verify `. +- Accepts raw JWT string or file path containing the token. +- Uses cached registry keys (1h TTL) and CRL (15min TTL). +- Exit code 1 on verification failure or revocation. + +## Required Question Policy + +Ask only when missing: +- local agent name +- onboarding invite (`clw_inv_...`) unless user explicitly requests API-key recovery path +- non-default OpenClaw path/base URL +- pairing QR image path or ticket string for confirm + +Do not ask for relay invite codes. +Do not ask for `clawd1_...` values. +Do not state that API key is required before invite redeem. +Do not suggest switching endpoints unless user explicitly asks for endpoint changes. ## Failure Handling -If setup or relay fails: -- Report precise missing file/path/value. -- Fix only the failing config/input. -- Ensure connector runtime is active (`clawdentity connector start `). -- Re-run `clawdentity openclaw doctor`. -- Re-run `clawdentity openclaw relay test --peer `. -- Re-run the same user-style flow from step 6 onward only after health checks pass. +### Connector errors +- `404` on outbound endpoint: connector runtime is not available. Rerun `clawdentity openclaw setup `. +- `409` on outbound: peer snapshot stale. Rerun `clawdentity openclaw setup `. +- `CLI_CONNECTOR_MISSING_AGENT_MATERIAL`: agent credentials missing. Rerun `clawdentity agent create ` or `clawdentity agent auth refresh `. + +### Pairing errors +- `pair start` 403 (`PROXY_PAIR_OWNERSHIP_FORBIDDEN`): initiator ownership check failed. Recreate/refresh the local agent identity. +- `pair start` 503 (`PROXY_PAIR_OWNERSHIP_UNAVAILABLE`): registry ownership validation is unavailable. Check proxy/registry service auth configuration. +- `pair confirm` 404 (`PROXY_PAIR_TICKET_NOT_FOUND`): ticket is invalid or expired. Request a new ticket from initiator. +- `pair confirm` 410 (`PROXY_PAIR_TICKET_EXPIRED`): ticket has expired. Request a new ticket. +- `CLI_PAIR_CONFIRM_INPUT_CONFLICT`: cannot provide both `--ticket` and `--qr-file`. Use one path only. +- `CLI_PAIR_PROXY_URL_MISMATCH`: local `proxyUrl` does not match registry metadata. Rerun `clawdentity invite redeem `. +- Responder shows peer but initiator does not: + - Cause: initiator started pairing without `--wait`. + - Fix: run `clawdentity pair status --ticket --wait` on initiator. + +### Setup errors +- `405 Method Not Allowed` on hook path: rerun `clawdentity openclaw setup ` and restart OpenClaw. +- `CLI_OPENCLAW_MISSING_AGENT_CREDENTIALS` or `CLI_OPENCLAW_EMPTY_AGENT_CREDENTIALS`: agent credentials missing or empty. Rerun `agent create` or `agent auth refresh`. + +### Credential expiry +- Agent AIT expired: run `clawdentity agent auth refresh `, then rerun `clawdentity openclaw setup `. +- API key invalid (401 on registry calls): rotate with `api-key create` then `config set apiKey`. + +### General recovery +- Report exact missing file/value. +- Fix only failing input/config. +- Prefer `openclaw setup` as the single runtime reconciliation command. +- Then run `openclaw relay test`. +- Use `openclaw doctor` only when you need a detailed diagnostic report. ## Bundled Resources -### References | File | Purpose | |------|---------| -| `references/clawdentity-protocol.md` | Invite format, peer map schema, connector handoff envelope, and runtime failure mapping | +| `references/clawdentity-protocol.md` | Peer-map schema, pairing contract, connector handoff envelope, proxy URL resolution, pairing error codes, cache files, peer alias derivation | +| `references/clawdentity-registry.md` | Admin bootstrap, API key lifecycle, agent revocation, auth refresh | -Directive: read the reference file before troubleshooting relay contract or connector handoff failures. +Directive: read the reference files before troubleshooting relay contract, connector handoff failures, or registry/admin operations. diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index ab0a9d9..36713ee 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -7,39 +7,39 @@ Define the exact runtime contract used by `relay-to-peer.mjs`. ## Filesystem Paths ### OpenClaw files -- `~/.openclaw/openclaw.json` -- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` -- `~/.openclaw/workspace/skills/clawdentity-openclaw-relay/SKILL.md` +- `/openclaw.json` (legacy filenames may exist: `clawdbot.json`, `moldbot.json`, `moltbot.json`) +- `/hooks/transforms/relay-to-peer.mjs` +- `/hooks/transforms/clawdentity-relay.json` +- `/hooks/transforms/clawdentity-peers.json` +- `/skills/clawdentity-openclaw-relay/SKILL.md` +- env overrides: + - `OPENCLAW_CONFIG_PATH`, `CLAWDBOT_CONFIG_PATH` + - `OPENCLAW_STATE_DIR`, `CLAWDBOT_STATE_DIR` + - `OPENCLAW_HOME` (used when explicit config/state overrides are unset) ### Clawdentity files - `~/.clawdentity/config.json` - `~/.clawdentity/agents//secret.key` +- `~/.clawdentity/agents//public.key` +- `~/.clawdentity/agents//identity.json` +- `~/.clawdentity/agents//registry-auth.json` - `~/.clawdentity/agents//ait.jwt` - `~/.clawdentity/peers.json` - `~/.clawdentity/openclaw-agent-name` - `~/.clawdentity/openclaw-relay.json` +- `~/.clawdentity/openclaw-connectors.json` +- `~/.clawdentity/pairing/` (ephemeral QR PNG storage, auto-cleaned after 900s) +- `~/.clawdentity/cache/registry-keys.json` (1-hour TTL, used by `verify`) +- `~/.clawdentity/cache/crl-claims.json` (15-minute TTL, used by `verify`) -## Invite Code Contract +## Setup Input Contract -Invite codes are prefixed with `clawd1_` and contain base64url JSON: - -```json -{ - "v": 1, - "issuedAt": "2026-02-15T20:00:00.000Z", - "did": "did:claw:agent:01H...", - "proxyUrl": "https://beta-proxy.example.com/hooks/agent", - "alias": "beta", - "name": "Beta Agent" -} -``` +`clawdentity openclaw setup` is self-setup only. It does not accept peer routing fields. Rules: -- `v` must be `1`. -- `issuedAt` is ISO-8601 UTC timestamp. -- `did` must be an agent DID. -- `proxyUrl` must be absolute `http` or `https`. -- `alias` is optional but preferred for zero-question setup. +- setup must succeed without any peer metadata +- peers config snapshot still exists and may be empty until pairing is completed +- setup is expected to bring connector runtime to a websocket-connected state (unless explicitly disabled by advanced flags) ## Peer Map Schema @@ -51,7 +51,8 @@ Rules: "beta": { "did": "did:claw:agent:01H...", "proxyUrl": "https://beta-proxy.example.com/hooks/agent", - "name": "Beta Agent" + "agentName": "beta", + "humanName": "Ira" } } } @@ -61,7 +62,8 @@ Rules: - peer alias key uses `[a-zA-Z0-9._-]` - `did` required and must begin with `did:` - `proxyUrl` required and must be a valid absolute URL -- `name` optional +- `agentName` optional +- `humanName` optional ## Proxy Pairing Prerequisite @@ -70,21 +72,25 @@ Relay delivery policy is trust-pair based on proxy side. Pairing must be complet Current pairing contract is ticket-based with CLI support: 1. Initiator owner starts pairing: - - CLI: `clawdentity pair start --proxy-url --qr` + - CLI: `clawdentity pair start --qr` - proxy route: `POST /pair/start` - headers: - `Authorization: Claw ` - - `x-claw-owner-pat: ` - - body (optional): + - ownership validation is handled internally by proxy-to-registry service auth + - body: ```json { - "ttlSeconds": 300 + "ttlSeconds": 300, + "initiatorProfile": { + "agentName": "alpha", + "humanName": "Ravi" + } } ``` 2. Responder confirms pairing: - - CLI: `clawdentity pair confirm --qr-file --proxy-url ` + - CLI: `clawdentity pair confirm --qr-file ` - proxy route: `POST /pair/confirm` - headers: - `Authorization: Claw ` @@ -92,13 +98,18 @@ Current pairing contract is ticket-based with CLI support: ```json { - "ticket": "clwpair1_..." + "ticket": "clwpair1_...", + "responderProfile": { + "agentName": "beta", + "humanName": "Ira" + } } ``` Rules: - `ticket` is one-time and expires (default 5 minutes, max 15 minutes). - Confirm establishes mutual trust for the initiator/responder pair. +- Confirm auto-persists peer DID/proxy mapping locally in `~/.clawdentity/peers.json` using ticket issuer metadata. - Same-agent sender/recipient is allowed by policy without explicit pair entry. ## Relay Input Contract @@ -129,22 +140,27 @@ Relay resolves local agent name in this order: ```json { "openclawBaseUrl": "http://127.0.0.1:18789", + "openclawHookToken": "", "updatedAt": "2026-02-15T20:00:00.000Z" } ``` Rules: - `openclawBaseUrl` must be absolute `http` or `https`. +- `openclawHookToken` is optional in schema but should be present after `clawdentity openclaw setup`; connector runtime uses it for `/hooks/*` auth when no explicit hook token option/env is provided. - `updatedAt` is ISO-8601 UTC timestamp. - Proxy runtime precedence is: `OPENCLAW_BASE_URL` env first, then `openclaw-relay.json`, then built-in default. ## Connector Handoff Contract The transform does not send directly to the peer proxy. It posts to the local connector runtime: -- Default endpoint: `http://127.0.0.1:19400/v1/outbound` -- Optional overrides: +- Endpoint candidates are loaded from OpenClaw-local `hooks/transforms/clawdentity-relay.json` (generated by `openclaw setup`) and attempted in order. +- Default fallback endpoint remains `http://127.0.0.1:19400/v1/outbound`. +- Runtime may also use: - `CLAWDENTITY_CONNECTOR_BASE_URL` - `CLAWDENTITY_CONNECTOR_OUTBOUND_PATH` +- `openclaw setup ` is the primary self-setup path and should leave runtime healthy. +- `connector start ` is advanced/manual recovery; it resolves bind URL from `~/.clawdentity/openclaw-connectors.json` when explicit env override is absent. Outbound JSON body sent by transform: @@ -175,3 +191,73 @@ Relay fails when: - local connector outbound request fails (network/other non-2xx) Error messages should include file/path context but never print secret content. + +## Proxy URL Resolution + +CLI resolves proxy URL in this order (first non-empty wins): + +1. `CLAWDENTITY_PROXY_URL` environment variable +2. `proxyUrl` from `~/.clawdentity/config.json` +3. Registry metadata from `GET /v1/metadata` +4. Error when configured proxy does not match metadata (`CLI_PAIR_PROXY_URL_MISMATCH`) or metadata lookup fails + +### Metadata expectation + +Registry metadata (`/v1/metadata`) should return a valid `proxyUrl`. + +Known defaults: + +| Registry URL | Metadata proxy URL | +|-------------|--------------------| +| `https://registry.clawdentity.com` | `https://proxy.clawdentity.com` | +| `https://dev.registry.clawdentity.com` | `https://dev.proxy.clawdentity.com` | + +Recovery: rerun onboarding (`clawdentity invite redeem --display-name `) so local config aligns to registry metadata. + +## Pairing Error Codes + +### `pair start` errors + +| HTTP Status | Error Code | Meaning | +|-------------|-----------|---------| +| 403 | `PROXY_PAIR_OWNERSHIP_FORBIDDEN` | Initiator ownership check failed | +| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | +| — | `CLI_PAIR_AGENT_NOT_FOUND` | Agent ait.jwt or secret.key missing/empty | +| — | `CLI_PAIR_HUMAN_NAME_MISSING` | Local config is missing `humanName`; set via invite redeem or config | +| — | `CLI_PAIR_PROXY_URL_REQUIRED` | Proxy URL could not be resolved | +| — | `CLI_PAIR_START_INVALID_TTL` | ttlSeconds must be a positive integer | +| — | `CLI_PAIR_INVALID_PROXY_URL` | Proxy URL is invalid | +| — | `CLI_PAIR_REQUEST_FAILED` | Unable to connect to proxy URL | + +### `pair confirm` errors + +| HTTP Status | Error Code | Meaning | +|-------------|-----------|---------| +| 404 | `PROXY_PAIR_TICKET_NOT_FOUND` | Pairing ticket is invalid or expired | +| 410 | `PROXY_PAIR_TICKET_EXPIRED` | Pairing ticket has expired | +| — | `CLI_PAIR_CONFIRM_TICKET_REQUIRED` | Either --ticket or --qr-file is required | +| — | `CLI_PAIR_CONFIRM_INPUT_CONFLICT` | Cannot provide both --ticket and --qr-file | +| — | `CLI_PAIR_CONFIRM_TICKET_INVALID` | Pairing ticket is invalid | +| — | `CLI_PAIR_CONFIRM_QR_FILE_NOT_FOUND` | QR file not found | +| — | `CLI_PAIR_CONFIRM_QR_NOT_FOUND` | No pairing QR code found in image | + +## Cache Files + +| Path | TTL | Used By | +|------|-----|---------| +| `~/.clawdentity/cache/registry-keys.json` | 1 hour | `verify` command — cached registry signing public keys | +| `~/.clawdentity/cache/crl-claims.json` | 15 minutes | `verify` command — cached certificate revocation list | + +Cache is populated on first `verify` call and refreshed when TTL expires. Stale cache is used as fallback when registry is unreachable. + +## Peer Alias Derivation + +When `pair confirm` saves a new peer, alias is derived automatically: + +1. Parse peer DID to extract ULID component. +2. Take last 8 characters of ULID, lowercase: `peer-`. +3. If alias already exists in `peers.json` for a different DID, append numeric suffix: `peer--2`, `peer--3`, etc. +4. If peer DID already exists in `peers.json`, reuse existing alias (no duplicate entry). +5. Fallback alias is `peer` if DID is not a valid agent DID. + +Alias validation: `[a-zA-Z0-9._-]`, max 128 characters. diff --git a/apps/openclaw-skill/skill/references/clawdentity-registry.md b/apps/openclaw-skill/skill/references/clawdentity-registry.md new file mode 100644 index 0000000..8a45180 --- /dev/null +++ b/apps/openclaw-skill/skill/references/clawdentity-registry.md @@ -0,0 +1,175 @@ +# Clawdentity Registry Operations Reference + +## Purpose + +Document registry-side CLI commands that are outside the core relay setup journey: admin bootstrap, API key lifecycle, agent revocation, and auth refresh. + +## Admin Bootstrap + +Bootstrap creates the first admin human and API key on a fresh registry. This is a prerequisite before any invites can be created. + +### Command + +``` +clawdentity admin bootstrap --bootstrap-secret +``` + +### Flags + +| Flag | Required | Description | +|------|----------|-------------| +| `--bootstrap-secret ` | Yes | One-time bootstrap secret configured on registry server | +| `--display-name ` | No | Admin display name | +| `--api-key-name ` | No | Admin API key label | +| `--registry-url ` | No | Override registry URL | + +### Expected Output + +``` +Admin bootstrap completed +Human DID: did:claw:human:01H... +API key name: +API key token (shown once): + +API key saved to local config +``` + +### Error Codes + +| Error Code | Meaning | +|------------|---------| +| `ADMIN_BOOTSTRAP_DISABLED` | Bootstrap is disabled on the registry | +| `ADMIN_BOOTSTRAP_UNAUTHORIZED` | Bootstrap secret is invalid | +| `ADMIN_BOOTSTRAP_ALREADY_COMPLETED` | Admin already exists; bootstrap is one-time | +| `ADMIN_BOOTSTRAP_INVALID` | Request payload is invalid | +| `CLI_ADMIN_BOOTSTRAP_SECRET_REQUIRED` | Bootstrap secret was not provided | +| `CLI_ADMIN_BOOTSTRAP_INVALID_REGISTRY_URL` | Registry URL is invalid | +| `CLI_ADMIN_BOOTSTRAP_REQUEST_FAILED` | Unable to connect to registry | +| `CLI_ADMIN_BOOTSTRAP_CONFIG_PERSISTENCE_FAILED` | Failed to save admin credentials locally | + +### Behavioral Notes + +- One-time operation: succeeds only on first call per registry. +- Automatically persists `registryUrl` and `apiKey` to local config. +- Registry must have `BOOTSTRAP_SECRET` environment variable set. +- After bootstrap, admin can create invites with `clawdentity invite create`. + +## API Key Lifecycle + +### Create API key + +``` +clawdentity api-key create +``` + +Creates a new API key under the current authenticated human. Token is displayed once. + +### List API keys + +``` +clawdentity api-key list +``` + +Lists all API keys for the current human with ID, name, and status. + +### Revoke API key + +``` +clawdentity api-key revoke +``` + +Revokes an API key by ID. The key becomes immediately unusable. + +### Rotation workflow + +1. `clawdentity api-key create` — note the new token. +2. `clawdentity config set apiKey ` — switch local config. +3. `clawdentity api-key revoke ` — deactivate old key. +4. `clawdentity config get apiKey` — verify new key is active. + +### Error Codes + +| HTTP Status | Meaning | +|-------------|---------| +| 401 | API key invalid or expired; re-authenticate | +| 403 | Insufficient permissions (admin required for some operations) | + +## Agent Revocation + +### Command + +``` +clawdentity agent revoke +``` + +Revokes a local agent identity via the registry. The agent's AIT will appear on the certificate revocation list (CRL). + +### Behavioral Notes + +- Reads agent DID from `~/.clawdentity/agents//identity.json`. +- Requires `apiKey` configured in `~/.clawdentity/config.json`. +- Idempotent: repeat revocation calls succeed without error. +- CRL propagation lag: verifiers using cached `crl-claims.json` (15-minute TTL) may not see revocation immediately. +- Local credential files are not deleted; only registry-side revocation is performed. + +### Error Codes + +| HTTP Status | Meaning | +|-------------|---------| +| 401 | Authentication failed — API key invalid | +| 404 | Agent not found in registry | +| 409 | Agent cannot be revoked (already revoked or conflict) | + +## Agent Auth Refresh + +### Command + +``` +clawdentity agent auth refresh +``` + +Refreshes the agent's registry auth credentials using Claw proof (Ed25519 signature). + +### What It Reads + +- `~/.clawdentity/agents//secret.key` — for signing the proof +- `~/.clawdentity/agents//registry-auth.json` — current refresh token + +### What It Writes + +- `~/.clawdentity/agents//registry-auth.json` — new access token and refresh token + +### Behavioral Notes + +- Uses atomic write (temp file + chmod 0600 + rename) to prevent corruption. +- Requires `registryUrl` configured in `~/.clawdentity/config.json`. +- After refresh, restart connector to pick up new credentials. +- If `registry-auth.json` is missing or empty, the agent must be re-created with `agent create`. + +### Error Codes + +| Error Code | Meaning | +|------------|---------| +| `CLI_OPENCLAW_EMPTY_AGENT_CREDENTIALS` | Registry auth file is empty or missing | +| 401 | Refresh token expired or invalid — re-create agent | + +## Invite Management (Admin) + +### Create invite + +``` +clawdentity invite create +clawdentity invite create --expires-at --registry-url +``` + +Admin-only. Creates a registry invite code (`clw_inv_...`) for onboarding new users. + +### Error Codes + +| Error Code | Meaning | +|------------|---------| +| `CLI_INVITE_MISSING_LOCAL_CREDENTIALS` | API key not configured | +| `CLI_INVITE_CREATE_FAILED` | Invite creation failed | +| 401 | Authentication failed | +| 403 | Requires admin access | +| 400 | Invalid request | diff --git a/apps/openclaw-skill/src/AGENTS.md b/apps/openclaw-skill/src/AGENTS.md index 9a1eabd..50f3957 100644 --- a/apps/openclaw-skill/src/AGENTS.md +++ b/apps/openclaw-skill/src/AGENTS.md @@ -9,10 +9,16 @@ ## Safety Rules - Validate external input (`payload`, peer config JSON) before use. - Do not log relay payload contents or local connector credential material. -- Keep transform relay path as local connector handoff only (`http://127.0.0.1:19400/v1/outbound` by default), not direct peer HTTP calls. +- Keep transform relay path as local connector handoff only, not direct peer HTTP calls. +- Relay transform must prefer OpenClaw-local runtime artifacts in `hooks/transforms/`: + - `clawdentity-relay.json` for connector endpoint candidates/path + - `clawdentity-peers.json` for peer alias map snapshot visible inside containerized OpenClaw runtimes +- Assume default onboarding runs `openclaw setup` end-to-end (including runtime startup); direct `connector start` is manual recovery only. +- Connector endpoint fallback order must remain container-safe for macOS/Linux hosts (for example `host.docker.internal`, `gateway.docker.internal`, linux bridge/default gateway, then loopback). - Keep peer alias semantics deterministic: validate `payload.peer` against peers config before connector handoff. - Keep connector failure mapping deterministic (`404` endpoint unavailable, `409` peer alias conflict, network failure generic outage). -- Keep peer schema strict (`did`, `proxyUrl`, optional `name`) and reject malformed values early. +- Assume connector runtime OpenClaw auth is sourced from `~/.clawdentity/openclaw-relay.json` (`openclawHookToken`) when explicit token flags/env are absent. +- Keep peer schema strict (`did`, `proxyUrl`, optional `agentName`, optional `humanName`) and reject malformed values early. ## Testing Rules - Use temp directories for filesystem tests; no dependency on real user home state. diff --git a/apps/openclaw-skill/src/transforms/peers-config.test.ts b/apps/openclaw-skill/src/transforms/peers-config.test.ts index 709e832..4dd02ff 100644 --- a/apps/openclaw-skill/src/transforms/peers-config.test.ts +++ b/apps/openclaw-skill/src/transforms/peers-config.test.ts @@ -46,7 +46,8 @@ describe("peers config", () => { beta: { did: "did:claw:agent:01TEST", proxyUrl: "https://beta.example.com/hooks/agent", - name: "Beta Agent", + agentName: "beta", + humanName: "Ira", }, }, }, @@ -59,7 +60,8 @@ describe("peers config", () => { beta: { did: "did:claw:agent:01TEST", proxyUrl: "https://beta.example.com/hooks/agent", - name: "Beta Agent", + agentName: "beta", + humanName: "Ira", }, }, }); diff --git a/apps/openclaw-skill/src/transforms/peers-config.ts b/apps/openclaw-skill/src/transforms/peers-config.ts index 1f9c10a..90ad2c3 100644 --- a/apps/openclaw-skill/src/transforms/peers-config.ts +++ b/apps/openclaw-skill/src/transforms/peers-config.ts @@ -10,7 +10,8 @@ const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; export type PeerEntry = { did: string; proxyUrl: string; - name?: string; + agentName?: string; + humanName?: string; }; export type PeersConfig = { @@ -83,12 +84,15 @@ function parseProxyUrl(value: unknown): string { } } -function parsePeerName(value: unknown): string | undefined { +function parseProfileName( + value: unknown, + label: "agentName" | "humanName", +): string | undefined { if (value === undefined) { return undefined; } - return parseNonEmptyString(value, "name"); + return parseNonEmptyString(value, label); } function parsePeerEntry(value: unknown): PeerEntry { @@ -98,13 +102,14 @@ function parsePeerEntry(value: unknown): PeerEntry { const did = parseDid(value.did); const proxyUrl = parseProxyUrl(value.proxyUrl); - const name = parsePeerName(value.name); + const agentName = parseProfileName(value.agentName, "agentName"); + const humanName = parseProfileName(value.humanName, "humanName"); - if (name === undefined) { + if (agentName === undefined && humanName === undefined) { return { did, proxyUrl }; } - return { did, proxyUrl, name }; + return { did, proxyUrl, agentName, humanName }; } function parsePeersConfig(value: unknown, source: string): PeersConfig { diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts index 28ae131..f10d1d3 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.test.ts @@ -25,7 +25,8 @@ function createRelaySandbox(): RelaySandbox { beta: { did: "did:claw:agent:01BETA", proxyUrl: "https://peer.example.com/hooks/agent?source=skill", - name: "Beta", + agentName: "beta", + humanName: "Ira", }, }, }, diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.ts index 3b0f5e5..537196f 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.ts @@ -1,3 +1,6 @@ +import { readFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; import { loadPeersConfig, type PeersConfigPathOptions, @@ -5,6 +8,15 @@ import { const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; +const RELAY_RUNTIME_FILE_NAME = "clawdentity-relay.json"; +const RELAY_PEERS_FILE_NAME = "clawdentity-peers.json"; + +type RelayRuntimeConfig = { + connectorBaseUrl?: string; + connectorBaseUrls?: string[]; + connectorPath?: string; + peersConfigPath?: string; +}; export type RelayToPeerOptions = PeersConfigPathOptions & { connectorBaseUrl?: string; @@ -27,6 +39,14 @@ function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } +function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + function parseRequiredString(value: unknown): string { if (typeof value !== "string") { throw new Error("Input value must be a string"); @@ -95,20 +115,168 @@ function normalizeConnectorPath(value: string): string { return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; } -function resolveConnectorEndpoint(options: RelayToPeerOptions): string { - const baseUrlInput = - options.connectorBaseUrl ?? - process.env.CLAWDENTITY_CONNECTOR_BASE_URL ?? - DEFAULT_CONNECTOR_BASE_URL; +function resolveTransformsDir(): string { + return dirname(fileURLToPath(import.meta.url)); +} + +async function readJson(filePath: string): Promise { + let raw: string; + try { + raw = await readFile(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + + throw error; + } + + try { + return JSON.parse(raw); + } catch { + throw new Error(`Relay runtime config at ${filePath} is not valid JSON`); + } +} + +function parseRelayRuntimeConfig(value: unknown): RelayRuntimeConfig { + if (!isRecord(value)) { + throw new Error("Relay runtime config must be an object"); + } + + const connectorBaseUrl = + typeof value.connectorBaseUrl === "string" && + value.connectorBaseUrl.trim().length > 0 + ? parseConnectorBaseUrl(value.connectorBaseUrl.trim()) + : undefined; + const connectorPath = + typeof value.connectorPath === "string" && + value.connectorPath.trim().length > 0 + ? normalizeConnectorPath(value.connectorPath) + : undefined; + const peersConfigPath = + typeof value.peersConfigPath === "string" && + value.peersConfigPath.trim().length > 0 + ? value.peersConfigPath.trim() + : undefined; + + const connectorBaseUrls = Array.isArray(value.connectorBaseUrls) + ? value.connectorBaseUrls + .filter((item): item is string => typeof item === "string") + .map((item) => item.trim()) + .filter((item) => item.length > 0) + .map(parseConnectorBaseUrl) + : undefined; + + return { + connectorBaseUrl, + connectorBaseUrls, + connectorPath, + peersConfigPath, + }; +} + +async function loadRelayRuntimeConfig(): Promise { + const runtimePath = join(resolveTransformsDir(), RELAY_RUNTIME_FILE_NAME); + const parsed = await readJson(runtimePath); + if (parsed === undefined) { + return {}; + } + + return parseRelayRuntimeConfig(parsed); +} + +function parseGatewayHexToIpv4(value: string): string | undefined { + if (!/^[0-9A-Fa-f]{8}$/.test(value)) { + return undefined; + } + + const octets = [0, 2, 4, 6].map((index) => + Number.parseInt(value.slice(index, index + 2), 16), + ); + return `${octets[3]}.${octets[2]}.${octets[1]}.${octets[0]}`; +} + +async function resolveLinuxDockerGatewayHost(): Promise { + let raw: string; + try { + raw = await readFile("/proc/net/route", "utf8"); + } catch { + return undefined; + } + + const lines = raw.split("\n"); + for (const line of lines.slice(1)) { + const parts = line.trim().split(/\s+/); + if (parts.length < 4) { + continue; + } + const destination = parts[1]; + const gateway = parts[2]; + const flags = Number.parseInt(parts[3], 16); + if ( + destination === "00000000" && + Number.isFinite(flags) && + (flags & 0x2) === 0x2 + ) { + return parseGatewayHexToIpv4(gateway); + } + } + + return undefined; +} + +async function resolveConnectorEndpoints( + options: RelayToPeerOptions, +): Promise { + const runtimeConfig = await loadRelayRuntimeConfig(); const pathInput = options.connectorPath ?? + runtimeConfig.connectorPath ?? process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH ?? DEFAULT_CONNECTOR_OUTBOUND_PATH; - - const baseUrl = parseConnectorBaseUrl(baseUrlInput.trim()); const path = normalizeConnectorPath(pathInput.trim()); - return new URL(path, baseUrl).toString(); + const candidates: string[] = []; + if (options.connectorBaseUrl) { + candidates.push(parseConnectorBaseUrl(options.connectorBaseUrl.trim())); + } + if (runtimeConfig.connectorBaseUrls) { + candidates.push(...runtimeConfig.connectorBaseUrls); + } + if (runtimeConfig.connectorBaseUrl) { + candidates.push(runtimeConfig.connectorBaseUrl); + } + if ( + typeof process.env.CLAWDENTITY_CONNECTOR_BASE_URL === "string" && + process.env.CLAWDENTITY_CONNECTOR_BASE_URL.trim().length > 0 + ) { + candidates.push( + parseConnectorBaseUrl(process.env.CLAWDENTITY_CONNECTOR_BASE_URL.trim()), + ); + } + candidates.push(DEFAULT_CONNECTOR_BASE_URL); + + const linuxGatewayHost = await resolveLinuxDockerGatewayHost(); + if (linuxGatewayHost) { + for (const candidate of [...candidates]) { + try { + const parsed = new URL(candidate); + if ( + parsed.hostname === "host.docker.internal" || + parsed.hostname === "gateway.docker.internal" || + parsed.hostname === "172.17.0.1" + ) { + parsed.hostname = linuxGatewayHost; + candidates.push(parsed.toString()); + } + } catch { + // Ignore malformed candidate; parseConnectorBaseUrl already guards known values. + } + } + } + + const deduped = Array.from(new Set(candidates.map((candidate) => candidate))); + return deduped.map((baseUrl) => new URL(path, baseUrl).toString()); } function mapConnectorFailure(status: number): Error { @@ -150,6 +318,40 @@ async function postToConnector( } } +function shouldTryNextConnectorEndpoint(error: unknown): boolean { + if (!(error instanceof Error)) { + return false; + } + + return ( + error.message === "Local connector outbound relay request failed" || + error.message === "Local connector outbound endpoint is unavailable" + ); +} + +async function resolvePeersConfigPathOptions( + options: RelayToPeerOptions, +): Promise { + if ( + options.configPath !== undefined || + options.configDir !== undefined || + options.homeDir !== undefined + ) { + return options; + } + + const runtimeConfig = await loadRelayRuntimeConfig(); + if (runtimeConfig.peersConfigPath) { + return { + configPath: join(resolveTransformsDir(), runtimeConfig.peersConfigPath), + }; + } + + return { + configPath: join(resolveTransformsDir(), RELAY_PEERS_FILE_NAME), + }; +} + export async function relayPayloadToPeer( payload: unknown, options: RelayToPeerOptions = {}, @@ -164,28 +366,42 @@ export async function relayPayloadToPeer( } const peerAlias = parseRequiredString(peerAliasValue); - const peersConfig = await loadPeersConfig(options); + const peersConfigPathOptions = await resolvePeersConfigPathOptions(options); + const peersConfig = await loadPeersConfig(peersConfigPathOptions); const peerEntry = peersConfig.peers[peerAlias]; if (!peerEntry) { throw new Error("Peer alias is not configured"); } - const connectorEndpoint = resolveConnectorEndpoint(options); + const connectorEndpoints = await resolveConnectorEndpoints(options); const fetchImpl = resolveRelayFetch(options.fetchImpl); const outboundPayload = removePeerField(payload); - await postToConnector( - connectorEndpoint, - { - peer: peerAlias, - peerDid: peerEntry.did, - peerProxyUrl: peerEntry.proxyUrl, - payload: outboundPayload, - }, - fetchImpl, - ); + const relayPayload: ConnectorRelayRequest = { + peer: peerAlias, + peerDid: peerEntry.did, + peerProxyUrl: peerEntry.proxyUrl, + payload: outboundPayload, + }; + + let lastError: unknown; + for (const endpoint of connectorEndpoints) { + try { + await postToConnector(endpoint, relayPayload, fetchImpl); + return null; + } catch (error) { + lastError = error; + if (!shouldTryNextConnectorEndpoint(error)) { + throw error; + } + } + } + + if (lastError instanceof Error) { + throw lastError; + } - return null; + throw new Error("Local connector outbound relay request failed"); } export default async function relayToPeer( diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 0ad3051..2c6d2a0 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -1,11 +1,14 @@ # Proxy local/development template -# For local Wrangler development, copy values into .dev.vars. +# For local Wrangler development, copy values into apps/proxy/.env. # OPENCLAW_BASE_URL is optional for relay-mode proxy operation. # OPENCLAW_BASE_URL=http://127.0.0.1:18789 # Runtime vars -ENVIRONMENT=local -REGISTRY_URL=https://dev.api.clawdentity.com +ENVIRONMENT=development +APP_VERSION=local-dev +REGISTRY_URL=https://dev.registry.clawdentity.com +REGISTRY_INTERNAL_SERVICE_ID=replace-with-internal-service-id +REGISTRY_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret INJECT_IDENTITY_INTO_MESSAGE=true # Trust backend policy: # - local: in-memory trust fallback is allowed when PROXY_TRUST_STATE is unavailable. @@ -13,7 +16,6 @@ INJECT_IDENTITY_INTO_MESSAGE=true # Pairing/trust state is managed dynamically via /pair/start + /pair/confirm. # No static allowlist environment variables are supported. -# /pair/start requires request header: x-claw-owner-pat: clw_pat_... # Optional runtime overrides # CRL_REFRESH_INTERVAL_MS=300000 @@ -21,3 +23,9 @@ INJECT_IDENTITY_INTO_MESSAGE=true # CRL_STALE_BEHAVIOR=fail-open # AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60 # AGENT_RATE_LIMIT_WINDOW_MS=60000 +# RELAY_QUEUE_MAX_MESSAGES_PER_AGENT=500 +# RELAY_QUEUE_TTL_SECONDS=3600 +# RELAY_RETRY_INITIAL_MS=1000 +# RELAY_RETRY_MAX_MS=30000 +# RELAY_RETRY_MAX_ATTEMPTS=25 +# RELAY_RETRY_JITTER_RATIO=0.2 diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index fd83afb..0f72af2 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -6,14 +6,15 @@ ## Runtime Configuration - Keep runtime config centralized in `src/config.ts`. -- Keep Cloudflare Worker deployment config in `wrangler.jsonc` with explicit `local`, `development`, and `production` environments. +- Keep Cloudflare Worker deployment config in `wrangler.jsonc` with explicit `local`, `dev`, and `production` environments. - Duplicate Durable Object `bindings` and `migrations` inside each Wrangler env block; env sections do not inherit top-level DO config. - Keep deploy traceability explicit by passing `APP_VERSION` (or fallback `PROXY_VERSION`) via Worker bindings; `/health` must surface the resolved version. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. - Keep runtime `ENVIRONMENT` explicit and validated to supported values: `local`, `development`, `production`, `test` (default `development`). -- Keep deployment intent explicit: `local` is for local Wrangler dev runs only; `development` and `production` are remote cloud environments. +- Keep deployment intent explicit: Wrangler `dev` maps to runtime `ENVIRONMENT=development`; `local` is for local Wrangler dev runs only, and `production` is the live cloud environment. +- Keep script intent explicit: `pnpm -F @clawdentity/proxy run dev` must run Wrangler with `--env dev --port 8787`, and `dev:local` is the only script that should run `--env local --port 8787`. - Keep trust-store backend policy environment-scoped: - `local`: allow in-memory trust-store fallback when `PROXY_TRUST_STATE` binding is unavailable. - `development` and `production`: require `PROXY_TRUST_STATE`; fail startup when missing. @@ -37,19 +38,23 @@ - `LISTEN_PORT` or `PORT` - `OPENCLAW_BASE_URL` - `REGISTRY_URL` or `CLAWDENTITY_REGISTRY_URL` - - `PAIRING_ISSUER_URL` (optional stable issuer origin used in pairing tickets) + - `REGISTRY_INTERNAL_SERVICE_ID` + `REGISTRY_INTERNAL_SERVICE_SECRET` (required together for proxy-to-registry identity ownership checks) - `OPENCLAW_STATE_DIR` + - `RELAY_QUEUE_MAX_MESSAGES_PER_AGENT`, `RELAY_QUEUE_TTL_SECONDS`, `RELAY_RETRY_INITIAL_MS`, `RELAY_RETRY_MAX_MS`, `RELAY_RETRY_MAX_ATTEMPTS`, `RELAY_RETRY_JITTER_RATIO` ## Trust and Pairing - Keep trust state in Durable Objects (`ProxyTrustState`), not in static environment variables. - Do not add support for `ALLOW_LIST`, `ALLOWLIST_OWNERS`, or `ALLOWLIST_AGENTS`; trust is API-managed only. - Pairing is managed by API: - - `POST /pair/start` (verified Claw auth + `x-claw-owner-pat` ownership check against registry `GET /v1/agents/:id/ownership`) + - `POST /pair/start` (verified Claw auth + internal ownership check via registry `/internal/v1/identity/agent-ownership`) - `POST /pair/confirm` (verified Claw auth + one-time pairing ticket consume) -- Cross-proxy `/pair/confirm` forwarding must enforce built-in SSRF protections (block localhost/private/reserved destinations for non-local proxy origins). +- Pairing flow is single-proxy only: `POST /pair/confirm` must consume local tickets from trust state and never forward confirm requests. - Keep `/pair/confirm` as a single trust-store operation that establishes trust and consumes the ticket in one step (`confirmPairingTicket`), never two separate calls. - Confirming a valid pairing ticket must establish mutual trust for the initiator/responder agent pair. - Keep pairing tickets one-time and expiring; reject missing/expired/malformed tickets with explicit client errors. +- Normalize pairing ticket expiry to whole seconds when persisting trust state (`exp` is second-granularity in ticket payload); do not reject valid tickets due millisecond offsets. +- Keep pairing fail-closed: do not bypass registry ownership dependency. +- Keep strict dependency enforcement as the default for `development` and `production`; do not infer bypass from hostnames. - Reject deprecated `ALLOW_ALL_VERIFIED` at startup; never provide a global allow-all bypass for verified callers. ## Auth Verification @@ -69,6 +74,8 @@ - Return `503` when registry keyset dependency is unavailable, and when CRL dependency is unavailable under `fail-closed` stale policy. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` authorization strict: after auth succeeds, require trusted initiator/responder pair before relay delivery. +- Keep `/hooks/agent` delivery contract async-first: accepted deliveries return `202` with delivery state (`delivered` or `queued`), not `502` for transient recipient offline cases. +- Keep queue overflow behavior explicit and stable: return `507 PROXY_RELAY_QUEUE_FULL` and preserve existing queued deliveries. - Keep `/v1/relay/connect` auth strict with verified Claw auth + PoP headers, but do not require `x-claw-agent-access`. ## CRL Policy diff --git a/apps/proxy/package.json b/apps/proxy/package.json index 11ee81e..4f7b6a4 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -25,10 +25,11 @@ }, "scripts": { "build": "tsup", - "deploy:dev": "wrangler deploy --env development", + "deploy:dev": "wrangler deploy --env dev", "deploy:production": "wrangler deploy --env production", - "dev": "wrangler dev --env local", - "dev:development": "wrangler dev --env development", + "dev": "wrangler dev --env dev --port 8787", + "dev:dev": "wrangler dev --env dev --port 8787", + "dev:local": "wrangler dev --env local --port 8787", "dev:fresh": "wrangler dev --env local --name clawdentity-proxy-local-fresh --port 8789 --persist-to .wrangler/state-fresh", "format": "biome format .", "lint": "biome lint .", diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index ee56711..dd48ebb 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -12,6 +12,7 @@ - Keep per-agent DID throttling in `agent-rate-limit-middleware.ts`; do not blend rate-limit state or counters into `auth-middleware.ts`. - Keep pre-auth public-route IP throttling in `public-rate-limit-middleware.ts`; do not blend unauthenticated probe controls into `auth-middleware.ts`. - Keep `.env` fallback loading inside `config.ts` so runtime behavior is deterministic. +- Keep runtime bootstrap tests hermetic: when asserting default fallback behavior, use temp `HOME`/`OPENCLAW_STATE_DIR` to avoid host-local config leakage. - Keep OpenClaw base URL fallback logic in `config.ts`: `OPENCLAW_BASE_URL` env -> `~/.clawdentity/openclaw-relay.json` -> default. - Keep OpenClaw compatibility vars optional for relay-mode runtime; never require `OPENCLAW_BASE_URL` for cloud relay startup. - Do not add `OPENCLAW_HOOK_TOKEN` handling to proxy runtime; hook token auth belongs to connector -> OpenClaw delivery path. @@ -20,6 +21,7 @@ - Keep trust-store backend policy explicit: only `local` may fallback to in-memory trust when `PROXY_TRUST_STATE` binding is absent; `development` and `production` must fail startup without durable trust binding. - Keep static allowlist env vars removed (`ALLOW_LIST`, `ALLOWLIST_OWNERS`, `ALLOWLIST_AGENTS`); trust must come from pairing state, not env. - Keep `/pair/confirm` write path atomic at the trust-store API level: trust persistence and one-time ticket consumption must happen in one operation (`confirmPairingTicket`). +- Keep pairing ticket status durable until expiry: `/pair/status` must return `pending` before confirm and `confirmed` after confirm so initiators can sync peers without reverse pairing. ## Config Error Handling - Convert parse failures to `ProxyConfigError` with code `CONFIG_VALIDATION_FAILED`. @@ -29,6 +31,8 @@ - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep trust/pairing state centralized in `proxy-trust-store.ts` and `proxy-trust-state.ts` (Durable Object backed). +- Keep shared trust key/expiry helpers in `proxy-trust-keys.ts`; do not duplicate pair-key or expiry-normalization logic across store/state runtimes. +- Keep shared trust key/expiry helpers in `proxy-trust-keys.ts`; do not duplicate pair-key or expiry-normalization logic across store/state runtimes. - Keep pairing route logic isolated in `pairing-route.ts`; `server.ts` should compose it, not implement policy details. - Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. @@ -38,15 +42,14 @@ - Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. - Keep worker runtime cache keys sensitive to deploy-time version bindings so `/health` reflects fresh `APP_VERSION` after deploy. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-trusted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. -- Keep pairing bootstrap explicit: `/pair/start` and `/pair/confirm` must bypass known-agent gate in auth middleware. -- Keep `/pair/start` ownership validation against registry `GET /v1/agents/:id/ownership` using `x-claw-owner-pat`, and map dependency failures to `503`. -- Allow optional `PAIRING_ISSUER_URL` override for `/pair/start` ticket issuer origin so cross-proxy forwarding can work when inbound hostnames differ from proxy-to-proxy reachability hostnames. -- Keep pairing tickets issuer-authenticated: `/pair/start` must sign each ticket and register the signing public key in registry (`/v1/proxy-pairing-keys`) before returning ticket data. -- Keep cross-proxy `/pair/confirm` forwarding SSRF-safe by default: reject localhost/private/reserved issuer origins when the current proxy origin is non-local. -- Enforce that forwarded `/pair/confirm` issuer origins use HTTPS once the proxy origin is non-local, while continuing to allow HTTP when both the proxy and issuer are on local/dev hosts. -- Before cross-proxy forwarding, resolve issuer signing key from registry (`/v1/proxy-pairing-keys/resolve`) and reject unverified tickets with `403` fail-closed behavior. -- Preserve the original request JSON bytes when forwarding `/pair/confirm`; issuer-side confirmation must validate the ticket payload, not responder PoP headers. -- Forward only minimal `/pair/confirm` headers (`content-type`); never forward responder `Authorization`/PoP headers or arbitrary inbound headers to issuer proxy. +- Keep onboarding bootstrap explicit: `/pair/start`, `/pair/confirm`, `/pair/status`, and `/v1/relay/connect` must bypass known-agent gate in auth middleware so freshly onboarded agents can bring connectors online before trust pairing. +- Keep `/pair/start` ownership validation against registry `/internal/v1/identity/agent-ownership` using internal service credentials (`x-claw-service-id` + `x-claw-service-secret`), and map dependency failures to `503`. +- Keep `/pair/start` fail-closed: do not bypass registry ownership dependencies. +- Keep pairing profile contract strict: + - `/pair/start` requires `initiatorProfile.{agentName,humanName}` + - `/pair/confirm` requires `responderProfile.{agentName,humanName}` + - `/pair/status` returns stored profile fields for initiator and responder +- Keep pairing tickets issuer-authenticated via local signature in `/pair/start`; `/pair/confirm` must consume only locally stored tickets in single-proxy mode. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. - Keep `/hooks/agent` trust check explicit: sender/recipient pair must be authorized by trust state before relay delivery. @@ -60,6 +63,9 @@ - Keep `/hooks/agent` input contract strict: require `Content-Type: application/json` and reject malformed JSON with explicit client errors. - Keep agent-access validation centralized in `auth-middleware.ts` and call registry `POST /v1/agents/auth/validate`; treat non-`204` non-`401` responses as dependency failures (`503`). - Keep relay delivery failure mapping explicit for `/hooks/agent`: DO delivery/RPC failures -> `502`, unavailable DO namespace -> `503`. +- Keep relay delivery semantics asynchronous and durable: `/hooks/agent` accepts queued deliveries with `202` (`state=queued`) when recipient connector is offline. +- Keep relay queue saturation explicit: reject new deliveries with `507 PROXY_RELAY_QUEUE_FULL`; do not evict queued messages implicitly. +- Keep relay retries inside `agent-relay-session.ts` with bounded backoff (`RELAY_RETRY_*`) and per-agent queue caps/TTL (`RELAY_QUEUE_*`); do not add ad-hoc retry loops in route handlers. - Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. - Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. - Index pairing tickets by ticket `kid` in both in-memory and Durable Object stores; persist the original full ticket string alongside each entry and require exact ticket match on confirm. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index 556361a..da7718c 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -56,9 +56,16 @@ function hasDisallowedControlCharacter(value: string): boolean { function createRelayHarness(input?: { deliverResult?: RelayDeliveryResult; throwOnDeliver?: boolean; + throwStatus?: number; + throwCode?: string; + throwMessage?: string; }) { const deliverResult = input?.deliverResult ?? { + deliveryId: "dlv_1", + state: "delivered", delivered: true, + queued: false, + queueDepth: 0, connectedSockets: 1, }; const receivedInputs: RelayDeliveryInput[] = []; @@ -72,7 +79,15 @@ function createRelayHarness(input?: { receivedInputs.push(relayInput); if (input?.throwOnDeliver) { - return new Response("delivery failed", { status: 502 }); + return Response.json( + { + error: { + code: input.throwCode ?? "PROXY_RELAY_DELIVERY_FAILED", + message: input.throwMessage ?? "delivery failed", + }, + }, + { status: input.throwStatus ?? 502 }, + ); } return Response.json(deliverResult, { status: 202 }); @@ -109,6 +124,7 @@ function createHookRouteApp(input: { const trustStore: ProxyTrustStore = { createPairingTicket: vi.fn(), confirmPairingTicket: vi.fn(), + getPairingTicketStatus: vi.fn(), isAgentKnown: vi.fn(async () => true), isPairAllowed: vi.fn( async (pair) => @@ -167,12 +183,20 @@ describe("POST /hooks/agent", () => { const body = (await response.json()) as { accepted: boolean; + deliveryId: string; + state: string; delivered: boolean; + queued: boolean; + queueDepth: number; connectedSockets: number; }; expect(body).toEqual({ accepted: true, + deliveryId: "dlv_1", + state: "delivered", delivered: true, + queued: false, + queueDepth: 0, connectedSockets: 1, }); }); @@ -489,10 +513,14 @@ describe("POST /hooks/agent", () => { expect(body.error.code).toBe("PROXY_RELAY_DELIVERY_FAILED"); }); - it("returns 502 when target connector is offline", async () => { + it("returns queued state when target connector is offline", async () => { const relayHarness = createRelayHarness({ deliverResult: { + deliveryId: "dlv_queued", + state: "queued", delivered: false, + queued: true, + queueDepth: 1, connectedSockets: 0, }, }); @@ -510,8 +538,44 @@ describe("POST /hooks/agent", () => { body: JSON.stringify({ event: "agent.started" }), }); - expect(response.status).toBe(502); + expect(response.status).toBe(202); + const body = (await response.json()) as { + accepted: boolean; + deliveryId: string; + state: string; + queued: boolean; + queueDepth: number; + }; + expect(body.accepted).toBe(true); + expect(body.deliveryId).toBe("dlv_queued"); + expect(body.state).toBe("queued"); + expect(body.queued).toBe(true); + expect(body.queueDepth).toBe(1); + }); + + it("returns 507 when relay queue is full", async () => { + const relayHarness = createRelayHarness({ + throwOnDeliver: true, + throwStatus: 507, + throwCode: "PROXY_RELAY_QUEUE_FULL", + throwMessage: "Target relay queue is full", + }); + const app = createHookRouteApp({ + relayNamespace: relayHarness.namespace, + }); + + const response = await app.request("/hooks/agent", { + method: "POST", + headers: { + "content-type": "application/json", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: + "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + }, + body: JSON.stringify({ event: "agent.started" }), + }); + + expect(response.status).toBe(507); const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_RELAY_CONNECTOR_OFFLINE"); + expect(body.error.code).toBe("PROXY_RELAY_QUEUE_FULL"); }); }); diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index 02d41b5..d578653 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -8,6 +8,7 @@ import { type AgentRelaySessionNamespace, deliverToRelaySession, type RelayDeliveryInput, + RelaySessionDeliveryError, } from "./agent-relay-session.js"; import type { ProxyRequestVariables } from "./auth-middleware.js"; import type { ProxyTrustStore } from "./proxy-trust-store.js"; @@ -227,6 +228,24 @@ export function createAgentHookHandler( try { deliveryResult = await deliverToRelaySession(relaySession, relayInput); } catch (error) { + if ( + error instanceof RelaySessionDeliveryError && + error.code === "PROXY_RELAY_QUEUE_FULL" + ) { + options.logger.warn("proxy.hooks.agent.relay_queue_full", { + requestId, + senderAgentDid: auth.agentDid, + recipientAgentDid, + }); + + throw new AppError({ + code: "PROXY_RELAY_QUEUE_FULL", + message: "Target relay queue is full", + status: 507, + expose: true, + }); + } + options.logger.warn("proxy.hooks.agent.relay_delivery_failed", { requestId, senderAgentDid: auth.agentDid, @@ -241,33 +260,35 @@ export function createAgentHookHandler( }); } - if (!deliveryResult.delivered) { - options.logger.warn("proxy.hooks.agent.connector_offline", { - requestId, - recipientAgentDid, - }); - - throw new AppError({ - code: "PROXY_RELAY_CONNECTOR_OFFLINE", - message: "Target connector is offline", - status: 502, - }); - } + const delivered = deliveryResult.delivered; + const queued = deliveryResult.queued ?? !delivered; + const state = deliveryResult.state ?? (delivered ? "delivered" : "queued"); + const queueDepth = deliveryResult.queueDepth ?? (queued ? 1 : 0); + const deliveryId = deliveryResult.deliveryId ?? requestId; + const connectedSockets = deliveryResult.connectedSockets; options.logger.info("proxy.hooks.agent.delivered_to_relay", { requestId, senderAgentDid: auth.agentDid, recipientAgentDid, - delivered: deliveryResult.delivered, - connectedSockets: deliveryResult.connectedSockets, + deliveryId, + state, + delivered, + queued, + queueDepth, + connectedSockets, sentAt: now().toISOString(), }); return c.json( { accepted: true, - delivered: deliveryResult.delivered, - connectedSockets: deliveryResult.connectedSockets, + deliveryId, + state, + delivered, + queued, + queueDepth, + connectedSockets, }, 202, ); diff --git a/apps/proxy/src/agent-rate-limit-middleware.test.ts b/apps/proxy/src/agent-rate-limit-middleware.test.ts index 03b7a17..18bd9b0 100644 --- a/apps/proxy/src/agent-rate-limit-middleware.test.ts +++ b/apps/proxy/src/agent-rate-limit-middleware.test.ts @@ -40,7 +40,7 @@ function createRateLimitTestApp(input: { agentDid: testAgentDid, ownerDid: "did:claw:human:test-owner", aitJti: "test-jti", - issuer: "https://api.clawdentity.com", + issuer: "https://registry.clawdentity.com", cnfPublicKey: "test-key", }); } diff --git a/apps/proxy/src/agent-relay-session.test.ts b/apps/proxy/src/agent-relay-session.test.ts index 6ae1835..48ff43d 100644 --- a/apps/proxy/src/agent-relay-session.test.ts +++ b/apps/proxy/src/agent-relay-session.test.ts @@ -10,6 +10,7 @@ type MockWebSocket = { const SENDER_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7"; const RECIPIENT_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8"; +const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; function createMockSocket(): MockWebSocket { return { @@ -20,7 +21,12 @@ function createMockSocket(): MockWebSocket { function createStateHarness() { const connectedSockets: WebSocket[] = []; + const storageMap = new Map(); const storage = { + get: vi.fn(async (key: string) => storageMap.get(key) as T | undefined), + put: vi.fn(async (key: string, value: T) => { + storageMap.set(key, value); + }), setAlarm: vi.fn(async (_scheduled: number | Date) => {}), deleteAlarm: vi.fn(async () => {}), }; @@ -36,6 +42,7 @@ function createStateHarness() { return { state, storage, + storageMap, connectedSockets, }; } @@ -114,7 +121,9 @@ describe("AgentRelaySession", () => { it("delivers relay frames to active websocket connectors", async () => { const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); const connectorSocket = createMockSocket(); const ws = connectorSocket as unknown as WebSocket; harness.connectedSockets.push(ws); @@ -145,10 +154,13 @@ describe("AgentRelaySession", () => { payload: { event: "agent.started" }, }); - expect(result).toEqual({ - delivered: true, - connectedSockets: 1, - }); + expect(result.delivered).toBe(true); + expect(result.queued).toBe(false); + expect(result.state).toBe("delivered"); + expect(result.queueDepth).toBe(0); + expect(result.connectedSockets).toBe(1); + expect(result.deliveryId).toBeTruthy(); + expect(connectorSocket.send).toHaveBeenCalledTimes(1); const relayPayload = parseFrame(connectorSocket.send.mock.calls[0]?.[0]); expect(relayPayload.type).toBe("deliver"); @@ -156,12 +168,13 @@ describe("AgentRelaySession", () => { expect(relayPayload.fromAgentDid).toBe(SENDER_AGENT_DID); expect(relayPayload.toAgentDid).toBe(RECIPIENT_AGENT_DID); } - expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); }); - it("returns not-delivered when no connector socket is active", async () => { + it("queues relay frames when no connector socket is active", async () => { const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); const result = await relaySession.deliverToConnector({ requestId: "req-2", @@ -170,60 +183,79 @@ describe("AgentRelaySession", () => { payload: { event: "agent.started" }, }); - expect(result).toEqual({ - delivered: false, - connectedSockets: 0, - }); - expect(harness.storage.setAlarm).not.toHaveBeenCalled(); + expect(result.delivered).toBe(false); + expect(result.queued).toBe(true); + expect(result.state).toBe("queued"); + expect(result.queueDepth).toBe(1); + expect(result.connectedSockets).toBe(0); + + const persisted = harness.storageMap.get(RELAY_QUEUE_STORAGE_KEY) as { + deliveries: Array<{ requestId: string }>; + }; + expect(persisted.deliveries).toHaveLength(1); + expect(persisted.deliveries[0]?.requestId).toBe("req-2"); }); - it("sends heartbeat frames on alarm when connectors are active", async () => { + it("drains queued messages after connector reconnects", async () => { const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + RELAY_RETRY_INITIAL_MS: "1", + }); + + await relaySession.deliverToConnector({ + requestId: "req-3", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + const connectorSocket = createMockSocket(); - harness.connectedSockets.push(connectorSocket as unknown as WebSocket); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); - const relaySession = new AgentRelaySession(harness.state); - await relaySession.alarm(); + connectorSocket.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } - expect(connectorSocket.send).toHaveBeenCalledTimes(1); - expect(String(connectorSocket.send.mock.calls[0]?.[0])).toContain( - '"type":"heartbeat"', - ); - expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); - }); + void relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 2), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); - it("handles heartbeat websocket frames by replying with heartbeat_ack and refreshing heartbeat", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); - const connectorSocket = createMockSocket() as unknown as WebSocket; - const heartbeatId = generateUlid(Date.now() + 2); - - await relaySession.webSocketMessage( - connectorSocket, - JSON.stringify({ - v: 1, - type: "heartbeat", - id: heartbeatId, - ts: new Date().toISOString(), - }), - ); + await new Promise((resolve) => setTimeout(resolve, 5)); + await relaySession.alarm(); - expect( - (connectorSocket as unknown as MockWebSocket).send, - ).toHaveBeenCalledTimes(1); - const ackFrame = parseFrame( - (connectorSocket as unknown as MockWebSocket).send.mock.calls[0]?.[0], - ); - expect(ackFrame.type).toBe("heartbeat_ack"); - if (ackFrame.type === "heartbeat_ack") { - expect(ackFrame.ackId).toBe(heartbeatId); - } - expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); + const sendFrames = connectorSocket.send.mock.calls + .map((call) => parseFrame(call[0])) + .filter((frame) => frame.type === "deliver"); + expect(sendFrames.length).toBe(1); + + const dedupedResult = await relaySession.deliverToConnector({ + requestId: "req-3", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + expect(dedupedResult.state).toBe("delivered"); + expect(dedupedResult.queueDepth).toBe(0); }); it("supports fetch RPC delivery endpoint for compatibility", async () => { const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); const connectorSocket = createMockSocket(); const ws = connectorSocket as unknown as WebSocket; harness.connectedSockets.push(ws); @@ -254,7 +286,7 @@ describe("AgentRelaySession", () => { "content-type": "application/json", }, body: JSON.stringify({ - requestId: "req-3", + requestId: "req-4", senderAgentDid: SENDER_AGENT_DID, recipientAgentDid: RECIPIENT_AGENT_DID, payload: { event: "agent.started" }, @@ -263,6 +295,58 @@ describe("AgentRelaySession", () => { ); expect(response.status).toBe(202); - expect(connectorSocket.send).toHaveBeenCalledTimes(1); + const body = (await response.json()) as { + deliveryId: string; + state: string; + delivered: boolean; + }; + expect(body.deliveryId).toBeTruthy(); + expect(body.state).toBe("delivered"); + expect(body.delivered).toBe(true); + }); + + it("returns queue-full error from RPC when buffer is full", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: "1", + RELAY_RETRY_JITTER_RATIO: "0", + }); + + const firstResponse = await relaySession.fetch( + new Request("https://relay.example.test/rpc/deliver-to-connector", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + requestId: "req-5", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }), + }), + ); + expect(firstResponse.status).toBe(202); + + const secondResponse = await relaySession.fetch( + new Request("https://relay.example.test/rpc/deliver-to-connector", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + requestId: "req-6", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }), + }), + ); + + expect(secondResponse.status).toBe(507); + const body = (await secondResponse.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("PROXY_RELAY_QUEUE_FULL"); }); }); diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts index 6897c17..6b1def8 100644 --- a/apps/proxy/src/agent-relay-session.ts +++ b/apps/proxy/src/agent-relay-session.ts @@ -7,13 +7,17 @@ import { serializeFrame, } from "@clawdentity/connector"; import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { parseProxyConfig } from "./config.js"; const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; const RELAY_RPC_DELIVER_PATH = "/rpc/deliver-to-connector"; const RELAY_HEARTBEAT_INTERVAL_MS = 30_000; +const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; type DurableObjectStorageLike = { deleteAlarm?: () => Promise | void; + get?: (key: string) => Promise | unknown; + put?: (key: string, value: unknown) => Promise | void; setAlarm: (scheduledTime: number | Date) => Promise | void; }; @@ -30,11 +34,39 @@ export type RelayDeliveryInput = { senderAgentDid: string; }; +export type RelayDeliveryState = "delivered" | "queued"; + export type RelayDeliveryResult = { connectedSockets: number; delivered: boolean; + deliveryId: string; + queueDepth: number; + queued: boolean; + state: RelayDeliveryState; }; +export class RelaySessionDeliveryError extends Error { + readonly code: string; + readonly status: number; + + constructor(input: { code: string; message: string; status: number }) { + super(input.message); + this.name = "RelaySessionDeliveryError"; + this.code = input.code; + this.status = input.status; + } +} + +class RelayQueueFullError extends Error { + readonly code = "PROXY_RELAY_QUEUE_FULL"; + readonly status = 507; + + constructor() { + super("Target relay queue is full"); + this.name = "RelayQueueFullError"; + } +} + export type AgentRelaySessionStub = { deliverToConnector?: ( input: RelayDeliveryInput, @@ -53,6 +85,39 @@ type PendingDelivery = { timeoutHandle: ReturnType; }; +type QueuedRelayDelivery = { + attemptCount: number; + createdAtMs: number; + deliveryId: string; + expiresAtMs: number; + nextAttemptAtMs: number; + payload: unknown; + recipientAgentDid: string; + requestId: string; + senderAgentDid: string; +}; + +type RelayDeliveryReceipt = { + deliveryId: string; + expiresAtMs: number; + requestId: string; + state: RelayDeliveryState; +}; + +type RelayQueueState = { + deliveries: QueuedRelayDelivery[]; + receipts: Record; +}; + +type RelayDeliveryPolicy = { + queueMaxMessagesPerAgent: number; + queueTtlMs: number; + retryInitialMs: number; + retryJitterRatio: number; + retryMaxAttempts: number; + retryMaxMs: number; +}; + function toHeartbeatFrame(): string { return serializeFrame({ v: CONNECTOR_FRAME_VERSION, @@ -108,6 +173,38 @@ function parseDeliveryInput(value: unknown): RelayDeliveryInput { }; } +function toRelayDeliveryResult(input: { + connectedSockets: number; + deliveryId: string; + queueDepth: number; + state: RelayDeliveryState; +}): RelayDeliveryResult { + return { + deliveryId: input.deliveryId, + state: input.state, + delivered: input.state === "delivered", + queued: input.state === "queued", + connectedSockets: input.connectedSockets, + queueDepth: input.queueDepth, + }; +} + +function toErrorResponse(input: { + code: string; + message: string; + status: number; +}): Response { + return Response.json( + { + error: { + code: input.code, + message: input.message, + }, + }, + { status: input.status }, + ); +} + export async function deliverToRelaySession( relaySession: AgentRelaySessionStub, input: RelayDeliveryInput, @@ -123,18 +220,52 @@ export async function deliverToRelaySession( ); if (!response.ok) { - throw new Error("Relay session delivery RPC failed"); + let code = "PROXY_RELAY_DELIVERY_FAILED"; + let message = "Relay session delivery RPC failed"; + try { + const body = (await response.json()) as { + error?: { code?: unknown; message?: unknown }; + }; + if (typeof body.error?.code === "string") { + code = body.error.code; + } + if (typeof body.error?.message === "string") { + message = body.error.message; + } + } catch { + // Ignore parse failures and keep defaults. + } + + throw new RelaySessionDeliveryError({ + code, + message, + status: response.status, + }); } return (await response.json()) as RelayDeliveryResult; } export class AgentRelaySession { + private readonly deliveryPolicy: RelayDeliveryPolicy; private readonly pendingDeliveries = new Map(); private readonly state: DurableObjectStateLike; + private inMemoryQueueState: RelayQueueState = { + deliveries: [], + receipts: {}, + }; - constructor(state: DurableObjectStateLike) { + constructor(state: DurableObjectStateLike, env?: unknown) { this.state = state; + const config = parseProxyConfig(env ?? {}); + this.deliveryPolicy = { + queueMaxMessagesPerAgent: config.relayQueueMaxMessagesPerAgent, + queueTtlMs: config.relayQueueTtlSeconds * 1000, + retryInitialMs: config.relayRetryInitialMs, + retryJitterRatio: config.relayRetryJitterRatio, + retryMaxAttempts: config.relayRetryMaxAttempts, + retryMaxMs: config.relayRetryMaxMs, + }; } async fetch(request: Request): Promise { @@ -155,7 +286,15 @@ export class AgentRelaySession { try { const result = await this.deliverToConnector(input); return Response.json(result, { status: 202 }); - } catch { + } catch (error) { + if (error instanceof RelayQueueFullError) { + return toErrorResponse({ + code: error.code, + message: error.message, + status: error.status, + }); + } + return new Response("Relay delivery failed", { status: 502 }); } } @@ -164,67 +303,120 @@ export class AgentRelaySession { } async alarm(): Promise { + const nowMs = Date.now(); const sockets = this.state.getWebSockets(); - if (sockets.length === 0) { - return; - } - const heartbeatFrame = toHeartbeatFrame(); - for (const socket of sockets) { - try { - socket.send(heartbeatFrame); - } catch { + if (sockets.length > 0) { + const heartbeatFrame = toHeartbeatFrame(); + for (const socket of sockets) { try { - socket.close(1011, "heartbeat_send_failed"); + socket.send(heartbeatFrame); } catch { - // Ignore close errors for already-closed sockets. + try { + socket.close(1011, "heartbeat_send_failed"); + } catch { + // Ignore close errors for already-closed sockets. + } } } } - await this.scheduleHeartbeat(); + const queueState = await this.loadQueueState(nowMs); + const queueMutated = await this.processQueueDeliveries(queueState, nowMs); + if (queueMutated) { + await this.saveQueueState(queueState); + } + + await this.scheduleNextAlarm(queueState, nowMs); } async deliverToConnector( input: RelayDeliveryInput, ): Promise { + const nowMs = Date.now(); + const queueState = await this.loadQueueState(nowMs); + const existingReceipt = queueState.receipts[input.requestId]; + + if (existingReceipt !== undefined && existingReceipt.expiresAtMs > nowMs) { + return toRelayDeliveryResult({ + deliveryId: existingReceipt.deliveryId, + state: existingReceipt.state, + connectedSockets: this.state.getWebSockets().length, + queueDepth: queueState.deliveries.length, + }); + } + const sockets = this.state.getWebSockets(); - if (sockets.length === 0) { - return { - delivered: false, - connectedSockets: 0, - }; + const deliveryId = generateUlid(nowMs); + const deliveryTtlExpiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; + let priorAttempts = 0; + + if (sockets.length > 0) { + priorAttempts = 1; + try { + const accepted = await this.sendDeliverFrame(sockets[0], input); + if (accepted) { + this.upsertReceipt(queueState, { + requestId: input.requestId, + deliveryId, + state: "delivered", + expiresAtMs: deliveryTtlExpiresAtMs, + }); + await this.saveQueueState(queueState); + await this.scheduleNextAlarm(queueState, nowMs); + + return toRelayDeliveryResult({ + deliveryId, + state: "delivered", + connectedSockets: sockets.length, + queueDepth: queueState.deliveries.length, + }); + } + } catch { + // Fall through to durable queueing below. + } } - const socket = sockets[0]; - const frame = toDeliverFrame(input); - const framePayload = serializeFrame(frame); + if (priorAttempts >= this.deliveryPolicy.retryMaxAttempts) { + throw new Error("Relay delivery exhausted retry budget"); + } - const accepted = await new Promise((resolve, reject) => { - const timeoutHandle = setTimeout(() => { - this.pendingDeliveries.delete(frame.id); - reject(new Error("Relay connector acknowledgement timed out")); - }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); + if ( + queueState.deliveries.length >= + this.deliveryPolicy.queueMaxMessagesPerAgent + ) { + throw new RelayQueueFullError(); + } - this.pendingDeliveries.set(frame.id, { - resolve, - reject, - timeoutHandle, - }); + const queuedDelivery: QueuedRelayDelivery = { + deliveryId, + requestId: input.requestId, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + payload: input.payload, + createdAtMs: nowMs, + attemptCount: priorAttempts, + expiresAtMs: deliveryTtlExpiresAtMs, + nextAttemptAtMs: nowMs + this.computeRetryDelayMs(priorAttempts), + }; - try { - socket.send(framePayload); - } catch (error) { - clearTimeout(timeoutHandle); - this.pendingDeliveries.delete(frame.id); - reject(error); - } + queueState.deliveries.push(queuedDelivery); + this.upsertReceipt(queueState, { + requestId: queuedDelivery.requestId, + deliveryId: queuedDelivery.deliveryId, + state: "queued", + expiresAtMs: queuedDelivery.expiresAtMs, }); - return { - delivered: accepted, + await this.saveQueueState(queueState); + await this.scheduleNextAlarm(queueState, nowMs); + + return toRelayDeliveryResult({ + deliveryId, + state: "queued", connectedSockets: sockets.length, - }; + queueDepth: queueState.deliveries.length, + }); } async webSocketMessage( @@ -240,7 +432,7 @@ export class AgentRelaySession { })(); if (frameResult === null) { - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); return; } @@ -248,7 +440,7 @@ export class AgentRelaySession { if (frame.type === "heartbeat") { ws.send(toHeartbeatAckFrame(frame.id)); - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); return; } @@ -259,26 +451,24 @@ export class AgentRelaySession { this.pendingDeliveries.delete(frame.ackId); pending.resolve(frame.accepted); } - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); return; } if (frame.type === "heartbeat_ack") { - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); return; } - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); } async webSocketClose(): Promise { if (this.state.getWebSockets().length === 0) { - await this.state.storage.deleteAlarm?.(); this.rejectPendingDeliveries(new Error("Connector socket closed")); - return; } - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); } async webSocketError(): Promise { @@ -303,7 +493,7 @@ export class AgentRelaySession { const server = pair[1]; this.state.acceptWebSocket(server, [connectorAgentDid]); - await this.scheduleHeartbeat(); + await this.scheduleFromStorage(); return new Response(null, { status: 101, @@ -311,6 +501,339 @@ export class AgentRelaySession { }); } + private async loadQueueState(nowMs: number): Promise { + const fromStorage = this.state.storage.get + ? await this.state.storage.get(RELAY_QUEUE_STORAGE_KEY) + : this.inMemoryQueueState; + const rawState = + typeof fromStorage === "object" && fromStorage !== null + ? (fromStorage as Partial) + : undefined; + + const queueState: RelayQueueState = { + deliveries: Array.isArray(rawState?.deliveries) + ? rawState.deliveries.filter((entry) => this.isQueuedDelivery(entry)) + : [], + receipts: this.normalizeReceipts(rawState?.receipts), + }; + + const pruned = this.pruneExpiredQueueState(queueState, nowMs); + if (pruned) { + await this.saveQueueState(queueState); + } + + return queueState; + } + + private async saveQueueState(queueState: RelayQueueState): Promise { + const serialized: RelayQueueState = { + deliveries: [...queueState.deliveries], + receipts: { ...queueState.receipts }, + }; + + if (this.state.storage.put) { + await this.state.storage.put(RELAY_QUEUE_STORAGE_KEY, serialized); + return; + } + + this.inMemoryQueueState = serialized; + } + + private isQueuedDelivery(value: unknown): value is QueuedRelayDelivery { + if (typeof value !== "object" || value === null) { + return false; + } + + const candidate = value as Partial; + return ( + typeof candidate.deliveryId === "string" && + typeof candidate.requestId === "string" && + typeof candidate.senderAgentDid === "string" && + typeof candidate.recipientAgentDid === "string" && + typeof candidate.createdAtMs === "number" && + Number.isFinite(candidate.createdAtMs) && + typeof candidate.attemptCount === "number" && + Number.isInteger(candidate.attemptCount) && + candidate.attemptCount >= 0 && + typeof candidate.expiresAtMs === "number" && + Number.isFinite(candidate.expiresAtMs) && + typeof candidate.nextAttemptAtMs === "number" && + Number.isFinite(candidate.nextAttemptAtMs) + ); + } + + private normalizeReceipts( + input: unknown, + ): Record { + if (typeof input !== "object" || input === null) { + return {}; + } + + const normalized: Record = {}; + for (const [key, value] of Object.entries( + input as Record, + )) { + if (typeof value !== "object" || value === null) { + continue; + } + + const receipt = value as Partial; + if ( + typeof receipt.requestId !== "string" || + receipt.requestId !== key || + typeof receipt.deliveryId !== "string" || + typeof receipt.expiresAtMs !== "number" || + !Number.isFinite(receipt.expiresAtMs) || + (receipt.state !== "queued" && receipt.state !== "delivered") + ) { + continue; + } + + normalized[key] = { + requestId: receipt.requestId, + deliveryId: receipt.deliveryId, + expiresAtMs: receipt.expiresAtMs, + state: receipt.state, + }; + } + + return normalized; + } + + private pruneExpiredQueueState( + queueState: RelayQueueState, + nowMs: number, + ): boolean { + let mutated = false; + + const retainedDeliveries: QueuedRelayDelivery[] = []; + for (const delivery of queueState.deliveries) { + if (delivery.expiresAtMs <= nowMs) { + this.deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + retainedDeliveries.push(delivery); + } + + if (retainedDeliveries.length !== queueState.deliveries.length) { + queueState.deliveries = retainedDeliveries; + mutated = true; + } + + for (const [requestId, receipt] of Object.entries(queueState.receipts)) { + if (receipt.expiresAtMs <= nowMs) { + delete queueState.receipts[requestId]; + mutated = true; + } + } + + return mutated; + } + + private deleteQueuedReceipt( + queueState: RelayQueueState, + requestId: string, + deliveryId: string, + ): void { + const receipt = queueState.receipts[requestId]; + if (receipt === undefined) { + return; + } + + if (receipt.deliveryId !== deliveryId || receipt.state !== "queued") { + return; + } + + delete queueState.receipts[requestId]; + } + + private upsertReceipt( + queueState: RelayQueueState, + receipt: RelayDeliveryReceipt, + ): void { + queueState.receipts[receipt.requestId] = receipt; + } + + private async processQueueDeliveries( + queueState: RelayQueueState, + nowMs: number, + ): Promise { + if (queueState.deliveries.length === 0) { + return false; + } + + const sockets = this.state.getWebSockets(); + if (sockets.length === 0) { + let mutated = false; + for (const delivery of queueState.deliveries) { + if (delivery.nextAttemptAtMs <= nowMs) { + delivery.nextAttemptAtMs = + nowMs + this.computeRetryDelayMs(delivery.attemptCount); + mutated = true; + } + } + + return mutated; + } + + queueState.deliveries.sort((left, right) => { + if (left.nextAttemptAtMs !== right.nextAttemptAtMs) { + return left.nextAttemptAtMs - right.nextAttemptAtMs; + } + + return left.createdAtMs - right.createdAtMs; + }); + + let mutated = false; + const socket = sockets[0]; + + for (let index = 0; index < queueState.deliveries.length; ) { + const delivery = queueState.deliveries[index]; + + if (delivery.expiresAtMs <= nowMs) { + queueState.deliveries.splice(index, 1); + this.deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + if (delivery.attemptCount >= this.deliveryPolicy.retryMaxAttempts) { + queueState.deliveries.splice(index, 1); + this.deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + if (delivery.nextAttemptAtMs > nowMs) { + index += 1; + continue; + } + + let accepted = false; + let deliveryError = false; + try { + accepted = await this.sendDeliverFrame(socket, { + requestId: delivery.requestId, + senderAgentDid: delivery.senderAgentDid, + recipientAgentDid: delivery.recipientAgentDid, + payload: delivery.payload, + }); + } catch { + deliveryError = true; + } + + if (accepted) { + queueState.deliveries.splice(index, 1); + this.upsertReceipt(queueState, { + requestId: delivery.requestId, + deliveryId: delivery.deliveryId, + state: "delivered", + expiresAtMs: nowMs + this.deliveryPolicy.queueTtlMs, + }); + mutated = true; + continue; + } + + const nextAttemptCount = delivery.attemptCount + 1; + if (nextAttemptCount >= this.deliveryPolicy.retryMaxAttempts) { + queueState.deliveries.splice(index, 1); + this.deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + delivery.attemptCount = nextAttemptCount; + delivery.nextAttemptAtMs = + nowMs + this.computeRetryDelayMs(delivery.attemptCount); + mutated = true; + index += 1; + + if (deliveryError) { + for ( + let remaining = index; + remaining < queueState.deliveries.length; + remaining += 1 + ) { + if (queueState.deliveries[remaining].nextAttemptAtMs <= nowMs) { + queueState.deliveries[remaining].nextAttemptAtMs = + nowMs + + this.computeRetryDelayMs( + queueState.deliveries[remaining].attemptCount, + ); + } + } + break; + } + } + + return mutated; + } + + private computeRetryDelayMs(priorAttempts: number): number { + const exponent = Math.max(0, priorAttempts - 1); + const baseDelay = Math.min( + this.deliveryPolicy.retryMaxMs, + this.deliveryPolicy.retryInitialMs * 2 ** exponent, + ); + + if (this.deliveryPolicy.retryJitterRatio <= 0) { + return baseDelay; + } + + const jitterSpan = baseDelay * this.deliveryPolicy.retryJitterRatio; + const lowerBound = Math.max(1, Math.floor(baseDelay - jitterSpan)); + const upperBound = Math.ceil(baseDelay + jitterSpan); + const sample = lowerBound + Math.random() * (upperBound - lowerBound); + return Math.min(this.deliveryPolicy.retryMaxMs, Math.floor(sample)); + } + + private async sendDeliverFrame( + socket: WebSocket, + input: RelayDeliveryInput, + ): Promise { + const frame = toDeliverFrame(input); + const framePayload = serializeFrame(frame); + + return new Promise((resolve, reject) => { + const timeoutHandle = setTimeout(() => { + this.pendingDeliveries.delete(frame.id); + reject(new Error("Relay connector acknowledgement timed out")); + }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); + + this.pendingDeliveries.set(frame.id, { + resolve, + reject, + timeoutHandle, + }); + + try { + socket.send(framePayload); + } catch (error) { + clearTimeout(timeoutHandle); + this.pendingDeliveries.delete(frame.id); + reject(error); + } + }); + } + private rejectPendingDeliveries(error: Error): void { for (const [deliveryId, pending] of this.pendingDeliveries) { clearTimeout(pending.timeoutHandle); @@ -319,7 +842,48 @@ export class AgentRelaySession { } } - private async scheduleHeartbeat(): Promise { - await this.state.storage.setAlarm(Date.now() + RELAY_HEARTBEAT_INTERVAL_MS); + private async scheduleFromStorage(): Promise { + const nowMs = Date.now(); + const queueState = await this.loadQueueState(nowMs); + await this.scheduleNextAlarm(queueState, nowMs); + } + + private async scheduleNextAlarm( + queueState: RelayQueueState, + nowMs: number, + ): Promise { + const candidates: number[] = []; + + const queueWakeAtMs = this.findNextQueueWakeMs(queueState, nowMs); + if (queueWakeAtMs !== undefined) { + candidates.push(queueWakeAtMs); + } + + if (this.state.getWebSockets().length > 0) { + candidates.push(nowMs + RELAY_HEARTBEAT_INTERVAL_MS); + } + + if (candidates.length === 0) { + await this.state.storage.deleteAlarm?.(); + return; + } + + await this.state.storage.setAlarm(Math.min(...candidates)); + } + + private findNextQueueWakeMs( + queueState: RelayQueueState, + nowMs: number, + ): number | undefined { + let earliest: number | undefined; + + for (const delivery of queueState.deliveries) { + const candidate = Math.max(nowMs + 1, delivery.nextAttemptAtMs); + if (earliest === undefined || candidate < earliest) { + earliest = candidate; + } + } + + return earliest; } } diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index b633968..55a78ac 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -11,7 +11,7 @@ import { describe, expect, it, vi } from "vitest"; import { RELAY_RECIPIENT_AGENT_DID_HEADER } from "./agent-hook-route.js"; import type { AgentRelaySessionNamespace } from "./agent-relay-session.js"; import { parseProxyConfig } from "./config.js"; -import { PAIR_CONFIRM_PATH } from "./pairing-constants.js"; +import { PAIR_CONFIRM_PATH, PAIR_STATUS_PATH } from "./pairing-constants.js"; import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; import { RELAY_CONNECT_PATH } from "./relay-connect-route.js"; import { createProxyApp } from "./server.js"; @@ -19,7 +19,7 @@ import { createProxyApp } from "./server.js"; const REGISTRY_KID = "registry-active-kid"; const NOW_MS = Date.now(); const NOW_SECONDS = Math.floor(NOW_MS / 1000); -const ISSUER = "https://api.clawdentity.com"; +const ISSUER = "https://registry.clawdentity.com"; const BODY_JSON = JSON.stringify({ message: "hello" }); const KNOWN_PEER_DID = "did:claw:agent:known-peer"; @@ -313,7 +313,13 @@ describe("proxy auth middleware", () => { const harness = await createAuthHarness({ allowCurrentAgent: false, }); - const requestBody = JSON.stringify({ ticket: "clwpair1_missing-ticket" }); + const requestBody = JSON.stringify({ + ticket: "clwpair1_missing-ticket", + responderProfile: { + agentName: "beta", + humanName: "Ira", + }, + }); const headers = await harness.createSignedHeaders({ body: requestBody, nonce: "nonce-pair-confirm-bootstrap", @@ -331,30 +337,29 @@ describe("proxy auth middleware", () => { expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); }); - it("allows forwarded /pair/confirm without Authorization when responder DID query is present", async () => { + it("allows unknown agents to reach /pair/status for initiator polling bootstrap", async () => { const harness = await createAuthHarness({ allowCurrentAgent: false, }); + const requestBody = JSON.stringify({ ticket: "clwpair1_missing-ticket" }); + const headers = await harness.createSignedHeaders({ + body: requestBody, + nonce: "nonce-pair-status-bootstrap", + pathWithQuery: PAIR_STATUS_PATH, + }); - const response = await harness.app.request( - `${PAIR_CONFIRM_PATH}?responderAgentDid=${encodeURIComponent(KNOWN_PEER_DID)}`, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - ticket: "clwpair1_missing-ticket", - }), - }, - ); + const response = await harness.app.request(PAIR_STATUS_PATH, { + method: "POST", + headers, + body: requestBody, + }); expect(response.status).toBe(400); const body = (await response.json()) as { error: { code: string } }; expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); }); - it("rejects /pair/confirm without Authorization when responder DID query is missing", async () => { + it("rejects /pair/confirm without Authorization", async () => { const harness = await createAuthHarness({ allowCurrentAgent: false, }); @@ -731,6 +736,28 @@ describe("proxy auth middleware", () => { expect(response.status).toBe(204); }); + it("allows unknown agents to connect relay websocket when auth validates", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + method: "GET", + pathWithQuery: RELAY_CONNECT_PATH, + nonce: "nonce-relay-connect-unknown-agent", + }); + const response = await harness.app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + ...headers, + upgrade: "websocket", + "x-claw-agent-access": "clw_agt_validtoken", + }, + }); + + expect(response.status).toBe(204); + }); + it("rejects non-health route when Authorization scheme is not Claw", async () => { const harness = await createAuthHarness(); const response = await harness.app.request("/protected", { diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 6627d9c..9e51f1a 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -21,7 +21,11 @@ import { } from "@clawdentity/sdk"; import { createMiddleware } from "hono/factory"; import type { ProxyConfig } from "./config.js"; -import { PAIR_CONFIRM_PATH, PAIR_START_PATH } from "./pairing-constants.js"; +import { + PAIR_CONFIRM_PATH, + PAIR_START_PATH, + PAIR_STATUS_PATH, +} from "./pairing-constants.js"; import type { ProxyTrustStore } from "./proxy-trust-store.js"; import { assertKnownTrustedAgent } from "./trust-policy.js"; @@ -142,7 +146,12 @@ function dependencyUnavailableError(options: { } function shouldSkipKnownAgentCheck(path: string): boolean { - return path === PAIR_START_PATH || path === PAIR_CONFIRM_PATH; + return ( + path === PAIR_START_PATH || + path === PAIR_CONFIRM_PATH || + path === PAIR_STATUS_PATH || + path === RELAY_CONNECT_PATH + ); } export function parseClawAuthorizationHeader(authorization?: string): string { @@ -167,12 +176,12 @@ export function parseClawAuthorizationHeader(authorization?: string): string { export function resolveExpectedIssuer(registryUrl: string): string | undefined { try { const hostname = new URL(registryUrl).hostname; - if (hostname === "api.clawdentity.com") { - return "https://api.clawdentity.com"; + if (hostname === "registry.clawdentity.com") { + return "https://registry.clawdentity.com"; } - if (hostname === "dev.api.clawdentity.com") { - return "https://dev.api.clawdentity.com"; + if (hostname === "dev.registry.clawdentity.com") { + return "https://dev.registry.clawdentity.com"; } return undefined; @@ -480,20 +489,7 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { await next(); return; } - const authorizationHeader = c.req.header("authorization"); - const forwardedResponderDid = c.req.query("responderAgentDid"); - const isAnonymousForwardedPairConfirm = - c.req.path === PAIR_CONFIRM_PATH && - (typeof authorizationHeader !== "string" || - authorizationHeader.trim().length === 0) && - typeof forwardedResponderDid === "string" && - forwardedResponderDid.trim().length > 0; - if (isAnonymousForwardedPairConfirm) { - await next(); - return; - } - const token = parseClawAuthorizationHeader(authorizationHeader); const claims = await verifyAitClaims(token); diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index bd39126..3a0d991 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -12,6 +12,12 @@ import { DEFAULT_PROXY_ENVIRONMENT, DEFAULT_PROXY_LISTEN_PORT, DEFAULT_REGISTRY_URL, + DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + DEFAULT_RELAY_QUEUE_TTL_SECONDS, + DEFAULT_RELAY_RETRY_INITIAL_MS, + DEFAULT_RELAY_RETRY_JITTER_RATIO, + DEFAULT_RELAY_RETRY_MAX_ATTEMPTS, + DEFAULT_RELAY_RETRY_MAX_MS, loadProxyConfig, ProxyConfigError, parseProxyConfig, @@ -33,6 +39,12 @@ describe("proxy config", () => { DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, agentRateLimitWindowMs: DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, injectIdentityIntoMessage: DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, + relayQueueMaxMessagesPerAgent: DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + relayQueueTtlSeconds: DEFAULT_RELAY_QUEUE_TTL_SECONDS, + relayRetryInitialMs: DEFAULT_RELAY_RETRY_INITIAL_MS, + relayRetryMaxMs: DEFAULT_RELAY_RETRY_MAX_MS, + relayRetryMaxAttempts: DEFAULT_RELAY_RETRY_MAX_ATTEMPTS, + relayRetryJitterRatio: DEFAULT_RELAY_RETRY_JITTER_RATIO, }); }); @@ -40,22 +52,39 @@ describe("proxy config", () => { const config = parseProxyConfig({ PORT: "4100", CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", - PAIRING_ISSUER_URL: "https://proxy.example.com", + REGISTRY_INTERNAL_SERVICE_ID: "01KHSVCABCDEFGHJKMNOPQRST", + REGISTRY_INTERNAL_SERVICE_SECRET: + "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: "75", AGENT_RATE_LIMIT_WINDOW_MS: "90000", INJECT_IDENTITY_INTO_MESSAGE: "true", + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: "700", + RELAY_QUEUE_TTL_SECONDS: "1800", + RELAY_RETRY_INITIAL_MS: "2000", + RELAY_RETRY_MAX_MS: "15000", + RELAY_RETRY_MAX_ATTEMPTS: "7", + RELAY_RETRY_JITTER_RATIO: "0.4", }); expect(config.listenPort).toBe(4100); expect(config.registryUrl).toBe("https://registry.example.com"); - expect(config.pairingIssuerUrl).toBe("https://proxy.example.com"); + expect(config.registryInternalServiceId).toBe("01KHSVCABCDEFGHJKMNOPQRST"); + expect(config.registryInternalServiceSecret).toBe( + "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", + ); expect(config.environment).toBe("local"); expect(config.crlStaleBehavior).toBe("fail-closed"); expect(config.agentRateLimitRequestsPerMinute).toBe(75); expect(config.agentRateLimitWindowMs).toBe(90000); expect(config.injectIdentityIntoMessage).toBe(true); + expect(config.relayQueueMaxMessagesPerAgent).toBe(700); + expect(config.relayQueueTtlSeconds).toBe(1800); + expect(config.relayRetryInitialMs).toBe(2000); + expect(config.relayRetryMaxMs).toBe(15000); + expect(config.relayRetryMaxAttempts).toBe(7); + expect(config.relayRetryJitterRatio).toBe(0.4); }); it("allows disabling identity injection via env override", () => { @@ -103,10 +132,34 @@ describe("proxy config", () => { ).toThrow(ProxyConfigError); }); - it("throws on invalid pairing issuer URL", () => { + it("throws on invalid relay queue/retry values", () => { expect(() => parseProxyConfig({ - PAIRING_ISSUER_URL: "not-a-url", + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: "0", + }), + ).toThrow(ProxyConfigError); + expect(() => + parseProxyConfig({ + RELAY_RETRY_INITIAL_MS: "2000", + RELAY_RETRY_MAX_MS: "1000", + }), + ).toThrow(ProxyConfigError); + expect(() => + parseProxyConfig({ + RELAY_RETRY_JITTER_RATIO: "1.1", + }), + ).toThrow(ProxyConfigError); + }); + + it("throws when only one internal service credential is provided", () => { + expect(() => + parseProxyConfig({ + REGISTRY_INTERNAL_SERVICE_ID: "svc-id-only", + }), + ).toThrow(ProxyConfigError); + expect(() => + parseProxyConfig({ + REGISTRY_INTERNAL_SERVICE_SECRET: "clw_srv_secret-only", }), ).toThrow(ProxyConfigError); }); diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index af678fe..1948947 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -20,7 +20,7 @@ export type ProxyConfigLoadOptions = { export const DEFAULT_PROXY_LISTEN_PORT = 4000; export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; -export const DEFAULT_REGISTRY_URL = "https://api.clawdentity.com"; +export const DEFAULT_REGISTRY_URL = "https://registry.clawdentity.com"; export const DEFAULT_PROXY_ENVIRONMENT: ProxyEnvironment = "development"; export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; @@ -28,6 +28,12 @@ export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; export const DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE = 60; export const DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS = 60 * 1000; export const DEFAULT_INJECT_IDENTITY_INTO_MESSAGE = true; +export const DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT = 500; +export const DEFAULT_RELAY_QUEUE_TTL_SECONDS = 3600; +export const DEFAULT_RELAY_RETRY_INITIAL_MS = 1000; +export const DEFAULT_RELAY_RETRY_MAX_MS = 30_000; +export const DEFAULT_RELAY_RETRY_MAX_ATTEMPTS = 25; +export const DEFAULT_RELAY_RETRY_JITTER_RATIO = 0.2; export class ProxyConfigError extends Error { readonly code = "CONFIG_VALIDATION_FAILED"; @@ -79,7 +85,8 @@ const proxyRuntimeEnvSchema = z.object({ .default(DEFAULT_PROXY_LISTEN_PORT), OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), - PAIRING_ISSUER_URL: z.string().trim().url().optional(), + REGISTRY_INTERNAL_SERVICE_ID: z.string().trim().min(1).optional(), + REGISTRY_INTERNAL_SERVICE_SECRET: z.string().trim().min(1).optional(), ENVIRONMENT: z .enum(proxyEnvironmentValues) .default(DEFAULT_PROXY_ENVIRONMENT), @@ -109,13 +116,44 @@ const proxyRuntimeEnvSchema = z.object({ INJECT_IDENTITY_INTO_MESSAGE: envBooleanSchema.default( DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, ), + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT), + RELAY_QUEUE_TTL_SECONDS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_QUEUE_TTL_SECONDS), + RELAY_RETRY_INITIAL_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_RETRY_INITIAL_MS), + RELAY_RETRY_MAX_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_RETRY_MAX_MS), + RELAY_RETRY_MAX_ATTEMPTS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_RETRY_MAX_ATTEMPTS), + RELAY_RETRY_JITTER_RATIO: z.coerce + .number() + .min(0) + .max(1) + .default(DEFAULT_RELAY_RETRY_JITTER_RATIO), }); export const proxyConfigSchema = z.object({ listenPort: z.number().int().min(1).max(65535), openclawBaseUrl: z.string().url(), registryUrl: z.string().url(), - pairingIssuerUrl: z.string().url().optional(), + registryInternalServiceId: z.string().min(1).optional(), + registryInternalServiceSecret: z.string().min(1).optional(), environment: z.enum(proxyEnvironmentValues), crlRefreshIntervalMs: z.number().int().positive(), crlMaxAgeMs: z.number().int().positive(), @@ -123,6 +161,12 @@ export const proxyConfigSchema = z.object({ agentRateLimitRequestsPerMinute: z.number().int().positive(), agentRateLimitWindowMs: z.number().int().positive(), injectIdentityIntoMessage: z.boolean(), + relayQueueMaxMessagesPerAgent: z.number().int().positive(), + relayQueueTtlSeconds: z.number().int().positive(), + relayRetryInitialMs: z.number().int().positive(), + relayRetryMaxMs: z.number().int().positive(), + relayRetryMaxAttempts: z.number().int().positive(), + relayRetryJitterRatio: z.number().min(0).max(1), }); export type ProxyConfig = z.infer; @@ -133,7 +177,8 @@ type RuntimeEnvInput = { OPENCLAW_BASE_URL?: unknown; REGISTRY_URL?: unknown; CLAWDENTITY_REGISTRY_URL?: unknown; - PAIRING_ISSUER_URL?: unknown; + REGISTRY_INTERNAL_SERVICE_ID?: unknown; + REGISTRY_INTERNAL_SERVICE_SECRET?: unknown; ENVIRONMENT?: unknown; ALLOW_ALL_VERIFIED?: unknown; CRL_REFRESH_INTERVAL_MS?: unknown; @@ -142,6 +187,12 @@ type RuntimeEnvInput = { AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: unknown; AGENT_RATE_LIMIT_WINDOW_MS?: unknown; INJECT_IDENTITY_INTO_MESSAGE?: unknown; + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT?: unknown; + RELAY_QUEUE_TTL_SECONDS?: unknown; + RELAY_RETRY_INITIAL_MS?: unknown; + RELAY_RETRY_MAX_MS?: unknown; + RELAY_RETRY_MAX_ATTEMPTS?: unknown; + RELAY_RETRY_JITTER_RATIO?: unknown; OPENCLAW_STATE_DIR?: unknown; HOME?: unknown; USERPROFILE?: unknown; @@ -415,7 +466,12 @@ function normalizeRuntimeEnv(input: unknown): Record { "REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL", ]), - PAIRING_ISSUER_URL: firstNonEmpty(env, ["PAIRING_ISSUER_URL"]), + REGISTRY_INTERNAL_SERVICE_ID: firstNonEmpty(env, [ + "REGISTRY_INTERNAL_SERVICE_ID", + ]), + REGISTRY_INTERNAL_SERVICE_SECRET: firstNonEmpty(env, [ + "REGISTRY_INTERNAL_SERVICE_SECRET", + ]), ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), @@ -429,6 +485,14 @@ function normalizeRuntimeEnv(input: unknown): Record { INJECT_IDENTITY_INTO_MESSAGE: firstNonEmpty(env, [ "INJECT_IDENTITY_INTO_MESSAGE", ]), + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: firstNonEmpty(env, [ + "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT", + ]), + RELAY_QUEUE_TTL_SECONDS: firstNonEmpty(env, ["RELAY_QUEUE_TTL_SECONDS"]), + RELAY_RETRY_INITIAL_MS: firstNonEmpty(env, ["RELAY_RETRY_INITIAL_MS"]), + RELAY_RETRY_MAX_MS: firstNonEmpty(env, ["RELAY_RETRY_MAX_MS"]), + RELAY_RETRY_MAX_ATTEMPTS: firstNonEmpty(env, ["RELAY_RETRY_MAX_ATTEMPTS"]), + RELAY_RETRY_JITTER_RATIO: firstNonEmpty(env, ["RELAY_RETRY_JITTER_RATIO"]), }; } @@ -496,13 +560,57 @@ export function parseProxyConfig(env: unknown): ProxyConfig { agentRateLimitWindowMs: parsedRuntimeEnv.data.AGENT_RATE_LIMIT_WINDOW_MS, injectIdentityIntoMessage: parsedRuntimeEnv.data.INJECT_IDENTITY_INTO_MESSAGE, + relayQueueMaxMessagesPerAgent: + parsedRuntimeEnv.data.RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + relayQueueTtlSeconds: parsedRuntimeEnv.data.RELAY_QUEUE_TTL_SECONDS, + relayRetryInitialMs: parsedRuntimeEnv.data.RELAY_RETRY_INITIAL_MS, + relayRetryMaxMs: parsedRuntimeEnv.data.RELAY_RETRY_MAX_MS, + relayRetryMaxAttempts: parsedRuntimeEnv.data.RELAY_RETRY_MAX_ATTEMPTS, + relayRetryJitterRatio: parsedRuntimeEnv.data.RELAY_RETRY_JITTER_RATIO, }; - if (parsedRuntimeEnv.data.PAIRING_ISSUER_URL !== undefined) { - candidateConfig.pairingIssuerUrl = parsedRuntimeEnv.data.PAIRING_ISSUER_URL; + if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID !== undefined) { + candidateConfig.registryInternalServiceId = + parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID; + } + if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET !== undefined) { + candidateConfig.registryInternalServiceSecret = + parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET; } const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); if (parsedConfig.success) { + const hasServiceId = + typeof parsedConfig.data.registryInternalServiceId === "string"; + const hasServiceSecret = + typeof parsedConfig.data.registryInternalServiceSecret === "string"; + if (hasServiceId !== hasServiceSecret) { + throw toConfigValidationError({ + fieldErrors: { + REGISTRY_INTERNAL_SERVICE_ID: [ + "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", + ], + REGISTRY_INTERNAL_SERVICE_SECRET: [ + "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", + ], + }, + formErrors: [], + }); + } + if ( + parsedConfig.data.relayRetryMaxMs < parsedConfig.data.relayRetryInitialMs + ) { + throw toConfigValidationError({ + fieldErrors: { + RELAY_RETRY_MAX_MS: [ + "RELAY_RETRY_MAX_MS must be greater than or equal to RELAY_RETRY_INITIAL_MS.", + ], + RELAY_RETRY_INITIAL_MS: [ + "RELAY_RETRY_MAX_MS must be greater than or equal to RELAY_RETRY_INITIAL_MS.", + ], + }, + formErrors: [], + }); + } return parsedConfig.data; } diff --git a/apps/proxy/src/index.test.ts b/apps/proxy/src/index.test.ts index a07dd45..4b3faab 100644 --- a/apps/proxy/src/index.test.ts +++ b/apps/proxy/src/index.test.ts @@ -1,3 +1,6 @@ +import { mkdtempSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; import { describe, expect, it } from "vitest"; import { ProxyConfigError } from "./config.js"; import { @@ -25,10 +28,18 @@ describe("proxy", () => { }); it("supports relay runtime startup without OpenClaw vars", () => { - const runtime = initializeProxyRuntime({}); - - expect(runtime.version).toBe(PROXY_VERSION); - expect(runtime.config.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + const tempHomeDir = mkdtempSync(join(tmpdir(), "clawdentity-proxy-")); + try { + const runtime = initializeProxyRuntime({ + HOME: tempHomeDir, + OPENCLAW_STATE_DIR: `${tempHomeDir}/.openclaw`, + }); + + expect(runtime.version).toBe(PROXY_VERSION); + expect(runtime.config.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + } finally { + rmSync(tempHomeDir, { recursive: true, force: true }); + } }); it("prefers APP_VERSION for runtime version", () => { diff --git a/apps/proxy/src/pairing-constants.ts b/apps/proxy/src/pairing-constants.ts index 2075716..df186d9 100644 --- a/apps/proxy/src/pairing-constants.ts +++ b/apps/proxy/src/pairing-constants.ts @@ -1,6 +1,6 @@ export const PAIR_START_PATH = "/pair/start"; export const PAIR_CONFIRM_PATH = "/pair/confirm"; -export const OWNER_PAT_HEADER = "x-claw-owner-pat"; +export const PAIR_STATUS_PATH = "/pair/status"; export const DEFAULT_PAIRING_TICKET_TTL_SECONDS = 300; export const MAX_PAIRING_TICKET_TTL_SECONDS = 900; diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index 6aab68e..0daf2a6 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -8,6 +8,15 @@ import { const INITIATOR_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_000)); const RESPONDER_AGENT_DID = makeAgentDid(generateUlid(1_700_000_000_100)); +const OWNER_DID = "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7"; +const INITIATOR_PROFILE = { + agentName: "alpha", + humanName: "Ravi", +}; +const RESPONDER_PROFILE = { + agentName: "beta", + humanName: "Ira", +}; vi.mock("./auth-middleware.js", async () => { const { createMiddleware } = await import("hono/factory"); @@ -17,8 +26,8 @@ vi.mock("./auth-middleware.js", async () => { createMiddleware(async (c, next) => { c.set("auth", { agentDid: c.req.header("x-test-agent-did") ?? INITIATOR_AGENT_DID, - ownerDid: c.req.header("x-test-owner-did") ?? "did:claw:human:owner", - issuer: "https://api.clawdentity.com", + ownerDid: c.req.header("x-test-owner-did") ?? OWNER_DID, + issuer: "https://registry.clawdentity.com", aitJti: "test-ait-jti", cnfPublicKey: "test-key", }); @@ -29,9 +38,9 @@ vi.mock("./auth-middleware.js", async () => { import { parseProxyConfig } from "./config.js"; import { - OWNER_PAT_HEADER, PAIR_CONFIRM_PATH, PAIR_START_PATH, + PAIR_STATUS_PATH, } from "./pairing-constants.js"; import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; import { createProxyApp } from "./server.js"; @@ -62,15 +71,18 @@ async function createSignedTicketFixture(input: { } function createPairingApp(input?: { + environment?: "local" | "development" | "production" | "test"; fetchImpl?: typeof fetch; nowMs?: () => number; - pairingIssuerUrl?: string; }) { const trustStore = createInMemoryProxyTrustStore(); const app = createProxyApp({ config: parseProxyConfig({ REGISTRY_URL: "https://registry.example.com", - PAIRING_ISSUER_URL: input?.pairingIssuerUrl, + REGISTRY_INTERNAL_SERVICE_ID: "01KHSVCABCDEFGHJKMNOPQRST", + REGISTRY_INTERNAL_SERVICE_SECRET: + "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", + ENVIRONMENT: input?.environment, }), pairing: { start: { @@ -78,7 +90,9 @@ function createPairingApp(input?: { nowMs: input?.nowMs, }, confirm: { - fetchImpl: input?.fetchImpl, + nowMs: input?.nowMs, + }, + status: { nowMs: input?.nowMs, }, }, @@ -92,24 +106,23 @@ function createPairingApp(input?: { } describe(`POST ${PAIR_START_PATH}`, () => { - it("creates a pairing ticket when owner PAT controls caller agent DID", async () => { - const fetchMock = vi.fn(async (requestInput: unknown) => { - const url = String(requestInput); - if (url.includes("/ownership")) { - return Response.json( - { - ownsAgent: true, - }, - { status: 200 }, - ); - } - - if (url.includes("/v1/proxy-pairing-keys")) { - return Response.json({ ok: true }, { status: 201 }); - } - - throw new Error(`Unexpected URL: ${url}`); - }); + it("creates a pairing ticket when caller owns initiator agent DID", async () => { + const fetchMock = vi.fn( + async (requestInput: unknown, _requestInit?: RequestInit) => { + const url = String(requestInput); + if (url.includes("/internal/v1/identity/agent-ownership")) { + return Response.json( + { + ownsAgent: true, + agentStatus: "active", + }, + { status: 200 }, + ); + } + + throw new Error(`Unexpected URL: ${url}`); + }, + ); const fetchImpl = fetchMock as unknown as typeof fetch; const { app } = createPairingApp({ @@ -121,39 +134,51 @@ describe(`POST ${PAIR_START_PATH}`, () => { method: "POST", headers: { "content-type": "application/json", - [OWNER_PAT_HEADER]: "clw_pat_owner_token", }, - body: JSON.stringify({}), + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + }), }); expect(response.status).toBe(200); const body = (await response.json()) as { expiresAt: string; initiatorAgentDid: string; + initiatorProfile: { + agentName: string; + humanName: string; + }; ticket: string; }; expect(body.ticket.startsWith("clwpair1_")).toBe(true); expect(body.initiatorAgentDid).toBe(INITIATOR_AGENT_DID); + expect(body.initiatorProfile).toEqual(INITIATOR_PROFILE); expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); - expect(fetchImpl).toHaveBeenCalledTimes(2); + expect(fetchImpl).toHaveBeenCalledTimes(1); const ownershipCallUrl = String(fetchMock.mock.calls[0]?.[0] ?? ""); - expect(ownershipCallUrl).toContain("/v1/agents/"); - expect(ownershipCallUrl).toContain("/ownership"); - const keyRegisterCallUrl = String(fetchMock.mock.calls[1]?.[0] ?? ""); - expect(keyRegisterCallUrl).toContain("/v1/proxy-pairing-keys"); + expect(ownershipCallUrl).toContain("/internal/v1/identity/agent-ownership"); + const ownershipCallInit = fetchMock.mock.calls[0]?.[1] as + | RequestInit + | undefined; + const ownershipHeaders = new Headers(ownershipCallInit?.headers); + expect(ownershipHeaders.get("x-claw-service-id")).toBe( + "01KHSVCABCDEFGHJKMNOPQRST", + ); + expect(ownershipHeaders.get("x-claw-service-secret")).toBe( + "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", + ); }); it("normalizes pairing ticket expiry to whole seconds", async () => { const fetchMock = vi.fn( async (requestInput: unknown, _requestInit?: RequestInit) => { const url = String(requestInput); - if (url.includes("/ownership")) { - return Response.json({ ownsAgent: true }, { status: 200 }); - } - - if (url.includes("/v1/proxy-pairing-keys")) { - return Response.json({ ok: true }, { status: 201 }); + if (url.includes("/internal/v1/identity/agent-ownership")) { + return Response.json( + { ownsAgent: true, agentStatus: "active" }, + { status: 200 }, + ); } throw new Error(`Unexpected URL: ${url}`); @@ -170,53 +195,27 @@ describe(`POST ${PAIR_START_PATH}`, () => { method: "POST", headers: { "content-type": "application/json", - [OWNER_PAT_HEADER]: "clw_pat_owner_token", }, - body: JSON.stringify({}), + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + }), }); expect(response.status).toBe(200); const body = (await response.json()) as { expiresAt: string; + ticket: string; }; expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); - - const keyRegisterInit = fetchMock.mock.calls[1]?.[1] as - | RequestInit - | undefined; - const keyRegisterBody = JSON.parse( - String(keyRegisterInit?.body ?? "{}"), - ) as { - expiresAt?: string; - }; - expect(keyRegisterBody.expiresAt).toBe("2023-11-14T22:18:20.000Z"); + expect(parsePairingTicket(body.ticket).exp * 1000).toBe(1_700_000_300_000); }); - it("returns 401 when owner PAT is invalid", async () => { - const fetchImpl = vi.fn( - async (_requestInput: unknown) => new Response(null, { status: 401 }), - ) as unknown as typeof fetch; - const { app } = createPairingApp({ fetchImpl }); - - const response = await app.request(PAIR_START_PATH, { - method: "POST", - headers: { - "content-type": "application/json", - [OWNER_PAT_HEADER]: "clw_pat_invalid", - }, - body: JSON.stringify({}), - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_OWNER_PAT_INVALID"); - }); - - it("returns 403 when owner PAT does not control caller DID", async () => { + it("returns 403 when ownership check reports caller is not owner", async () => { const fetchImpl = vi.fn(async (_requestInput: unknown) => Response.json( { ownsAgent: false, + agentStatus: "active", }, { status: 200 }, ), @@ -227,50 +226,40 @@ describe(`POST ${PAIR_START_PATH}`, () => { method: "POST", headers: { "content-type": "application/json", - [OWNER_PAT_HEADER]: "clw_pat_owner", }, - body: JSON.stringify({}), + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + }), }); expect(response.status).toBe(403); const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_OWNER_PAT_FORBIDDEN"); + expect(body.error.code).toBe("PROXY_PAIR_OWNERSHIP_FORBIDDEN"); }); - it("uses configured pairing issuer URL when creating ticket", async () => { - const fetchImpl = vi.fn(async (requestInput: unknown) => { - const url = String(requestInput); - if (url.includes("/ownership")) { - return Response.json({ ownsAgent: true }, { status: 200 }); - } - - if (url.includes("/v1/proxy-pairing-keys")) { - return Response.json({ ok: true }, { status: 201 }); - } - - throw new Error(`Unexpected URL: ${url}`); + it("keeps strict dependency failures when ownership lookup is unavailable", async () => { + const fetchImpl = vi.fn(async () => { + throw new Error("registry unavailable"); }) as unknown as typeof fetch; const { app } = createPairingApp({ + environment: "development", fetchImpl, - nowMs: () => 1_700_000_000_000, - pairingIssuerUrl: "http://127.0.0.1:8788", + nowMs: () => 1_700_000_000_123, }); const response = await app.request(PAIR_START_PATH, { method: "POST", headers: { "content-type": "application/json", - [OWNER_PAT_HEADER]: "clw_pat_owner_token", }, - body: JSON.stringify({}), + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + }), }); - expect(response.status).toBe(200); - const body = (await response.json()) as { - ticket: string; - }; - const parsedTicket = parsePairingTicket(body.ticket); - expect(parsedTicket.iss).toBe("http://127.0.0.1:8788"); + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_OWNERSHIP_UNAVAILABLE"); }); }); @@ -287,6 +276,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }); const ticket = await trustStore.createPairingTicket({ initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "http://localhost", ticket: createdTicket.ticket, expiresAtMs: 1_700_000_900_000, @@ -301,20 +291,31 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }, body: JSON.stringify({ ticket: ticket.ticket, + responderProfile: RESPONDER_PROFILE, }), }); expect(response.status).toBe(201); const body = (await response.json()) as { initiatorAgentDid: string; + initiatorProfile: { + agentName: string; + humanName: string; + }; paired: boolean; responderAgentDid: string; + responderProfile: { + agentName: string; + humanName: string; + }; }; expect(body).toEqual({ paired: true, initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, responderAgentDid: RESPONDER_AGENT_DID, + responderProfile: RESPONDER_PROFILE, }); expect( @@ -330,381 +331,154 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }), ).toBe(true); }); +}); - it("forwards confirm to issuer proxy when ticket issuer differs", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "https://issuer.proxy.example", - nowMs: 1_700_000_000_000, - expiresAtMs: 1_700_000_900_000, - }); - - const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return Response.json( - { - key: { - publicKeyX: created.publicKeyX, - }, - }, - { status: 200 }, - ); - } - - expect(urlString).toBe( - `https://issuer.proxy.example/pair/confirm?responderAgentDid=${encodeURIComponent(RESPONDER_AGENT_DID)}`, - ); - const forwardedBody = JSON.parse(String(init?.body ?? "{}")) as { - ticket: string; - }; - expect(forwardedBody.ticket.startsWith("clwpair1_")).toBe(true); - - return Response.json( - { - paired: true, - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - }, - { status: 201 }, - ); - }); - +describe(`POST ${PAIR_STATUS_PATH}`, () => { + it("returns pending status to initiator before ticket is confirmed", async () => { const { app, trustStore } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, nowMs: () => 1_700_000_000_000, }); - - const response = await app.request(PAIR_CONFIRM_PATH, { - method: "POST", - headers: { - "content-type": "application/json", - "x-test-agent-did": RESPONDER_AGENT_DID, - }, - body: JSON.stringify({ - ticket: created.ticket, - }), - }); - - expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(2); - expect( - await trustStore.isPairAllowed({ - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - }), - ).toBe(true); - }); - - it("rejects forwarded confirm when issuer key cannot be resolved", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "https://issuer.proxy.example", + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", nowMs: 1_700_000_000_000, expiresAtMs: 1_700_000_900_000, }); - - const fetchImpl = vi.fn(async (url: unknown) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return new Response(null, { status: 404 }); - } - - throw new Error(`Unexpected URL: ${urlString}`); - }) as unknown as typeof fetch; - - const { app } = createPairingApp({ - fetchImpl, - nowMs: () => 1_700_000_000_000, + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, }); - const response = await app.request(PAIR_CONFIRM_PATH, { + const response = await app.request(PAIR_STATUS_PATH, { method: "POST", headers: { "content-type": "application/json", - "x-test-agent-did": RESPONDER_AGENT_DID, + "x-test-agent-did": INITIATOR_AGENT_DID, }, body: JSON.stringify({ - ticket: created.ticket, + ticket: ticket.ticket, }), }); - expect(response.status).toBe(403); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_TICKET_UNTRUSTED_ISSUER"); - }); - - it("rejects forwarding to blocked issuer origin for non-local proxy origins", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "http://127.0.0.1:8787", - nowMs: 1_700_000_000_000, - expiresAtMs: 1_700_000_900_000, - }); - - const forwardFetch = vi.fn(async (url: unknown) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return Response.json( - { - key: { - publicKeyX: created.publicKeyX, - }, - }, - { status: 200 }, - ); - } - - throw new Error("forward fetch should not be called"); - }); - - const { app } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, - nowMs: () => 1_700_000_000_000, - }); - - const response = await app.request( - "https://proxy.public.example/pair/confirm", - { - method: "POST", - headers: { - "content-type": "application/json", - "x-test-agent-did": RESPONDER_AGENT_DID, - }, - body: JSON.stringify({ - ticket: created.ticket, - }), + expect(response.status).toBe(200); + expect( + (await response.json()) as { + status: string; + initiatorAgentDid: string; + initiatorProfile: { + agentName: string; + humanName: string; + }; }, - ); - - expect(response.status).toBe(403); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_TICKET_ISSUER_BLOCKED"); - expect(forwardFetch).toHaveBeenCalledTimes(1); - }); - - it("rejects HTTP issuer origin when proxy is non-local", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "http://issuer.proxy.example", - nowMs: 1_700_000_000_000, - expiresAtMs: 1_700_000_900_000, - }); - - const forwardFetch = vi.fn(async (url: unknown) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return Response.json( - { - key: { - publicKeyX: created.publicKeyX, - }, - }, - { status: 200 }, - ); - } - - throw new Error("forward fetch should not be called"); + ).toMatchObject({ + status: "pending", + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2023-11-14T22:28:20.000Z", }); + }); - const { app } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, + it("returns confirmed status to initiator after responder confirms ticket", async () => { + const { app, trustStore } = createPairingApp({ nowMs: () => 1_700_000_000_000, }); - - const response = await app.request( - "https://proxy.public.example/pair/confirm", - { - method: "POST", - headers: { - "content-type": "application/json", - "x-test-agent-did": RESPONDER_AGENT_DID, - }, - body: JSON.stringify({ - ticket: created.ticket, - }), - }, - ); - - expect(response.status).toBe(403); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_CONFIRM_ISSUER_INSECURE"); - expect(forwardFetch).toHaveBeenCalledTimes(1); - }); - - it("allows HTTP issuer origin when both proxy and issuer are local", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "http://127.0.0.1:8787", + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", nowMs: 1_700_000_000_000, expiresAtMs: 1_700_000_900_000, }); - - const forwardFetch = vi.fn(async (url: unknown) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return Response.json( - { - key: { - publicKeyX: created.publicKeyX, - }, - }, - { status: 200 }, - ); - } - - expect(urlString).toBe( - `http://127.0.0.1:8787/pair/confirm?responderAgentDid=${encodeURIComponent(RESPONDER_AGENT_DID)}`, - ); - - return Response.json( - { - paired: true, - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - }, - { status: 201 }, - ); + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, }); - - const { app, trustStore } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, - nowMs: () => 1_700_000_000_000, + await trustStore.confirmPairingTicket({ + ticket: ticket.ticket, + responderAgentDid: RESPONDER_AGENT_DID, + responderProfile: RESPONDER_PROFILE, + nowMs: 1_700_000_000_200, }); - const response = await app.request("http://localhost/pair/confirm", { + const response = await app.request(PAIR_STATUS_PATH, { method: "POST", headers: { "content-type": "application/json", - "x-test-agent-did": RESPONDER_AGENT_DID, + "x-test-agent-did": INITIATOR_AGENT_DID, }, body: JSON.stringify({ - ticket: created.ticket, + ticket: ticket.ticket, }), }); - expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(2); + expect(response.status).toBe(200); expect( - await trustStore.isPairAllowed({ - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - }), - ).toBe(true); - }); - - it("preserves original signed JSON body when forwarding to issuer proxy", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "https://issuer.proxy.example", - nowMs: 1_700_000_000_000, - expiresAtMs: 1_700_000_900_000, - }); - - let expectedBody = ""; - const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return Response.json( - { - key: { - publicKeyX: created.publicKeyX, - }, - }, - { status: 200 }, - ); - } - - expect(String(init?.body ?? "")).toBe(expectedBody); - return Response.json( - { - paired: true, - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - }, - { status: 201 }, - ); - }); - - const { app } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, - nowMs: () => 1_700_000_000_000, - }); - - const bodyRaw = `{ "ticket":"${created.ticket}", "extra":"value" }`; - expectedBody = bodyRaw; - - const response = await app.request(PAIR_CONFIRM_PATH, { - method: "POST", - headers: { - "content-type": "application/json", - "x-test-agent-did": RESPONDER_AGENT_DID, + (await response.json()) as { + status: string; + initiatorAgentDid: string; + initiatorProfile: { + agentName: string; + humanName: string; + }; + responderAgentDid: string; + responderProfile: { + agentName: string; + humanName: string; + }; }, - body: bodyRaw, + ).toMatchObject({ + status: "confirmed", + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: RESPONDER_AGENT_DID, + responderProfile: RESPONDER_PROFILE, + expiresAt: "2023-11-14T22:28:20.000Z", + confirmedAt: "2023-11-14T22:13:20.000Z", }); - - expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(2); - const forwardedBody = String( - (forwardFetch.mock.calls[1]?.[1] as RequestInit | undefined)?.body ?? "", - ); - expect(forwardedBody).toBe(bodyRaw); }); - it("forwards only required confirmation headers", async () => { - const created = await createSignedTicketFixture({ - issuerProxyUrl: "https://issuer.proxy.example", + it("rejects status lookups from non-participant agents", async () => { + const { app, trustStore } = createPairingApp({ + nowMs: () => 1_700_000_000_000, + }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", nowMs: 1_700_000_000_000, expiresAtMs: 1_700_000_900_000, }); - - const forwardFetch = vi.fn(async (url: unknown, init?: RequestInit) => { - const urlString = String(url); - if (urlString.includes("/v1/proxy-pairing-keys/resolve")) { - return Response.json( - { - key: { - publicKeyX: created.publicKeyX, - }, - }, - { status: 200 }, - ); - } - - const headers = new Headers(init?.headers); - expect(headers.get("authorization")).toBeNull(); - expect(headers.get("x-claw-proof")).toBeNull(); - expect(headers.get("x-claw-body-sha256")).toBeNull(); - expect(headers.get("x-claw-timestamp")).toBeNull(); - expect(headers.get("x-claw-nonce")).toBeNull(); - expect(headers.get("content-type")).toBe("application/json"); - expect(headers.get("x-forwarded-for")).toBeNull(); - - return Response.json( - { - paired: true, - initiatorAgentDid: INITIATOR_AGENT_DID, - responderAgentDid: RESPONDER_AGENT_DID, - }, - { status: 201 }, - ); - }); - - const { app } = createPairingApp({ - fetchImpl: forwardFetch as unknown as typeof fetch, - nowMs: () => 1_700_000_000_000, + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, }); - const response = await app.request(PAIR_CONFIRM_PATH, { + const response = await app.request(PAIR_STATUS_PATH, { method: "POST", headers: { - authorization: "Claw test-token", "content-type": "application/json", - "x-claw-proof": "proof", - "x-claw-body-sha256": "sha", - "x-claw-timestamp": "1700000000", - "x-claw-nonce": "nonce", - "x-forwarded-for": "10.0.0.1", - "x-test-agent-did": RESPONDER_AGENT_DID, + "x-test-agent-did": makeAgentDid(generateUlid(1_700_000_000_300)), }, - body: JSON.stringify({ ticket: created.ticket }), + body: JSON.stringify({ + ticket: ticket.ticket, + }), }); - expect(response.status).toBe(201); - expect(forwardFetch).toHaveBeenCalledTimes(2); + expect(response.status).toBe(403); + expect( + (await response.json()) as { error: { code: string } }, + ).toMatchObject({ + error: { + code: "PROXY_PAIR_STATUS_FORBIDDEN", + message: "Caller is not a participant for this pairing ticket", + }, + }); }); }); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 284bd98..758bceb 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -1,33 +1,31 @@ import { - PROXY_PAIRING_KEYS_PATH, - PROXY_PAIRING_KEYS_RESOLVE_PATH, - parseDid, -} from "@clawdentity/protocol"; -import { AppError, type Logger } from "@clawdentity/sdk"; + AppError, + createRegistryIdentityClient, + type Logger, +} from "@clawdentity/sdk"; import type { Context } from "hono"; import type { ProxyRequestVariables } from "./auth-middleware.js"; import { DEFAULT_PAIRING_TICKET_TTL_SECONDS, MAX_PAIRING_TICKET_TTL_SECONDS, - OWNER_PAT_HEADER, PAIR_CONFIRM_PATH, PAIR_START_PATH, + PAIR_STATUS_PATH, } from "./pairing-constants.js"; import { createPairingTicket, createPairingTicketSigningKey, PairingTicketParseError, parsePairingTicket, - verifyPairingTicketSignature, } from "./pairing-ticket.js"; import { + type PeerProfile, type ProxyTrustStore, ProxyTrustStoreError, } from "./proxy-trust-store.js"; -const REGISTRY_AGENT_OWNERSHIP_PATH_PREFIX = "/v1/agents"; - -export { OWNER_PAT_HEADER, PAIR_CONFIRM_PATH, PAIR_START_PATH }; +export { PAIR_CONFIRM_PATH, PAIR_START_PATH }; +export { PAIR_STATUS_PATH }; type PairingRouteContext = Context<{ Variables: ProxyRequestVariables; @@ -42,31 +40,50 @@ type CreatePairStartHandlerOptions = PairStartRuntimeOptions & { logger: Logger; registryUrl: string; trustStore: ProxyTrustStore; - issuerProxyUrl?: string; + registryInternalServiceId?: string; + registryInternalServiceSecret?: string; }; export type PairConfirmRuntimeOptions = { - fetchImpl?: typeof fetch; nowMs?: () => number; }; type CreatePairConfirmHandlerOptions = PairConfirmRuntimeOptions & { logger: Logger; trustStore: ProxyTrustStore; - registryUrl: string; }; -function parseOwnerPatHeader(headerValue: string | undefined): string { - if (typeof headerValue !== "string" || headerValue.trim().length === 0) { +export type PairStatusRuntimeOptions = { + nowMs?: () => number; +}; + +type CreatePairStatusHandlerOptions = PairStatusRuntimeOptions & { + logger: Logger; + trustStore: ProxyTrustStore; +}; + +const MAX_PROFILE_NAME_LENGTH = 64; + +function parseInternalServiceCredentials(input: { + serviceId?: string; + serviceSecret?: string; +}): { serviceId: string; serviceSecret: string } { + const serviceId = + typeof input.serviceId === "string" ? input.serviceId.trim() : ""; + const serviceSecret = + typeof input.serviceSecret === "string" ? input.serviceSecret.trim() : ""; + if (serviceId.length === 0 || serviceSecret.length === 0) { throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_REQUIRED", - message: "X-Claw-Owner-Pat header is required", - status: 401, - expose: true, + code: "PROXY_INTERNAL_AUTH_CONFIG_INVALID", + message: "Proxy internal service auth is not configured", + status: 500, }); } - return headerValue.trim(); + return { + serviceId, + serviceSecret, + }; } function normalizeRegistryUrl(registryUrl: string): string { @@ -74,6 +91,10 @@ function normalizeRegistryUrl(registryUrl: string): string { return new URL(baseUrl).toString(); } +function normalizeProxyOrigin(value: string): string { + return new URL(value).origin; +} + function parseTtlSeconds(value: unknown): number { if (value === undefined) { return DEFAULT_PAIRING_TICKET_TTL_SECONDS; @@ -100,648 +121,159 @@ function parseTtlSeconds(value: unknown): number { return value; } -async function parseJsonBody(c: PairingRouteContext): Promise { - try { - return await c.req.json(); - } catch { - throw new AppError({ - code: "PROXY_PAIR_INVALID_BODY", - message: "Request body must be valid JSON", - status: 400, - expose: true, - }); +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } } -} -async function parseRawJsonBody(c: PairingRouteContext): Promise<{ - rawBody: string; - json: unknown; -}> { - const rawBody = await c.req.raw.clone().text(); + return false; +} - try { - return { - rawBody, - json: JSON.parse(rawBody) as unknown, - }; - } catch { +function parseProfileName(value: unknown, label: string): string { + if (typeof value !== "string") { throw new AppError({ code: "PROXY_PAIR_INVALID_BODY", - message: "Request body must be valid JSON", + message: `${label} is required`, status: 400, expose: true, }); } -} -async function parseRegistryOwnershipResponse(response: Response): Promise<{ - ownsAgent: boolean; -}> { - const payload = (await response.json()) as { - ownsAgent?: unknown; - }; - if (typeof payload.ownsAgent !== "boolean") { + const normalized = value.trim(); + if (normalized.length === 0) { throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", - message: "Registry owner lookup payload is invalid", - status: 503, - expose: true, - }); - } - - return { - ownsAgent: payload.ownsAgent, - }; -} - -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - -function extractErrorCode(payload: unknown): string | undefined { - if (typeof payload !== "object" || payload === null) { - return undefined; - } - - const error = (payload as { error?: unknown }).error; - if (typeof error !== "object" || error === null) { - return undefined; - } - - return typeof (error as { code?: unknown }).code === "string" - ? (error as { code: string }).code - : undefined; -} - -function extractErrorMessage(payload: unknown): string | undefined { - if (typeof payload !== "object" || payload === null) { - return undefined; - } - - const error = (payload as { error?: unknown }).error; - if (typeof error !== "object" || error === null) { - return undefined; - } - - return typeof (error as { message?: unknown }).message === "string" - ? (error as { message: string }).message - : undefined; -} - -async function assertPatOwnsInitiatorAgent(input: { - fetchImpl: typeof fetch; - initiatorAgentDid: string; - ownerPat: string; - registryUrl: string; -}): Promise { - const parsedDid = parseDid(input.initiatorAgentDid); - const ownershipUrl = new URL( - `${REGISTRY_AGENT_OWNERSHIP_PATH_PREFIX}/${parsedDid.ulid}/ownership`, - input.registryUrl, - ); - - let response: Response; - try { - response = await input.fetchImpl(ownershipUrl, { - method: "GET", - headers: { - authorization: `Bearer ${input.ownerPat}`, - }, - }); - } catch { - throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", - message: "Registry owner lookup is unavailable", - status: 503, - expose: true, - }); - } - - if (response.status === 401) { - throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_INVALID", - message: "Owner PAT is invalid or expired", - status: 401, + code: "PROXY_PAIR_INVALID_BODY", + message: `${label} is required`, + status: 400, expose: true, }); } - if (!response.ok) { + if (normalized.length > MAX_PROFILE_NAME_LENGTH) { throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", - message: "Registry owner lookup is unavailable", - status: 503, + code: "PROXY_PAIR_INVALID_BODY", + message: `${label} must be at most ${MAX_PROFILE_NAME_LENGTH} characters`, + status: 400, expose: true, }); } - let parsed: Awaited>; - try { - parsed = await parseRegistryOwnershipResponse(response); - } catch (error) { - if (error instanceof AppError) { - throw error; - } - + if (hasControlChars(normalized)) { throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_UNAVAILABLE", - message: "Registry owner lookup payload is invalid", - status: 503, - expose: true, - }); - } - - if (parsed.ownsAgent) { - return; - } - - throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_FORBIDDEN", - message: "Owner PAT does not control caller agent DID", - status: 403, - expose: true, - }); -} - -function toPairingStoreAppError(error: unknown): AppError { - if (error instanceof ProxyTrustStoreError) { - return new AppError({ - code: error.code, - message: error.message, - status: error.status, + code: "PROXY_PAIR_INVALID_BODY", + message: `${label} contains control characters`, + status: 400, expose: true, }); } - return new AppError({ - code: "PROXY_PAIR_STATE_UNAVAILABLE", - message: "Pairing state is unavailable", - status: 503, - expose: true, - }); -} - -function normalizeProxyOrigin(value: string): string { - const parsed = new URL(value); - return parsed.origin; -} - -function normalizeHostName(value: string): string { - const lowered = value.trim().toLowerCase(); - return lowered.endsWith(".") ? lowered.slice(0, -1) : lowered; -} - -function parseIpv4Literal( - hostname: string, -): [number, number, number, number] | null { - const parts = hostname.split("."); - if (parts.length !== 4) { - return null; - } - - const bytes: number[] = []; - for (const part of parts) { - if (!/^\d+$/.test(part)) { - return null; - } - - const value = Number(part); - if (!Number.isInteger(value) || value < 0 || value > 255) { - return null; - } - - bytes.push(value); - } - - return bytes as [number, number, number, number]; -} - -function isBlockedIpv4Literal(hostname: string): boolean { - const ipv4 = parseIpv4Literal(hostname); - if (ipv4 === null) { - return false; - } - - const [a, b, c, d] = ipv4; - - if (a === 10) return true; - if (a === 127) return true; - if (a === 0) return true; - if (a === 169 && b === 254) return true; - if (a === 172 && b >= 16 && b <= 31) return true; - if (a === 192 && b === 168) return true; - if (a === 100 && b >= 64 && b <= 127) return true; - if (a === 192 && b === 0 && c === 0) return true; - if (a === 192 && b === 0 && c === 2) return true; - if (a === 198 && b === 18) return true; - if (a === 198 && b === 19) return true; - if (a === 198 && b === 51 && c === 100) return true; - if (a === 203 && b === 0 && c === 113) return true; - if (a >= 224) return true; - if (a === 255 && b === 255 && c === 255 && d === 255) return true; - - return false; -} - -function parseIpv6Literal(hostname: string): number[] | null { - const raw = - hostname.startsWith("[") && hostname.endsWith("]") - ? hostname.slice(1, -1) - : hostname; - const noZoneId = raw.split("%")[0] ?? raw; - if (!noZoneId.includes(":")) { - return null; - } - - const parts = noZoneId.split("::"); - if (parts.length > 2) { - return null; - } - - const parseGroupList = (value: string): number[] | null => { - if (value.length === 0) { - return []; - } - - const groups = value.split(":"); - const words: number[] = []; - for (const group of groups) { - if ( - group.length === 0 || - group.length > 4 || - !/^[0-9a-f]+$/i.test(group) - ) { - return null; - } - - words.push(Number.parseInt(group, 16)); - } - - return words; - }; - - const left = parseGroupList(parts[0] ?? ""); - const right = parseGroupList(parts[1] ?? ""); - if (left === null || right === null) { - return null; - } - - if (parts.length === 1) { - if (left.length !== 8) { - return null; - } - - return left; - } - - const missing = 8 - (left.length + right.length); - if (missing < 1) { - return null; - } - - return [...left, ...new Array(missing).fill(0), ...right]; -} - -function isBlockedIpv6Literal(hostname: string): boolean { - const ipv6 = parseIpv6Literal(hostname); - if (ipv6 === null) { - return false; - } - - const [a, b, c, d, e, f, g, h] = ipv6; - - const isUnspecified = - a === 0 && - b === 0 && - c === 0 && - d === 0 && - e === 0 && - f === 0 && - g === 0 && - h === 0; - if (isUnspecified) { - return true; - } - - const isLoopback = - a === 0 && - b === 0 && - c === 0 && - d === 0 && - e === 0 && - f === 0 && - g === 0 && - h === 1; - if (isLoopback) { - return true; - } - - if ((a & 0xfe00) === 0xfc00) { - return true; - } - - if ((a & 0xffc0) === 0xfe80) { - return true; - } - - if ((a & 0xff00) === 0xff00) { - return true; - } - - if (a === 0x2001 && b === 0x0db8) { - return true; - } - - const isIpv4Mapped = - a === 0 && - b === 0 && - c === 0 && - d === 0 && - e === 0 && - (f === 0xffff || f === 0); - - if (isIpv4Mapped) { - const mappedA = g >> 8; - const mappedB = g & 0xff; - const mappedC = h >> 8; - const mappedD = h & 0xff; - return isBlockedIpv4Literal(`${mappedA}.${mappedB}.${mappedC}.${mappedD}`); - } - - return false; + return normalized; } -function isLocalLikeHostname(hostname: string): boolean { - if (hostname === "localhost" || hostname.endsWith(".localhost")) { - return true; - } - - if (hostname.endsWith(".local") || hostname.endsWith(".internal")) { - return true; - } - - if (!hostname.includes(".") && parseIpv4Literal(hostname) === null) { - return true; - } - - return false; -} - -function isBlockedForwardOrigin(origin: string): boolean { - const parsed = new URL(origin); - const hostname = normalizeHostName(parsed.hostname); - - if (isLocalLikeHostname(hostname)) { - return true; - } - - if (isBlockedIpv4Literal(hostname)) { - return true; - } - - if (isBlockedIpv6Literal(hostname)) { - return true; - } - - return false; -} - -function mapForwardedPairConfirmError( - status: number, - payload: unknown, -): AppError { - const code = extractErrorCode(payload) ?? "PROXY_PAIR_CONFIRM_FAILED"; - const message = - extractErrorMessage(payload) ?? - (status >= 500 - ? "Issuer proxy pairing service is unavailable" - : "Issuer proxy rejected pairing confirm"); - - return new AppError({ - code, - message, - status, - expose: true, - }); -} - -function parsePairConfirmResponse(payload: unknown): { - paired: true; - initiatorAgentDid: string; - responderAgentDid: string; -} { - if (typeof payload !== "object" || payload === null) { +function parsePeerProfile(value: unknown, label: string): PeerProfile { + if (typeof value !== "object" || value === null) { throw new AppError({ - code: "PROXY_PAIR_CONFIRM_INVALID_RESPONSE", - message: "Issuer proxy response is invalid", - status: 502, - expose: true, - }); - } - - const paired = (payload as { paired?: unknown }).paired === true; - const initiatorRaw = (payload as { initiatorAgentDid?: unknown }) - .initiatorAgentDid; - const responderRaw = (payload as { responderAgentDid?: unknown }) - .responderAgentDid; - const initiatorAgentDid = - typeof initiatorRaw === "string" ? initiatorRaw : ""; - const responderAgentDid = - typeof responderRaw === "string" ? responderRaw : ""; - - if (!paired) { - throw new AppError({ - code: "PROXY_PAIR_CONFIRM_INVALID_RESPONSE", - message: "Issuer proxy response is invalid", - status: 502, - expose: true, - }); - } - - try { - if (parseDid(initiatorAgentDid).kind !== "agent") { - throw new Error("invalid"); - } - if (parseDid(responderAgentDid).kind !== "agent") { - throw new Error("invalid"); - } - } catch { - throw new AppError({ - code: "PROXY_PAIR_CONFIRM_INVALID_RESPONSE", - message: "Issuer proxy response is invalid", - status: 502, + code: "PROXY_PAIR_INVALID_BODY", + message: `${label} is required`, + status: 400, expose: true, }); } + const payload = value as { agentName?: unknown; humanName?: unknown }; return { - paired: true, - initiatorAgentDid, - responderAgentDid, + agentName: parseProfileName(payload.agentName, `${label}.agentName`), + humanName: parseProfileName(payload.humanName, `${label}.humanName`), }; } -function buildForwardedConfirmHeaders(source: Headers): Headers { - const headers = new Headers(); - const contentType = source.get("content-type"); - if (contentType !== null) { - headers.set("content-type", contentType); - } - - return headers; -} - -function parseResponderDidFromQuery( - responderDidQuery: string | undefined, -): string { - if ( - typeof responderDidQuery !== "string" || - responderDidQuery.trim().length === 0 - ) { - throw new AppError({ - code: "PROXY_PAIR_INVALID_BODY", - message: "responderAgentDid query parameter is required", - status: 400, - expose: true, - }); - } - - const responderAgentDid = responderDidQuery.trim(); +async function parseJsonBody(c: PairingRouteContext): Promise { try { - const parsedResponderDid = parseDid(responderAgentDid); - if (parsedResponderDid.kind !== "agent") { - throw new Error("invalid responder did kind"); - } + return await c.req.json(); } catch { throw new AppError({ code: "PROXY_PAIR_INVALID_BODY", - message: "responderAgentDid must be a valid agent DID", + message: "Request body must be valid JSON", status: 400, expose: true, }); } - - return responderAgentDid; } -async function registerPairingKey(input: { - fetchImpl: typeof fetch; - ownerPat: string; +async function assertCallerOwnsInitiatorAgent(input: { + initiatorAgentDid: string; + ownerDid: string; registryUrl: string; - issuerOrigin: string; - pkid: string; - publicKeyX: string; - expiresAtMs: number; -}): Promise { - let response: Response; - try { - response = await input.fetchImpl( - new URL(PROXY_PAIRING_KEYS_PATH, input.registryUrl), - { - method: "POST", - headers: { - authorization: `Bearer ${input.ownerPat}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - issuerOrigin: input.issuerOrigin, - pkid: input.pkid, - publicKeyX: input.publicKeyX, - expiresAt: new Date(input.expiresAtMs).toISOString(), - }), - }, - ); - } catch { - throw new AppError({ - code: "PROXY_PAIR_TICKET_SIGNING_UNAVAILABLE", - message: "Registry pairing-key registration is unavailable", - status: 503, - expose: true, - }); - } - - if (response.status === 401) { - throw new AppError({ - code: "PROXY_PAIR_OWNER_PAT_INVALID", - message: "Owner PAT is invalid or expired", - status: 401, - expose: true, - }); - } - - if (!response.ok) { - throw new AppError({ - code: "PROXY_PAIR_TICKET_SIGNING_UNAVAILABLE", - message: "Registry pairing-key registration is unavailable", - status: 503, - expose: true, - }); - } -} - -async function resolvePairingKey(input: { + registryInternalServiceId: string; + registryInternalServiceSecret: string; fetchImpl: typeof fetch; - registryUrl: string; - issuerOrigin: string; - pkid: string; -}): Promise<{ publicKeyX: string }> { - const resolveUrl = new URL( - PROXY_PAIRING_KEYS_RESOLVE_PATH, - input.registryUrl, - ); - resolveUrl.searchParams.set("issuerOrigin", input.issuerOrigin); - resolveUrl.searchParams.set("pkid", input.pkid); +}): Promise { + const identityClient = createRegistryIdentityClient({ + registryUrl: input.registryUrl, + serviceId: input.registryInternalServiceId, + serviceSecret: input.registryInternalServiceSecret, + fetchImpl: input.fetchImpl, + }); - let response: Response; + let result: { + ownsAgent: boolean; + agentStatus: "active" | "revoked" | null; + }; try { - response = await input.fetchImpl(resolveUrl, { - method: "GET", + result = await identityClient.checkAgentOwnership({ + ownerDid: input.ownerDid, + agentDid: input.initiatorAgentDid, }); - } catch { + } catch (error) { + if ( + error instanceof AppError && + error.code === "IDENTITY_SERVICE_UNAUTHORIZED" + ) { + throw new AppError({ + code: "PROXY_INTERNAL_AUTH_UNAUTHORIZED", + message: "Proxy internal service authorization failed", + status: 503, + expose: true, + }); + } + throw new AppError({ - code: "PROXY_PAIR_TICKET_VERIFY_UNAVAILABLE", - message: "Registry pairing-key lookup is unavailable", + code: "PROXY_PAIR_OWNERSHIP_UNAVAILABLE", + message: "Registry owner lookup is unavailable", status: 503, expose: true, }); } - if (response.status === 404) { + if (!result.ownsAgent || result.agentStatus !== "active") { throw new AppError({ - code: "PROXY_PAIR_TICKET_UNTRUSTED_ISSUER", - message: "Pairing ticket issuer could not be verified", + code: "PROXY_PAIR_OWNERSHIP_FORBIDDEN", + message: "Caller does not control initiator agent DID", status: 403, expose: true, }); } +} - if (!response.ok) { - throw new AppError({ - code: "PROXY_PAIR_TICKET_VERIFY_UNAVAILABLE", - message: "Registry pairing-key lookup is unavailable", - status: 503, - expose: true, - }); - } - - const payload = (await parseJsonResponse(response)) as { - key?: { publicKeyX?: unknown }; - }; - const publicKeyX = payload?.key?.publicKeyX; - if (typeof publicKeyX !== "string" || publicKeyX.trim().length === 0) { - throw new AppError({ - code: "PROXY_PAIR_TICKET_VERIFY_UNAVAILABLE", - message: "Registry pairing-key lookup payload is invalid", - status: 503, +function toPairingStoreAppError(error: unknown): AppError { + if (error instanceof ProxyTrustStoreError) { + return new AppError({ + code: error.code, + message: error.message, + status: error.status, expose: true, }); } - return { publicKeyX: publicKeyX.trim() }; + return new AppError({ + code: "PROXY_PAIR_STATE_UNAVAILABLE", + message: "Pairing state is unavailable", + status: 503, + expose: true, + }); } export function createPairStartHandler( @@ -750,10 +282,6 @@ export function createPairStartHandler( const fetchImpl = options.fetchImpl ?? fetch; const nowMs = options.nowMs ?? Date.now; const registryUrl = normalizeRegistryUrl(options.registryUrl); - const configuredIssuerProxyUrl = - typeof options.issuerProxyUrl === "string" - ? normalizeProxyOrigin(options.issuerProxyUrl) - : undefined; return async (c) => { const auth = c.get("auth"); @@ -767,22 +295,29 @@ export function createPairStartHandler( const body = (await parseJsonBody(c)) as { ttlSeconds?: unknown; + initiatorProfile?: unknown; }; - const ttlSeconds = parseTtlSeconds(body.ttlSeconds); - const ownerPat = parseOwnerPatHeader(c.req.header(OWNER_PAT_HEADER)); - - await assertPatOwnsInitiatorAgent({ + const initiatorProfile = parsePeerProfile( + body.initiatorProfile, + "initiatorProfile", + ); + const internalServiceCredentials = parseInternalServiceCredentials({ + serviceId: options.registryInternalServiceId, + serviceSecret: options.registryInternalServiceSecret, + }); + await assertCallerOwnsInitiatorAgent({ fetchImpl, initiatorAgentDid: auth.agentDid, - ownerPat, + ownerDid: auth.ownerDid, registryUrl, + registryInternalServiceId: internalServiceCredentials.serviceId, + registryInternalServiceSecret: internalServiceCredentials.serviceSecret, }); const issuedAtMs = nowMs(); const requestedExpiresAtMs = issuedAtMs + ttlSeconds * 1000; - const issuerProxyUrl = - configuredIssuerProxyUrl ?? normalizeProxyOrigin(c.req.url); + const issuerProxyUrl = normalizeProxyOrigin(c.req.url); const signingKey = await createPairingTicketSigningKey({ nowMs: issuedAtMs, @@ -813,19 +348,10 @@ export function createPairStartHandler( }); const expiresAtMs = createdTicket.payload.exp * 1000; - await registerPairingKey({ - fetchImpl, - ownerPat, - registryUrl, - issuerOrigin: issuerProxyUrl, - pkid: signingKey.pkid, - publicKeyX: signingKey.publicKeyX, - expiresAtMs, - }); - const pairingTicketResult = await options.trustStore .createPairingTicket({ initiatorAgentDid: auth.agentDid, + initiatorProfile, issuerProxyUrl, ticket: createdTicket.ticket, expiresAtMs, @@ -845,6 +371,7 @@ export function createPairStartHandler( return c.json({ initiatorAgentDid: pairingTicketResult.initiatorAgentDid, + initiatorProfile: pairingTicketResult.initiatorProfile, ticket: pairingTicketResult.ticket, expiresAt: new Date(pairingTicketResult.expiresAtMs).toISOString(), }); @@ -855,17 +382,21 @@ export function createPairConfirmHandler( options: CreatePairConfirmHandlerOptions, ): (c: PairingRouteContext) => Promise { const nowMs = options.nowMs ?? Date.now; - const fetchImpl = options.fetchImpl ?? fetch; - const registryUrl = normalizeRegistryUrl(options.registryUrl); return async (c) => { const auth = c.get("auth"); + if (auth === undefined) { + throw new AppError({ + code: "PROXY_PAIR_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } - const parsedBody = await parseRawJsonBody(c); - const body = parsedBody.json as { + const body = (await parseJsonBody(c)) as { ticket?: unknown; + responderProfile?: unknown; }; - if (typeof body.ticket !== "string" || body.ticket.trim() === "") { throw new AppError({ code: "PROXY_PAIR_INVALID_BODY", @@ -874,12 +405,14 @@ export function createPairConfirmHandler( expose: true, }); } + const responderProfile = parsePeerProfile( + body.responderProfile, + "responderProfile", + ); const ticket = body.ticket.trim(); - - let parsedTicket: ReturnType; try { - parsedTicket = parsePairingTicket(ticket); + parsePairingTicket(ticket); } catch (error) { if (error instanceof PairingTicketParseError) { throw new AppError({ @@ -898,141 +431,11 @@ export function createPairConfirmHandler( }); } - const localProxyOrigin = normalizeProxyOrigin(c.req.url); - const ticketIssuerOrigin = normalizeProxyOrigin(parsedTicket.iss); - const isIssuerLocal = ticketIssuerOrigin === localProxyOrigin; - - if (!isIssuerLocal) { - if (auth === undefined) { - throw new AppError({ - code: "PROXY_PAIR_AUTH_REQUIRED", - message: "Authorization is required for cross-proxy confirm", - status: 401, - expose: true, - }); - } - - const resolvedKey = await resolvePairingKey({ - fetchImpl, - registryUrl, - issuerOrigin: ticketIssuerOrigin, - pkid: parsedTicket.pkid, - }); - - const verified = await verifyPairingTicketSignature({ - payload: parsedTicket, - publicKeyX: resolvedKey.publicKeyX, - }).catch(() => false); - - if (!verified) { - throw new AppError({ - code: "PROXY_PAIR_TICKET_UNTRUSTED_ISSUER", - message: "Pairing ticket issuer could not be verified", - status: 403, - expose: true, - }); - } - - const localProxyAllowsPrivateForwarding = - isBlockedForwardOrigin(localProxyOrigin); - const issuerOriginUrl = new URL(ticketIssuerOrigin); - const issuerOriginIsBlocked = isBlockedForwardOrigin(ticketIssuerOrigin); - - if (!issuerOriginIsBlocked && issuerOriginUrl.protocol !== "https:") { - throw new AppError({ - code: "PROXY_PAIR_CONFIRM_ISSUER_INSECURE", - message: - "Forwarded issuer proxy pairing origin must use HTTPS outside of local hosts", - status: 403, - expose: true, - }); - } - - if (!localProxyAllowsPrivateForwarding && issuerOriginIsBlocked) { - throw new AppError({ - code: "PROXY_PAIR_TICKET_ISSUER_BLOCKED", - message: "Pairing ticket issuer origin is blocked", - status: 403, - expose: true, - }); - } - - const issuerConfirmUrl = new URL( - PAIR_CONFIRM_PATH, - ticketIssuerOrigin.endsWith("/") - ? ticketIssuerOrigin - : `${ticketIssuerOrigin}/`, - ); - issuerConfirmUrl.searchParams.set("responderAgentDid", auth.agentDid); - - const forwardedResponse = await fetchImpl(issuerConfirmUrl, { - method: "POST", - headers: buildForwardedConfirmHeaders(c.req.raw.headers), - body: parsedBody.rawBody, - }).catch((error: unknown) => { - throw new AppError({ - code: "PROXY_PAIR_STATE_UNAVAILABLE", - message: "Issuer proxy pairing service is unavailable", - status: 503, - details: { - reason: error instanceof Error ? error.message : "unknown", - }, - expose: true, - }); - }); - - const forwardedBody = await parseJsonResponse(forwardedResponse); - if (!forwardedResponse.ok) { - throw mapForwardedPairConfirmError( - forwardedResponse.status, - forwardedBody, - ); - } - - const confirmed = parsePairConfirmResponse(forwardedBody); - if (confirmed.responderAgentDid !== auth.agentDid) { - throw new AppError({ - code: "PROXY_PAIR_CONFIRM_RESPONDER_MISMATCH", - message: "Issuer proxy response did not match caller responder DID", - status: 502, - expose: true, - }); - } - - await options.trustStore - .upsertPair({ - initiatorAgentDid: confirmed.initiatorAgentDid, - responderAgentDid: confirmed.responderAgentDid, - }) - .catch((error: unknown) => { - throw toPairingStoreAppError(error); - }); - - options.logger.info("proxy.pair.confirm.forwarded", { - requestId: c.get("requestId"), - initiatorAgentDid: confirmed.initiatorAgentDid, - responderAgentDid: confirmed.responderAgentDid, - issuerProxyUrl: ticketIssuerOrigin, - }); - - return c.json( - { - paired: true, - initiatorAgentDid: confirmed.initiatorAgentDid, - responderAgentDid: confirmed.responderAgentDid, - }, - 201, - ); - } - - const responderAgentDid = - auth?.agentDid ?? - parseResponderDidFromQuery(c.req.query("responderAgentDid")); - const confirmedPairingTicket = await options.trustStore .confirmPairingTicket({ ticket, - responderAgentDid, + responderAgentDid: auth.agentDid, + responderProfile, nowMs: nowMs(), }) .catch((error: unknown) => { @@ -1050,9 +453,99 @@ export function createPairConfirmHandler( { paired: true, initiatorAgentDid: confirmedPairingTicket.initiatorAgentDid, + initiatorProfile: confirmedPairingTicket.initiatorProfile, responderAgentDid: confirmedPairingTicket.responderAgentDid, + responderProfile: confirmedPairingTicket.responderProfile, }, 201, ); }; } + +export function createPairStatusHandler( + options: CreatePairStatusHandlerOptions, +): (c: PairingRouteContext) => Promise { + const nowMs = options.nowMs ?? Date.now; + + return async (c) => { + const auth = c.get("auth"); + if (auth === undefined) { + throw new AppError({ + code: "PROXY_PAIR_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const body = (await parseJsonBody(c)) as { ticket?: unknown }; + if (typeof body.ticket !== "string" || body.ticket.trim() === "") { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "ticket is required", + status: 400, + expose: true, + }); + } + + const ticket = body.ticket.trim(); + const status = await options.trustStore + .getPairingTicketStatus({ + ticket, + nowMs: nowMs(), + }) + .catch((error: unknown) => { + throw toPairingStoreAppError(error); + }); + + const isParticipant = + auth.agentDid === status.initiatorAgentDid || + (status.status === "confirmed" && + auth.agentDid === status.responderAgentDid); + if (!isParticipant) { + throw new AppError({ + code: "PROXY_PAIR_STATUS_FORBIDDEN", + message: "Caller is not a participant for this pairing ticket", + status: 403, + expose: true, + }); + } + + options.logger.info("proxy.pair.status", { + requestId: c.get("requestId"), + status: status.status, + initiatorAgentDid: status.initiatorAgentDid, + initiatorAgentName: status.initiatorProfile.agentName, + initiatorHumanName: status.initiatorProfile.humanName, + responderAgentDid: + status.status === "confirmed" ? status.responderAgentDid : undefined, + responderAgentName: + status.status === "confirmed" + ? status.responderProfile.agentName + : undefined, + responderHumanName: + status.status === "confirmed" + ? status.responderProfile.humanName + : undefined, + expiresAt: new Date(status.expiresAtMs).toISOString(), + confirmedAt: + status.status === "confirmed" + ? new Date(status.confirmedAtMs).toISOString() + : undefined, + }); + + return c.json({ + status: status.status, + initiatorAgentDid: status.initiatorAgentDid, + initiatorProfile: status.initiatorProfile, + responderAgentDid: + status.status === "confirmed" ? status.responderAgentDid : undefined, + responderProfile: + status.status === "confirmed" ? status.responderProfile : undefined, + expiresAt: new Date(status.expiresAtMs).toISOString(), + confirmedAt: + status.status === "confirmed" + ? new Date(status.confirmedAtMs).toISOString() + : undefined, + }); + }; +} diff --git a/apps/proxy/src/proxy-trust-keys.ts b/apps/proxy/src/proxy-trust-keys.ts new file mode 100644 index 0000000..00eb619 --- /dev/null +++ b/apps/proxy/src/proxy-trust-keys.ts @@ -0,0 +1,10 @@ +export function toPairKey( + initiatorAgentDid: string, + responderAgentDid: string, +): string { + return [initiatorAgentDid, responderAgentDid].sort().join("|"); +} + +export function normalizeExpiryToWholeSecond(expiresAtMs: number): number { + return Math.floor(expiresAtMs / 1000) * 1000; +} diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts index f0f447d..0a29845 100644 --- a/apps/proxy/src/proxy-trust-state.test.ts +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -7,6 +7,16 @@ import { import { ProxyTrustState } from "./proxy-trust-state.js"; import { TRUST_STORE_ROUTES } from "./proxy-trust-store.js"; +const INITIATOR_PROFILE = { + agentName: "alpha", + humanName: "Ravi", +}; + +const RESPONDER_PROFILE = { + agentName: "beta", + humanName: "Ira", +}; + function tamperTicketNonce(ticket: string): string { const prefix = "clwpair1_"; if (!ticket.startsWith(prefix)) { @@ -120,6 +130,7 @@ describe("ProxyTrustState", () => { const ticketResponse = await proxyTrustState.fetch( makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: createdTicket.ticket, expiresAtMs: 1_700_000_060_000, @@ -132,6 +143,7 @@ describe("ProxyTrustState", () => { makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { ticket: ticketBody.ticket, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_000_100, }), ); @@ -145,7 +157,9 @@ describe("ProxyTrustState", () => { }, ).toEqual({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", }); @@ -158,6 +172,100 @@ describe("ProxyTrustState", () => { expect((await pairCheckResponse.json()) as { allowed: boolean }).toEqual({ allowed: true, }); + + const statusResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.getPairingTicketStatus, { + ticket: ticketBody.ticket, + nowMs: 1_700_000_000_150, + }), + ); + + expect(statusResponse.status).toBe(200); + expect( + (await statusResponse.json()) as { + status: string; + initiatorAgentDid: string; + responderAgentDid: string; + }, + ).toMatchObject({ + status: "confirmed", + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, + expiresAtMs: 1_700_000_060_000, + confirmedAtMs: 1_700_000_000_000, + }); + }); + + it("returns pending status before a ticket is confirmed", async () => { + const { proxyTrustState } = createProxyTrustState(); + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + + const ticketResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_060_000, + nowMs: 1_700_000_000_000, + }), + ); + const ticketBody = (await ticketResponse.json()) as { ticket: string }; + + const statusResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.getPairingTicketStatus, { + ticket: ticketBody.ticket, + nowMs: 1_700_000_000_100, + }), + ); + + expect(statusResponse.status).toBe(200); + expect( + (await statusResponse.json()) as { + status: string; + }, + ).toMatchObject({ + status: "pending", + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + expiresAtMs: 1_700_000_060_000, + }); + }); + + it("normalizes pairing ticket expiry to whole seconds", async () => { + const { proxyTrustState } = createProxyTrustState(); + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_123, + expiresAtMs: 1_700_000_060_123, + }); + + const ticketResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_060_123, + nowMs: 1_700_000_000_123, + }), + ); + + expect(ticketResponse.status).toBe(200); + expect( + (await ticketResponse.json()) as { + expiresAtMs: number; + }, + ).toMatchObject({ + expiresAtMs: 1_700_000_060_000, + }); }); it("rejects tampered ticket text when kid matches stored entry", async () => { @@ -171,6 +279,7 @@ describe("ProxyTrustState", () => { const ticketResponse = await proxyTrustState.fetch( makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: createdTicket.ticket, expiresAtMs: 1_700_000_060_000, @@ -183,6 +292,7 @@ describe("ProxyTrustState", () => { makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { ticket: tamperTicketNonce(ticketBody.ticket), responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_000_100, }), ); diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index 78a46d3..348fdcb 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -2,9 +2,12 @@ import { PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; +import { normalizeExpiryToWholeSecond, toPairKey } from "./proxy-trust-keys.js"; import { type PairingTicketConfirmInput, type PairingTicketInput, + type PairingTicketStatusInput, + type PeerProfile, TRUST_STORE_ROUTES, } from "./proxy-trust-store.js"; @@ -12,27 +15,57 @@ type StoredPairingTicket = { ticket: string; expiresAtMs: number; initiatorAgentDid: string; + initiatorProfile: PeerProfile; issuerProxyUrl: string; }; +type StoredConfirmedPairingTicket = { + ticket: string; + expiresAtMs: number; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid: string; + responderProfile: PeerProfile; + issuerProxyUrl: string; + confirmedAtMs: number; +}; + type PairingTicketMap = Record; +type ConfirmedPairingTicketMap = Record; type AgentPeersIndex = Record; +type ExpirableTrustState = { + pairingTickets: PairingTicketMap; + confirmedPairingTickets: ConfirmedPairingTicketMap; +}; const PAIRS_STORAGE_KEY = "trust:pairs"; const AGENT_PEERS_STORAGE_KEY = "trust:agent-peers"; const PAIRING_TICKETS_STORAGE_KEY = "trust:pairing-tickets"; - -function toPairKey( - initiatorAgentDid: string, - responderAgentDid: string, -): string { - return [initiatorAgentDid, responderAgentDid].sort().join("|"); -} +const CONFIRMED_PAIRING_TICKETS_STORAGE_KEY = "trust:pairing-tickets-confirmed"; function isNonEmptyString(value: unknown): value is string { return typeof value === "string" && value.trim().length > 0; } +function parsePeerProfile(value: unknown): PeerProfile | undefined { + if (typeof value !== "object" || value === null) { + return undefined; + } + + const entry = value as { agentName?: unknown; humanName?: unknown }; + if ( + !isNonEmptyString(entry.agentName) || + !isNonEmptyString(entry.humanName) + ) { + return undefined; + } + + return { + agentName: entry.agentName.trim(), + humanName: entry.humanName.trim(), + }; +} + function addPeer( index: AgentPeersIndex, leftAgentDid: string, @@ -89,6 +122,10 @@ export class ProxyTrustState { return this.handleConfirmPairingTicket(request); } + if (url.pathname === TRUST_STORE_ROUTES.getPairingTicketStatus) { + return this.handleGetPairingTicketStatus(request); + } + if (url.pathname === TRUST_STORE_ROUTES.upsertPair) { return this.handleUpsertPair(request); } @@ -106,30 +143,29 @@ export class ProxyTrustState { async alarm(): Promise { const nowMs = Date.now(); - const pairingTickets = await this.loadPairingTickets(); - - let mutated = false; - for (const [ticketKid, details] of Object.entries(pairingTickets)) { - if (details.expiresAtMs <= nowMs) { - delete pairingTickets[ticketKid]; - mutated = true; - } - } - + const expirableState = await this.loadExpirableState(); + const mutated = this.removeExpiredEntries(expirableState, nowMs); if (mutated) { - await this.savePairingTickets(pairingTickets); + await this.saveExpirableState(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); } - - await this.scheduleNextCodeCleanup(pairingTickets); + await this.scheduleNextCodeCleanup( + expirableState.pairingTickets, + expirableState.confirmedPairingTickets, + ); } private async handleCreatePairingTicket(request: Request): Promise { const body = (await parseBody(request)) as | Partial | undefined; + const initiatorProfile = parsePeerProfile(body?.initiatorProfile); if ( !body || !isNonEmptyString(body.initiatorAgentDid) || + !initiatorProfile || !isNonEmptyString(body.issuerProxyUrl) || !isNonEmptyString(body.ticket) || typeof body.expiresAtMs !== "number" || @@ -144,6 +180,9 @@ export class ProxyTrustState { } const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( + body.expiresAtMs, + ); let parsedTicket: ReturnType; try { parsedTicket = parsePairingTicket(body.ticket); @@ -167,7 +206,7 @@ export class ProxyTrustState { }); } - if (parsedTicket.exp * 1000 !== body.expiresAtMs) { + if (parsedTicket.exp * 1000 !== normalizedExpiresAtMs) { return toErrorResponse({ code: "PROXY_PAIR_START_INVALID_BODY", message: "Pairing ticket expiry is invalid", @@ -175,7 +214,7 @@ export class ProxyTrustState { }); } - if (body.expiresAtMs <= nowMs) { + if (normalizedExpiresAtMs <= nowMs) { return toErrorResponse({ code: "PROXY_PAIR_TICKET_EXPIRED", message: "Pairing ticket has expired", @@ -183,21 +222,26 @@ export class ProxyTrustState { }); } - const pairingTickets = await this.loadPairingTickets(); - pairingTickets[parsedTicket.kid] = { + const expirableState = await this.loadExpirableState(); + expirableState.pairingTickets[parsedTicket.kid] = { ticket: body.ticket, initiatorAgentDid: body.initiatorAgentDid, + initiatorProfile, issuerProxyUrl: parsedTicket.iss, - expiresAtMs: body.expiresAtMs, + expiresAtMs: normalizedExpiresAtMs, }; + delete expirableState.confirmedPairingTickets[parsedTicket.kid]; - await this.savePairingTickets(pairingTickets); - await this.scheduleNextCodeCleanup(pairingTickets); + await this.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); return Response.json({ ticket: body.ticket, - expiresAtMs: body.expiresAtMs, + expiresAtMs: normalizedExpiresAtMs, initiatorAgentDid: body.initiatorAgentDid, + initiatorProfile, issuerProxyUrl: parsedTicket.iss, }); } @@ -208,10 +252,12 @@ export class ProxyTrustState { const body = (await parseBody(request)) as | Partial | undefined; + const responderProfile = parsePeerProfile(body?.responderProfile); if ( !body || !isNonEmptyString(body.ticket) || - !isNonEmptyString(body.responderAgentDid) + !isNonEmptyString(body.responderAgentDid) || + !responderProfile ) { return toErrorResponse({ code: "PROXY_PAIR_CONFIRM_INVALID_BODY", @@ -236,8 +282,8 @@ export class ProxyTrustState { } const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); - const pairingTickets = await this.loadPairingTickets(); - const stored = pairingTickets[parsedTicket.kid]; + const expirableState = await this.loadExpirableState(); + const stored = expirableState.pairingTickets[parsedTicket.kid]; if (!stored || stored.ticket !== body.ticket) { return toErrorResponse({ @@ -248,9 +294,12 @@ export class ProxyTrustState { } if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { - delete pairingTickets[parsedTicket.kid]; - await this.savePairingTickets(pairingTickets); - await this.scheduleNextCodeCleanup(pairingTickets); + delete expirableState.pairingTickets[parsedTicket.kid]; + delete expirableState.confirmedPairingTickets[parsedTicket.kid]; + await this.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); return toErrorResponse({ code: "PROXY_PAIR_TICKET_EXPIRED", message: "Pairing ticket has expired", @@ -276,17 +325,129 @@ export class ProxyTrustState { await this.savePairs(pairs); await this.saveAgentPeers(agentPeers); - delete pairingTickets[parsedTicket.kid]; - await this.savePairingTickets(pairingTickets); - await this.scheduleNextCodeCleanup(pairingTickets); + delete expirableState.pairingTickets[parsedTicket.kid]; + expirableState.confirmedPairingTickets[parsedTicket.kid] = { + ticket: body.ticket, + expiresAtMs: stored.expiresAtMs, + initiatorAgentDid: stored.initiatorAgentDid, + initiatorProfile: stored.initiatorProfile, + responderAgentDid: body.responderAgentDid, + responderProfile, + issuerProxyUrl: stored.issuerProxyUrl, + confirmedAtMs: normalizeExpiryToWholeSecond(nowMs), + }; + await this.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); return Response.json({ initiatorAgentDid: stored.initiatorAgentDid, + initiatorProfile: stored.initiatorProfile, responderAgentDid: body.responderAgentDid, + responderProfile, issuerProxyUrl: stored.issuerProxyUrl, }); } + private async handleGetPairingTicketStatus( + request: Request, + ): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + if (!body || !isNonEmptyString(body.ticket)) { + return toErrorResponse({ + code: "PROXY_PAIR_STATUS_INVALID_BODY", + message: "Pairing ticket status input is invalid", + status: 400, + }); + } + + const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(body.ticket); + } catch (error) { + if (error instanceof PairingTicketParseError) { + return toErrorResponse({ + code: error.code, + message: error.message, + status: 400, + }); + } + + throw error; + } + + const expirableState = await this.loadExpirableState(); + + const pending = expirableState.pairingTickets[parsedTicket.kid]; + if (pending && pending.ticket === body.ticket) { + if (pending.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + delete expirableState.pairingTickets[parsedTicket.kid]; + await this.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + }); + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return Response.json({ + status: "pending", + ticket: pending.ticket, + initiatorAgentDid: pending.initiatorAgentDid, + initiatorProfile: pending.initiatorProfile, + issuerProxyUrl: pending.issuerProxyUrl, + expiresAtMs: pending.expiresAtMs, + }); + } + + const confirmed = expirableState.confirmedPairingTickets[parsedTicket.kid]; + if (confirmed && confirmed.ticket === body.ticket) { + if (confirmed.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + delete expirableState.confirmedPairingTickets[parsedTicket.kid]; + await this.saveExpirableStateAndSchedule(expirableState, { + confirmedPairingTickets: true, + }); + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return Response.json({ + status: "confirmed", + ticket: confirmed.ticket, + initiatorAgentDid: confirmed.initiatorAgentDid, + initiatorProfile: confirmed.initiatorProfile, + responderAgentDid: confirmed.responderAgentDid, + responderProfile: confirmed.responderProfile, + issuerProxyUrl: confirmed.issuerProxyUrl, + expiresAtMs: confirmed.expiresAtMs, + confirmedAtMs: confirmed.confirmedAtMs, + }); + } + + if (parsedTicket.exp * 1000 <= nowMs) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", + status: 404, + }); + } + private async handleUpsertPair(request: Request): Promise { const body = (await parseBody(request)) as | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } @@ -363,6 +524,75 @@ export class ProxyTrustState { return Response.json({ known: false }); } + private async loadExpirableState(): Promise { + const [pairingTickets, confirmedPairingTickets] = await Promise.all([ + this.loadPairingTickets(), + this.loadConfirmedPairingTickets(), + ]); + + return { pairingTickets, confirmedPairingTickets }; + } + + private removeExpiredEntries( + state: ExpirableTrustState, + nowMs: number, + ): boolean { + let mutated = false; + + for (const [ticketKid, details] of Object.entries(state.pairingTickets)) { + if (details.expiresAtMs <= nowMs) { + delete state.pairingTickets[ticketKid]; + mutated = true; + } + } + + for (const [ticketKid, details] of Object.entries( + state.confirmedPairingTickets, + )) { + if (details.expiresAtMs <= nowMs) { + delete state.confirmedPairingTickets[ticketKid]; + mutated = true; + } + } + + return mutated; + } + + private async saveExpirableState( + state: ExpirableTrustState, + options: { + pairingTickets?: boolean; + confirmedPairingTickets?: boolean; + }, + ): Promise { + const saves: Promise[] = []; + if (options.pairingTickets) { + saves.push(this.savePairingTickets(state.pairingTickets)); + } + if (options.confirmedPairingTickets) { + saves.push( + this.saveConfirmedPairingTickets(state.confirmedPairingTickets), + ); + } + if (saves.length > 0) { + await Promise.all(saves); + } + } + + private async saveExpirableStateAndSchedule( + state: ExpirableTrustState, + options: { + pairingTickets?: boolean; + confirmedPairingTickets?: boolean; + }, + ): Promise { + await this.saveExpirableState(state, options); + await this.scheduleNextCodeCleanup( + state.pairingTickets, + state.confirmedPairingTickets, + ); + } + private async loadPairs(): Promise> { const raw = await this.state.storage.get(PAIRS_STORAGE_KEY); if (!Array.isArray(raw)) { @@ -422,10 +652,13 @@ export class ProxyTrustState { ticket?: unknown; expiresAtMs?: unknown; initiatorAgentDid?: unknown; + initiatorProfile?: unknown; issuerProxyUrl?: unknown; }; + const initiatorProfile = parsePeerProfile(entry.initiatorProfile); if ( !isNonEmptyString(entry.initiatorAgentDid) || + !initiatorProfile || !isNonEmptyString(entry.issuerProxyUrl) || typeof entry.expiresAtMs !== "number" || !Number.isInteger(entry.expiresAtMs) @@ -447,6 +680,7 @@ export class ProxyTrustState { ticket: ticketCandidate, expiresAtMs: entry.expiresAtMs, initiatorAgentDid: entry.initiatorAgentDid, + initiatorProfile, issuerProxyUrl: parsedTicket.iss, }; } @@ -460,12 +694,90 @@ export class ProxyTrustState { await this.state.storage.put(PAIRING_TICKETS_STORAGE_KEY, pairingTickets); } + private async loadConfirmedPairingTickets(): Promise { + const raw = await this.state.storage.get( + CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, + ); + + if (typeof raw !== "object" || raw === null) { + return {}; + } + + const normalized: ConfirmedPairingTicketMap = {}; + for (const [entryKey, value] of Object.entries(raw)) { + if (typeof value !== "object" || value === null) { + continue; + } + + const entry = value as { + ticket?: unknown; + expiresAtMs?: unknown; + initiatorAgentDid?: unknown; + initiatorProfile?: unknown; + responderAgentDid?: unknown; + responderProfile?: unknown; + issuerProxyUrl?: unknown; + confirmedAtMs?: unknown; + }; + const initiatorProfile = parsePeerProfile(entry.initiatorProfile); + const responderProfile = parsePeerProfile(entry.responderProfile); + + if ( + !isNonEmptyString(entry.initiatorAgentDid) || + !initiatorProfile || + !isNonEmptyString(entry.responderAgentDid) || + !responderProfile || + !isNonEmptyString(entry.issuerProxyUrl) || + typeof entry.expiresAtMs !== "number" || + !Number.isInteger(entry.expiresAtMs) || + typeof entry.confirmedAtMs !== "number" || + !Number.isInteger(entry.confirmedAtMs) + ) { + continue; + } + + const ticketCandidate = isNonEmptyString(entry.ticket) + ? entry.ticket + : entryKey; + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(ticketCandidate); + } catch { + continue; + } + + normalized[parsedTicket.kid] = { + ticket: ticketCandidate, + expiresAtMs: entry.expiresAtMs, + initiatorAgentDid: entry.initiatorAgentDid, + initiatorProfile, + responderAgentDid: entry.responderAgentDid, + responderProfile, + issuerProxyUrl: parsedTicket.iss, + confirmedAtMs: entry.confirmedAtMs, + }; + } + + return normalized; + } + + private async saveConfirmedPairingTickets( + pairingTickets: ConfirmedPairingTicketMap, + ): Promise { + await this.state.storage.put( + CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, + pairingTickets, + ); + } + private async scheduleNextCodeCleanup( pairingTickets: PairingTicketMap, + confirmedPairingTickets: ConfirmedPairingTicketMap, ): Promise { - const expiryValues = Object.values(pairingTickets).map( - (details) => details.expiresAtMs, - ); + const expiryValues = [ + ...Object.values(pairingTickets), + ...Object.values(confirmedPairingTickets), + ].map((details) => details.expiresAtMs); if (expiryValues.length === 0) { await this.state.storage.deleteAlarm(); diff --git a/apps/proxy/src/proxy-trust-store.test.ts b/apps/proxy/src/proxy-trust-store.test.ts index 78a4e4b..90fc80d 100644 --- a/apps/proxy/src/proxy-trust-store.test.ts +++ b/apps/proxy/src/proxy-trust-store.test.ts @@ -6,6 +6,16 @@ import { } from "./pairing-ticket.js"; import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; +const INITIATOR_PROFILE = { + agentName: "alpha", + humanName: "Ravi", +}; + +const RESPONDER_PROFILE = { + agentName: "beta", + humanName: "Ira", +}; + function tamperTicketNonce(ticket: string): string { const prefix = "clwpair1_"; if (!ticket.startsWith(prefix)) { @@ -99,6 +109,7 @@ describe("in-memory proxy trust store", () => { }); const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, expiresAtMs: 1_700_000_060_000, @@ -108,12 +119,15 @@ describe("in-memory proxy trust store", () => { const confirmed = await store.confirmPairingTicket({ ticket: ticket.ticket, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_000_100, }); expect(confirmed).toEqual({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", }); @@ -121,6 +135,7 @@ describe("in-memory proxy trust store", () => { store.confirmPairingTicket({ ticket: ticket.ticket, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_000_200, }), ).rejects.toMatchObject({ @@ -132,6 +147,81 @@ describe("in-memory proxy trust store", () => { expect(await store.isAgentKnown("did:claw:agent:bob")).toBe(true); }); + it("returns pending and confirmed pairing ticket status for initiator polling", async () => { + const store = createInMemoryProxyTrustStore(); + const created = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + const ticket = await store.createPairingTicket({ + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + ticket: created.ticket, + expiresAtMs: 1_700_000_060_000, + nowMs: 1_700_000_000_000, + }); + + await expect( + store.getPairingTicketStatus({ + ticket: ticket.ticket, + nowMs: 1_700_000_000_100, + }), + ).resolves.toEqual({ + status: "pending", + ticket: ticket.ticket, + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + expiresAtMs: 1_700_000_060_000, + }); + + await store.confirmPairingTicket({ + ticket: ticket.ticket, + responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, + nowMs: 1_700_000_000_300, + }); + + await expect( + store.getPairingTicketStatus({ + ticket: ticket.ticket, + nowMs: 1_700_000_000_400, + }), + ).resolves.toEqual({ + status: "confirmed", + ticket: ticket.ticket, + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + expiresAtMs: 1_700_000_060_000, + confirmedAtMs: 1_700_000_000_000, + }); + }); + + it("normalizes pairing ticket expiry to whole seconds", async () => { + const store = createInMemoryProxyTrustStore(); + const created = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_123, + expiresAtMs: 1_700_000_060_123, + }); + + const ticket = await store.createPairingTicket({ + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + ticket: created.ticket, + expiresAtMs: 1_700_000_060_123, + nowMs: 1_700_000_000_123, + }); + + expect(ticket.expiresAtMs).toBe(1_700_000_060_000); + }); + it("rejects tampered ticket text when kid matches stored entry", async () => { const store = createInMemoryProxyTrustStore(); const created = await createSignedTicket({ @@ -141,6 +231,7 @@ describe("in-memory proxy trust store", () => { }); const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, expiresAtMs: 1_700_000_060_000, @@ -151,6 +242,7 @@ describe("in-memory proxy trust store", () => { store.confirmPairingTicket({ ticket: tamperTicketNonce(ticket.ticket), responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_000_100, }), ).rejects.toMatchObject({ @@ -168,6 +260,7 @@ describe("in-memory proxy trust store", () => { }); const ticket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, expiresAtMs: 1_700_000_001_000, @@ -178,6 +271,17 @@ describe("in-memory proxy trust store", () => { store.confirmPairingTicket({ ticket: ticket.ticket, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, + nowMs: 1_700_000_002_000, + }), + ).rejects.toMatchObject({ + code: "PROXY_PAIR_TICKET_EXPIRED", + status: 410, + }); + + await expect( + store.getPairingTicketStatus({ + ticket: ticket.ticket, nowMs: 1_700_000_002_000, }), ).rejects.toMatchObject({ @@ -196,6 +300,7 @@ describe("in-memory proxy trust store", () => { }); const expiredTicket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: expired.ticket, expiresAtMs: 1_700_000_001_000, @@ -209,6 +314,7 @@ describe("in-memory proxy trust store", () => { }); const validTicket = await store.createPairingTicket({ initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: valid.ticket, expiresAtMs: 1_700_000_060_000, @@ -218,6 +324,7 @@ describe("in-memory proxy trust store", () => { await store.confirmPairingTicket({ ticket: validTicket.ticket, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_002_000, }); @@ -225,6 +332,7 @@ describe("in-memory proxy trust store", () => { store.confirmPairingTicket({ ticket: expiredTicket.ticket, responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, nowMs: 1_700_000_002_100, }), ).rejects.toMatchObject({ diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 42d5811..411d120 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -3,9 +3,11 @@ import { PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; +import { normalizeExpiryToWholeSecond, toPairKey } from "./proxy-trust-keys.js"; export type PairingTicketInput = { initiatorAgentDid: string; + initiatorProfile: PeerProfile; issuerProxyUrl: string; ticket: string; expiresAtMs: number; @@ -16,21 +18,56 @@ export type PairingTicketResult = { ticket: string; expiresAtMs: number; initiatorAgentDid: string; + initiatorProfile: PeerProfile; issuerProxyUrl: string; }; export type PairingTicketConfirmInput = { ticket: string; responderAgentDid: string; + responderProfile: PeerProfile; nowMs?: number; }; export type PairingTicketConfirmResult = { initiatorAgentDid: string; + initiatorProfile: PeerProfile; responderAgentDid: string; + responderProfile: PeerProfile; issuerProxyUrl: string; }; +export type PairingTicketStatusInput = { + ticket: string; + nowMs?: number; +}; + +export type PairingTicketStatusResult = + | { + status: "pending"; + ticket: string; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + issuerProxyUrl: string; + expiresAtMs: number; + } + | { + status: "confirmed"; + ticket: string; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid: string; + responderProfile: PeerProfile; + issuerProxyUrl: string; + expiresAtMs: number; + confirmedAtMs: number; + }; + +export type PeerProfile = { + agentName: string; + humanName: string; +}; + export type PairingInput = { initiatorAgentDid: string; responderAgentDid: string; @@ -41,6 +78,9 @@ export interface ProxyTrustStore { confirmPairingTicket( input: PairingTicketConfirmInput, ): Promise; + getPairingTicketStatus( + input: PairingTicketStatusInput, + ): Promise; isAgentKnown(agentDid: string): Promise; isPairAllowed(input: PairingInput): Promise; upsertPair(input: PairingInput): Promise; @@ -70,18 +110,12 @@ export class ProxyTrustStoreError extends Error { export const TRUST_STORE_ROUTES = { createPairingTicket: "/pairing-tickets/create", confirmPairingTicket: "/pairing-tickets/confirm", + getPairingTicketStatus: "/pairing-tickets/status", isAgentKnown: "/agents/known", isPairAllowed: "/pairs/check", upsertPair: "/pairs/upsert", } as const; -function toPairKey( - initiatorAgentDid: string, - responderAgentDid: string, -): string { - return [initiatorAgentDid, responderAgentDid].sort().join("|"); -} - function parseErrorPayload(payload: unknown): { code: string; message: string; @@ -174,6 +208,13 @@ export function createDurableProxyTrustStore( input, ); }, + async getPairingTicketStatus(input) { + return callDurableState( + namespace, + TRUST_STORE_ROUTES.getPairingTicketStatus, + input, + ); + }, async isAgentKnown(agentDid) { const result = await callDurableState<{ known: boolean }>( namespace, @@ -203,12 +244,26 @@ export function createDurableProxyTrustStore( export function createInMemoryProxyTrustStore(): ProxyTrustStore { const pairKeys = new Set(); const agentPeers = new Map>(); + const confirmedPairingTickets = new Map< + string, + { + ticket: string; + expiresAtMs: number; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid: string; + responderProfile: PeerProfile; + issuerProxyUrl: string; + confirmedAtMs: number; + } + >(); const pairingTickets = new Map< string, { ticket: string; expiresAtMs: number; initiatorAgentDid: string; + initiatorProfile: PeerProfile; issuerProxyUrl: string; } >(); @@ -223,6 +278,16 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { pairingTickets.delete(ticketKid); } } + + for (const [ticketKid, details] of confirmedPairingTickets.entries()) { + if (skipTicketKid === ticketKid) { + continue; + } + + if (details.expiresAtMs <= nowMs) { + confirmedPairingTickets.delete(ticketKid); + } + } } function upsertPeer(leftAgentDid: string, rightAgentDid: string): void { @@ -255,6 +320,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { function resolveConfirmablePairingTicket(input: PairingTicketConfirmInput): { pair: PairingTicketConfirmResult; ticketKid: string; + expiresAtMs: number; } { const nowMs = input.nowMs ?? Date.now(); const parsedTicket = parseStoredTicket(input.ticket); @@ -289,19 +355,92 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { return { pair: { initiatorAgentDid: stored.initiatorAgentDid, + initiatorProfile: stored.initiatorProfile, responderAgentDid: input.responderAgentDid, + responderProfile: input.responderProfile, issuerProxyUrl: stored.issuerProxyUrl, }, ticketKid: parsedTicket.kid, + expiresAtMs: stored.expiresAtMs, }; } + function resolveTicketStatus( + input: PairingTicketStatusInput, + ): PairingTicketStatusResult { + const nowMs = input.nowMs ?? Date.now(); + const parsedTicket = parseStoredTicket(input.ticket); + cleanup(nowMs, parsedTicket.kid); + + const pending = pairingTickets.get(parsedTicket.kid); + if (pending && pending.ticket === input.ticket) { + if (pending.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + pairingTickets.delete(parsedTicket.kid); + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return { + status: "pending", + ticket: pending.ticket, + initiatorAgentDid: pending.initiatorAgentDid, + initiatorProfile: pending.initiatorProfile, + issuerProxyUrl: pending.issuerProxyUrl, + expiresAtMs: pending.expiresAtMs, + }; + } + + const confirmed = confirmedPairingTickets.get(parsedTicket.kid); + if (confirmed && confirmed.ticket === input.ticket) { + if (confirmed.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + confirmedPairingTickets.delete(parsedTicket.kid); + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return { + status: "confirmed", + ticket: confirmed.ticket, + initiatorAgentDid: confirmed.initiatorAgentDid, + initiatorProfile: confirmed.initiatorProfile, + responderAgentDid: confirmed.responderAgentDid, + responderProfile: confirmed.responderProfile, + issuerProxyUrl: confirmed.issuerProxyUrl, + expiresAtMs: confirmed.expiresAtMs, + confirmedAtMs: confirmed.confirmedAtMs, + }; + } + + if (parsedTicket.exp * 1000 <= nowMs) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", + status: 404, + }); + } + return { async createPairingTicket(input) { const nowMs = input.nowMs ?? Date.now(); cleanup(nowMs); const parsedTicket = parseStoredTicket(input.ticket); + const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( + input.expiresAtMs, + ); if (parsedTicket.iss !== input.issuerProxyUrl) { throw new ProxyTrustStoreError({ @@ -311,7 +450,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { }); } - if (parsedTicket.exp * 1000 !== input.expiresAtMs) { + if (parsedTicket.exp * 1000 !== normalizedExpiresAtMs) { throw new ProxyTrustStoreError({ code: "PROXY_PAIR_START_INVALID_BODY", message: "Pairing ticket expiry is invalid", @@ -322,20 +461,29 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { pairingTickets.set(parsedTicket.kid, { ticket: input.ticket, initiatorAgentDid: input.initiatorAgentDid, + initiatorProfile: input.initiatorProfile, issuerProxyUrl: parsedTicket.iss, - expiresAtMs: input.expiresAtMs, + expiresAtMs: normalizedExpiresAtMs, }); + confirmedPairingTickets.delete(parsedTicket.kid); return { ticket: input.ticket, - expiresAtMs: input.expiresAtMs, + expiresAtMs: normalizedExpiresAtMs, initiatorAgentDid: input.initiatorAgentDid, + initiatorProfile: input.initiatorProfile, issuerProxyUrl: parsedTicket.iss, }; }, async confirmPairingTicket(input) { - const { pair: confirmedPair, ticketKid } = - resolveConfirmablePairingTicket(input); + const { + pair: confirmedPair, + ticketKid, + expiresAtMs, + } = resolveConfirmablePairingTicket(input); + const confirmedAtMs = normalizeExpiryToWholeSecond( + input.nowMs ?? Date.now(), + ); pairKeys.add( toPairKey( confirmedPair.initiatorAgentDid, @@ -351,8 +499,21 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { confirmedPair.initiatorAgentDid, ); pairingTickets.delete(ticketKid); + confirmedPairingTickets.set(ticketKid, { + ticket: input.ticket, + initiatorAgentDid: confirmedPair.initiatorAgentDid, + initiatorProfile: confirmedPair.initiatorProfile, + responderAgentDid: confirmedPair.responderAgentDid, + responderProfile: confirmedPair.responderProfile, + issuerProxyUrl: confirmedPair.issuerProxyUrl, + expiresAtMs, + confirmedAtMs, + }); return confirmedPair; }, + async getPairingTicketStatus(input) { + return resolveTicketStatus(input); + }, async isAgentKnown(agentDid) { return (agentPeers.get(agentDid)?.size ?? 0) > 0; }, diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 94c5979..421ce0c 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -21,12 +21,18 @@ import { } from "./auth-middleware.js"; import type { ProxyConfig } from "./config.js"; import { PROXY_VERSION } from "./index.js"; -import { PAIR_CONFIRM_PATH, PAIR_START_PATH } from "./pairing-constants.js"; +import { + PAIR_CONFIRM_PATH, + PAIR_START_PATH, + PAIR_STATUS_PATH, +} from "./pairing-constants.js"; import { createPairConfirmHandler, createPairStartHandler, + createPairStatusHandler, type PairConfirmRuntimeOptions, type PairStartRuntimeOptions, + type PairStatusRuntimeOptions, } from "./pairing-route.js"; import { createInMemoryProxyTrustStore, @@ -66,6 +72,7 @@ type CreateProxyAppOptions = { relay?: RelayConnectRuntimeOptions; pairing?: { confirm?: PairConfirmRuntimeOptions; + status?: PairStatusRuntimeOptions; start?: PairStartRuntimeOptions; }; trustStore?: ProxyTrustStore; @@ -148,7 +155,9 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { createPairStartHandler({ logger, registryUrl: options.config.registryUrl, - issuerProxyUrl: options.config.pairingIssuerUrl, + registryInternalServiceId: options.config.registryInternalServiceId, + registryInternalServiceSecret: + options.config.registryInternalServiceSecret, trustStore, ...options.pairing?.start, }), @@ -157,11 +166,18 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { PAIR_CONFIRM_PATH, createPairConfirmHandler({ logger, - registryUrl: options.config.registryUrl, trustStore, ...options.pairing?.confirm, }), ); + app.post( + PAIR_STATUS_PATH, + createPairStatusHandler({ + logger, + trustStore, + ...options.pairing?.status, + }), + ); app.get( RELAY_CONNECT_PATH, createRelayConnectHandler({ diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 55c095f..37d6186 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -22,7 +22,8 @@ export type ProxyWorkerBindings = { PROXY_TRUST_STATE?: ProxyTrustStateNamespace; REGISTRY_URL?: string; CLAWDENTITY_REGISTRY_URL?: string; - PAIRING_ISSUER_URL?: string; + REGISTRY_INTERNAL_SERVICE_ID?: string; + REGISTRY_INTERNAL_SERVICE_SECRET?: string; ENVIRONMENT?: string; ALLOW_ALL_VERIFIED?: string; CRL_REFRESH_INTERVAL_MS?: string; @@ -31,6 +32,12 @@ export type ProxyWorkerBindings = { AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: string; AGENT_RATE_LIMIT_WINDOW_MS?: string; INJECT_IDENTITY_INTO_MESSAGE?: string; + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT?: string; + RELAY_QUEUE_TTL_SECONDS?: string; + RELAY_RETRY_INITIAL_MS?: string; + RELAY_RETRY_MAX_MS?: string; + RELAY_RETRY_MAX_ATTEMPTS?: string; + RELAY_RETRY_JITTER_RATIO?: string; APP_VERSION?: string; PROXY_VERSION?: string; [key: string]: unknown; @@ -51,7 +58,8 @@ function toCacheKey(env: ProxyWorkerBindings): string { env.PROXY_TRUST_STATE === undefined ? "no-trust-do" : "has-trust-do", env.REGISTRY_URL, env.CLAWDENTITY_REGISTRY_URL, - env.PAIRING_ISSUER_URL, + env.REGISTRY_INTERNAL_SERVICE_ID, + env.REGISTRY_INTERNAL_SERVICE_SECRET, env.ENVIRONMENT, env.ALLOW_ALL_VERIFIED, env.CRL_REFRESH_INTERVAL_MS, @@ -60,6 +68,12 @@ function toCacheKey(env: ProxyWorkerBindings): string { env.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, env.AGENT_RATE_LIMIT_WINDOW_MS, env.INJECT_IDENTITY_INTO_MESSAGE, + env.RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + env.RELAY_QUEUE_TTL_SECONDS, + env.RELAY_RETRY_INITIAL_MS, + env.RELAY_RETRY_MAX_MS, + env.RELAY_RETRY_MAX_ATTEMPTS, + env.RELAY_RETRY_JITTER_RATIO, env.APP_VERSION, env.PROXY_VERSION, ]; diff --git a/apps/proxy/wrangler.jsonc b/apps/proxy/wrangler.jsonc index 4f04180..9d89ae5 100644 --- a/apps/proxy/wrangler.jsonc +++ b/apps/proxy/wrangler.jsonc @@ -53,13 +53,25 @@ ], "vars": { "ENVIRONMENT": "local", - "REGISTRY_URL": "https://dev.api.clawdentity.com", + "REGISTRY_URL": "https://dev.registry.clawdentity.com", "OPENCLAW_BASE_URL": "http://127.0.0.1:18789", - "INJECT_IDENTITY_INTO_MESSAGE": "true" + "INJECT_IDENTITY_INTO_MESSAGE": "true", + "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT": "500", + "RELAY_QUEUE_TTL_SECONDS": "3600", + "RELAY_RETRY_INITIAL_MS": "1000", + "RELAY_RETRY_MAX_MS": "30000", + "RELAY_RETRY_MAX_ATTEMPTS": "25", + "RELAY_RETRY_JITTER_RATIO": "0.2" } }, - "development": { - "name": "clawdentity-proxy-development", + "dev": { + "name": "clawdentity-proxy-dev", + "routes": [ + { + "pattern": "dev.proxy.clawdentity.com", + "custom_domain": true + } + ], "durable_objects": { "bindings": [ { @@ -84,12 +96,24 @@ ], "vars": { "ENVIRONMENT": "development", - "REGISTRY_URL": "https://dev.api.clawdentity.com", - "INJECT_IDENTITY_INTO_MESSAGE": "true" + "REGISTRY_URL": "https://dev.registry.clawdentity.com", + "INJECT_IDENTITY_INTO_MESSAGE": "true", + "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT": "500", + "RELAY_QUEUE_TTL_SECONDS": "3600", + "RELAY_RETRY_INITIAL_MS": "1000", + "RELAY_RETRY_MAX_MS": "30000", + "RELAY_RETRY_MAX_ATTEMPTS": "25", + "RELAY_RETRY_JITTER_RATIO": "0.2" } }, "production": { "name": "clawdentity-proxy", + "routes": [ + { + "pattern": "proxy.clawdentity.com", + "custom_domain": true + } + ], "durable_objects": { "bindings": [ { @@ -114,8 +138,14 @@ ], "vars": { "ENVIRONMENT": "production", - "REGISTRY_URL": "https://api.clawdentity.com", - "INJECT_IDENTITY_INTO_MESSAGE": "true" + "REGISTRY_URL": "https://registry.clawdentity.com", + "INJECT_IDENTITY_INTO_MESSAGE": "true", + "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT": "500", + "RELAY_QUEUE_TTL_SECONDS": "3600", + "RELAY_RETRY_INITIAL_MS": "1000", + "RELAY_RETRY_MAX_MS": "30000", + "RELAY_RETRY_MAX_ATTEMPTS": "25", + "RELAY_RETRY_JITTER_RATIO": "0.2" } } } diff --git a/apps/registry/.env.example b/apps/registry/.env.example index 48dcb85..267af15 100644 --- a/apps/registry/.env.example +++ b/apps/registry/.env.example @@ -1,5 +1,5 @@ # Registry local/development template -# For local Wrangler development, place active values in .dev.vars. +# For local Wrangler development, copy values into apps/registry/.env. # For cloud deploys, set secrets with: # wrangler secret put BOOTSTRAP_SECRET --env # wrangler secret put REGISTRY_SIGNING_KEY --env @@ -8,6 +8,9 @@ # Wrangler vars (non-secret) ENVIRONMENT=development APP_VERSION=local-dev +EVENT_BUS_BACKEND=memory +PROXY_URL=https://dev.proxy.clawdentity.com +REGISTRY_ISSUER_URL=https://dev.registry.clawdentity.com # Secrets BOOTSTRAP_SECRET=replace-with-random-secret diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index b0599a7..2240ca1 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -10,20 +10,21 @@ - Keep D1 database IDs version-controlled; manage secrets with `wrangler secret put`. - Keep `migrations_dir` aligned with Drizzle output directory (`drizzle`). - Prefer branded custom domains over `*.workers.dev` for public endpoints. - - Development: `dev.api.clawdentity.com` - - Production: `api.clawdentity.com` + - Development: `dev.registry.clawdentity.com` + - Production: `registry.clawdentity.com` ## Deployment Rules - Always deploy with explicit environment: `--env dev` or `--env production`. - Deploy scripts must run D1 migrations before Worker deployment. -- For local development, run local migrations before `wrangler dev --env dev` (use `pnpm -F @clawdentity/registry run dev:local`). +- For local development, run local migrations before `wrangler dev --env dev --port 8788` (use `pnpm -F @clawdentity/registry run dev:local`). - Verify `GET /health` returns `status: "ok"` and environment (`development` or `production`). ## Runtime and API - Preserve `/health` response contract: `{ status, version, environment }`. - Keep the worker entrypoint in `src/server.ts`; use `src/index.ts` only as the package export wrapper. - Keep environment variables non-secret in `wrangler.jsonc` and secret values out of git. -- Keep `.dev.vars` and `.env.example` synchronized when adding/changing runtime config fields (`ENVIRONMENT`, `APP_VERSION`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). +- Keep `.dev.vars` and `.env.example` synchronized when adding/changing runtime config fields (`ENVIRONMENT`, `APP_VERSION`, `PROXY_URL`, `EVENT_BUS_BACKEND`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). +- Use queue-backed event bus in `development`/`production` (`EVENT_BUS_BACKEND=queue` + `EVENT_BUS_QUEUE` binding) and memory backend in local development overrides (`EVENT_BUS_BACKEND=memory`). ## Validation - Validate config changes with `wrangler check` before deployment. diff --git a/apps/registry/drizzle/0004_internal_services.sql b/apps/registry/drizzle/0004_internal_services.sql new file mode 100644 index 0000000..b868def --- /dev/null +++ b/apps/registry/drizzle/0004_internal_services.sql @@ -0,0 +1,22 @@ +DROP TABLE IF EXISTS `proxy_pairing_keys`; +--> statement-breakpoint +CREATE TABLE `internal_services` ( + `id` text PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `secret_hash` text NOT NULL, + `secret_prefix` text NOT NULL, + `scopes_json` text NOT NULL, + `status` text DEFAULT 'active' NOT NULL, + `created_by` text NOT NULL, + `rotated_at` text, + `last_used_at` text, + `created_at` text NOT NULL, + `updated_at` text NOT NULL, + FOREIGN KEY (`created_by`) REFERENCES `humans`(`id`) ON UPDATE no action ON DELETE no action +); +--> statement-breakpoint +CREATE UNIQUE INDEX `internal_services_name_unique` ON `internal_services` (`name`); +--> statement-breakpoint +CREATE INDEX `idx_internal_services_secret_prefix` ON `internal_services` (`secret_prefix`); +--> statement-breakpoint +CREATE INDEX `idx_internal_services_status` ON `internal_services` (`status`); diff --git a/apps/registry/package.json b/apps/registry/package.json index bcdc99c..f91b9b5 100644 --- a/apps/registry/package.json +++ b/apps/registry/package.json @@ -15,8 +15,8 @@ "build": "tsup", "format": "biome format .", "lint": "biome lint .", - "dev": "wrangler dev --env dev", - "dev:local": "wrangler d1 migrations apply clawdentity-db-dev --local --env dev && wrangler dev --env dev", + "dev": "wrangler dev --env dev --port 8788", + "dev:local": "wrangler d1 migrations apply clawdentity-db-dev --local --env dev && wrangler dev --env dev --port 8788", "deploy:dev": "wrangler d1 migrations apply clawdentity-db-dev --remote --env dev && wrangler deploy --env dev", "deploy:production": "wrangler d1 migrations apply clawdentity-db --remote --env production && wrangler deploy --env production", "db:generate": "drizzle-kit generate", diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index ea470e3..9de773e 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -50,6 +50,8 @@ - Validate payload strictly: `issuerOrigin` must be URL origin (`http`/`https`), `pkid` non-empty, `publicKeyX` non-empty, `expiresAt` valid future ISO timestamp. - Keep writes idempotent on (`issuer_origin`, `pkid`) and update key material/expiry when repeated registration arrives. - `GET /v1/proxy-pairing-keys/resolve` is public and returns only active (non-expired) key metadata needed for proxy ticket verification. +- `POST /internal/v1/proxy-pairing-keys` requires service auth and must resolve `ownerDid -> humans.id` before persisting `created_by`. +- `GET /internal/v1/proxy-pairing-keys/resolve` requires service auth and mirrors the same active-key lookup contract. - For unknown/expired keys, return `404 PROXY_PAIRING_KEY_NOT_FOUND`; do not leak extra owner data. ## Validation @@ -77,6 +79,12 @@ - Return `{ ownsAgent: true }` when the caller owns the agent and `{ ownsAgent: false }` for foreign or missing IDs. - Keep this endpoint ownership-only; do not return agent metadata. +## POST /internal/v1/ownership/check Contract +- Require service auth via internal service token middleware. +- Validate `ownerDid` and `agentDid` as DID strings (`human` and `agent` kinds respectively). +- Return `{ ownsAgent, agentStatus }` where `agentStatus` is `active | revoked | null`. +- Keep this endpoint internal-only for proxy service-to-service ownership checks. + ## POST /v1/invites Contract - Require PAT auth via `createApiKeyAuth`. - Enforce admin-only access with explicit `403 INVITE_CREATE_FORBIDDEN` for authenticated non-admin callers. @@ -90,6 +98,7 @@ - One-time semantics are enforced by guarded update (`redeemed_by IS NULL`); repeated redeem attempts must return explicit invite lifecycle errors. - Expired invites must be rejected with `INVITE_REDEEM_EXPIRED` before token issuance. - Successful redeem must create a new active user human and mint a PAT in the same mutation unit as invite consumption. +- Successful redeem response must include `proxyUrl` sourced from registry config (`PROXY_URL`) so onboarding clients can persist relay routing without prompting for proxy details. - Keep mutation flow transaction-first; on local fallback (no transaction support), apply compensation rollback so failed redeem attempts do not leave partially-created humans or consumed invites. ## POST /v1/me/api-keys Contract @@ -136,7 +145,7 @@ - Consume challenge with guarded state transition (`pending` -> `used`) in the same mutation unit as agent insert; reject zero-row updates as replayed challenge. - Use shared SDK datetime helpers (`nowIso`, `addSeconds`) for issuance/expiry math instead of ad-hoc `Date.now()` arithmetic in route logic. - Resolve signing material through a reusable signer helper (`registry-signer.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. -- Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.api.clawdentity.com`, `production` -> `https://api.clawdentity.com`) rather than request-origin inference. +- Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.registry.clawdentity.com`, `production` -> `https://registry.clawdentity.com`) rather than request-origin inference. - Bootstrap agent auth refresh material in the same mutation unit as agent creation by inserting an active `agent_auth_sessions` row. - Response shape is `{ agent, ait, agentAuth }` where `agentAuth` returns short-lived access credentials and rotating refresh credentials. diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index 51af0f2..7b8eead 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -32,9 +32,9 @@ const REGISTRY_ISSUER_BY_ENVIRONMENT: Record< RegistryConfig["ENVIRONMENT"], string > = { - development: "https://dev.api.clawdentity.com", - production: "https://api.clawdentity.com", - test: "https://dev.api.clawdentity.com", + development: "https://dev.registry.clawdentity.com", + production: "https://registry.clawdentity.com", + test: "https://dev.registry.clawdentity.com", }; type AgentRegistrationBody = { @@ -774,7 +774,12 @@ export { }; export function resolveRegistryIssuer( - environment: RegistryConfig["ENVIRONMENT"], + config: Pick, ): string { - return REGISTRY_ISSUER_BY_ENVIRONMENT[environment]; + const explicitIssuer = config.REGISTRY_ISSUER_URL?.trim(); + if (explicitIssuer && explicitIssuer.length > 0) { + return explicitIssuer; + } + + return REGISTRY_ISSUER_BY_ENVIRONMENT[config.ENVIRONMENT]; } diff --git a/apps/registry/src/auth/AGENTS.md b/apps/registry/src/auth/AGENTS.md index 248150c..3bf501b 100644 --- a/apps/registry/src/auth/AGENTS.md +++ b/apps/registry/src/auth/AGENTS.md @@ -30,3 +30,10 @@ - Keep access-token parsing (`clw_agt_`) centralized in `agent-auth-token.ts`; do not duplicate marker/format checks in route handlers. - `POST /v1/agents/auth/validate` must fail closed with `401` for missing/invalid/expired/revoked credentials. - Access validation must compare hashed token material with constant-time semantics and update `access_last_used_at` on successful validation. + +## Internal Service Auth Rules +- Keep proxy-to-registry internal auth in `service-auth.ts` with database-backed service records (`internal_services`). +- Keep scope normalization/parsing centralized in `internal-service-scopes.ts` so admin route payload validation and middleware config parsing cannot drift. +- Guard all internal routes (`/internal/v1/...`) with `createServiceAuth({ requiredScopes })` and the `INTERNAL_SERVICE_ID_HEADER`/`INTERNAL_SERVICE_SECRET_HEADER` names defined in `@clawdentity/sdk`. +- Persist per-service secrets with the `clw_srv_` marker, hash them before storing, and refresh `last_used_at` on every authenticated call. +- Fail closed when a service record is missing, disabled, or presents an invalid prefix/secret, and surface `INTERNAL_SERVICE_UNAUTHORIZED`/`INTERNAL_SERVICE_FORBIDDEN` codes accordingly. diff --git a/apps/registry/src/auth/agent-claw-auth.ts b/apps/registry/src/auth/agent-claw-auth.ts index 39a09f2..7c21245 100644 --- a/apps/registry/src/auth/agent-claw-auth.ts +++ b/apps/registry/src/auth/agent-claw-auth.ts @@ -93,7 +93,7 @@ export async function verifyAgentClawRequest(input: { const token = parseClawAuthorizationHeader( input.request.headers.get("authorization") ?? undefined, ); - const expectedIssuer = resolveRegistryIssuer(input.config.ENVIRONMENT); + const expectedIssuer = resolveRegistryIssuer(input.config); const verificationKeys = buildRegistryVerificationKeys( input.config.REGISTRY_SIGNING_KEYS, ); diff --git a/apps/registry/src/auth/internal-service-scopes.ts b/apps/registry/src/auth/internal-service-scopes.ts new file mode 100644 index 0000000..2d34b6a --- /dev/null +++ b/apps/registry/src/auth/internal-service-scopes.ts @@ -0,0 +1,61 @@ +import { AppError } from "@clawdentity/sdk"; + +export function normalizeInternalServiceScopes( + scopes: readonly string[], +): string[] { + return [ + ...new Set(scopes.map((scope) => scope.trim()).filter((scope) => scope)), + ]; +} + +export function parseInternalServiceScopesPayload(value: unknown): string[] { + if (!Array.isArray(value)) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const scopes = normalizeInternalServiceScopes( + value.filter((scope): scope is string => typeof scope === "string"), + ); + if (scopes.length === 0) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + return scopes; +} + +export function parseInternalServiceScopesJson(scopesJson: string): string[] { + let parsed: unknown; + try { + parsed = JSON.parse(scopesJson); + } catch { + throw new AppError({ + code: "INTERNAL_SERVICE_CONFIG_INVALID", + message: "Internal service scopes are invalid", + status: 500, + expose: true, + }); + } + + if (!Array.isArray(parsed)) { + throw new AppError({ + code: "INTERNAL_SERVICE_CONFIG_INVALID", + message: "Internal service scopes are invalid", + status: 500, + expose: true, + }); + } + + return normalizeInternalServiceScopes( + parsed.filter((scope): scope is string => typeof scope === "string"), + ); +} diff --git a/apps/registry/src/auth/service-auth.ts b/apps/registry/src/auth/service-auth.ts new file mode 100644 index 0000000..0b4455f --- /dev/null +++ b/apps/registry/src/auth/service-auth.ts @@ -0,0 +1,176 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { + AppError, + INTERNAL_SERVICE_ID_HEADER, + INTERNAL_SERVICE_SECRET_HEADER, +} from "@clawdentity/sdk"; +import { and, eq } from "drizzle-orm"; +import { createMiddleware } from "hono/factory"; +import { createDb } from "../db/client.js"; +import { internal_services } from "../db/schema.js"; +import { constantTimeEqual } from "./api-key-token.js"; +import { parseInternalServiceScopesJson } from "./internal-service-scopes.js"; + +export const INTERNAL_SERVICE_SECRET_MARKER = "clw_srv_"; +const INTERNAL_SERVICE_SECRET_LOOKUP_ENTROPY_LENGTH = 8; +const INTERNAL_SERVICE_SECRET_RANDOM_BYTES_LENGTH = 32; + +export type AuthenticatedService = { + id: string; + name: string; + scopes: string[]; +}; + +function unauthorizedError(message: string): AppError { + return new AppError({ + code: "INTERNAL_SERVICE_UNAUTHORIZED", + message, + status: 401, + expose: true, + }); +} + +function forbiddenError(message: string): AppError { + return new AppError({ + code: "INTERNAL_SERVICE_FORBIDDEN", + message, + status: 403, + expose: true, + }); +} + +function parseRequiredHeader(value: string | undefined, label: string): string { + const normalized = typeof value === "string" ? value.trim() : ""; + if (normalized.length === 0) { + throw unauthorizedError(`${label} header is required`); + } + + return normalized; +} + +function parseServiceSecret(secret: string): string { + if ( + !secret.startsWith(INTERNAL_SERVICE_SECRET_MARKER) || + secret.length <= INTERNAL_SERVICE_SECRET_MARKER.length + ) { + throw unauthorizedError("Service secret is invalid"); + } + + return secret; +} + +export function deriveInternalServiceSecretPrefix(secret: string): string { + const normalized = parseServiceSecret(secret); + const entropyPrefix = normalized.slice( + INTERNAL_SERVICE_SECRET_MARKER.length, + INTERNAL_SERVICE_SECRET_MARKER.length + + INTERNAL_SERVICE_SECRET_LOOKUP_ENTROPY_LENGTH, + ); + if (entropyPrefix.length === 0) { + throw unauthorizedError("Service secret is invalid"); + } + + return `${INTERNAL_SERVICE_SECRET_MARKER}${entropyPrefix}`; +} + +export async function hashInternalServiceSecret( + secret: string, +): Promise { + const normalized = parseServiceSecret(secret); + const digest = await crypto.subtle.digest( + "SHA-256", + new TextEncoder().encode(normalized), + ); + return Array.from(new Uint8Array(digest)) + .map((value) => value.toString(16).padStart(2, "0")) + .join(""); +} + +export function generateInternalServiceSecret(): string { + const randomBytes = crypto.getRandomValues( + new Uint8Array(INTERNAL_SERVICE_SECRET_RANDOM_BYTES_LENGTH), + ); + return `${INTERNAL_SERVICE_SECRET_MARKER}${encodeBase64url(randomBytes)}`; +} + +function assertScopes( + availableScopes: readonly string[], + requiredScopes: readonly string[], +): void { + if (requiredScopes.length === 0) { + return; + } + + const available = new Set(availableScopes); + for (const requiredScope of requiredScopes) { + if (!available.has(requiredScope)) { + throw forbiddenError("Internal service is missing required scope"); + } + } +} + +export function createServiceAuth(options?: { + requiredScopes?: readonly string[]; +}) { + const requiredScopes = [...(options?.requiredScopes ?? [])]; + return createMiddleware<{ + Bindings: { + DB: D1Database; + }; + Variables: { + service: AuthenticatedService; + }; + }>(async (c, next) => { + const serviceId = parseRequiredHeader( + c.req.header(INTERNAL_SERVICE_ID_HEADER), + INTERNAL_SERVICE_ID_HEADER, + ); + const serviceSecret = parseRequiredHeader( + c.req.header(INTERNAL_SERVICE_SECRET_HEADER), + INTERNAL_SERVICE_SECRET_HEADER, + ); + const secretPrefix = deriveInternalServiceSecretPrefix(serviceSecret); + const secretHash = await hashInternalServiceSecret(serviceSecret); + + const db = createDb(c.env.DB); + const rows = await db + .select({ + id: internal_services.id, + name: internal_services.name, + secretHash: internal_services.secret_hash, + scopesJson: internal_services.scopes_json, + }) + .from(internal_services) + .where( + and( + eq(internal_services.id, serviceId), + eq(internal_services.secret_prefix, secretPrefix), + eq(internal_services.status, "active"), + ), + ) + .limit(1); + + const row = rows[0]; + if (!row || !constantTimeEqual(secretHash, row.secretHash)) { + throw unauthorizedError("Service credentials are invalid"); + } + + const scopes = parseInternalServiceScopesJson(row.scopesJson); + assertScopes(scopes, requiredScopes); + + c.set("service", { + id: row.id, + name: row.name, + scopes, + }); + + await db + .update(internal_services) + .set({ + last_used_at: new Date().toISOString(), + }) + .where(eq(internal_services.id, row.id)); + + await next(); + }); +} diff --git a/apps/registry/src/db/schema.ts b/apps/registry/src/db/schema.ts index 4aeb86b..a488017 100644 --- a/apps/registry/src/db/schema.ts +++ b/apps/registry/src/db/schema.ts @@ -176,24 +176,27 @@ export const invites = sqliteTable("invites", { created_at: text("created_at").notNull(), }); -export const proxy_pairing_keys = sqliteTable( - "proxy_pairing_keys", +export const internal_services = sqliteTable( + "internal_services", { id: text("id").primaryKey(), - issuer_origin: text("issuer_origin").notNull(), - pkid: text("pkid").notNull(), - public_key_x: text("public_key_x").notNull(), + name: text("name").notNull().unique(), + secret_hash: text("secret_hash").notNull(), + secret_prefix: text("secret_prefix").notNull(), + scopes_json: text("scopes_json").notNull(), + status: text("status", { enum: ["active", "revoked"] }) + .notNull() + .default("active"), created_by: text("created_by") .notNull() .references(() => humans.id), - expires_at: text("expires_at").notNull(), + rotated_at: text("rotated_at"), + last_used_at: text("last_used_at"), created_at: text("created_at").notNull(), + updated_at: text("updated_at").notNull(), }, (table) => [ - uniqueIndex("idx_proxy_pairing_keys_issuer_pkid").on( - table.issuer_origin, - table.pkid, - ), - index("idx_proxy_pairing_keys_expires_at").on(table.expires_at), + index("idx_internal_services_secret_prefix").on(table.secret_prefix), + index("idx_internal_services_status").on(table.status), ], ); diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 9ba9bd3..49c5480 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -1,16 +1,19 @@ import { ADMIN_BOOTSTRAP_PATH, + ADMIN_INTERNAL_SERVICES_PATH, AGENT_AUTH_REFRESH_PATH, AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, canonicalizeAgentRegistrationProof, encodeBase64url, generateUlid, + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, INVITES_PATH, INVITES_REDEEM_PATH, ME_API_KEYS_PATH, makeAgentDid, makeHumanDid, + REGISTRY_METADATA_PATH, } from "@clawdentity/protocol"; import { encodeEd25519SignatureBase64url, @@ -2404,6 +2407,37 @@ describe("GET /health", () => { }); }); +describe(`GET ${REGISTRY_METADATA_PATH}`, () => { + it("returns environment metadata including resolved proxy URL", async () => { + const res = await createRegistryApp().request( + `https://registry.example.test${REGISTRY_METADATA_PATH}`, + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "development", + APP_VERSION: "sha-meta-123", + PROXY_URL: "https://dev.proxy.clawdentity.com", + }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + status: string; + environment: string; + version: string; + registryUrl: string; + proxyUrl: string; + }; + expect(body).toEqual({ + status: "ok", + environment: "development", + version: "sha-meta-123", + registryUrl: "https://registry.example.test", + proxyUrl: "https://dev.proxy.clawdentity.com", + }); + }); +}); + describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { it("returns 503 when bootstrap secret is not configured", async () => { const { database } = createFakeDb([]); @@ -3030,7 +3064,7 @@ describe("GET /v1/crl", () => { const claims = await verifyCRL({ token: body.crl, - expectedIssuer: "https://dev.api.clawdentity.com", + expectedIssuer: "https://dev.registry.clawdentity.com", registryKeys: keysBody.keys .filter((key) => key.status === "active") .map((key) => ({ @@ -3704,11 +3738,13 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { name: string; token: string; }; + proxyUrl: string; }; expect(redeemBody.human.displayName).toBe("Invitee Alpha"); expect(redeemBody.human.role).toBe("user"); expect(redeemBody.apiKey.name).toBe("primary-invite-key"); expect(redeemBody.apiKey.token.startsWith("clw_pat_")).toBe(true); + expect(redeemBody.proxyUrl).toBe("https://dev.proxy.clawdentity.com"); expect(humanInserts).toHaveLength(1); expect(apiKeyInserts).toHaveLength(1); @@ -4606,6 +4642,41 @@ describe("GET /v1/agents/:id/ownership", () => { }); }); +describe("internal service-auth routes", () => { + it("returns 401 when internal service credential headers are missing", async () => { + const res = await createRegistryApp().request( + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("INTERNAL_SERVICE_UNAUTHORIZED"); + }); + + // Service-scope and payload-validation integration is covered by + // dedicated auth + route-level tests that exercise real D1-backed flows. + it("requires PAT auth for admin internal service endpoints", async () => { + const res = await createRegistryApp().request( + ADMIN_INTERNAL_SERVICES_PATH, + { + method: "GET", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + expect(res.status).toBe(401); + }); +}); + describe("DELETE /v1/agents/:id", () => { it("returns 401 when PAT is missing", async () => { const agentId = generateUlid(1700200000000); @@ -5100,7 +5171,7 @@ describe("POST /v1/agents/:id/reissue", () => { const claims = await verifyAIT({ token: body.ait, - expectedIssuer: "https://dev.api.clawdentity.com", + expectedIssuer: "https://dev.registry.clawdentity.com", registryKeys: keysBody.keys .filter((key) => key.status === "active") .map((key) => ({ @@ -5259,7 +5330,7 @@ describe("POST /v1/agents/:id/reissue", () => { const claims = await verifyAIT({ token: body.ait, - expectedIssuer: "https://dev.api.clawdentity.com", + expectedIssuer: "https://dev.registry.clawdentity.com", registryKeys: [ { kid: "reg-key-1", @@ -6014,7 +6085,7 @@ describe("POST /v1/agents", () => { const claims = await verifyAIT({ token: registerBody.ait, - expectedIssuer: "https://dev.api.clawdentity.com", + expectedIssuer: "https://dev.registry.clawdentity.com", registryKeys: keysBody.keys .filter((key) => key.status === "active") .map((key) => ({ @@ -6027,7 +6098,7 @@ describe("POST /v1/agents", () => { })), }); - expect(claims.iss).toBe("https://dev.api.clawdentity.com"); + expect(claims.iss).toBe("https://dev.registry.clawdentity.com"); expect(claims.sub).toBe(registerBody.agent.did); expect(claims.ownerDid).toBe(registerBody.agent.ownerDid); expect(claims.name).toBe(registerBody.agent.name); @@ -6136,7 +6207,7 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { const refreshTokenHash = await hashAgentToken(refreshToken); const ait = await signAIT({ claims: { - iss: "https://dev.api.clawdentity.com", + iss: "https://dev.registry.clawdentity.com", sub: agentDid, ownerDid: makeHumanDid(generateUlid(Date.now() + 2)), name: "agent-refresh-01", diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 46288a3..cf85d13 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,30 +1,38 @@ import { ADMIN_BOOTSTRAP_PATH, + ADMIN_INTERNAL_SERVICES_PATH, AGENT_AUTH_REFRESH_PATH, AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, generateUlid, + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, INVITES_PATH, INVITES_REDEEM_PATH, ME_API_KEYS_PATH, makeHumanDid, - PROXY_PAIRING_KEYS_PATH, - PROXY_PAIRING_KEYS_RESOLVE_PATH, + parseDid, + parseUlid, + REGISTRY_METADATA_PATH, } from "@clawdentity/protocol"; import { AppError, + createEventEnvelope, createHonoErrorHandler, + createInMemoryEventBus, createLogger, + createQueueEventBus, createRequestContextMiddleware, createRequestLoggingMiddleware, + type EventBus, nowIso, parseRegistryConfig, + type QueuePublisher, type RegistryConfig, shouldExposeVerboseErrors, signAIT, signCRL, } from "@clawdentity/sdk"; -import { and, desc, eq, gt, isNull, lt } from "drizzle-orm"; +import { and, desc, eq, isNull, lt } from "drizzle-orm"; import { Hono } from "hono"; import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; import { @@ -78,6 +86,14 @@ import { generateApiKeyToken, hashApiKeyToken, } from "./auth/api-key-token.js"; +import { parseInternalServiceScopesPayload } from "./auth/internal-service-scopes.js"; +import { + type AuthenticatedService, + createServiceAuth, + deriveInternalServiceSecretPrefix, + generateInternalServiceSecret, + hashInternalServiceSecret, +} from "./auth/service-auth.js"; import { createDb } from "./db/client.js"; import { agent_auth_events, @@ -86,8 +102,8 @@ import { agents, api_keys, humans, + internal_services, invites, - proxy_pairing_keys, revocations, } from "./db/schema.js"; import { @@ -116,6 +132,10 @@ type Bindings = { DB: D1Database; ENVIRONMENT: string; APP_VERSION?: string; + PROXY_URL?: string; + REGISTRY_ISSUER_URL?: string; + EVENT_BUS_BACKEND?: "memory" | "queue"; + EVENT_BUS_QUEUE?: QueuePublisher; BOOTSTRAP_SECRET?: string; REGISTRY_SIGNING_KEY?: string; REGISTRY_SIGNING_KEYS?: string; @@ -130,8 +150,25 @@ const CRL_TTL_SECONDS = REGISTRY_CACHE_MAX_AGE_SECONDS + REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS + CRL_EXPIRY_SAFETY_BUFFER_SECONDS; +const PROXY_URL_BY_ENVIRONMENT: Record = + { + development: "https://dev.proxy.clawdentity.com", + production: "https://proxy.clawdentity.com", + test: "https://dev.proxy.clawdentity.com", + }; // Deterministic bootstrap identity guarantees one-time admin creation under races. const BOOTSTRAP_ADMIN_HUMAN_ID = "00000000000000000000000000"; +const REGISTRY_SERVICE_EVENT_VERSION = "v1"; + +const AGENT_AUTH_EVENT_NAME_BY_TYPE: Record< + "issued" | "refreshed" | "revoked" | "refresh_rejected", + string +> = { + issued: "agent.auth.issued", + refreshed: "agent.auth.refreshed", + revoked: "agent.auth.revoked", + refresh_rejected: "agent.auth.refresh_rejected", +}; type OwnedAgent = { id: string; @@ -204,6 +241,7 @@ type RegistryRateLimitRuntimeOptions = { type CreateRegistryAppOptions = { rateLimit?: RegistryRateLimitRuntimeOptions; + eventBus?: EventBus; }; function crlBuildError(options: { @@ -499,118 +537,247 @@ function parseAgentAccessHeaderToken(token: string | undefined): string { } } -function parseIssuerOrigin(value: unknown): string { - if (typeof value !== "string") { +function parseInternalServiceName(value: unknown): string { + const normalized = typeof value === "string" ? value.trim() : ""; + if (!/^[a-z0-9][a-z0-9-_]{1,63}$/i.test(normalized)) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + return normalized; +} + +function parseInternalServiceCreatePayload(payload: unknown): { + name: string; + scopes: string[]; +} { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", status: 400, expose: true, }); } - let parsed: URL; + const value = payload as Record; + return { + name: parseInternalServiceName(value.name), + scopes: parseInternalServiceScopesPayload(value.scopes), + }; +} + +function parseInternalServicePathId(input: { + id: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): string { + const candidate = input.id.trim(); try { - parsed = new URL(value.trim()); + return parseUlid(candidate).value; } catch { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "INTERNAL_SERVICE_INVALID_PATH", + message: + input.environment === "production" + ? "Request could not be processed" + : "Internal service path is invalid", status: 400, - expose: true, + expose: input.environment !== "production", + details: + input.environment === "production" + ? undefined + : { + fieldErrors: { id: ["id must be a valid ULID"] }, + formErrors: [], + }, }); } +} - if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { +function parseInternalServiceRotatePayload(payload: unknown): { + scopes?: string[]; +} { + if (payload === undefined || payload === null) { + return {}; + } + if (typeof payload !== "object" || Array.isArray(payload)) { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", status: 400, expose: true, }); } - return parsed.origin; + const value = payload as Record; + if (value.scopes === undefined) { + return {}; + } + + return { + scopes: parseInternalServiceScopesPayload(value.scopes), + }; } -function parseProxyPairingKeyRegisterPayload(payload: unknown): { - issuerOrigin: string; - pkid: string; - publicKeyX: string; - expiresAt: string; -} { - if (!payload || typeof payload !== "object" || Array.isArray(payload)) { +function resolveEventBusBackend( + config: RegistryConfig, +): NonNullable { + if (config.EVENT_BUS_BACKEND === "memory") { + return "memory"; + } + + if (config.EVENT_BUS_BACKEND === "queue") { + return "queue"; + } + + return config.ENVIRONMENT === "development" || + config.ENVIRONMENT === "production" + ? "queue" + : "memory"; +} + +function resolveRegistryEventBus(input: { + config: RegistryConfig; + bindings: Bindings; + explicitBus?: EventBus; +}): EventBus { + if (input.explicitBus !== undefined) { + return input.explicitBus; + } + + const backend = resolveEventBusBackend(input.config); + if (backend === "memory") { + return createInMemoryEventBus(); + } + + const queue = input.bindings.EVENT_BUS_QUEUE; + if (queue === undefined) { + throw new AppError({ + code: "CONFIG_VALIDATION_FAILED", + message: "Registry configuration is invalid", + status: 500, + expose: true, + details: { + fieldErrors: { + EVENT_BUS_QUEUE: [ + "EVENT_BUS_QUEUE is required when EVENT_BUS_BACKEND is queue", + ], + }, + formErrors: [], + }, + }); + } + + return createQueueEventBus(queue); +} + +function parseHumanDid(value: unknown): string { + if (typeof value !== "string") { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", status: 400, expose: true, }); } - const value = payload as Record; - const pkid = typeof value.pkid === "string" ? value.pkid.trim() : ""; - const publicKeyX = - typeof value.publicKeyX === "string" ? value.publicKeyX.trim() : ""; - const expiresAt = - typeof value.expiresAt === "string" ? value.expiresAt.trim() : ""; - const issuerOrigin = parseIssuerOrigin(value.issuerOrigin); - - if (pkid.length === 0 || publicKeyX.length === 0 || expiresAt.length === 0) { + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "human") { + throw new Error("invalid"); + } + } catch { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", status: 400, expose: true, }); } - const expiresAtMillis = Date.parse(expiresAt); - if (!Number.isFinite(expiresAtMillis)) { + if (candidate.length === 0) { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", status: 400, expose: true, }); } - if (expiresAtMillis <= Date.now()) { + return candidate; +} + +function parseAgentDid(value: unknown): string { + if (typeof value !== "string") { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", status: 400, expose: true, }); } - return { - issuerOrigin, - pkid, - publicKeyX, - expiresAt: new Date(expiresAtMillis).toISOString(), - }; + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + throw new Error("invalid"); + } + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + if (candidate.length === 0) { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + return candidate; } -function parseProxyPairingKeyResolveQuery(requestUrl: string): { - issuerOrigin: string; - pkid: string; +function parseInternalOwnershipCheckPayload(payload: unknown): { + ownerDid: string; + agentDid: string; } { - const url = new URL(requestUrl); - const issuerOrigin = parseIssuerOrigin(url.searchParams.get("issuerOrigin")); - const pkid = url.searchParams.get("pkid")?.trim() ?? ""; - if (pkid.length === 0) { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key query is invalid", + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + let ownerDid: string; + try { + ownerDid = parseHumanDid(value.ownerDid); + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", status: 400, expose: true, }); } return { - issuerOrigin, - pkid, + ownerDid, + agentDid: parseAgentDid(value.agentDid), }; } @@ -622,7 +789,10 @@ async function insertAgentAuthEvent(input: { reason?: string; metadata?: Record; createdAt?: string; + eventBus?: EventBus; + initiatedByAccountId?: string | null; }): Promise { + const createdAt = input.createdAt ?? nowIso(); await input.db.insert(agent_auth_events).values({ id: generateUlid(Date.now()), agent_id: input.agentId, @@ -631,8 +801,40 @@ async function insertAgentAuthEvent(input: { reason: input.reason ?? null, metadata_json: input.metadata === undefined ? null : JSON.stringify(input.metadata), - created_at: input.createdAt ?? nowIso(), + created_at: createdAt, }); + + if (input.eventBus === undefined) { + return; + } + + const eventData: Record = { + agentId: input.agentId, + sessionId: input.sessionId, + }; + if (input.reason !== undefined) { + eventData.reason = input.reason; + } + if (input.metadata !== undefined) { + eventData.metadata = input.metadata; + } + + try { + await input.eventBus.publish( + createEventEnvelope({ + type: AGENT_AUTH_EVENT_NAME_BY_TYPE[input.eventType], + version: REGISTRY_SERVICE_EVENT_VERSION, + initiatedByAccountId: input.initiatedByAccountId ?? null, + timestampUtc: createdAt, + data: eventData, + }), + ); + } catch (error) { + logger.warn("registry.event_bus.publish_failed", { + eventType: input.eventType, + errorName: error instanceof Error ? error.name : "unknown", + }); + } } async function resolveInviteRedeemStateError(input: { @@ -755,8 +957,14 @@ function adminBootstrapAlreadyCompletedError(): AppError { }); } +function resolveProxyUrl(config: RegistryConfig): string { + return config.PROXY_URL ?? PROXY_URL_BY_ENVIRONMENT[config.ENVIRONMENT]; +} + function createRegistryApp(options: CreateRegistryAppOptions = {}) { let cachedConfig: RegistryConfig | undefined; + let cachedEventBus: EventBus | undefined; + let cachedEventBusKey: string | undefined; function getConfig(bindings: Bindings): RegistryConfig { if (cachedConfig) { @@ -767,9 +975,37 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { return cachedConfig; } + function getEventBus(bindings: Bindings): EventBus { + if (options.eventBus !== undefined) { + return options.eventBus; + } + + const config = getConfig(bindings); + const resolvedBackend = resolveEventBusBackend(config); + const key = `${config.ENVIRONMENT}|${resolvedBackend}|${ + bindings.EVENT_BUS_QUEUE === undefined ? "no-queue" : "has-queue" + }`; + if (cachedEventBus && cachedEventBusKey === key) { + return cachedEventBus; + } + + const resolved = resolveRegistryEventBus({ + config, + bindings, + explicitBus: options.eventBus, + }); + cachedEventBus = resolved; + cachedEventBusKey = key; + return resolved; + } + const app = new Hono<{ Bindings: Bindings; - Variables: { requestId: string; human: AuthenticatedHuman }; + Variables: { + requestId: string; + human: AuthenticatedHuman; + service: AuthenticatedService; + }; }>(); const rateLimitOptions = options.rateLimit; const resolveRateLimit = createInMemoryRateLimit({ @@ -820,6 +1056,17 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { }); }); + app.get(REGISTRY_METADATA_PATH, (c) => { + const config = getConfig(c.env); + return c.json({ + status: "ok", + environment: config.ENVIRONMENT, + version: config.APP_VERSION ?? "0.0.0", + registryUrl: c.req.url ? new URL(c.req.url).origin : undefined, + proxyUrl: resolveProxyUrl(config), + }); + }); + app.post(ADMIN_BOOTSTRAP_PATH, async (c) => { const config = getConfig(c.env); const expectedBootstrapSecret = requireBootstrapSecret( @@ -997,7 +1244,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const claims = buildCrlClaims({ rows, environment: config.ENVIRONMENT, - issuer: resolveRegistryIssuer(config.ENVIRONMENT), + issuer: resolveRegistryIssuer(config), nowSeconds, }); const crl = await signCRL({ @@ -1040,94 +1287,233 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { return c.json(mapResolvedAgentRow(row)); }); - app.post(PROXY_PAIRING_KEYS_PATH, createApiKeyAuth(), async (c) => { + app.post(ADMIN_INTERNAL_SERVICES_PATH, createApiKeyAuth(), async (c) => { + const human = c.get("human"); + if (human.role !== "admin") { + throw new AppError({ + code: "INTERNAL_SERVICE_CREATE_FORBIDDEN", + message: "Admin role is required", + status: 403, + expose: true, + }); + } + let payload: unknown; try { payload = await c.req.json(); } catch { throw new AppError({ - code: "PROXY_PAIRING_KEY_INVALID", - message: "Pairing key payload is invalid", + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", status: 400, expose: true, }); } - const parsed = parseProxyPairingKeyRegisterPayload(payload); - const human = c.get("human"); + const parsed = parseInternalServiceCreatePayload(payload); const db = createDb(c.env.DB); - const createdAt = nowIso(); - - await db - .insert(proxy_pairing_keys) - .values({ - id: generateUlid(Date.now()), - issuer_origin: parsed.issuerOrigin, - pkid: parsed.pkid, - public_key_x: parsed.publicKeyX, - created_by: human.id, - expires_at: parsed.expiresAt, - created_at: createdAt, + const existingRows = await db + .select({ + id: internal_services.id, }) - .onConflictDoUpdate({ - target: [proxy_pairing_keys.issuer_origin, proxy_pairing_keys.pkid], - set: { - public_key_x: parsed.publicKeyX, - created_by: human.id, - expires_at: parsed.expiresAt, - created_at: createdAt, - }, + .from(internal_services) + .where(eq(internal_services.name, parsed.name)) + .limit(1); + if (existingRows[0]) { + throw new AppError({ + code: "INTERNAL_SERVICE_ALREADY_EXISTS", + message: "Internal service already exists", + status: 409, + expose: true, }); + } + + const secret = generateInternalServiceSecret(); + const secretHash = await hashInternalServiceSecret(secret); + const secretPrefix = deriveInternalServiceSecretPrefix(secret); + const createdAt = nowIso(); + const serviceId = generateUlid(Date.now()); + await db.insert(internal_services).values({ + id: serviceId, + name: parsed.name, + secret_hash: secretHash, + secret_prefix: secretPrefix, + scopes_json: JSON.stringify(parsed.scopes), + status: "active", + created_by: human.id, + rotated_at: null, + last_used_at: null, + created_at: createdAt, + updated_at: createdAt, + }); return c.json( { - key: { - issuerOrigin: parsed.issuerOrigin, - pkid: parsed.pkid, - expiresAt: parsed.expiresAt, + internalService: { + id: serviceId, + name: parsed.name, + scopes: parsed.scopes, + status: "active", + createdAt, + updatedAt: createdAt, + rotatedAt: null, + lastUsedAt: null, + secret, }, }, 201, ); }); - app.get(PROXY_PAIRING_KEYS_RESOLVE_PATH, async (c) => { - const query = parseProxyPairingKeyResolveQuery(c.req.url); - const db = createDb(c.env.DB); - const now = nowIso(); - - const rows = await db - .select({ - issuerOrigin: proxy_pairing_keys.issuer_origin, - pkid: proxy_pairing_keys.pkid, - publicKeyX: proxy_pairing_keys.public_key_x, - expiresAt: proxy_pairing_keys.expires_at, - }) - .from(proxy_pairing_keys) - .where( - and( - eq(proxy_pairing_keys.issuer_origin, query.issuerOrigin), - eq(proxy_pairing_keys.pkid, query.pkid), - gt(proxy_pairing_keys.expires_at, now), - ), - ) - .limit(1); - - const row = rows[0]; - if (!row) { + app.get(ADMIN_INTERNAL_SERVICES_PATH, createApiKeyAuth(), async (c) => { + const human = c.get("human"); + if (human.role !== "admin") { throw new AppError({ - code: "PROXY_PAIRING_KEY_NOT_FOUND", - message: "Pairing key is not available", - status: 404, + code: "INTERNAL_SERVICE_LIST_FORBIDDEN", + message: "Admin role is required", + status: 403, expose: true, }); } + const db = createDb(c.env.DB); + const rows = await db + .select({ + id: internal_services.id, + name: internal_services.name, + scopesJson: internal_services.scopes_json, + status: internal_services.status, + createdAt: internal_services.created_at, + updatedAt: internal_services.updated_at, + rotatedAt: internal_services.rotated_at, + lastUsedAt: internal_services.last_used_at, + }) + .from(internal_services) + .orderBy(desc(internal_services.created_at), desc(internal_services.id)); + return c.json({ - key: row, + internalServices: rows.map((row) => ({ + id: row.id, + name: row.name, + scopes: JSON.parse(row.scopesJson) as string[], + status: row.status, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + rotatedAt: row.rotatedAt, + lastUsedAt: row.lastUsedAt, + })), }); }); + app.post( + `${ADMIN_INTERNAL_SERVICES_PATH}/:id/rotate`, + createApiKeyAuth(), + async (c) => { + const config = getConfig(c.env); + const human = c.get("human"); + if (human.role !== "admin") { + throw new AppError({ + code: "INTERNAL_SERVICE_ROTATE_FORBIDDEN", + message: "Admin role is required", + status: 403, + expose: true, + }); + } + + const serviceId = parseInternalServicePathId({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + + let payload: unknown = {}; + try { + const rawBody = await c.req.text(); + if (rawBody.trim().length > 0) { + payload = JSON.parse(rawBody); + } + } catch { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const parsedPayload = parseInternalServiceRotatePayload(payload); + const db = createDb(c.env.DB); + const rows = await db + .select({ + id: internal_services.id, + name: internal_services.name, + scopesJson: internal_services.scopes_json, + status: internal_services.status, + }) + .from(internal_services) + .where(eq(internal_services.id, serviceId)) + .limit(1); + const service = rows[0]; + if (!service) { + throw new AppError({ + code: "INTERNAL_SERVICE_NOT_FOUND", + message: "Internal service was not found", + status: 404, + expose: true, + }); + } + if (service.status !== "active") { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID_STATE", + message: "Internal service cannot be rotated", + status: 409, + expose: true, + }); + } + + const scopes = + parsedPayload.scopes ?? + ((JSON.parse(service.scopesJson) as unknown[]).filter( + (scope): scope is string => + typeof scope === "string" && scope.trim().length > 0, + ) as string[]); + if (scopes.length === 0) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const secret = generateInternalServiceSecret(); + const secretHash = await hashInternalServiceSecret(secret); + const secretPrefix = deriveInternalServiceSecretPrefix(secret); + const rotatedAt = nowIso(); + await db + .update(internal_services) + .set({ + secret_hash: secretHash, + secret_prefix: secretPrefix, + scopes_json: JSON.stringify(scopes), + rotated_at: rotatedAt, + updated_at: rotatedAt, + }) + .where(eq(internal_services.id, service.id)); + + return c.json({ + internalService: { + id: service.id, + name: service.name, + scopes, + status: "active", + rotatedAt, + updatedAt: rotatedAt, + secret, + }, + }); + }, + ); + app.get("/v1/me", createApiKeyAuth(), (c) => { return c.json({ human: c.get("human") }); }); @@ -1358,6 +1744,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { name: parsedPayload.apiKeyName, token: apiKeyToken, }, + proxyUrl: resolveProxyUrl(config), }, 201, ); @@ -1551,6 +1938,45 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { }); }); + app.post( + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, + createServiceAuth({ + requiredScopes: ["identity.read"], + }), + async (c) => { + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + const parsed = parseInternalOwnershipCheckPayload(payload); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + ownerDid: humans.did, + status: agents.status, + }) + .from(agents) + .innerJoin(humans, eq(agents.owner_id, humans.id)) + .where(eq(agents.did, parsed.agentDid)) + .limit(1); + + const row = rows[0]; + return c.json({ + ownsAgent: row !== undefined && row.ownerDid === parsed.ownerDid, + agentStatus: row?.status ?? null, + }); + }, + ); + app.post(AGENT_REGISTRATION_CHALLENGE_PATH, createApiKeyAuth(), async (c) => { const config = getConfig(c.env); const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); @@ -1649,7 +2075,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const registration = buildAgentRegistrationFromParsed({ parsedBody, ownerDid: human.did, - issuer: resolveRegistryIssuer(config.ENVIRONMENT), + issuer: resolveRegistryIssuer(config), }); const signer = await resolveRegistrySigner(config); const ait = await signAIT({ @@ -1734,6 +2160,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { metadata: { actor: "agent_registration", }, + eventBus: getEventBus(c.env), + initiatedByAccountId: human.did, }); } catch (error) { if (options.rollbackOnAgentInsertFailure) { @@ -1896,6 +2324,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { sessionId: existingSession.id, eventType: "refresh_rejected", reason: "invalid_refresh_token", + eventBus: getEventBus(c.env), + initiatedByAccountId: claims.ownerDid, }); throw agentAuthRefreshRejectedError({ code: "AGENT_AUTH_REFRESH_INVALID", @@ -1920,6 +2350,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { eventType: "revoked", reason: "refresh_token_expired", createdAt: revokedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: claims.ownerDid, }); throw agentAuthRefreshRejectedError({ code: "AGENT_AUTH_REFRESH_EXPIRED", @@ -1968,6 +2400,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { sessionId: existingSession.id, eventType: "refreshed", createdAt: refreshedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: claims.ownerDid, }); }; @@ -2145,6 +2579,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { eventType: "revoked", reason: "owner_auth_revoke", createdAt: revokedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: human.did, }); }; @@ -2244,6 +2680,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { eventType: "revoked", reason: "agent_revoked", createdAt: revokedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: human.did, }); } }; @@ -2307,7 +2745,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { framework: existingAgent.framework, publicKey: existingAgent.public_key, previousExpiresAt: existingAgent.expires_at, - issuer: resolveRegistryIssuer(config.ENVIRONMENT), + issuer: resolveRegistryIssuer(config), }); const signer = await resolveRegistrySigner(config); const ait = await signAIT({ diff --git a/apps/registry/wrangler.jsonc b/apps/registry/wrangler.jsonc index f60b6bb..ab184b7 100644 --- a/apps/registry/wrangler.jsonc +++ b/apps/registry/wrangler.jsonc @@ -6,6 +6,7 @@ "compatibility_flags": ["nodejs_compat"], "env": { "dev": { + "name": "clawdentity-registry-dev", "d1_databases": [ { "binding": "DB", @@ -16,15 +17,26 @@ ], "routes": [ { - "pattern": "dev.api.clawdentity.com", + "pattern": "dev.registry.clawdentity.com", "custom_domain": true } ], + "queues": { + "producers": [ + { + "binding": "EVENT_BUS_QUEUE", + "queue": "clawdentity-events-dev" + } + ] + }, "vars": { - "ENVIRONMENT": "development" + "ENVIRONMENT": "development", + "PROXY_URL": "https://dev.proxy.clawdentity.com", + "EVENT_BUS_BACKEND": "queue" } }, "production": { + "name": "clawdentity-registry", "d1_databases": [ { "binding": "DB", @@ -35,12 +47,22 @@ ], "routes": [ { - "pattern": "api.clawdentity.com", + "pattern": "registry.clawdentity.com", "custom_domain": true } ], + "queues": { + "producers": [ + { + "binding": "EVENT_BUS_QUEUE", + "queue": "clawdentity-events" + } + ] + }, "vars": { - "ENVIRONMENT": "production" + "ENVIRONMENT": "production", + "PROXY_URL": "https://proxy.clawdentity.com", + "EVENT_BUS_BACKEND": "queue" } } } diff --git a/package.json b/package.json index 7a52095..d5135bb 100644 --- a/package.json +++ b/package.json @@ -17,9 +17,11 @@ "affected:typecheck:local": "nx affected -t typecheck --base=origin/main --head=HEAD", "affected:test:local": "nx affected -t lint,format,typecheck,test --base=origin/main --head=HEAD", "affected:ci": "nx affected -t lint,format,typecheck,test,build --base=$NX_BASE --head=$NX_HEAD", + "dev:registry": "pnpm -F @clawdentity/registry run dev", "dev:registry:local": "pnpm -F @clawdentity/registry run dev:local", "dev:proxy": "pnpm -F @clawdentity/proxy run dev", - "dev:proxy:development": "pnpm -F @clawdentity/proxy run dev:development", + "dev:proxy:dev": "pnpm -F @clawdentity/proxy run dev:dev", + "dev:proxy:local": "pnpm -F @clawdentity/proxy run dev:local", "dev:proxy:fresh": "pnpm -F @clawdentity/proxy run dev:fresh" }, "devDependencies": { diff --git a/packages/connector/AGENTS.md b/packages/connector/AGENTS.md index 692bb65..691843e 100644 --- a/packages/connector/AGENTS.md +++ b/packages/connector/AGENTS.md @@ -9,10 +9,17 @@ - Reuse shared protocol validators (`parseDid`, `parseUlid`) instead of duplicating DID/ULID logic. - Keep reconnect and heartbeat behavior deterministic and testable via dependency injection (`webSocketFactory`, `fetchImpl`, clock/random). - Keep local OpenClaw delivery concerns in `src/client.ts`; do not spread HTTP delivery logic across modules. -- Keep local OpenClaw restart handling bounded: retry only transient delivery failures with capped backoff and an overall retry budget so connector ack behavior remains compatible with relay DO delivery timeouts. +- Keep inbound connector delivery durable: acknowledge proxy delivery only after payload persistence to local inbox (`agents//inbound-inbox/index.json`), then replay asynchronously to OpenClaw hook. +- Keep local inbox storage portable and inspectable (`index.json` + `events.jsonl`) with atomic index writes (`.tmp` + rename); do not introduce runtime-specific persistence dependencies for connector inbox state. +- Keep replay behavior restart-safe: on runtime boot, replay pending inbox entries in background before relying on new WebSocket traffic. +- Keep local OpenClaw replay backoff bounded and deterministic (`CONNECTOR_INBOUND_RETRY_*` / `CONNECTOR_INBOUND_REPLAY_*`) with structured logging for replay success/failure. - Refresh agent access credentials at runtime startup when cached access tokens are missing or near expiry before attempting relay WebSocket connection, while persisting refreshed auth atomically to `registry-auth.json`. +- Sync `registry-auth.json` from disk before runtime auth refresh/retry decisions so external `agent auth refresh` updates are picked up without requiring a connector restart. +- Accept base proxy websocket URLs (`ws://host:port` / `wss://host`) and normalize them to relay connect path (`/v1/relay/connect`) before connector dial; avoid requiring callers to know the relay path details. +- Regenerate relay WebSocket auth headers (timestamp/nonce/signature) on every reconnect attempt; never reuse a previously-signed header set across retries. ## Testing Rules - `src/frames.test.ts` must cover roundtrip serialization and explicit invalid-frame failures. - Client tests must mock WebSocket/fetch and verify heartbeat ack, delivery forwarding, reconnect, and outbound queue flush behavior. +- Inbox tests must cover persistence, dedupe by request id, cap enforcement, and replay state transitions (`markReplayFailure`/`markDelivered`). - Keep tests fully offline and deterministic (fake timers where timing matters). diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md new file mode 100644 index 0000000..ba7f72f --- /dev/null +++ b/packages/connector/src/AGENTS.md @@ -0,0 +1,25 @@ +# AGENTS.md (packages/connector/src) + +## Source Layout +- Keep frame schema definitions in `frames.ts` and validate every inbound/outbound frame through parser helpers. +- Keep websocket lifecycle + ack behavior in `client.ts`. +- Keep local runtime orchestration (`/v1/outbound`, `/v1/status`, auth refresh, replay loop) in `runtime.ts`. +- Keep durable inbound storage logic isolated in `inbound-inbox.ts`. + +## Inbound Durability Rules +- Connector must persist inbound relay payloads before sending `deliver_ack accepted=true`. +- Persist connector inbox state as atomic JSON index + append-only JSONL events under `agents//inbound-inbox/`. +- Inbox dedupe key is request/frame id; duplicates must not create extra pending entries. +- Replay must continue after runtime restarts by loading pending entries from inbox index at startup. +- Do not drop pending entries on transient replay failures; reschedule with bounded backoff. + +## Replay/Health Rules +- Keep replay configuration environment-driven via `CONNECTOR_INBOUND_*` vars with safe defaults from `constants.ts`. +- `/v1/status` must include websocket state and inbound replay health (`pendingCount`, `oldestPendingAt`, replay activity/error, hook status). +- On inbox/status read failures, return explicit structured errors instead of crashing runtime. + +## Testing Rules +- `inbound-inbox.test.ts` must cover persistence, dedupe, cap enforcement, and replay bookkeeping transitions. +- `client.test.ts` must cover both delivery modes: + - direct local OpenClaw delivery fallback + - injected inbound persistence handler ack path diff --git a/packages/connector/src/client.test.ts b/packages/connector/src/client.test.ts index 0c59a8f..99f13e1 100644 --- a/packages/connector/src/client.test.ts +++ b/packages/connector/src/client.test.ts @@ -236,6 +236,57 @@ describe("ConnectorClient", () => { client.disconnect(); }); + it("acks success when inbound delivery handler persists payload", async () => { + const sockets: MockWebSocket[] = []; + const fetchMock = vi.fn(); + const inboundDeliverHandler = vi.fn(async () => ({ accepted: true })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + inboundDeliverHandler, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { message: "persist me" }, + }), + ); + + await vi.waitFor(() => { + expect(inboundDeliverHandler).toHaveBeenCalledTimes(1); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + expect(fetchMock).not.toHaveBeenCalled(); + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + it("retries transient local openclaw failures and eventually acks success", async () => { const sockets: MockWebSocket[] = []; const fetchMock = vi @@ -328,6 +379,46 @@ describe("ConnectorClient", () => { client.disconnect(); }); + it("refreshes connection headers on reconnect attempts", async () => { + const sockets: MockWebSocket[] = []; + const dialHeaders: Record[] = []; + let nonceCounter = 0; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 0, + reconnectMaxDelayMs: 0, + reconnectJitterRatio: 0, + connectionHeadersProvider: () => ({ + "x-claw-nonce": `nonce-${++nonceCounter}`, + }), + webSocketFactory: (url, headers) => { + dialHeaders.push(headers); + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + await vi.waitFor(() => { + expect(sockets).toHaveLength(1); + }); + expect(dialHeaders[0]["x-claw-nonce"]).toBe("nonce-1"); + + sockets[0].open(); + sockets[0].failClose(1006, "network down"); + + await vi.waitFor(() => { + expect(sockets).toHaveLength(2); + }); + expect(dialHeaders[1]["x-claw-nonce"]).toBe("nonce-2"); + + client.disconnect(); + }); + it("queues outbound enqueue frames until connected", async () => { const sockets: MockWebSocket[] = []; diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index 70c6338..d617bb5 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -56,6 +56,9 @@ export type ConnectorClientHooks = { export type ConnectorClientOptions = { connectorUrl: string; connectionHeaders?: Record; + connectionHeadersProvider?: + | (() => Record | Promise>) + | undefined; openclawBaseUrl: string; openclawHookToken?: string; openclawHookPath?: string; @@ -77,6 +80,9 @@ export type ConnectorClientOptions = { fetchImpl?: typeof fetch; logger?: Logger; hooks?: ConnectorClientHooks; + inboundDeliverHandler?: + | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) + | undefined; now?: () => number; random?: () => number; ulidFactory?: (time?: number) => string; @@ -202,6 +208,9 @@ function normalizeConnectionHeaders( export class ConnectorClient { private readonly connectorUrl: string; private readonly connectionHeaders: Record; + private readonly connectionHeadersProvider: + | (() => Record | Promise>) + | undefined; private readonly openclawHookUrl: string; private readonly openclawHookToken?: string; private readonly heartbeatIntervalMs: number; @@ -222,6 +231,9 @@ export class ConnectorClient { private readonly fetchImpl: typeof fetch; private readonly logger: Logger; private readonly hooks: ConnectorClientHooks; + private readonly inboundDeliverHandler: + | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) + | undefined; private readonly now: () => number; private readonly random: () => number; private readonly ulidFactory: (time?: number) => string; @@ -238,6 +250,7 @@ export class ConnectorClient { this.connectionHeaders = normalizeConnectionHeaders( options.connectionHeaders, ); + this.connectionHeadersProvider = options.connectionHeadersProvider; this.openclawHookToken = options.openclawHookToken; this.heartbeatIntervalMs = options.heartbeatIntervalMs ?? DEFAULT_HEARTBEAT_INTERVAL_MS; @@ -290,6 +303,7 @@ export class ConnectorClient { options.logger ?? createLogger({ service: "connector", module: "client" }); this.hooks = options.hooks ?? {}; + this.inboundDeliverHandler = options.inboundDeliverHandler; this.now = options.now ?? Date.now; this.random = options.random ?? Math.random; this.ulidFactory = options.ulidFactory ?? generateUlid; @@ -306,7 +320,7 @@ export class ConnectorClient { } this.started = true; - this.connectSocket(); + void this.connectSocket(); } disconnect(): void { @@ -346,14 +360,30 @@ export class ConnectorClient { return frame; } - private connectSocket(): void { + private async connectSocket(): Promise { this.clearReconnectTimeout(); + let connectionHeaders = this.connectionHeaders; + if (this.connectionHeadersProvider) { + try { + connectionHeaders = normalizeConnectionHeaders( + await this.connectionHeadersProvider(), + ); + } catch (error) { + this.logger.warn("connector.websocket.create_failed", { + reason: sanitizeErrorReason(error), + }); + this.scheduleReconnect(); + return; + } + } + + if (!this.started) { + return; + } + try { - this.socket = this.webSocketFactory( - this.connectorUrl, - this.connectionHeaders, - ); + this.socket = this.webSocketFactory(this.connectorUrl, connectionHeaders); } catch (error) { this.logger.warn("connector.websocket.create_failed", { reason: sanitizeErrorReason(error), @@ -424,7 +454,7 @@ export class ConnectorClient { this.reconnectAttempt += 1; this.reconnectTimeout = setTimeout(() => { - this.connectSocket(); + void this.connectSocket(); }, delayMs); } @@ -534,6 +564,47 @@ export class ConnectorClient { } private async handleDeliverFrame(frame: DeliverFrame): Promise { + if (this.inboundDeliverHandler !== undefined) { + try { + const result = await this.inboundDeliverHandler(frame); + const ackFrame: DeliverAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "deliver_ack", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + ackId: frame.id, + accepted: result.accepted, + reason: result.reason, + }; + + this.sendFrame(ackFrame); + if (result.accepted) { + this.hooks.onDeliverSucceeded?.(frame); + } else { + this.hooks.onDeliverFailed?.( + frame, + new Error( + result.reason ?? + "Inbound delivery was rejected by runtime handler", + ), + ); + } + } catch (error) { + const ackFrame: DeliverAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "deliver_ack", + id: this.makeFrameId(), + ts: this.makeTimestamp(), + ackId: frame.id, + accepted: false, + reason: sanitizeErrorReason(error), + }; + this.sendFrame(ackFrame); + this.hooks.onDeliverFailed?.(frame, error); + } + return; + } + try { await this.deliverToLocalOpenclawWithRetry(frame); const ackFrame: DeliverAckFrame = { diff --git a/packages/connector/src/constants.ts b/packages/connector/src/constants.ts index 70aefeb..2f3442c 100644 --- a/packages/connector/src/constants.ts +++ b/packages/connector/src/constants.ts @@ -18,9 +18,17 @@ export const DEFAULT_RECONNECT_JITTER_RATIO = 0.2; export const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; export const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; +export const DEFAULT_CONNECTOR_STATUS_PATH = "/v1/status"; export const DEFAULT_RELAY_DELIVER_TIMEOUT_MS = 15_000; export const DEFAULT_OPENCLAW_DELIVER_RETRY_BUDGET_MS = DEFAULT_RELAY_DELIVER_TIMEOUT_MS - 1_000; +export const DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES = 100_000; +export const DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES = 2 * 1024 * 1024 * 1024; +export const DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = 2_000; +export const DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE = 25; +export const DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS = 1_000; +export const DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS = 60_000; +export const DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR = 2; export const AGENT_ACCESS_HEADER = "x-claw-agent-access"; diff --git a/packages/connector/src/inbound-inbox.test.ts b/packages/connector/src/inbound-inbox.test.ts new file mode 100644 index 0000000..2379f47 --- /dev/null +++ b/packages/connector/src/inbound-inbox.test.ts @@ -0,0 +1,215 @@ +import { mkdirSync, mkdtempSync, readFileSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { afterEach, describe, expect, it } from "vitest"; +import { + createConnectorInboundInbox, + resolveConnectorInboundInboxDir, +} from "./inbound-inbox.js"; + +function createSandbox(): { cleanup: () => void; rootDir: string } { + const rootDir = mkdtempSync(join(tmpdir(), "clawdentity-connector-inbox-")); + return { + rootDir, + cleanup: () => { + rmSync(rootDir, { force: true, recursive: true }); + }, + }; +} + +afterEach(() => { + // no-op hook for symmetry and future timer cleanup +}); + +describe("ConnectorInboundInbox", () => { + it("persists and deduplicates inbound frames", async () => { + const sandbox = createSandbox(); + + try { + const inbox = createConnectorInboundInbox({ + configDir: sandbox.rootDir, + agentName: "alpha", + maxPendingMessages: 100, + maxPendingBytes: 1024 * 1024, + }); + + const first = await inbox.enqueue({ + v: 1, + type: "deliver", + id: "01HXYZTESTDELIVER000000000000", + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "hello" }, + }); + expect(first.accepted).toBe(true); + expect(first.duplicate).toBe(false); + + const second = await inbox.enqueue({ + v: 1, + type: "deliver", + id: "01HXYZTESTDELIVER000000000000", + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "hello" }, + }); + expect(second.accepted).toBe(true); + expect(second.duplicate).toBe(true); + expect(second.pendingCount).toBe(1); + + const snapshot = await inbox.getSnapshot(); + expect(snapshot.pendingCount).toBe(1); + expect(snapshot.pendingBytes).toBeGreaterThan(0); + + const inboxDir = resolveConnectorInboundInboxDir({ + configDir: sandbox.rootDir, + agentName: "alpha", + }); + const indexPath = join(inboxDir, "index.json"); + const eventsPath = join(inboxDir, "events.jsonl"); + + const indexRaw = readFileSync(indexPath, "utf8"); + expect(indexRaw).toContain("pendingByRequestId"); + const eventsRaw = readFileSync(eventsPath, "utf8"); + expect(eventsRaw).toContain("inbound_persisted"); + expect(eventsRaw).toContain("inbound_duplicate"); + } finally { + sandbox.cleanup(); + } + }); + + it("enforces inbox size and message caps", async () => { + const sandbox = createSandbox(); + + try { + const inbox = createConnectorInboundInbox({ + configDir: sandbox.rootDir, + agentName: "alpha", + maxPendingMessages: 1, + maxPendingBytes: 64, + }); + + const accepted = await inbox.enqueue({ + v: 1, + type: "deliver", + id: "01HXYZTESTDELIVER000000000001", + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "small" }, + }); + expect(accepted.accepted).toBe(true); + + const rejectedByCount = await inbox.enqueue({ + v: 1, + type: "deliver", + id: "01HXYZTESTDELIVER000000000002", + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "next" }, + }); + expect(rejectedByCount.accepted).toBe(false); + expect(rejectedByCount.reason).toContain("message cap"); + + const sandbox2 = createSandbox(); + try { + const byteCapped = createConnectorInboundInbox({ + configDir: sandbox2.rootDir, + agentName: "beta", + maxPendingMessages: 100, + maxPendingBytes: 8, + }); + const rejectedByBytes = await byteCapped.enqueue({ + v: 1, + type: "deliver", + id: "01HXYZTESTDELIVER000000000003", + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "this is too large" }, + }); + expect(rejectedByBytes.accepted).toBe(false); + expect(rejectedByBytes.reason).toContain("byte cap"); + } finally { + sandbox2.cleanup(); + } + } finally { + sandbox.cleanup(); + } + }); + + it("replays bookkeeping updates pending entries", async () => { + const sandbox = createSandbox(); + + try { + const inbox = createConnectorInboundInbox({ + configDir: sandbox.rootDir, + agentName: "alpha", + maxPendingMessages: 100, + maxPendingBytes: 1024 * 1024, + }); + + await inbox.enqueue({ + v: 1, + type: "deliver", + id: "01HXYZTESTDELIVER000000000004", + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "hello" }, + }); + + const dueNow = await inbox.listDuePending({ + nowMs: Date.now(), + limit: 10, + }); + expect(dueNow).toHaveLength(1); + expect(dueNow[0]?.requestId).toBe("01HXYZTESTDELIVER000000000004"); + + await inbox.markReplayFailure({ + requestId: "01HXYZTESTDELIVER000000000004", + errorMessage: "hook unavailable", + nextAttemptAt: new Date(Date.now() + 60_000).toISOString(), + }); + + const dueLater = await inbox.listDuePending({ + nowMs: Date.now(), + limit: 10, + }); + expect(dueLater).toHaveLength(0); + + await inbox.markDelivered("01HXYZTESTDELIVER000000000004"); + const snapshot = await inbox.getSnapshot(); + expect(snapshot.pendingCount).toBe(0); + } finally { + sandbox.cleanup(); + } + }); + + it("gracefully handles missing index file", async () => { + const sandbox = createSandbox(); + + try { + const inboxDir = resolveConnectorInboundInboxDir({ + configDir: sandbox.rootDir, + agentName: "alpha", + }); + mkdirSync(inboxDir, { recursive: true }); + + const inbox = createConnectorInboundInbox({ + configDir: sandbox.rootDir, + agentName: "alpha", + maxPendingMessages: 100, + maxPendingBytes: 1024 * 1024, + }); + + const snapshot = await inbox.getSnapshot(); + expect(snapshot.pendingCount).toBe(0); + expect(snapshot.pendingBytes).toBe(0); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/packages/connector/src/inbound-inbox.ts b/packages/connector/src/inbound-inbox.ts new file mode 100644 index 0000000..47ea1f4 --- /dev/null +++ b/packages/connector/src/inbound-inbox.ts @@ -0,0 +1,514 @@ +import { + appendFile, + mkdir, + readFile, + rename, + writeFile, +} from "node:fs/promises"; +import { dirname, join } from "node:path"; +import type { DeliverFrame } from "./frames.js"; + +const INBOUND_INBOX_DIR_NAME = "inbound-inbox"; +const INBOUND_INBOX_INDEX_FILE_NAME = "index.json"; +const INBOUND_INBOX_EVENTS_FILE_NAME = "events.jsonl"; +const INBOUND_INBOX_SCHEMA_VERSION = 1; + +type InboundInboxIndexFile = { + version: number; + pendingBytes: number; + pendingByRequestId: Record; + updatedAt: string; +}; + +type InboundInboxEvent = { + details?: Record; + requestId?: string; + type: + | "inbound_persisted" + | "inbound_duplicate" + | "replay_succeeded" + | "replay_failed" + | "inbox_pruned"; +}; + +export type ConnectorInboundInboxItem = { + attemptCount: number; + fromAgentDid: string; + id: string; + lastAttemptAt?: string; + lastError?: string; + nextAttemptAt: string; + payload: unknown; + payloadBytes: number; + receivedAt: string; + requestId: string; + toAgentDid: string; +}; + +export type ConnectorInboundInboxSnapshot = { + nextAttemptAt?: string; + oldestPendingAt?: string; + pendingBytes: number; + pendingCount: number; +}; + +export type ConnectorInboundInboxEnqueueResult = { + accepted: boolean; + duplicate: boolean; + pendingCount: number; + reason?: string; +}; + +export type ConnectorInboundInboxOptions = { + agentName: string; + configDir: string; + maxPendingBytes: number; + maxPendingMessages: number; +}; + +function nowIso(): string { + return new Date().toISOString(); +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +function parsePendingItem( + value: unknown, +): ConnectorInboundInboxItem | undefined { + if (!isRecord(value)) { + return undefined; + } + + const id = typeof value.id === "string" ? value.id.trim() : ""; + const requestId = + typeof value.requestId === "string" ? value.requestId.trim() : ""; + const fromAgentDid = + typeof value.fromAgentDid === "string" ? value.fromAgentDid.trim() : ""; + const toAgentDid = + typeof value.toAgentDid === "string" ? value.toAgentDid.trim() : ""; + const receivedAt = + typeof value.receivedAt === "string" ? value.receivedAt.trim() : ""; + const nextAttemptAt = + typeof value.nextAttemptAt === "string" ? value.nextAttemptAt.trim() : ""; + const attemptCount = + typeof value.attemptCount === "number" && + Number.isInteger(value.attemptCount) + ? value.attemptCount + : NaN; + const payloadBytes = + typeof value.payloadBytes === "number" && + Number.isInteger(value.payloadBytes) + ? value.payloadBytes + : NaN; + + if ( + id.length === 0 || + requestId.length === 0 || + fromAgentDid.length === 0 || + toAgentDid.length === 0 || + receivedAt.length === 0 || + nextAttemptAt.length === 0 || + !Number.isFinite(attemptCount) || + attemptCount < 0 || + !Number.isFinite(payloadBytes) || + payloadBytes < 0 + ) { + return undefined; + } + + const lastError = + typeof value.lastError === "string" ? value.lastError : undefined; + const lastAttemptAt = + typeof value.lastAttemptAt === "string" ? value.lastAttemptAt : undefined; + + return { + id, + requestId, + fromAgentDid, + toAgentDid, + payload: value.payload, + payloadBytes, + receivedAt, + nextAttemptAt, + attemptCount, + lastError, + lastAttemptAt, + }; +} + +function toDefaultIndexFile(): InboundInboxIndexFile { + return { + version: INBOUND_INBOX_SCHEMA_VERSION, + pendingBytes: 0, + pendingByRequestId: {}, + updatedAt: nowIso(), + }; +} + +function normalizeIndexFile(raw: unknown): InboundInboxIndexFile { + if (!isRecord(raw)) { + throw new Error("Inbound inbox index root must be an object"); + } + + const pendingByRequestIdRaw = raw.pendingByRequestId; + if (!isRecord(pendingByRequestIdRaw)) { + throw new Error("Inbound inbox index pendingByRequestId must be an object"); + } + + const pendingByRequestId: Record = {}; + let pendingBytes = 0; + for (const [requestId, candidate] of Object.entries(pendingByRequestIdRaw)) { + const entry = parsePendingItem(candidate); + if (entry === undefined || entry.requestId !== requestId) { + continue; + } + pendingByRequestId[requestId] = entry; + pendingBytes += entry.payloadBytes; + } + + return { + version: + typeof raw.version === "number" && Number.isFinite(raw.version) + ? raw.version + : INBOUND_INBOX_SCHEMA_VERSION, + pendingBytes, + pendingByRequestId, + updatedAt: + typeof raw.updatedAt === "string" && raw.updatedAt.trim().length > 0 + ? raw.updatedAt + : nowIso(), + }; +} + +function toComparableTimeMs(value: string): number { + const parsed = Date.parse(value); + if (Number.isFinite(parsed)) { + return parsed; + } + + return Number.MAX_SAFE_INTEGER; +} + +export class ConnectorInboundInbox { + private readonly agentName: string; + private readonly eventsPath: string; + private readonly indexPath: string; + private readonly maxPendingBytes: number; + private readonly maxPendingMessages: number; + private readonly inboxDir: string; + + private writeChain: Promise = Promise.resolve(); + + constructor(options: ConnectorInboundInboxOptions) { + this.agentName = options.agentName; + this.inboxDir = join( + options.configDir, + "agents", + this.agentName, + INBOUND_INBOX_DIR_NAME, + ); + this.indexPath = join(this.inboxDir, INBOUND_INBOX_INDEX_FILE_NAME); + this.eventsPath = join(this.inboxDir, INBOUND_INBOX_EVENTS_FILE_NAME); + this.maxPendingBytes = options.maxPendingBytes; + this.maxPendingMessages = options.maxPendingMessages; + } + + async enqueue( + frame: DeliverFrame, + ): Promise { + return await this.withWriteLock(async () => { + const index = await this.loadIndex(); + const existing = index.pendingByRequestId[frame.id]; + if (existing !== undefined) { + await this.appendEvent({ + type: "inbound_duplicate", + requestId: frame.id, + }); + return { + accepted: true, + duplicate: true, + pendingCount: Object.keys(index.pendingByRequestId).length, + }; + } + + const payloadBytes = Buffer.byteLength( + JSON.stringify(frame.payload ?? null), + "utf8", + ); + + const pendingCount = Object.keys(index.pendingByRequestId).length; + if (pendingCount >= this.maxPendingMessages) { + return { + accepted: false, + duplicate: false, + pendingCount, + reason: "connector inbound inbox is full (message cap reached)", + }; + } + + if (index.pendingBytes + payloadBytes > this.maxPendingBytes) { + return { + accepted: false, + duplicate: false, + pendingCount, + reason: "connector inbound inbox is full (byte cap reached)", + }; + } + + const pendingItem: ConnectorInboundInboxItem = { + id: frame.id, + requestId: frame.id, + fromAgentDid: frame.fromAgentDid, + toAgentDid: frame.toAgentDid, + payload: frame.payload, + payloadBytes, + receivedAt: nowIso(), + nextAttemptAt: nowIso(), + attemptCount: 0, + }; + + index.pendingByRequestId[pendingItem.requestId] = pendingItem; + index.pendingBytes += pendingItem.payloadBytes; + index.updatedAt = nowIso(); + await this.saveIndex(index); + await this.appendEvent({ + type: "inbound_persisted", + requestId: pendingItem.requestId, + details: { + payloadBytes, + fromAgentDid: pendingItem.fromAgentDid, + toAgentDid: pendingItem.toAgentDid, + }, + }); + + return { + accepted: true, + duplicate: false, + pendingCount: Object.keys(index.pendingByRequestId).length, + }; + }); + } + + async listDuePending(input: { + limit: number; + nowMs: number; + }): Promise { + const index = await this.loadIndex(); + const due = Object.values(index.pendingByRequestId) + .filter((item) => toComparableTimeMs(item.nextAttemptAt) <= input.nowMs) + .sort((left, right) => { + const leftNext = toComparableTimeMs(left.nextAttemptAt); + const rightNext = toComparableTimeMs(right.nextAttemptAt); + if (leftNext !== rightNext) { + return leftNext - rightNext; + } + + return ( + toComparableTimeMs(left.receivedAt) - + toComparableTimeMs(right.receivedAt) + ); + }); + + return due.slice(0, Math.max(1, input.limit)); + } + + async markDelivered(requestId: string): Promise { + await this.withWriteLock(async () => { + const index = await this.loadIndex(); + const entry = index.pendingByRequestId[requestId]; + if (entry === undefined) { + return; + } + + delete index.pendingByRequestId[requestId]; + index.pendingBytes = Math.max(0, index.pendingBytes - entry.payloadBytes); + index.updatedAt = nowIso(); + await this.saveIndex(index); + await this.appendEvent({ + type: "replay_succeeded", + requestId, + }); + }); + } + + async markReplayFailure(input: { + errorMessage: string; + nextAttemptAt: string; + requestId: string; + }): Promise { + await this.withWriteLock(async () => { + const index = await this.loadIndex(); + const entry = index.pendingByRequestId[input.requestId]; + if (entry === undefined) { + return; + } + + entry.attemptCount += 1; + entry.lastError = input.errorMessage; + entry.lastAttemptAt = nowIso(); + entry.nextAttemptAt = input.nextAttemptAt; + index.updatedAt = nowIso(); + await this.saveIndex(index); + await this.appendEvent({ + type: "replay_failed", + requestId: input.requestId, + details: { + attemptCount: entry.attemptCount, + nextAttemptAt: input.nextAttemptAt, + errorMessage: input.errorMessage, + }, + }); + }); + } + + async pruneDelivered(): Promise { + await this.withWriteLock(async () => { + const index = await this.loadIndex(); + const beforeCount = Object.keys(index.pendingByRequestId).length; + if (beforeCount === 0) { + return; + } + + const after: Record = {}; + let pendingBytes = 0; + for (const [requestId, entry] of Object.entries( + index.pendingByRequestId, + )) { + if (entry.attemptCount < 0) { + continue; + } + + after[requestId] = entry; + pendingBytes += entry.payloadBytes; + } + + index.pendingByRequestId = after; + index.pendingBytes = pendingBytes; + index.updatedAt = nowIso(); + await this.saveIndex(index); + await this.appendEvent({ + type: "inbox_pruned", + details: { + beforeCount, + afterCount: Object.keys(after).length, + }, + }); + }); + } + + async getSnapshot(): Promise { + const index = await this.loadIndex(); + const entries = Object.values(index.pendingByRequestId); + if (entries.length === 0) { + return { + pendingCount: 0, + pendingBytes: index.pendingBytes, + }; + } + + entries.sort((left, right) => { + return ( + toComparableTimeMs(left.receivedAt) - + toComparableTimeMs(right.receivedAt) + ); + }); + + const nextAttemptAt = entries + .map((entry) => entry.nextAttemptAt) + .sort( + (left, right) => toComparableTimeMs(left) - toComparableTimeMs(right), + )[0]; + + return { + pendingCount: entries.length, + pendingBytes: index.pendingBytes, + oldestPendingAt: entries[0]?.receivedAt, + nextAttemptAt, + }; + } + + private async withWriteLock(fn: () => Promise): Promise { + const previous = this.writeChain; + let release: (() => void) | undefined; + this.writeChain = new Promise((resolve) => { + release = resolve; + }); + + await previous; + try { + return await fn(); + } finally { + release?.(); + } + } + + private async loadIndex(): Promise { + await mkdir(this.inboxDir, { recursive: true }); + + let raw: string; + try { + raw = await readFile(this.indexPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return toDefaultIndexFile(); + } + + throw error; + } + + if (raw.trim().length === 0) { + return toDefaultIndexFile(); + } + + const parsed = JSON.parse(raw) as unknown; + return normalizeIndexFile(parsed); + } + + private async saveIndex(index: InboundInboxIndexFile): Promise { + await mkdir(dirname(this.indexPath), { recursive: true }); + + const payload = { + ...index, + version: INBOUND_INBOX_SCHEMA_VERSION, + updatedAt: nowIso(), + } satisfies InboundInboxIndexFile; + + const tmpPath = `${this.indexPath}.tmp-${Date.now()}`; + await writeFile(tmpPath, `${JSON.stringify(payload, null, 2)}\n`, "utf8"); + await rename(tmpPath, this.indexPath); + } + + private async appendEvent(event: InboundInboxEvent): Promise { + await mkdir(dirname(this.eventsPath), { recursive: true }); + await appendFile( + this.eventsPath, + `${JSON.stringify({ ...event, at: nowIso() })}\n`, + "utf8", + ); + } +} + +export function createConnectorInboundInbox( + options: ConnectorInboundInboxOptions, +): ConnectorInboundInbox { + return new ConnectorInboundInbox(options); +} + +export function resolveConnectorInboundInboxDir(input: { + agentName: string; + configDir: string; +}): string { + return join( + input.configDir, + "agents", + input.agentName, + INBOUND_INBOX_DIR_NAME, + ); +} diff --git a/packages/connector/src/index.ts b/packages/connector/src/index.ts index 961a229..306b5ac 100644 --- a/packages/connector/src/index.ts +++ b/packages/connector/src/index.ts @@ -10,7 +10,15 @@ export { CONNECTOR_FRAME_VERSION, CONNECTOR_VERSION, DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, + DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, + DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, + DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, + DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, + DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_CONNECTOR_STATUS_PATH, DEFAULT_HEARTBEAT_INTERVAL_MS, DEFAULT_OPENCLAW_BASE_URL, DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, @@ -46,7 +54,15 @@ export { parseFrame, serializeFrame, } from "./frames.js"; - +export type { + ConnectorInboundInboxItem, + ConnectorInboundInboxSnapshot, +} from "./inbound-inbox.js"; +export { + ConnectorInboundInbox, + createConnectorInboundInbox, + resolveConnectorInboundInboxDir, +} from "./inbound-inbox.js"; export type { ConnectorRuntimeHandle, StartConnectorRuntimeInput, diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index ab39ee0..560586b 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -1,5 +1,5 @@ import { randomBytes } from "node:crypto"; -import { mkdir, rename, writeFile } from "node:fs/promises"; +import { mkdir, readFile, rename, writeFile } from "node:fs/promises"; import { createServer, type IncomingMessage, @@ -9,6 +9,7 @@ import { dirname, join } from "node:path"; import { decodeBase64url, encodeBase64url, + RELAY_CONNECT_PATH, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "@clawdentity/protocol"; import { @@ -25,10 +26,23 @@ import { ConnectorClient, type ConnectorWebSocket } from "./client.js"; import { AGENT_ACCESS_HEADER, DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, + DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, + DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, + DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, + DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, + DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_CONNECTOR_STATUS_PATH, DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, DEFAULT_OPENCLAW_HOOK_PATH, } from "./constants.js"; +import { + type ConnectorInboundInboxSnapshot, + createConnectorInboundInbox, +} from "./inbound-inbox.js"; type ConnectorRuntimeCredentials = { accessExpiresAt?: string; @@ -138,6 +152,10 @@ function normalizeWebSocketUrl(urlInput: string | undefined): string { throw new Error("Proxy websocket URL must use ws:// or wss://"); } + if (parsed.pathname === "/") { + parsed.pathname = RELAY_CONNECT_PATH; + } + return parsed.toString(); } @@ -165,6 +183,187 @@ function resolveOpenclawHookToken(input?: string): string | undefined { return value; } +function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { + const normalizedBase = baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`; + const normalizedHookPath = hookPath.startsWith("/") + ? hookPath.slice(1) + : hookPath; + return new URL(normalizedHookPath, normalizedBase).toString(); +} + +function parsePositiveIntEnv( + key: string, + fallback: number, + minimum = 1, +): number { + const raw = process.env[key]?.trim(); + if (!raw) { + return fallback; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isFinite(parsed) || parsed < minimum) { + return fallback; + } + + return parsed; +} + +function sanitizeErrorReason(error: unknown): string { + if (!(error instanceof Error)) { + return "Unknown error"; + } + + return error.message.trim().slice(0, 240) || "Unknown error"; +} + +class LocalOpenclawDeliveryError extends Error { + readonly retryable: boolean; + + constructor(input: { message: string; retryable: boolean }) { + super(input.message); + this.name = "LocalOpenclawDeliveryError"; + this.retryable = input.retryable; + } +} + +type InboundReplayPolicy = { + batchSize: number; + inboxMaxBytes: number; + inboxMaxMessages: number; + replayIntervalMs: number; + retryBackoffFactor: number; + retryInitialDelayMs: number; + retryMaxDelayMs: number; +}; + +type InboundReplayStatus = { + lastReplayAt?: string; + lastReplayError?: string; + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + replayerActive: boolean; +}; + +type InboundReplayView = { + lastReplayAt?: string; + lastReplayError?: string; + pending: ConnectorInboundInboxSnapshot; + replayerActive: boolean; + openclawHook: { + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + url: string; + }; +}; + +function loadInboundReplayPolicy(): InboundReplayPolicy { + const retryBackoffFactor = Number.parseFloat( + process.env.CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR ?? "", + ); + + return { + inboxMaxMessages: parsePositiveIntEnv( + "CONNECTOR_INBOUND_INBOX_MAX_MESSAGES", + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, + ), + inboxMaxBytes: parsePositiveIntEnv( + "CONNECTOR_INBOUND_INBOX_MAX_BYTES", + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, + ), + replayIntervalMs: parsePositiveIntEnv( + "CONNECTOR_INBOUND_REPLAY_INTERVAL_MS", + DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, + ), + batchSize: parsePositiveIntEnv( + "CONNECTOR_INBOUND_REPLAY_BATCH_SIZE", + DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, + ), + retryInitialDelayMs: parsePositiveIntEnv( + "CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS", + DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, + ), + retryMaxDelayMs: parsePositiveIntEnv( + "CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS", + DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, + ), + retryBackoffFactor: + Number.isFinite(retryBackoffFactor) && retryBackoffFactor >= 1 + ? retryBackoffFactor + : DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, + }; +} + +function computeReplayDelayMs(input: { + attemptCount: number; + policy: InboundReplayPolicy; +}): number { + const exponent = Math.max(0, input.attemptCount - 1); + const delay = Math.min( + input.policy.retryMaxDelayMs, + Math.floor( + input.policy.retryInitialDelayMs * + input.policy.retryBackoffFactor ** exponent, + ), + ); + return Math.max(1, delay); +} + +async function deliverToOpenclawHook(input: { + fetchImpl: typeof fetch; + openclawHookToken?: string; + openclawHookUrl: string; + payload: unknown; + requestId: string; +}): Promise { + const controller = new AbortController(); + const timeoutHandle = setTimeout(() => { + controller.abort(); + }, DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS); + + const headers: Record = { + "content-type": "application/json", + "x-request-id": input.requestId, + }; + if (input.openclawHookToken !== undefined) { + headers["x-openclaw-token"] = input.openclawHookToken; + } + + try { + const response = await input.fetchImpl(input.openclawHookUrl, { + method: "POST", + headers, + body: JSON.stringify(input.payload), + signal: controller.signal, + }); + if (!response.ok) { + throw new LocalOpenclawDeliveryError({ + message: `Local OpenClaw hook rejected payload with status ${response.status}`, + retryable: + response.status >= 500 || + response.status === 404 || + response.status === 429, + }); + } + } catch (error) { + if (error instanceof Error && error.name === "AbortError") { + throw new LocalOpenclawDeliveryError({ + message: "Local OpenClaw hook request timed out", + retryable: true, + }); + } + if (error instanceof LocalOpenclawDeliveryError) { + throw error; + } + throw new LocalOpenclawDeliveryError({ + message: sanitizeErrorReason(error), + retryable: true, + }); + } finally { + clearTimeout(timeoutHandle); + } +} + function toInitialAuthBundle( credentials: ConnectorRuntimeCredentials, ): AgentAuthBundle { @@ -297,6 +496,89 @@ async function writeRegistryAuthAtomic(input: { await rename(tmpPath, targetPath); } +function parseRegistryAuthFromDisk( + payload: unknown, +): AgentAuthBundle | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const tokenType = payload.tokenType; + const accessToken = payload.accessToken; + const accessExpiresAt = payload.accessExpiresAt; + const refreshToken = payload.refreshToken; + const refreshExpiresAt = payload.refreshExpiresAt; + + if ( + tokenType !== "Bearer" || + typeof accessToken !== "string" || + typeof accessExpiresAt !== "string" || + typeof refreshToken !== "string" || + typeof refreshExpiresAt !== "string" + ) { + return undefined; + } + + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + }; +} + +async function readRegistryAuthFromDisk(input: { + configDir: string; + agentName: string; + logger: Logger; +}): Promise { + const authPath = join( + input.configDir, + AGENTS_DIR_NAME, + input.agentName, + REGISTRY_AUTH_FILENAME, + ); + + let raw: string; + try { + raw = await readFile(authPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return undefined; + } + + input.logger.warn("connector.runtime.registry_auth_read_failed", { + authPath, + reason: error instanceof Error ? error.message : "unknown", + }); + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + input.logger.warn("connector.runtime.registry_auth_invalid_json", { + authPath, + }); + return undefined; + } + + const auth = parseRegistryAuthFromDisk(parsed); + if (auth === undefined) { + input.logger.warn("connector.runtime.registry_auth_invalid_shape", { + authPath, + }); + } + return auth; +} + async function readRequestJson(req: IncomingMessage): Promise { const chunks: Buffer[] = []; let totalBytes = 0; @@ -386,7 +668,37 @@ export async function startConnectorRuntime( let currentAuth = toInitialAuthBundle(input.credentials); - if (shouldRefreshAccessToken(currentAuth, Date.now())) { + const syncAuthFromDisk = async (): Promise => { + const diskAuth = await readRegistryAuthFromDisk({ + configDir: input.configDir, + agentName: input.agentName, + logger, + }); + if (!diskAuth) { + return; + } + + if ( + diskAuth.accessToken === currentAuth.accessToken && + diskAuth.accessExpiresAt === currentAuth.accessExpiresAt && + diskAuth.refreshToken === currentAuth.refreshToken && + diskAuth.refreshExpiresAt === currentAuth.refreshExpiresAt + ) { + return; + } + + currentAuth = diskAuth; + logger.info("connector.runtime.registry_auth_synced", { + agentName: input.agentName, + }); + }; + + const refreshCurrentAuthIfNeeded = async (): Promise => { + await syncAuthFromDisk(); + if (!shouldRefreshAccessToken(currentAuth, Date.now())) { + return; + } + currentAuth = await refreshAgentAuthWithClawProof({ registryUrl: input.registryUrl, ait: input.credentials.ait, @@ -399,33 +711,162 @@ export async function startConnectorRuntime( agentName: input.agentName, auth: currentAuth, }); - } + }; + + await refreshCurrentAuthIfNeeded(); const wsUrl = normalizeWebSocketUrl(input.proxyWebsocketUrl); const wsParsed = new URL(wsUrl); - const upgradeHeaders = await buildUpgradeHeaders({ - wsUrl: wsParsed, - ait: input.credentials.ait, - accessToken: currentAuth.accessToken, - secretKey, + const openclawBaseUrl = resolveOpenclawBaseUrl(input.openclawBaseUrl); + const openclawHookPath = resolveOpenclawHookPath(input.openclawHookPath); + const openclawHookToken = resolveOpenclawHookToken(input.openclawHookToken); + const openclawHookUrl = toOpenclawHookUrl(openclawBaseUrl, openclawHookPath); + const inboundReplayPolicy = loadInboundReplayPolicy(); + const inboundInbox = createConnectorInboundInbox({ + configDir: input.configDir, + agentName: input.agentName, + maxPendingMessages: inboundReplayPolicy.inboxMaxMessages, + maxPendingBytes: inboundReplayPolicy.inboxMaxBytes, }); + const inboundReplayStatus: InboundReplayStatus = { + replayerActive: false, + }; + let runtimeStopping = false; + let replayInFlight = false; + let replayIntervalHandle: ReturnType | undefined; + + const resolveUpgradeHeaders = async (): Promise> => { + await refreshCurrentAuthIfNeeded(); + return buildUpgradeHeaders({ + wsUrl: wsParsed, + ait: input.credentials.ait, + accessToken: currentAuth.accessToken, + secretKey, + }); + }; + + const replayPendingInboundMessages = async (): Promise => { + if (runtimeStopping || replayInFlight) { + return; + } + + replayInFlight = true; + inboundReplayStatus.replayerActive = true; + + try { + const dueItems = await inboundInbox.listDuePending({ + nowMs: Date.now(), + limit: inboundReplayPolicy.batchSize, + }); + for (const pending of dueItems) { + inboundReplayStatus.lastAttemptAt = new Date().toISOString(); + try { + await deliverToOpenclawHook({ + fetchImpl, + openclawHookUrl, + openclawHookToken, + requestId: pending.requestId, + payload: pending.payload, + }); + await inboundInbox.markDelivered(pending.requestId); + inboundReplayStatus.lastReplayAt = new Date().toISOString(); + inboundReplayStatus.lastReplayError = undefined; + inboundReplayStatus.lastAttemptStatus = "ok"; + logger.info("connector.inbound.replay_succeeded", { + requestId: pending.requestId, + attemptCount: pending.attemptCount + 1, + }); + } catch (error) { + const reason = sanitizeErrorReason(error); + const retryable = + error instanceof LocalOpenclawDeliveryError + ? error.retryable + : true; + const nextAttemptAt = new Date( + Date.now() + + computeReplayDelayMs({ + attemptCount: pending.attemptCount + 1, + policy: inboundReplayPolicy, + }) * + (retryable ? 1 : 10), + ).toISOString(); + await inboundInbox.markReplayFailure({ + requestId: pending.requestId, + errorMessage: reason, + nextAttemptAt, + }); + inboundReplayStatus.lastReplayError = reason; + inboundReplayStatus.lastAttemptStatus = "failed"; + logger.warn("connector.inbound.replay_failed", { + requestId: pending.requestId, + attemptCount: pending.attemptCount + 1, + retryable, + nextAttemptAt, + reason, + }); + } + } + } finally { + replayInFlight = false; + inboundReplayStatus.replayerActive = false; + } + }; + + const readInboundReplayView = async (): Promise => { + const pending = await inboundInbox.getSnapshot(); + return { + pending, + replayerActive: inboundReplayStatus.replayerActive || replayInFlight, + lastReplayAt: inboundReplayStatus.lastReplayAt, + lastReplayError: inboundReplayStatus.lastReplayError, + openclawHook: { + url: openclawHookUrl, + lastAttemptAt: inboundReplayStatus.lastAttemptAt, + lastAttemptStatus: inboundReplayStatus.lastAttemptStatus, + }, + }; + }; const connectorClient = new ConnectorClient({ connectorUrl: wsParsed.toString(), - connectionHeaders: upgradeHeaders, - openclawBaseUrl: resolveOpenclawBaseUrl(input.openclawBaseUrl), - openclawHookPath: resolveOpenclawHookPath(input.openclawHookPath), - openclawHookToken: resolveOpenclawHookToken(input.openclawHookToken), + connectionHeadersProvider: resolveUpgradeHeaders, + openclawBaseUrl, + openclawHookPath, + openclawHookToken, fetchImpl, logger, + inboundDeliverHandler: async (frame) => { + const persisted = await inboundInbox.enqueue(frame); + if (!persisted.accepted) { + logger.warn("connector.inbound.persist_rejected", { + requestId: frame.id, + reason: persisted.reason ?? "inbox limit reached", + pendingCount: persisted.pendingCount, + }); + return { + accepted: false, + reason: persisted.reason, + }; + } + + logger.info("connector.inbound.persisted", { + requestId: frame.id, + duplicate: persisted.duplicate, + pendingCount: persisted.pendingCount, + }); + void replayPendingInboundMessages(); + return { accepted: true }; + }, webSocketFactory: createWebSocketFactory(), }); const outboundBaseUrl = normalizeOutboundBaseUrl(input.outboundBaseUrl); const outboundPath = normalizeOutboundPath(input.outboundPath); + const statusPath = DEFAULT_CONNECTOR_STATUS_PATH; const outboundUrl = new URL(outboundPath, outboundBaseUrl).toString(); const relayToPeer = async (request: OutboundRelayRequest): Promise => { + await syncAuthFromDisk(); const peerUrl = new URL(request.peerProxyUrl); const body = JSON.stringify(request.payload ?? {}); const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; @@ -475,7 +916,10 @@ export async function startConnectorRuntime( await executeWithAgentAuthRefreshRetry({ key: refreshKey, shouldRetry: isRetryableRelayAuthError, - getAuth: async () => currentAuth, + getAuth: async () => { + await syncAuthFromDisk(); + return currentAuth; + }, persistAuth: async (nextAuth) => { currentAuth = nextAuth; await writeRegistryAuthAtomic({ @@ -501,6 +945,52 @@ export async function startConnectorRuntime( ? new URL(req.url, outboundBaseUrl).pathname : "/"; + if (requestPath === statusPath) { + if (req.method !== "GET") { + res.statusCode = 405; + res.setHeader("allow", "GET"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + let inboundReplayView: InboundReplayView; + try { + inboundReplayView = await readInboundReplayView(); + } catch (error) { + logger.warn("connector.status.inbound_inbox_unavailable", { + reason: sanitizeErrorReason(error), + }); + writeJson(res, 500, { + status: "error", + error: { + code: "CONNECTOR_INBOUND_INBOX_UNAVAILABLE", + message: "Connector inbound inbox status is unavailable", + }, + outboundUrl, + websocketUrl: wsUrl, + websocketConnected: connectorClient.isConnected(), + }); + return; + } + writeJson(res, 200, { + status: "ok", + outboundUrl, + websocketUrl: wsUrl, + websocketConnected: connectorClient.isConnected(), + inboundInbox: { + pendingCount: inboundReplayView.pending.pendingCount, + pendingBytes: inboundReplayView.pending.pendingBytes, + oldestPendingAt: inboundReplayView.pending.oldestPendingAt, + nextAttemptAt: inboundReplayView.pending.nextAttemptAt, + replayerActive: inboundReplayView.replayerActive, + lastReplayAt: inboundReplayView.lastReplayAt, + lastReplayError: inboundReplayView.lastReplayError, + }, + openclawHook: inboundReplayView.openclawHook, + }); + return; + } + if (requestPath !== outboundPath) { writeJson(res, 404, { error: "Not Found" }); return; @@ -552,6 +1042,11 @@ export async function startConnectorRuntime( }); const stop = async (): Promise => { + runtimeStopping = true; + if (replayIntervalHandle !== undefined) { + clearInterval(replayIntervalHandle); + replayIntervalHandle = undefined; + } connectorClient.disconnect(); await new Promise((resolve, reject) => { server.close((error) => { @@ -578,6 +1073,11 @@ export async function startConnectorRuntime( }); connectorClient.connect(); + await inboundInbox.pruneDelivered(); + void replayPendingInboundMessages(); + replayIntervalHandle = setInterval(() => { + void replayPendingInboundMessages(); + }, inboundReplayPolicy.replayIntervalMs); logger.info("connector.runtime.started", { outboundUrl, diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index af30a9f..b10ca06 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -19,7 +19,8 @@ - Keep T02 canonicalization minimal and deterministic; replay/skew/nonce policy enforcement is handled in later tickets (`T07`, `T08`, `T09`). - Define shared API route fragments in protocol exports (for example `ADMIN_BOOTSTRAP_PATH`) so CLI/SDK/apps avoid hardcoded duplicate endpoint literals. - Keep lifecycle route constants together in `endpoints.ts` (e.g., `ADMIN_BOOTSTRAP_PATH`, `AGENT_REGISTRATION_CHALLENGE_PATH`, `AGENT_AUTH_REFRESH_PATH`, `AGENT_AUTH_VALIDATE_PATH`, `ME_API_KEYS_PATH`) so registry, proxy, and CLI stay contract-synchronized. -- Keep proxy pairing key route constants in protocol exports (`PROXY_PAIRING_KEYS_PATH`, `PROXY_PAIRING_KEYS_RESOLVE_PATH`) so registry write/read APIs and proxy verification paths remain synchronized. +- Keep protocol route constants scoped to active contracts only; remove deprecated endpoint exports immediately when a flow is retired. +- Keep internal identity route constants in protocol exports (`INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH`) so service-to-service ownership checks stay synchronized. - Keep relay contract constants in protocol exports (`RELAY_CONNECT_PATH`, `RELAY_RECIPIENT_AGENT_DID_HEADER`) so connector and hook routing stay synchronized across apps. - Keep registration-proof canonicalization in protocol exports (`canonicalizeAgentRegistrationProof`) so CLI signing and registry verification use an identical message format. - Keep optional proof fields deterministic in canonical strings (empty-string placeholders) to avoid default-value mismatches between clients and server. diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index 1a1ec4e..a2cdebd 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -1,11 +1,13 @@ export const ADMIN_BOOTSTRAP_PATH = "/v1/admin/bootstrap"; +export const ADMIN_INTERNAL_SERVICES_PATH = "/v1/admin/internal-services"; export const AGENT_REGISTRATION_CHALLENGE_PATH = "/v1/agents/challenge"; export const AGENT_AUTH_REFRESH_PATH = "/v1/agents/auth/refresh"; export const AGENT_AUTH_VALIDATE_PATH = "/v1/agents/auth/validate"; export const INVITES_PATH = "/v1/invites"; export const INVITES_REDEEM_PATH = "/v1/invites/redeem"; export const ME_API_KEYS_PATH = "/v1/me/api-keys"; -export const PROXY_PAIRING_KEYS_PATH = "/v1/proxy-pairing-keys"; -export const PROXY_PAIRING_KEYS_RESOLVE_PATH = "/v1/proxy-pairing-keys/resolve"; +export const REGISTRY_METADATA_PATH = "/v1/metadata"; +export const INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH = + "/internal/v1/identity/agent-ownership"; export const RELAY_CONNECT_PATH = "/v1/relay/connect"; export const RELAY_RECIPIENT_AGENT_DID_HEADER = "x-claw-recipient-agent-did"; diff --git a/packages/protocol/src/index.test.ts b/packages/protocol/src/index.test.ts index 00dcade..1432303 100644 --- a/packages/protocol/src/index.test.ts +++ b/packages/protocol/src/index.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "vitest"; import { ADMIN_BOOTSTRAP_PATH, + ADMIN_INTERNAL_SERVICES_PATH, AGENT_AUTH_REFRESH_PATH, AGENT_AUTH_VALIDATE_PATH, AGENT_NAME_REGEX, @@ -15,6 +16,7 @@ import { decodeBase64url, encodeBase64url, generateUlid, + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, INVITES_PATH, INVITES_REDEEM_PATH, MAX_AGENT_DESCRIPTION_LENGTH, @@ -23,13 +25,12 @@ import { makeAgentDid, makeHumanDid, PROTOCOL_VERSION, - PROXY_PAIRING_KEYS_PATH, - PROXY_PAIRING_KEYS_RESOLVE_PATH, ProtocolParseError, parseAitClaims, parseCrlClaims, parseDid, parseUlid, + REGISTRY_METADATA_PATH, RELAY_CONNECT_PATH, RELAY_RECIPIENT_AGENT_DID_HEADER, validateAgentName, @@ -42,15 +43,16 @@ describe("protocol", () => { it("exports shared endpoint constants", () => { expect(ADMIN_BOOTSTRAP_PATH).toBe("/v1/admin/bootstrap"); + expect(ADMIN_INTERNAL_SERVICES_PATH).toBe("/v1/admin/internal-services"); expect(AGENT_REGISTRATION_CHALLENGE_PATH).toBe("/v1/agents/challenge"); expect(AGENT_AUTH_REFRESH_PATH).toBe("/v1/agents/auth/refresh"); expect(AGENT_AUTH_VALIDATE_PATH).toBe("/v1/agents/auth/validate"); expect(INVITES_PATH).toBe("/v1/invites"); expect(INVITES_REDEEM_PATH).toBe("/v1/invites/redeem"); expect(ME_API_KEYS_PATH).toBe("/v1/me/api-keys"); - expect(PROXY_PAIRING_KEYS_PATH).toBe("/v1/proxy-pairing-keys"); - expect(PROXY_PAIRING_KEYS_RESOLVE_PATH).toBe( - "/v1/proxy-pairing-keys/resolve", + expect(REGISTRY_METADATA_PATH).toBe("/v1/metadata"); + expect(INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH).toBe( + "/internal/v1/identity/agent-ownership", ); expect(RELAY_CONNECT_PATH).toBe("/v1/relay/connect"); expect(RELAY_RECIPIENT_AGENT_DID_HEADER).toBe("x-claw-recipient-agent-did"); diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 4a974be..6e16e42 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -22,14 +22,15 @@ export type { ClawDidKind } from "./did.js"; export { makeAgentDid, makeHumanDid, parseDid } from "./did.js"; export { ADMIN_BOOTSTRAP_PATH, + ADMIN_INTERNAL_SERVICES_PATH, AGENT_AUTH_REFRESH_PATH, AGENT_AUTH_VALIDATE_PATH, AGENT_REGISTRATION_CHALLENGE_PATH, + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, INVITES_PATH, INVITES_REDEEM_PATH, ME_API_KEYS_PATH, - PROXY_PAIRING_KEYS_PATH, - PROXY_PAIRING_KEYS_RESOLVE_PATH, + REGISTRY_METADATA_PATH, RELAY_CONNECT_PATH, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "./endpoints.js"; diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index 1f35ed8..ac15b6e 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -17,6 +17,7 @@ - `http/sign` + `http/verify`: PoP request signing and verification that binds method, path+query, timestamp, nonce, and body hash. - `security/nonce-cache`: in-memory TTL nonce replay protection keyed by `agentDid + nonce`. - `agent-auth-client`: shared agent auth refresh client + retry orchestration (`executeWithAgentAuthRefreshRetry`) for CLI/runtime integrations. +- `event-bus`: shared event envelope + transport abstractions (`createInMemoryEventBus`, `createQueueEventBus`) for environment-based async delivery. - `testing/*`: shared deterministic test fixtures (e.g. AIT claims) for app/package tests. - Tests should prove tamper cases (payload change, header kid swap, signature corruption). @@ -41,11 +42,13 @@ - Nonce cache accept path must prune expired entries across all agent buckets to keep memory bounded under high-cardinality agent traffic. - Nonce cache must validate the top-level input shape before reading fields so invalid JS callers receive structured `AppError`s instead of runtime `TypeError`s. - Registry config parsing must validate `REGISTRY_SIGNING_KEYS` as JSON before runtime use so keyset endpoints fail fast with `CONFIG_VALIDATION_FAILED` on malformed key documents. +- Registry config parsing must validate `PROXY_URL` as an absolute URL so invite onboarding responses can safely publish proxy routing hints. - Registry keyset validation must reject duplicate `kid` values and malformed `x` key material (non-base64url or non-32-byte Ed25519) so verifier behavior cannot become order-dependent. - Use `RuntimeEnvironment` + `shouldExposeVerboseErrors` from `runtime-environment` for environment-based error-detail behavior; do not duplicate ad-hoc `NODE_ENV`/string checks. - Keep `agent-auth-client` runtime-portable (no Node-only filesystem APIs); delegate persistence/locking to callers. - Keep refresh retry policy strict: a single refresh attempt and a single request retry on retryable auth failures. - Keep per-agent refresh single-flight keyed by explicit caller-provided key to avoid duplicate refresh races. +- Keep event envelopes versioned and explicit (`id`, `version`, `timestampUtc`, `initiatedByAccountId`, `data`) with past-tense event names. - Keep shared test fixtures in `src/testing/*` and consume via `@clawdentity/sdk/testing` to avoid copy/paste helpers across apps. ## Testing Rules diff --git a/packages/sdk/src/config.test.ts b/packages/sdk/src/config.test.ts index f1354b1..dbefe7c 100644 --- a/packages/sdk/src/config.test.ts +++ b/packages/sdk/src/config.test.ts @@ -9,6 +9,18 @@ describe("config helpers", () => { }); }); + it("parses EVENT_BUS_BACKEND when provided", () => { + expect( + parseRegistryConfig({ + ENVIRONMENT: "development", + EVENT_BUS_BACKEND: "queue", + }), + ).toEqual({ + ENVIRONMENT: "development", + EVENT_BUS_BACKEND: "queue", + }); + }); + it("parses REGISTRY_SIGNING_KEYS into validated key entries", () => { const config = parseRegistryConfig({ ENVIRONMENT: "development", @@ -46,6 +58,30 @@ describe("config helpers", () => { }); }); + it("parses PROXY_URL when provided", () => { + expect( + parseRegistryConfig({ + ENVIRONMENT: "development", + PROXY_URL: "https://dev.proxy.clawdentity.com", + }), + ).toEqual({ + ENVIRONMENT: "development", + PROXY_URL: "https://dev.proxy.clawdentity.com", + }); + }); + + it("parses REGISTRY_ISSUER_URL when provided", () => { + expect( + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_ISSUER_URL: "http://host.docker.internal:8788", + }), + ).toEqual({ + ENVIRONMENT: "development", + REGISTRY_ISSUER_URL: "http://host.docker.internal:8788", + }); + }); + it("throws AppError when APP_VERSION is empty", () => { try { parseRegistryConfig({ @@ -59,6 +95,19 @@ describe("config helpers", () => { } }); + it("throws AppError when REGISTRY_ISSUER_URL is invalid", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + REGISTRY_ISSUER_URL: "not-a-url", + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); + it("throws AppError on invalid registry config", () => { try { parseRegistryConfig({ ENVIRONMENT: "local" }); @@ -69,6 +118,19 @@ describe("config helpers", () => { } }); + it("throws AppError on invalid EVENT_BUS_BACKEND", () => { + try { + parseRegistryConfig({ + ENVIRONMENT: "development", + EVENT_BUS_BACKEND: "invalid", + }); + throw new Error("expected parseRegistryConfig to throw"); + } catch (error) { + expect(error).toBeInstanceOf(AppError); + expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); + } + }); + it("throws AppError when REGISTRY_SIGNING_KEYS is invalid JSON", () => { try { parseRegistryConfig({ diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index 846489b..4c86c81 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -5,6 +5,7 @@ import { runtimeEnvironmentValues } from "./runtime-environment.js"; const environmentSchema = z.enum(runtimeEnvironmentValues); const registrySigningKeyStatusSchema = z.enum(["active", "revoked"]); +const registryEventBusBackendSchema = z.enum(["memory", "queue"]); const ED25519_PUBLIC_KEY_LENGTH = 32; const registrySigningPublicKeySchema = z @@ -89,6 +90,9 @@ const registrySigningKeysEnvSchema = z export const registryConfigSchema = z.object({ ENVIRONMENT: environmentSchema, APP_VERSION: z.string().min(1).optional(), + PROXY_URL: z.string().url().optional(), + REGISTRY_ISSUER_URL: z.string().url().optional(), + EVENT_BUS_BACKEND: registryEventBusBackendSchema.optional(), BOOTSTRAP_SECRET: z.string().min(1).optional(), REGISTRY_SIGNING_KEY: z.string().min(1).optional(), REGISTRY_SIGNING_KEYS: registrySigningKeysEnvSchema.optional(), diff --git a/packages/sdk/src/event-bus.test.ts b/packages/sdk/src/event-bus.test.ts new file mode 100644 index 0000000..fe58837 --- /dev/null +++ b/packages/sdk/src/event-bus.test.ts @@ -0,0 +1,63 @@ +import { describe, expect, it, vi } from "vitest"; +import { + createEventEnvelope, + createInMemoryEventBus, + createQueueEventBus, +} from "./event-bus.js"; + +describe("event bus", () => { + it("creates a normalized event envelope", () => { + const event = createEventEnvelope({ + type: "agent.auth.issued", + initiatedByAccountId: "did:claw:human:01HXYZ", + data: { + agentDid: "did:claw:agent:01HABC", + }, + }); + + expect(event.id.length).toBeGreaterThan(0); + expect(event.version).toBe("v1"); + expect(event.timestampUtc.length).toBeGreaterThan(0); + expect(event.type).toBe("agent.auth.issued"); + expect(event.initiatedByAccountId).toBe("did:claw:human:01HXYZ"); + expect(event.data).toEqual({ + agentDid: "did:claw:agent:01HABC", + }); + }); + + it("publishes events to in-memory subscribers", async () => { + const bus = createInMemoryEventBus(); + const subscriber = vi.fn(); + bus.subscribe(subscriber); + const event = createEventEnvelope({ + type: "agent.auth.refreshed", + data: { + agentDid: "did:claw:agent:01HABC", + }, + }); + + await bus.publish(event); + + expect(bus.publishedEvents).toEqual([event]); + expect(subscriber).toHaveBeenCalledTimes(1); + expect(subscriber).toHaveBeenCalledWith(event); + }); + + it("serializes events when using queue event bus", async () => { + const queue = { + send: vi.fn(async (_message: string) => undefined), + }; + const bus = createQueueEventBus(queue); + const event = createEventEnvelope({ + type: "agent.auth.revoked", + data: { + agentDid: "did:claw:agent:01HABC", + }, + }); + + await bus.publish(event); + + expect(queue.send).toHaveBeenCalledTimes(1); + expect(queue.send).toHaveBeenCalledWith(JSON.stringify(event)); + }); +}); diff --git a/packages/sdk/src/event-bus.ts b/packages/sdk/src/event-bus.ts new file mode 100644 index 0000000..4deb7d0 --- /dev/null +++ b/packages/sdk/src/event-bus.ts @@ -0,0 +1,119 @@ +import { nowIso } from "./datetime.js"; + +const DEFAULT_EVENT_VERSION = "v1"; + +export type EventEnvelope> = { + id: string; + type: string; + version: string; + timestampUtc: string; + initiatedByAccountId: string | null; + data: TData; +}; + +export type EventEnvelopeInput> = { + id?: string; + type: string; + version?: string; + timestampUtc?: string; + initiatedByAccountId?: string | null; + data: TData; +}; + +export type EventBus = { + publish>( + event: EventEnvelope, + ): Promise; +}; + +export type EventHandler = ( + event: EventEnvelope>, +) => Promise | void; + +export type InMemoryEventBus = EventBus & { + subscribe(handler: EventHandler): () => void; + readonly publishedEvents: readonly EventEnvelope>[]; +}; + +export type QueuePublisher = { + send(message: string): Promise; +}; + +function createEventId(): string { + if (typeof crypto?.randomUUID === "function") { + return crypto.randomUUID(); + } + + const random = Math.random().toString(36).slice(2, 10); + return `${Date.now()}-${random}`; +} + +function normalizeRequiredString(value: string, fieldName: string): string { + const normalized = value.trim(); + if (normalized.length > 0) { + return normalized; + } + + throw new Error(`${fieldName} must be a non-empty string`); +} + +function normalizeOptionalString( + value: string | undefined, + fallback: string, +): string { + if (value === undefined) { + return fallback; + } + + const normalized = value.trim(); + return normalized.length > 0 ? normalized : fallback; +} + +export function createEventEnvelope>( + input: EventEnvelopeInput, +): EventEnvelope { + return { + id: normalizeOptionalString(input.id, createEventId()), + type: normalizeRequiredString(input.type, "type"), + version: normalizeOptionalString(input.version, DEFAULT_EVENT_VERSION), + timestampUtc: normalizeOptionalString(input.timestampUtc, nowIso()), + initiatedByAccountId: input.initiatedByAccountId ?? null, + data: input.data, + }; +} + +export function createInMemoryEventBus(): InMemoryEventBus { + const handlers = new Set(); + const publishedEvents: EventEnvelope>[] = []; + + return { + async publish>( + event: EventEnvelope, + ): Promise { + const normalized = event as EventEnvelope>; + publishedEvents.push(normalized); + for (const handler of handlers) { + await handler(normalized); + } + }, + subscribe(handler: EventHandler): () => void { + handlers.add(handler); + return () => { + handlers.delete(handler); + }; + }, + get publishedEvents() { + return publishedEvents; + }, + }; +} + +export function createQueueEventBus(queue: QueuePublisher): EventBus { + return { + async publish>( + event: EventEnvelope, + ): Promise { + await queue.send(JSON.stringify(event)); + }, + }; +} diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index d0f4ff1..29e534a 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -6,7 +6,10 @@ import { addSeconds, CrlJwtError, createCrlCache, + createEventEnvelope, + createInMemoryEventBus, createNonceCache, + createRegistryIdentityClient, DEFAULT_CRL_MAX_AGE_MS, DEFAULT_CRL_REFRESH_INTERVAL_MS, DEFAULT_NONCE_TTL_MS, @@ -16,6 +19,8 @@ import { encodeEd25519SignatureBase64url, executeWithAgentAuthRefreshRetry, generateEd25519Keypair, + INTERNAL_SERVICE_ID_HEADER, + INTERNAL_SERVICE_SECRET_HEADER, parseRegistryConfig, REQUEST_ID_HEADER, resolveRequestId, @@ -47,6 +52,26 @@ describe("sdk", () => { expect(shouldExposeVerboseErrors("test")).toBe(true); expect(REQUEST_ID_HEADER).toBe("x-request-id"); expect(AppError).toBeTypeOf("function"); + const eventBus = createInMemoryEventBus(); + const event = createEventEnvelope({ + type: "agent.auth.issued", + data: { agentDid: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4" }, + }); + const identityClient = createRegistryIdentityClient({ + registryUrl: "https://registry.clawdentity.com", + serviceId: "svc-proxy", + serviceSecret: "clw_srv_secret", + fetchImpl: (async () => + Response.json( + { ownsAgent: true, agentStatus: "active" }, + { status: 200 }, + )) as typeof fetch, + }); + expect(eventBus).toBeDefined(); + expect(event.type).toBe("agent.auth.issued"); + expect(identityClient).toBeDefined(); + expect(INTERNAL_SERVICE_ID_HEADER).toBe("x-claw-service-id"); + expect(INTERNAL_SERVICE_SECRET_HEADER).toBe("x-claw-service-secret"); }); it("exports agent auth refresh retry helpers", async () => { diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 3943178..6bc6e01 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -35,6 +35,19 @@ export { verifyEd25519, } from "./crypto/ed25519.js"; export { addSeconds, isExpired, nowIso } from "./datetime.js"; +export type { + EventBus, + EventEnvelope, + EventEnvelopeInput, + EventHandler, + InMemoryEventBus, + QueuePublisher, +} from "./event-bus.js"; +export { + createEventEnvelope, + createInMemoryEventBus, + createQueueEventBus, +} from "./event-bus.js"; export { AppError, createHonoErrorHandler, @@ -66,6 +79,12 @@ export type { export { CrlJwtError, signCRL, verifyCRL } from "./jwt/crl-jwt.js"; export type { Logger } from "./logging.js"; export { createLogger, createRequestLoggingMiddleware } from "./logging.js"; +export type { AgentOwnershipResult } from "./registry-identity-client.js"; +export { + createRegistryIdentityClient, + INTERNAL_SERVICE_ID_HEADER, + INTERNAL_SERVICE_SECRET_HEADER, +} from "./registry-identity-client.js"; export type { RequestContextVariables } from "./request-context.js"; export { createRequestContextMiddleware, diff --git a/packages/sdk/src/registry-identity-client.test.ts b/packages/sdk/src/registry-identity-client.test.ts new file mode 100644 index 0000000..780889c --- /dev/null +++ b/packages/sdk/src/registry-identity-client.test.ts @@ -0,0 +1,114 @@ +import { makeAgentDid, makeHumanDid } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; +import type { AppError } from "./exceptions.js"; +import { + createRegistryIdentityClient, + INTERNAL_SERVICE_ID_HEADER, + INTERNAL_SERVICE_SECRET_HEADER, +} from "./registry-identity-client.js"; + +describe("registry identity client", () => { + it("checks ownership with service credential headers", async () => { + const fetchImpl = vi.fn(async () => + Response.json( + { ownsAgent: true, agentStatus: "active" }, + { status: 200 }, + ), + ); + const client = createRegistryIdentityClient({ + fetchImpl: fetchImpl as unknown as typeof fetch, + registryUrl: "https://registry.clawdentity.com", + serviceId: "01JTESTSERVICE1234567890AB", + serviceSecret: "clw_srv_secret", + }); + + const result = await client.checkAgentOwnership({ + ownerDid: makeHumanDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + agentDid: makeAgentDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + }); + + expect(result).toEqual({ + ownsAgent: true, + agentStatus: "active", + }); + expect(fetchImpl).toHaveBeenCalledTimes(1); + const [, init] = fetchImpl.mock.calls[0] ?? []; + const headers = new Headers((init as RequestInit | undefined)?.headers); + expect(headers.get(INTERNAL_SERVICE_ID_HEADER)).toBe( + "01JTESTSERVICE1234567890AB", + ); + expect(headers.get(INTERNAL_SERVICE_SECRET_HEADER)).toBe("clw_srv_secret"); + }); + + it("maps unauthorized responses to service-auth errors", async () => { + const client = createRegistryIdentityClient({ + fetchImpl: (async () => + Response.json( + { + error: { + code: "INTERNAL_SERVICE_UNAUTHORIZED", + message: "service secret is invalid", + }, + }, + { status: 401 }, + )) as typeof fetch, + registryUrl: "https://registry.clawdentity.com", + serviceId: "svc-proxy", + serviceSecret: "bad-secret", + }); + + await expect( + client.checkAgentOwnership({ + ownerDid: makeHumanDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + agentDid: makeAgentDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + }), + ).rejects.toMatchObject({ + code: "IDENTITY_SERVICE_UNAUTHORIZED", + status: 503, + } satisfies Partial); + }); + + it("maps network failures to unavailable errors", async () => { + const client = createRegistryIdentityClient({ + fetchImpl: (async () => { + throw new Error("network error"); + }) as typeof fetch, + registryUrl: "https://registry.clawdentity.com", + serviceId: "svc-proxy", + serviceSecret: "secret", + }); + + await expect( + client.checkAgentOwnership({ + ownerDid: makeHumanDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + agentDid: makeAgentDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + }), + ).rejects.toMatchObject({ + code: "IDENTITY_SERVICE_UNAVAILABLE", + status: 503, + } satisfies Partial); + }); + + it("rejects invalid ownership response payloads", async () => { + const client = createRegistryIdentityClient({ + fetchImpl: (async () => + Response.json( + { ownsAgent: "yes", agentStatus: "active" }, + { status: 200 }, + )) as typeof fetch, + registryUrl: "https://registry.clawdentity.com", + serviceId: "svc-proxy", + serviceSecret: "secret", + }); + + await expect( + client.checkAgentOwnership({ + ownerDid: makeHumanDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + agentDid: makeAgentDid("01HF7YAT31JZHSMW1CG6Q6MHB7"), + }), + ).rejects.toMatchObject({ + code: "IDENTITY_SERVICE_INVALID_RESPONSE", + status: 503, + } satisfies Partial); + }); +}); diff --git a/packages/sdk/src/registry-identity-client.ts b/packages/sdk/src/registry-identity-client.ts new file mode 100644 index 0000000..a138ef4 --- /dev/null +++ b/packages/sdk/src/registry-identity-client.ts @@ -0,0 +1,233 @@ +import { + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, + parseDid, +} from "@clawdentity/protocol"; +import { AppError } from "./exceptions.js"; + +export const INTERNAL_SERVICE_ID_HEADER = "x-claw-service-id"; +export const INTERNAL_SERVICE_SECRET_HEADER = "x-claw-service-secret"; + +export type AgentOwnershipStatus = "active" | "revoked" | null; + +export type AgentOwnershipResult = { + ownsAgent: boolean; + agentStatus: AgentOwnershipStatus; +}; + +type RegistryErrorEnvelope = { + error?: { + code?: string; + message?: string; + }; +}; + +type RegistryIdentityClientInput = { + fetchImpl?: typeof fetch; + registryUrl: string; + serviceId: string; + serviceSecret: string; +}; + +function normalizeRegistryUrl(registryUrl: string): string { + const normalized = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + return new URL(normalized).toString(); +} + +function toIdentityPathWithQuery(urlString: string): string { + const parsed = new URL(urlString); + return `${parsed.pathname}${parsed.search}`; +} + +function parseRegistryErrorEnvelope(payload: unknown): RegistryErrorEnvelope { + if (typeof payload !== "object" || payload === null) { + return {}; + } + + const error = (payload as { error?: unknown }).error; + if (typeof error !== "object" || error === null) { + return {}; + } + + return { + error: { + code: + typeof (error as { code?: unknown }).code === "string" + ? (error as { code: string }).code + : undefined, + message: + typeof (error as { message?: unknown }).message === "string" + ? (error as { message: string }).message + : undefined, + }, + }; +} + +async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +function parseOwnershipResponse(payload: unknown): AgentOwnershipResult { + if (typeof payload !== "object" || payload === null) { + throw new AppError({ + code: "IDENTITY_SERVICE_INVALID_RESPONSE", + message: "Registry identity response is invalid", + status: 503, + expose: true, + }); + } + + const ownsAgent = (payload as { ownsAgent?: unknown }).ownsAgent; + const agentStatus = (payload as { agentStatus?: unknown }).agentStatus; + + if ( + typeof ownsAgent !== "boolean" || + !( + agentStatus === "active" || + agentStatus === "revoked" || + agentStatus === null + ) + ) { + throw new AppError({ + code: "IDENTITY_SERVICE_INVALID_RESPONSE", + message: "Registry identity response is invalid", + status: 503, + expose: true, + }); + } + + return { + ownsAgent, + agentStatus, + }; +} + +function validateServiceIdentity(input: RegistryIdentityClientInput): void { + const serviceId = input.serviceId.trim(); + const serviceSecret = input.serviceSecret.trim(); + + if (serviceId.length === 0 || serviceSecret.length === 0) { + throw new AppError({ + code: "IDENTITY_SERVICE_CONFIG_INVALID", + message: "Registry internal service credentials are not configured", + status: 500, + expose: true, + }); + } +} + +function validateOwnershipInput(input: { + ownerDid: string; + agentDid: string; +}): void { + try { + if (parseDid(input.ownerDid).kind !== "human") { + throw new Error("invalid owner did"); + } + if (parseDid(input.agentDid).kind !== "agent") { + throw new Error("invalid agent did"); + } + } catch { + throw new AppError({ + code: "IDENTITY_SERVICE_INVALID_INPUT", + message: "Ownership input is invalid", + status: 400, + expose: true, + }); + } +} + +export function createRegistryIdentityClient( + input: RegistryIdentityClientInput, +) { + const fetchImpl = input.fetchImpl ?? fetch; + if (typeof fetchImpl !== "function") { + throw new AppError({ + code: "IDENTITY_SERVICE_CONFIG_INVALID", + message: "fetch implementation is required", + status: 500, + expose: true, + }); + } + + const registryUrl = normalizeRegistryUrl(input.registryUrl); + validateServiceIdentity(input); + + return { + async checkAgentOwnership(payload: { + ownerDid: string; + agentDid: string; + }): Promise { + validateOwnershipInput(payload); + + const requestUrl = new URL( + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH.slice(1), + registryUrl, + ).toString(); + const requestBody = JSON.stringify({ + ownerDid: payload.ownerDid, + agentDid: payload.agentDid, + }); + + let response: Response; + try { + response = await fetchImpl(requestUrl, { + method: "POST", + headers: { + "content-type": "application/json", + [INTERNAL_SERVICE_ID_HEADER]: input.serviceId.trim(), + [INTERNAL_SERVICE_SECRET_HEADER]: input.serviceSecret.trim(), + }, + body: requestBody, + }); + } catch { + throw new AppError({ + code: "IDENTITY_SERVICE_UNAVAILABLE", + message: "Registry identity service is unavailable", + status: 503, + expose: true, + }); + } + + const responseBody = await parseJsonResponse(response); + if (response.status === 401 || response.status === 403) { + const parsedError = parseRegistryErrorEnvelope(responseBody); + throw new AppError({ + code: "IDENTITY_SERVICE_UNAUTHORIZED", + message: + parsedError.error?.message ?? + "Registry internal service authorization failed", + status: 503, + expose: true, + details: { + registryCode: parsedError.error?.code, + }, + }); + } + + if (!response.ok) { + const parsedError = parseRegistryErrorEnvelope(responseBody); + throw new AppError({ + code: "IDENTITY_SERVICE_UNAVAILABLE", + message: + parsedError.error?.message ?? + "Registry identity service is unavailable", + status: 503, + expose: true, + details: { + status: response.status, + registryCode: parsedError.error?.code, + pathWithQuery: toIdentityPathWithQuery(requestUrl), + }, + }); + } + + return parseOwnershipResponse(responseBody); + }, + }; +} diff --git a/packages/sdk/src/testing/ait-fixtures.test.ts b/packages/sdk/src/testing/ait-fixtures.test.ts index eead148..12eb7bd 100644 --- a/packages/sdk/src/testing/ait-fixtures.test.ts +++ b/packages/sdk/src/testing/ait-fixtures.test.ts @@ -10,7 +10,7 @@ describe("buildTestAitClaims", () => { nowSeconds: 1_700_000_000, }); - expect(claims.iss).toBe("https://api.clawdentity.com"); + expect(claims.iss).toBe("https://registry.clawdentity.com"); expect(parseDid(claims.sub).kind).toBe("agent"); expect(parseDid(claims.ownerDid).kind).toBe("human"); expect(parseUlid(parseDid(claims.sub).ulid).timestampMs).toBe( diff --git a/packages/sdk/src/testing/ait-fixtures.ts b/packages/sdk/src/testing/ait-fixtures.ts index 80c933c..077ed2b 100644 --- a/packages/sdk/src/testing/ait-fixtures.ts +++ b/packages/sdk/src/testing/ait-fixtures.ts @@ -18,7 +18,7 @@ export type BuildTestAitClaimsInput = { }; const DEFAULT_SEED_MS = 1_700_000_000_000; -const DEFAULT_ISSUER = "https://api.clawdentity.com"; +const DEFAULT_ISSUER = "https://registry.clawdentity.com"; const DEFAULT_NAME = "Proxy Agent"; const DEFAULT_FRAMEWORK = "openclaw"; const DEFAULT_DESCRIPTION = "test agent"; From 7269dc49a2f3f818ddbe5d6067313ec92cc189c3 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 21:11:07 +0530 Subject: [PATCH 094/190] feat: harden relay onboarding, pairing, and skill state discovery --- ARCHITECTURE.md | 555 +++++++++++++++ LICENSE | 21 + PRD.md | 243 ------- README.md | 663 +++--------------- apps/cli/package.json | 2 +- apps/cli/src/AGENTS.md | 2 + apps/cli/src/commands/AGENTS.md | 5 + apps/cli/src/commands/openclaw.test.ts | 147 ++++ apps/cli/src/commands/openclaw.ts | 331 ++++++++- apps/cli/src/commands/pair.test.ts | 105 ++- apps/cli/src/commands/pair.ts | 100 ++- apps/openclaw-skill/AGENTS.md | 4 +- apps/openclaw-skill/skill/SKILL.md | 153 +++- .../skill/examples/openclaw-relay-sample.json | 5 + .../skill/examples/peers-sample.json | 10 + .../references/clawdentity-environment.md | 53 ++ .../skill/references/clawdentity-protocol.md | 149 ++-- .../skill/references/clawdentity-registry.md | 11 + apps/proxy/.env.example | 2 +- apps/proxy/src/AGENTS.md | 2 + apps/proxy/src/config.test.ts | 30 + apps/proxy/src/config.ts | 56 +- apps/proxy/src/node-server.ts | 5 +- apps/proxy/src/pairing-route.ts | 5 +- apps/proxy/src/pairing-ticket.ts | 13 +- apps/proxy/src/proxy-trust-state.ts | 22 +- apps/proxy/src/proxy-trust-store.ts | 32 +- apps/proxy/src/worker.test.ts | 64 +- apps/proxy/src/worker.ts | 4 +- apps/registry/.env.example | 4 +- apps/registry/src/AGENTS.md | 1 + apps/registry/src/server.test.ts | 50 +- apps/registry/src/server.ts | 4 +- apps/registry/wrangler.jsonc | 2 + assets/banner.png | Bin 0 -> 167993 bytes packages/sdk/AGENTS.md | 1 + packages/sdk/src/config.test.ts | 49 ++ packages/sdk/src/config.ts | 75 +- sparkling-sauteeing-brook.md | 364 ---------- 39 files changed, 2009 insertions(+), 1335 deletions(-) create mode 100644 ARCHITECTURE.md create mode 100644 LICENSE delete mode 100644 PRD.md create mode 100644 apps/openclaw-skill/skill/examples/openclaw-relay-sample.json create mode 100644 apps/openclaw-skill/skill/examples/peers-sample.json create mode 100644 apps/openclaw-skill/skill/references/clawdentity-environment.md create mode 100644 assets/banner.png delete mode 100644 sparkling-sauteeing-brook.md diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 0000000..d180e28 --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,555 @@ +# Clawdentity Architecture + +Deep technical reference for Clawdentity's protocol, verification pipeline, security model, and deployment. + +For an overview and quick start, see [README.md](./README.md). + +--- + +## Table of Contents + +- [Agent-to-Agent Communication: Complete Flow](#agent-to-agent-communication-complete-flow) + - [Overview](#overview) + - [Step 1: Human Onboarding (Invite-Gated)](#step-1-human-onboarding-invite-gated) + - [Step 2: Agent Identity Creation (Challenge-Response)](#step-2-agent-identity-creation-challenge-response) + - [Step 3: Peer Routing Setup](#step-3-peer-routing-setup-out-of-band-metadata) + - [Step 4: First Message](#step-4-first-message-bob--alice) +- [Verification Pipeline](#verification-pipeline) +- [Operator Controls](#operator-controls) +- [What Gets Shared](#what-gets-shared-and-what-never-should) +- [Core Features (MVP)](#core-features-mvp) +- [Discovery Mechanisms](#discovery-how-first-contact-happens) +- [Security Architecture](#security-architecture-mvp) +- [Deployment](#deployment) +- [MVP Goals](#mvp-goals) + +--- + +## Why This Exists (OpenClaw Reality) + +OpenClaw webhooks are a great transport layer, but they authenticate using a **single shared webhook token**. OpenClaw requires `hooks.token` when hooks are enabled, and inbound calls must provide the token (e.g., `Authorization: Bearer ...` or `x-openclaw-token: ...`). +OpenClaw docs: https://docs.openclaw.ai/automation/webhook + +That means "just replace Bearer with Claw" does **not** work without upstream changes. + +### MVP Integration Approach (No OpenClaw Fork) + +For MVP, Clawdentity runs as a **proxy/sidecar** in front of OpenClaw: + +``` +Caller Agent + | + | Authorization: Claw + X-Claw-Proof/Nonce/Timestamp + v +Clawdentity Proxy (verifies identity + trust policy + rate limits) + | + | x-openclaw-token: (internal only) + v +OpenClaw Gateway (normal /hooks/agent handling) +``` + +**What happens to the OpenClaw hooks token?** + +- It stays **private** on the gateway host. +- Only the proxy uses it to forward requests to OpenClaw. +- You never share it with other humans/agents. + +--- + +## Agent-to-Agent Communication: Complete Flow + +This section walks through **every step** from zero to two OpenClaw agents exchanging their first message. Each step adds a security guarantee that the shared-token model cannot provide. + +### Overview + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ CLAWDENTITY REGISTRY │ +│ │ +│ Issues identities (AIT) · Publishes revocation list (CRL) │ +│ Validates agent auth · Manages invite-gated onboarding │ +└───────────────┬─────────────────────────────────┬──────────────────────┘ + │ │ + issues AIT + auth issues AIT + auth + │ │ + ┌───────────▼──────────┐ ┌───────────▼──────────┐ + │ AGENT ALICE │ │ AGENT BOB │ + │ (OpenClaw + keys) │ │ (OpenClaw + keys) │ + │ │ │ │ + │ Ed25519 keypair │ │ Ed25519 keypair │ + │ AIT (signed passport)│ │ AIT (signed passport│ + │ Auth tokens │ │ Auth tokens │ + └───────────┬───────────┘ └──────────┬───────────┘ + │ │ + signs every request signs every request + with private key with private key + │ │ + ┌───────────▼──────────┐ ┌───────────▼──────────┐ + │ ALICE'S PROXY │◄─────────│ Bob sends signed │ + │ (Cloudflare Worker) │ HTTP POST│ request to Alice │ + │ │ │ │ + │ Verifies identity │ └──────────────────────┘ + │ Checks revocation │ + │ Enforces trust pairs │ + │ Rejects replays │ + │ Rate limits per agent│ + └───────────┬───────────┘ + │ + only verified requests + reach OpenClaw + │ + ┌───────────▼──────────┐ + │ ALICE'S OPENCLAW │ + │ (localhost, private) │ + │ │ + │ Receives message │ + │ Never exposed to │ + │ public internet │ + └───────────────────────┘ +``` + +### Step 1: Human Onboarding (Invite-Gated) + +An admin creates an invite code. A new operator redeems it to get API access. + +``` +Admin Registry + │ │ + │ clawdentity invite create │ + │──────────────────────────────►│ Generates clw_inv_ + │◄──────────────────────────────│ Stores with optional expiry + │ │ + │ Shares invite code │ + │ out-of-band (email, etc.) │ + │ │ + +New Operator Registry + │ │ + │ clawdentity invite redeem │ + │──────────────────────────────►│ Creates human account + │◄──────────────────────────────│ Issues API key (shown once) + │ │ + │ Stores API key locally │ +``` + +**Security:** Invite codes are single-use and time-limited. One agent per invite prevents bulk abuse. + +### Step 2: Agent Identity Creation (Challenge-Response) + +The operator creates an agent identity. The private key **never leaves the machine**. + +``` +CLI (operator's machine) Registry + │ │ + │ 1. Generate Ed25519 keypair │ + │ (secret.key stays local) │ + │ │ + │ 2. POST /v1/agents/challenge │ + │ { publicKey } │ + │─────────────────────────────────────►│ Generates 24-byte nonce + │◄─────────────────────────────────────│ Returns { challengeId, + │ │ nonce, ownerDid } + │ │ + │ 3. Sign canonical proof with │ + │ private key (proves ownership) │ + │ │ + │ 4. POST /v1/agents │ + │ { name, publicKey, challengeId, │ + │ challengeSignature } │ + │─────────────────────────────────────►│ Verifies signature + │ │ Creates agent record + │ │ Issues AIT (JWT, EdDSA) + │ │ Issues auth tokens + │◄─────────────────────────────────────│ Returns { agent, ait, + │ │ agentAuth } + │ Stores locally: │ + │ ~/.clawdentity/agents// │ + │ ├── secret.key (private, 0600) │ + │ ├── public.key │ + │ ├── ait.jwt (signed passport) │ + │ ├── identity.json │ + │ └── registry-auth.json │ +``` + +**Security:** Challenge-response proves the operator holds the private key without ever transmitting it. The 5-minute challenge window prevents delayed replay. Each challenge is single-use. + +**What's in the AIT (Agent Identity Token):** + +| Claim | Purpose | +|-------|---------| +| `sub` | Agent DID (`did:claw:agent:`) — unique identity | +| `ownerDid` | Human DID — who owns this agent | +| `cnf.jwk.x` | Agent's public key — for verifying PoP signatures | +| `jti` | Token ID — for revocation tracking | +| `iss` | Registry URL — who vouches for this identity | +| `exp` | Expiry — credential lifetime (1-90 days) | + +### Step 3: Peer Routing Setup (Out-of-Band Metadata) + +Operators exchange peer metadata out-of-band (alias, DID, proxy URL). No relay invite code is required. + +``` +Alice's Operator Bob's Operator + │ │ + │ Shares metadata out-of-band ─────────►│ + │ alias, DID, proxy URL │ + │ │ + │ │ clawdentity openclaw setup + │ │ bob --peer-alias alice + │ │ --peer-did did:claw:agent:... + │ │ --peer-proxy-url https://alice-proxy/hooks/agent + │ │ + │ │ Stores peer in peers.json: + │ │ { "alice": { + │ │ "did": "did:claw:agent:...", + │ │ "proxyUrl": "https://..." + │ │ }} + │ │ + │ │ Installs relay transform + │ │ Configures OpenClaw hooks +``` + +**Security:** Setup uses only public peer metadata (DID + proxy URL + alias). No keys, tokens, or secrets are exchanged. Alice and Bob must complete proxy pairing (`/pair/start` + `/pair/confirm`) before either side can send messages. + +### Step 4: First Message (Bob → Alice) + +Bob's OpenClaw triggers the relay. Every request is cryptographically signed. + +``` +Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's OpenClaw + │ │ │ │ + │ Hook trigger: │ │ │ + │ { peer: "alice", │ │ │ + │ message: "Hi!" } │ │ │ + │─────────────────────►│ │ │ + │ │ │ │ + │ 1. Load Bob's credentials │ │ + │ (secret.key, ait.jwt) │ │ + │ 2. Look up "alice" in │ │ + │ peers.json → proxy URL │ │ + │ 3. Sign HTTP request: │ │ + │ ┌─────────────────────┐ │ │ + │ │ Canonical string: │ │ │ + │ │ POST /hooks/agent │ │ │ + │ │ timestamp: │ │ │ + │ │ nonce: │ │ │ + │ │ body-sha256: │ │ │ + │ │ │ │ │ + │ │ Ed25519.sign(canon, │ │ │ + │ │ secretKey) → proof │ │ │ + │ └─────────────────────┘ │ │ + │ 4. Send signed request: │ │ + │ POST https://alice-proxy/hooks/agent │ + │ Authorization: Claw │ │ + │ X-Claw-Timestamp: │ │ + │ X-Claw-Nonce: │ │ + │ X-Claw-Body-SHA256: │ │ + │ X-Claw-Proof: │ │ + │ X-Claw-Agent-Access: │ │ + │ │─────────────────────►│ │ + │ │ │ │ + │ │ VERIFICATION PIPELINE │ + │ │ ───────────────────── │ + │ │ ① Verify AIT signature │ + │ │ (registry EdDSA keys) │ + │ │ ② Check timestamp skew │ + │ │ (max ±300 seconds) │ + │ │ ③ Verify PoP signature │ + │ │ (Ed25519 from AIT cnf key) │ + │ │ ④ Reject nonce replay │ + │ │ (per-agent nonce cache) │ + │ │ ⑤ Check CRL revocation │ + │ │ (signed list from registry) │ + │ │ ⑥ Enforce trust pair │ + │ │ (is Bob trusted for Alice?) │ + │ │ ⑦ Validate agent access token │ + │ │ (POST to registry) │ + │ │ │ │ + │ │ │ ALL CHECKS PASSED │ + │ │ │ │ + │ │ │ Forward to OpenClaw: │ + │ │ │ POST /hooks/agent │ + │ │ │ x-openclaw-token: │ + │ │ │──────────────────────►│ + │ │ │ │ Message + │ │ │◄──────────────────────│ delivered! + │ │◄─────────────────────│ 202 │ + │◄─────────────────────│ │ │ +``` + +--- + +## Verification Pipeline + +### What Gets Verified (and When It Fails) + +| Check | Failure | HTTP Status | Meaning | +|-------|---------|-------------|---------| +| AIT signature | `PROXY_AUTH_INVALID_AIT` | 401 | Token is forged or tampered | +| Timestamp skew | `PROXY_AUTH_TIMESTAMP_SKEW` | 401 | Request is too old or clock is wrong | +| PoP signature | `PROXY_AUTH_INVALID_PROOF` | 401 | Sender doesn't hold the private key | +| Nonce replay | `PROXY_AUTH_REPLAY` | 401 | Same request was sent twice | +| CRL revocation | `PROXY_AUTH_REVOKED` | 401 | Agent identity has been revoked | +| Trust policy | `PROXY_AUTH_FORBIDDEN` | 403 | Agent is valid but not trusted for this recipient | +| Agent access token | `PROXY_AGENT_ACCESS_INVALID` | 401 | Session token expired or revoked | +| Rate limit | `PROXY_RATE_LIMIT_EXCEEDED` | 429 | Too many requests from this agent | + +--- + +## Operator Controls + +### Sender Side Operator (Owner/Admin) + +- Action: `clawdentity agent revoke ` +- Scope: **global** (registry-level identity revocation) +- Effect: every receiving proxy rejects that revoked token once CRL refreshes. +- Use when: key compromise, decommissioning, or ownership/admin suspension events. + +### Receiver Side Operator (Callee Gateway Owner) + +- Action: remove/deny trusted caller pair in local proxy trust state (or keep approval-required first contact) +- Scope: **local only** (that specific gateway/proxy) +- Effect: caller is blocked on this gateway immediately, but remains valid elsewhere unless globally revoked. +- Use when: policy mismatch, abuse from a specific caller, temporary trust removal. + +### Key Distinction + +- **Global revoke** = sender owner/admin authority at registry. +- **Local block** = receiver operator authority at their own gateway. +- Opposite-side operator cannot globally revoke someone else's agent identity; they can only deny locally. + +### Incident Response Pattern + +1. Receiver blocks caller locally for immediate containment. +2. Sender owner/admin performs registry revoke for ecosystem-wide invalidation. +3. Proxies return: + - `401` for invalid/expired/revoked identity + - `403` for valid identity that is not trusted locally for the target recipient + +--- + +## What Gets Shared (and What Never Should) + +- Shared **in-band** on each request: **AIT + PoP proof headers** +- Shared publicly: registry signing public keys + CRL (signed, cacheable) +- **Never shared**: the agent's **private key** or identity folder + +--- + +## Core Features (MVP) + +### 1) Identity Issuance and Verification + +- Handled by: `apps/registry`, `packages/sdk` +- Registry issues signed AITs tied to agent DID + owner DID. +- Registry publishes verification material (`/.well-known/claw-keys.json`) and signed CRL. +- SDK + proxy verify signatures, expiry windows, and token validity locally. + +### 2) Request-Level Proof and Replay Protection + +- Handled by: `packages/sdk`, `apps/proxy` +- Each request carries PoP-bound headers: + - `Authorization: Claw ` + - `X-Claw-Timestamp` + - `X-Claw-Nonce` + - `X-Claw-Body-SHA256` + - `X-Claw-Proof` +- Proxy rejects tampered payloads, nonce replays, and stale timestamps. + +### 3) Proxy Enforcement Before OpenClaw + +- Handled by: `apps/proxy` +- Proxy Worker verifies AIT + CRL + PoP before forwarding to OpenClaw. +- Enforces durable trust pairs for sender/recipient DID. +- Applies per-agent rate limiting. +- Keeps `hooks.token` private and only injects it internally during forward. +- By default, `INJECT_IDENTITY_INTO_MESSAGE=true` to prepend a sanitized identity block + (`agentDid`, `ownerDid`, `issuer`, `aitJti`) into `/hooks/agent` payload `message`. + Set `INJECT_IDENTITY_INTO_MESSAGE=false` to keep payloads unchanged. + +### 4) Operator Lifecycle Tooling (CLI) + +- Handled by: `apps/cli` +- `clawdentity agent create` for local keypair + registry registration. +- `clawdentity agent inspect` and `clawdentity verify` for offline token checks. +- `clawdentity agent revoke` for kill switch workflows. +- `clawdentity api-key create` to mint a new PAT (token shown once). +- `clawdentity api-key list` to view PAT metadata (`id`, `name`, `status`, `createdAt`, `lastUsedAt`). +- `clawdentity api-key revoke ` to invalidate a PAT without rotating unrelated keys. +- `clawdentity share` for contact-card exchange (DID, verify URL, endpoint). +- `clawdentity connector start ` to run local relay connector runtime. +- `clawdentity connector service install ` to configure connector autostart after reboot/login (`launchd` on macOS, `systemd --user` on Linux). +- `clawdentity connector service uninstall ` to remove connector autostart service. +- `clawdentity skill install` to install/update OpenClaw relay skill artifacts under `~/.openclaw`. + +### 5) Onboarding and Control Model + +- Handled by: `apps/registry`, `apps/cli` +- Invite-gated registration model with admin-issued invite codes. +- One-agent-per-invite policy for simple quota and abuse control. +- Feature work follows a deployment-first gate tracked in GitHub issues. + +### 6) Discovery and First-Contact Options + +- Handled by: `apps/registry`, `apps/proxy`, `apps/cli` +- Out-of-band contact card sharing. +- Registry `gateway_hint` resolution. +- Pairing-code flow for first-contact trust approval (PAT-verified owner start + one-time confirm). + +--- + +## OpenClaw Skill Install + +Expected operator flow starts from the CLI command: + +```bash +clawdentity skill install +``` + +Installer logic prepares OpenClaw runtime artifacts automatically: +- `~/.openclaw/skills/clawdentity-openclaw-relay/SKILL.md` +- `~/.openclaw/skills/clawdentity-openclaw-relay/references/*` +- `~/.openclaw/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` +- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` + +Install is idempotent and logs deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). +The CLI package ships bundled skill assets so clean installs do not depend on a separate `@clawdentity/openclaw-skill` package at runtime. + +### CLI npm Release (Manual) + +- GitHub workflow: `.github/workflows/publish-cli.yml` +- Trigger: `workflow_dispatch` with inputs: + - `version` (semver, required) + - `dist_tag` (default `latest`) +- Required GitHub secret: `NPM_TOKEN` +- Publish target: npm package `clawdentity` +- Workflow runs CLI lint/typecheck/test/build before publishing. + +--- + +## Discovery (How First Contact Happens) + +MVP supports three ways to "find" another agent: + +1. **Out-of-band share**: human shares a contact card (verify link + endpoint URL) +2. **Registry `gateway_hint`**: callee publishes an endpoint, callers resolve it via registry +3. **Pairing code** (proxy): "Approve first contact" to establish a mutual trusted agent pair + +No one shares keys/files between agents. Identity is presented per request. + +--- + +## Security Architecture (MVP) + +### Trust Boundaries and Sensitive Assets + +- **Agent private key**: secret, local only, never leaves agent machine. +- **Registry signing key**: secret, server-side only, signs AIT and CRL. +- **OpenClaw `hooks.token`**: secret, only present on gateway host/proxy. +- **AIT + PoP headers**: transmitted per request, safe to share in-band. + +### Threats Addressed + +- Do not expose OpenClaw webhooks directly to the public internet. Follow OpenClaw guidance (loopback, tailnet, trusted reverse proxy). + Docs: https://docs.openclaw.ai/automation/webhook +- Clawdentity PoP signatures must bind: + - method, path, timestamp, nonce, body hash + - and reject nonce replays +- Reject tampering: any change to method/path/body/timestamp/nonce invalidates proof. +- Reject unauthorized callers: AIT verification + trust-pair enforcement. +- Reject compromised identities quickly: CRL-based revocation checks. +- Contain abuse: per-agent rate limits at proxy boundary. + +### Security Guarantees and Limits + +- Guarantees: + - caller identity can be cryptographically verified + - caller ownership is traceable via token claims + - revocation can be enforced without rotating shared OpenClaw token +- Limits: + - if the endpoint that holds the agent private key is compromised, attacker can sign as that agent until revocation + - if CRL refresh is delayed, enforcement follows configured staleness policy (`fail-open` or `fail-closed`) + +### Safe Defaults and Operator Guidance + +- Treat any identity fields (agent name/description) as untrusted input; never allow prompt injection via identity metadata. +- Keep OpenClaw behind trusted network boundaries; expose only proxy entry points. +- Rotate PATs and audit trusted pair entries regularly. +- Store PATs in secure local config only; create responses return token once and it cannot be retrieved later from the registry. +- Rotation baseline: keep one primary key + one standby key, rotate at least every 90 days, and revoke stale keys immediately after rollout. + +--- + +## Deployment + +### Repo Layout + +Nx monorepo with pnpm workspaces: + +``` +clawdentity/ +├── apps/ +│ ├── registry/ — Identity registry (Cloudflare Worker) +│ │ Issues AITs, serves CRL + public keys +│ │ Worker config: apps/registry/wrangler.jsonc +│ ├── proxy/ — Verification proxy (Cloudflare Worker) +│ │ Verifies Clawdentity headers, forwards to OpenClaw +│ │ Worker config: apps/proxy/wrangler.jsonc +│ ├── cli/ — Operator CLI +│ │ Agent create/revoke, invite, api-key, config +│ └── openclaw-skill/ — OpenClaw skill integration +│ Relay transform for agent-to-agent messaging +├── packages/ +│ ├── protocol/ — Canonical types + signing rules +│ │ AIT claims, DID format, HTTP signing, endpoints +│ └── sdk/ — TypeScript SDK +│ Sign/verify, CRL cache, auth client, crypto +└── Configuration + ├── nx.json — Monorepo task orchestration + ├── pnpm-workspace.yaml + └── tsconfig.base.json +``` + +### Proxy Worker Local Runs + +- Development env (`ENVIRONMENT=development`): `pnpm dev:proxy` +- Local env (`ENVIRONMENT=local`): `pnpm dev:proxy:local` +- Fresh deploy-like env: `pnpm dev:proxy:fresh` +- Development deploy command: `pnpm -F @clawdentity/proxy run deploy:dev` +- Production deploy command: `pnpm -F @clawdentity/proxy run deploy:production` +- Environment intent: `local` is local Wrangler development only; `development` and `production` are cloud deployment environments. + +### Registry Worker Local Runs + +- Development env (`ENVIRONMENT=development`): `pnpm dev:registry` +- Development env with local D1 migration apply: `pnpm dev:registry:local` + +### Deploy Automation + +- GitHub workflow: `.github/workflows/deploy-develop.yml` +- Trigger: push to `develop` +- Runs full quality gates, then deploys: + - registry (`apps/registry`, env `dev`) with D1 migrations + - proxy (`apps/proxy`, env `dev`) +- Health checks must pass with `version == $GITHUB_SHA` for: + - `https://dev.registry.clawdentity.com/health` + - deployed proxy `/health` URL (workers.dev URL extracted from wrangler output, or optional `PROXY_HEALTH_URL` secret override) +- Required GitHub secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID` + +--- + +## MVP Goals + +1. **Create agent identity** (local keypair + registry-issued AIT) +2. **Send signed requests** (PoP per request, replay-resistant) +3. **Verify locally** (signature + expiry + cached CRL) +4. **Kill switch** (revoke → proxy rejects within CRL refresh window) +5. **Discovery** (share endpoint + verify link; optional pairing code) + +--- + +## Further Reading + +- **[README.md](./README.md)** — overview, quick start, and comparison +- **[PRD.md](./PRD.md)** — MVP product requirements and rollout strategy +- **Execution and issue governance:** [GitHub issue tracker](https://github.com/vrknetha/clawdentity/issues/74) diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f795f30 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Ravi Kiran Vemula + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/PRD.md b/PRD.md deleted file mode 100644 index 4d4dac0..0000000 --- a/PRD.md +++ /dev/null @@ -1,243 +0,0 @@ -# PRD — Clawdentity MVP (v0.1) - -**Last updated:** 2026-02-17 -**Owner:** Ravi Kiran Vemula -**Status:** Ready for execution (deployment-first gate enabled) -**Primary target:** OpenClaw Gateway webhooks (`/hooks/*`) -**OpenClaw docs reference:** https://docs.openclaw.ai/automation/webhook - ---- - -## 1) Problem - -OpenClaw webhooks are authenticated via a **single shared webhook token** (`hooks.token`). When hooks are enabled, OpenClaw requires this token on inbound requests (e.g., `Authorization: Bearer ` or `x-openclaw-token: `). This creates: - -- **No identity:** receiver can’t prove which agent called -- **No accountability:** can’t answer “who owns this agent?” -- **No kill switch:** compromise persists unless token rotated everywhere - ---- - -## 2) Goal (MVP) - -Deliver a minimal identity layer that answers: - -> “Who is this agent, who owns it, and is it revoked?” - -…and make it work with OpenClaw **without forking OpenClaw**. - ---- - -## 3) MVP approach (OpenClaw-aligned) - -Because OpenClaw requires `hooks.token` and expects Bearer/token auth for `/hooks/*`, MVP uses a **Clawdentity Proxy** in front of OpenClaw: - -- External callers authenticate to the proxy using **Clawdentity identity headers** -- Proxy verifies identity locally (AIT signature + expiry + cached CRL + PoP) -- Proxy forwards the request to OpenClaw with `x-openclaw-token: ` - -**Important:** The OpenClaw webhook token is never shared externally. - ---- - -## 4) Users - -### Personas -1) **Gateway Operator:** runs OpenClaw; wants to allow only verified callers -2) **Agent Developer:** needs simple tooling to sign outbound calls -3) **Relying-party Service:** wants local verification + revocation checks - ---- - -## 5) Scope - -### In scope (MVP) -- **Registry** - - Create agent identity: register public key, issue AIT - - Cloudflare Worker runtime config lives at `apps/registry/wrangler.jsonc` - - Publish registry signing public keys (`/.well-known/claw-keys.json`) - - Revoke agent → CRL - - CRL endpoint (signed) - - Optional: `gateway_hint` storage + public resolve - -- **SDK (TypeScript)** - - Generate/load agent keypair - - Sign requests (PoP) with replay protection - - Verify AIT (offline signature verification) - - CRL caching & revocation checks - -- **CLI** - - Create agent (`claw agent create`) - - Revoke agent (`claw agent revoke`) - - Inspect token (`claw agent inspect`) - - Verify token (`claw verify`) - - Personal PAT lifecycle (`clawdentity api-key create|list|revoke`) - - Share contact card (`claw share`) - - CLI skill install path (`clawdentity skill install`) that prepares OpenClaw relay skill artifacts automatically - -- **Proxy** - - Verify inbound Clawdentity headers - - Enforce durable trust-pair rules for sender/recipient agent DIDs - - Rate-limit per verified agent DID - - Forward to OpenClaw `/hooks/agent` with `x-openclaw-token` - -- **Discovery** - - Share-by-contact-card (verify link + endpoint) - - Resolve `gateway_hint` from registry (optional) - - Pairing code (`/pair/start` + `/pair/confirm`) for trust bootstrap - -- **Onboarding / access control** - - Invite-gated user registration (`register --invite`) - - One agent slot per invite code - - Admin invite management workflow - -### Out of scope (MVP) -- Organizations, org roles -- Public search discovery (`/discover?q=`), badges -- WebSocket revocation push (polling only) -- Permissions/scopes/delegation chains -- “Immutable signed audit log” claims - ---- - -## 6) Functional requirements - -### 6.1 AIT (Agent Identity Token) -- JWT (JWS), `alg=EdDSA`, `typ=AIT` -- Payload must include: - - `iss`, `sub` (agent DID), `owner` (human DID) - - `agent_pubkey` or `cnf` - - `iat`, `nbf`, `exp`, `jti` - - `name` (strict validation), `framework` -- **One active AIT per agent DID** - - Reissue/rotate automatically revokes the previous `jti` - -### 6.2 CRL (Revocation List) -- Signed token (JWT JWS), `typ=CRL` -- Contains list of revoked `jti`s (+ metadata) -- Clients cache and refresh at default **300 seconds** -- Configurable stale behavior: fail-open vs fail-closed - -### 6.3 PoP request signing (replay-resistant) -Headers required: -- `Authorization: Claw ` -- `X-Claw-Timestamp: ` -- `X-Claw-Nonce: ` -- `X-Claw-Body-SHA256: ` -- `X-Claw-Proof: ` - -Verifier must enforce: -- timestamp max skew (default 300s) -- nonce replay cache (default 5 minutes) -- proof signature verifies against pubkey in AIT -- reject if AIT is revoked - -### 6.4 Proxy → OpenClaw forwarding -- Proxy forwards to `${openclawBaseUrl}/hooks/agent` -- Adds OpenClaw hook token internally: - - `x-openclaw-token: ` - -### 6.5 OpenClaw behavioral constraints -- `/hooks/agent` is async and returns **202** (job started) -- Multi-turn continuity uses `sessionKey` field - -### 6.6 Invite-gated user model -- Bootstrap creates the first admin + PAT -- Admin creates invite codes with optional expiry -- Registration requires a valid invite code -- One invite maps to one agent slot -- Admin may suspend a human, which triggers agent revocation flow - -### 6.7 Personal PAT lifecycle (post-bootstrap) -- Authenticated humans can mint additional PATs for safe key rotation. -- Registry exposes: - - `POST /v1/me/api-keys` (create, plaintext token returned once) - - `GET /v1/me/api-keys` (metadata only) - - `DELETE /v1/me/api-keys/:id` (revoke) -- Revoked PATs must fail auth with `401 API_KEY_REVOKED`. -- Unrelated active PATs must continue to authenticate after targeted key revocation. - -### 6.8 OpenClaw skill install command -- Installer runs explicitly via `clawdentity skill install`. -- Installer must prepare these artifacts without manual copy steps: - - `SKILL.md` - - `references/*` - - `relay-to-peer.mjs` in managed skill path and hooks transform path -- Runtime installs must not depend on sibling workspace packages; required skill assets are bundled with the CLI package. -- Re-running install must be idempotent and safe. -- Missing source artifacts must fail with actionable errors. - -### 6.9 Deployment and release automation -- `develop` deploy workflow must deploy both registry and proxy after full quality gates pass. -- Registry deploy must run D1 migrations before Worker deploy. -- Registry and proxy `/health` checks must validate: - - `status = "ok"` - - `environment = "development"` - - `version = git commit SHA` passed via `APP_VERSION`. -- CLI release must use manual GitHub workflow dispatch with explicit semver version input. -- Published npm package must be `clawdentity` and must not include `workspace:*` runtime dependencies. - ---- - -## 7) Non-functional requirements - -- **Setup time:** < 10 minutes to first verified call -- **Propagation:** revocation enforced within CRL refresh window -- **Reliability:** verifier works when registry is temporarily unavailable (uses cached keys/CRL) -- **Security:** replay protection must be implemented (nonce + cache) - ---- - -## 8) Success criteria - -- Valid caller → proxy forwards → OpenClaw returns 202 -- Invalid/expired/revoked token → proxy returns 401 -- Valid but not trusted for recipient pair → proxy returns 403 -- Replay within time window is rejected (nonce reuse) -- Revocation causes rejection within next CRL refresh - ---- - -## 9) Rollout plan - -1) Establish workspace and deployment baseline -2) Deploy and verify `develop` baseline environments and health checks (registry + proxy) -3) Establish manual npm release gate for `clawdentity` CLI package -4) Execute MVP feature backlog after deployment and release gates pass -5) Execute Phase 2/3 enhancements from HLD after MVP stability - ---- - -## 10) Execution plan - -Execution sequencing, dependency management, and wave planning are maintained in the GitHub issue tracker. - -Primary tracker: https://github.com/vrknetha/clawdentity/issues/74. - -Governance rules: -- Treat GitHub issues as the source of truth for rollout order and blockers. -- Record dependency or wave changes in tracker issues at the time of change. -- Keep this PRD and `README.md` aligned with tracker-level execution decisions. - ---- - -## 11) Deferred items (post-MVP) - -- Web UI for revocation operations -- Pairing flow automation beyond base implementation -- Outbound relay and contact book -- Conversation threading headers (`X-Claw-Conversation-Id`, `X-Claw-Reply-To`) -- OpenClaw skill and optional first-class agent channel - ---- - -## 12) Verification plan - -1) Unit tests for protocol helpers, SDK crypto, JWT handling, nonce cache, CRL cache -2) Integration tests for registry routes (Workers emulator) and proxy pipeline -3) E2E flow: CLI create -> signed call -> proxy verify -> OpenClaw `202` -4) Revocation propagation test within CRL refresh window -5) Replay attack rejection via nonce reuse checks -6) CI gate: lint -> typecheck -> test -> build -7) Deploy gate: registry and proxy health checks validate `APP_VERSION == github.sha` in `develop` -8) Release gate: `publish-cli.yml` validates CLI package and publishes `clawdentity` with npm provenance diff --git a/README.md b/README.md index 00ddf89..1bd2f8f 100644 --- a/README.md +++ b/README.md @@ -1,633 +1,160 @@ -# Clawdentity +

+ Clawdentity +

-Verified identity + revocation for AI agents — starting with **OpenClaw**. +

Clawdentity

-Clawdentity solves one question for agent-to-agent / agent-to-service calls: +

+ Verified identity + instant revocation for AI agents — starting with OpenClaw. +

-> **“Who is this agent, who owns it, and is it revoked?”** - -It does this with: - -- **AIT (Agent Identity Token)**: a registry-signed passport (JWT / EdDSA) -- **PoP (Proof-of-Possession)**: every request is signed with the agent’s private key -- **CRL (Revocation List)**: a signed revocation feed clients cache and refresh +

+ npm version + MIT License + Node 22+ + TypeScript +

--- -## Problem statement - -OpenClaw webhook auth is built around a shared gateway token. That works for transport, but not for identity-aware agent systems. -In practice, identity is flat: any caller with the shared token looks the same to the gateway. - -Current pain points: +## The Problem -- **Shared-secret blast radius:** if one token leaks, any caller can impersonate a trusted agent until rotation. -- **No per-agent identity:** receivers cannot prove which exact agent sent a request or who owns it. -- **Weak revocation model:** disabling one compromised agent means rotating shared credentials across integrations. -- **No local trust policy:** gateway operators cannot reliably enforce “who is allowed” per caller identity. +OpenClaw webhook auth uses a **single shared gateway token**. That works for transport, but breaks down for identity-aware agent systems: -What Clawdentity adds: - -- Verifiable per-agent identity (AIT + PoP) -- Fast revocation propagation (signed CRL + cache refresh) -- Proxy-side policy enforcement (trust pairs + rate limits + replay protection) - ---- +- **Shared-secret blast radius** — if one token leaks, any caller can impersonate a trusted agent until rotation +- **No per-agent identity** — receivers cannot prove which exact agent sent a request or who owns it +- **Weak revocation** — disabling one compromised agent means rotating shared credentials across all integrations +- **No local trust policy** — gateway operators cannot enforce "who is allowed" per caller identity +- **Public exposure trade-off** — for agent-to-agent communication, you need a public endpoint; without a proxy layer, that means exposing OpenClaw directly or sharing the webhook token with every caller -## Why this exists (OpenClaw reality) +## What Clawdentity Does -OpenClaw webhooks are a great transport layer, but they authenticate using a **single shared webhook token**. OpenClaw requires `hooks.token` when hooks are enabled, and inbound calls must provide the token (e.g., `Authorization: Bearer ...` or `x-openclaw-token: ...`). -OpenClaw docs: https://docs.openclaw.ai/automation/webhook +Clawdentity works **with** OpenClaw (not a fork) and adds the missing identity layer for agent-to-agent trust: -That means “just replace Bearer with Claw” does **not** work without upstream changes. +- **Per-agent identity** — each agent gets a unique DID and registry-signed passport (AIT) +- **Request-level signing** — every request is cryptographically signed with a proof-of-possession (PoP) header +- **Instant revocation** — revoke one agent via signed CRL without rotating any shared tokens +- **Proxy enforcement** — trust-pair policies, per-agent rate limits, and replay protection at the gateway boundary +- **OpenClaw stays private** — the proxy is the only public-facing endpoint; your OpenClaw instance stays on localhost and the webhook token is never shared +- **QR-code pairing** — one-scan first-contact trust approval between agents -### MVP integration approach (no OpenClaw fork) - -For MVP, Clawdentity runs as a **proxy/sidecar** in front of OpenClaw: +## How It Works ``` Caller Agent - | - | Authorization: Claw + X-Claw-Proof/Nonce/Timestamp - v -Clawdentity Proxy (verifies identity + trust policy + rate limits) - | - | x-openclaw-token: (internal only) - v -OpenClaw Gateway (normal /hooks/agent handling) -``` - -**What happens to the OpenClaw hooks token?** - -- It stays **private** on the gateway host. -- Only the proxy uses it to forward requests to OpenClaw. -- You never share it with other humans/agents. - ---- - -## How it works (end-to-end) - -### 1) Agent identity provisioning - -- Operator runs CLI to create an agent identity. -- Registry stores the public key and issues a signed AIT. -- Agent keeps private key locally; registry never sees it. - -### 2) Outbound request signing - -- SDK creates PoP headers for each request: - - `Authorization: Claw ` - - `X-Claw-Timestamp` - - `X-Claw-Nonce` - - `X-Claw-Body-SHA256` - - `X-Claw-Proof` -- Proof signature is bound to method, path, timestamp, nonce, and body hash. - -### 3) Proxy verification pipeline - -- Proxy validates AIT signature against registry keys. -- Proxy checks AIT expiry and CRL revocation status. -- Proxy verifies PoP signature against the key in the token. -- Proxy rejects replay via timestamp skew + nonce cache. -- Proxy enforces trust-pair policy and rate limits. - -### 4) Forward to OpenClaw - -- Only verified and authorized requests are forwarded. -- Proxy injects internal `x-openclaw-token` to call OpenClaw `/hooks/agent`. -- OpenClaw continues normal processing and returns `202` for async handling. - -### 5) Revocation flow - -- Agent owner or admin revokes an agent at the registry. -- Registry publishes revocation in signed CRL. -- Proxy cache refresh picks up revocation and starts rejecting requests from revoked AITs. - ---- - -## Agent-to-Agent Communication: Complete Flow - -This section walks through **every step** from zero to two OpenClaw agents exchanging their first message. Each step adds a security guarantee that the shared-token model cannot provide. - -### Overview - -``` -┌─────────────────────────────────────────────────────────────────────────┐ -│ CLAWDENTITY REGISTRY │ -│ │ -│ Issues identities (AIT) · Publishes revocation list (CRL) │ -│ Validates agent auth · Manages invite-gated onboarding │ -└───────────────┬─────────────────────────────────┬──────────────────────┘ - │ │ - issues AIT + auth issues AIT + auth - │ │ - ┌───────────▼──────────┐ ┌───────────▼──────────┐ - │ AGENT ALICE │ │ AGENT BOB │ - │ (OpenClaw + keys) │ │ (OpenClaw + keys) │ - │ │ │ │ - │ Ed25519 keypair │ │ Ed25519 keypair │ - │ AIT (signed passport)│ │ AIT (signed passport│ - │ Auth tokens │ │ Auth tokens │ - └───────────┬───────────┘ └──────────┬───────────┘ - │ │ - signs every request signs every request - with private key with private key - │ │ - ┌───────────▼──────────┐ ┌───────────▼──────────┐ - │ ALICE'S PROXY │◄─────────│ Bob sends signed │ - │ (Cloudflare Worker) │ HTTP POST│ request to Alice │ - │ │ │ │ - │ Verifies identity │ └──────────────────────┘ - │ Checks revocation │ - │ Enforces trust pairs │ - │ Rejects replays │ - │ Rate limits per agent│ - └───────────┬───────────┘ - │ - only verified requests - reach OpenClaw - │ - ┌───────────▼──────────┐ - │ ALICE'S OPENCLAW │ - │ (localhost, private) │ - │ │ - │ Receives message │ - │ Never exposed to │ - │ public internet │ - └───────────────────────┘ + │ + │ Authorization: Claw + │ + X-Claw-Proof / Nonce / Timestamp + ▼ +Clawdentity Proxy ← verifies identity, trust policy, rate limits + │ + │ x-openclaw-token: (internal only) + ▼ +OpenClaw Gateway ← localhost only, never exposed ``` -### Step 1: Human Onboarding (Invite-Gated) +1. **Provision** — create an agent identity (Ed25519 keypair + registry-issued AIT) +2. **Sign** — SDK signs every outbound request with the agent's private key +3. **Verify** — proxy validates AIT + PoP + CRL + trust pair before forwarding +4. **Forward** — only verified requests reach OpenClaw on localhost; your instance is never directly reachable from the internet -An admin creates an invite code. A new operator redeems it to get API access. +## Quick Start -``` -Admin Registry - │ │ - │ clawdentity invite create │ - │──────────────────────────────►│ Generates clw_inv_ - │◄──────────────────────────────│ Stores with optional expiry - │ │ - │ Shares invite code │ - │ out-of-band (email, etc.) │ - │ │ - -New Operator Registry - │ │ - │ clawdentity invite redeem │ - │──────────────────────────────►│ Creates human account - │◄──────────────────────────────│ Issues API key (shown once) - │ │ - │ Stores API key locally │ -``` +Have an invite code (`clw_inv_...`) ready, then prompt your OpenClaw agent: -**Security:** Invite codes are single-use and time-limited. One agent per invite prevents bulk abuse. +> Set up Clawdentity relay -### Step 2: Agent Identity Creation (Challenge-Response) +The agent runs the full onboarding sequence — install, identity creation, relay configuration, and readiness checks. It will ask for your invite code and agent name. -The operator creates an agent identity. The private key **never leaves the machine**. +
+Manual CLI setup -``` -CLI (operator's machine) Registry - │ │ - │ 1. Generate Ed25519 keypair │ - │ (secret.key stays local) │ - │ │ - │ 2. POST /v1/agents/challenge │ - │ { publicKey } │ - │─────────────────────────────────────►│ Generates 24-byte nonce - │◄─────────────────────────────────────│ Returns { challengeId, - │ │ nonce, ownerDid } - │ │ - │ 3. Sign canonical proof with │ - │ private key (proves ownership) │ - │ │ - │ 4. POST /v1/agents │ - │ { name, publicKey, challengeId, │ - │ challengeSignature } │ - │─────────────────────────────────────►│ Verifies signature - │ │ Creates agent record - │ │ Issues AIT (JWT, EdDSA) - │ │ Issues auth tokens - │◄─────────────────────────────────────│ Returns { agent, ait, - │ │ agentAuth } - │ Stores locally: │ - │ ~/.clawdentity/agents// │ - │ ├── secret.key (private, 0600) │ - │ ├── public.key │ - │ ├── ait.jwt (signed passport) │ - │ ├── identity.json │ - │ └── registry-auth.json │ -``` +```bash +# Install the CLI +npm install -g clawdentity -**Security:** Challenge-response proves the operator holds the private key without ever transmitting it. The 5-minute challenge window prevents delayed replay. Each challenge is single-use. +# Initialize config +clawdentity config init -**What's in the AIT (Agent Identity Token):** +# Redeem an invite (sets API key) +clawdentity invite redeem --display-name "Your Name" -| Claim | Purpose | -|-------|---------| -| `sub` | Agent DID (`did:claw:agent:`) — unique identity | -| `ownerDid` | Human DID — who owns this agent | -| `cnf.jwk.x` | Agent's public key — for verifying PoP signatures | -| `jti` | Token ID — for revocation tracking | -| `iss` | Registry URL — who vouches for this identity | -| `exp` | Expiry — credential lifetime (1-90 days) | +# Create an agent identity +clawdentity agent create --framework openclaw -### Step 3: Peer Routing Setup (Out-of-Band Metadata) +# Configure the relay +clawdentity openclaw setup -Operators exchange peer metadata out-of-band (alias, DID, proxy URL). No relay invite code is required. +# Install the skill artifact +clawdentity skill install +# Verify everything works +clawdentity openclaw doctor ``` -Alice's Operator Bob's Operator - │ │ - │ Shares metadata out-of-band ─────────►│ - │ alias, DID, proxy URL │ - │ │ - │ │ clawdentity openclaw setup - │ │ bob --peer-alias alice - │ │ --peer-did did:claw:agent:... - │ │ --peer-proxy-url https://alice-proxy/hooks/agent - │ │ - │ │ Stores peer in peers.json: - │ │ { "alice": { - │ │ "did": "did:claw:agent:...", - │ │ "proxyUrl": "https://..." - │ │ }} - │ │ - │ │ Installs relay transform - │ │ Configures OpenClaw hooks -``` - -**Security:** Setup uses only public peer metadata (DID + proxy URL + alias). No keys, tokens, or secrets are exchanged. Alice and Bob must complete proxy pairing (`/pair/start` + `/pair/confirm`) before either side can send messages. -### Step 4: First Message (Bob → Alice) +
-Bob's OpenClaw triggers the relay. Every request is cryptographically signed. - -``` -Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's OpenClaw - │ │ │ │ - │ Hook trigger: │ │ │ - │ { peer: "alice", │ │ │ - │ message: "Hi!" } │ │ │ - │─────────────────────►│ │ │ - │ │ │ │ - │ 1. Load Bob's credentials │ │ - │ (secret.key, ait.jwt) │ │ - │ 2. Look up "alice" in │ │ - │ peers.json → proxy URL │ │ - │ 3. Sign HTTP request: │ │ - │ ┌─────────────────────┐ │ │ - │ │ Canonical string: │ │ │ - │ │ POST /hooks/agent │ │ │ - │ │ timestamp: │ │ │ - │ │ nonce: │ │ │ - │ │ body-sha256: │ │ │ - │ │ │ │ │ - │ │ Ed25519.sign(canon, │ │ │ - │ │ secretKey) → proof │ │ │ - │ └─────────────────────┘ │ │ - │ 4. Send signed request: │ │ - │ POST https://alice-proxy/hooks/agent │ - │ Authorization: Claw │ │ - │ X-Claw-Timestamp: │ │ - │ X-Claw-Nonce: │ │ - │ X-Claw-Body-SHA256: │ │ - │ X-Claw-Proof: │ │ - │ X-Claw-Agent-Access: │ │ - │ │─────────────────────►│ │ - │ │ │ │ - │ │ VERIFICATION PIPELINE │ - │ │ ───────────────────── │ - │ │ ① Verify AIT signature │ - │ │ (registry EdDSA keys) │ - │ │ ② Check timestamp skew │ - │ │ (max ±300 seconds) │ - │ │ ③ Verify PoP signature │ - │ │ (Ed25519 from AIT cnf key) │ - │ │ ④ Reject nonce replay │ - │ │ (per-agent nonce cache) │ - │ │ ⑤ Check CRL revocation │ - │ │ (signed list from registry) │ - │ │ ⑥ Enforce trust pair │ - │ │ (is Bob trusted for Alice?) │ - │ │ ⑦ Validate agent access token │ - │ │ (POST to registry) │ - │ │ │ │ - │ │ │ ALL CHECKS PASSED │ - │ │ │ │ - │ │ │ Forward to OpenClaw: │ - │ │ │ POST /hooks/agent │ - │ │ │ x-openclaw-token: │ - │ │ │──────────────────────►│ - │ │ │ │ Message - │ │ │◄──────────────────────│ delivered! - │ │◄─────────────────────│ 202 │ - │◄─────────────────────│ │ │ -``` - -### Why This Beats Shared Tokens +## Shared Tokens vs Clawdentity | Property | Shared Webhook Token | Clawdentity | |----------|---------------------|-------------| | **Identity** | All callers look the same | Each agent has a unique DID and signed passport | -| **Accountability** | Cannot trace who sent what | Every request proves exactly which agent sent it | | **Blast radius** | One leak exposes everything | One compromised key only affects that agent | | **Revocation** | Rotate shared token = break all integrations | Revoke one agent instantly via CRL, others unaffected | | **Replay protection** | None | Timestamp + nonce + signature on every request | | **Tamper detection** | None | Body hash + PoP signature = any modification is detectable | | **Per-caller policy** | Not possible | Trust pairs by sender/recipient DID, rate limit per agent | | **Key exposure** | Token must be shared with every caller | Private key never leaves the agent's machine | +| **Network exposure** | OpenClaw must be reachable by callers; token shared with each | OpenClaw stays on localhost; only the proxy is public | -### What Gets Verified (and When It Fails) - -| Check | Failure | HTTP Status | Meaning | -|-------|---------|-------------|---------| -| AIT signature | `PROXY_AUTH_INVALID_AIT` | 401 | Token is forged or tampered | -| Timestamp skew | `PROXY_AUTH_TIMESTAMP_SKEW` | 401 | Request is too old or clock is wrong | -| PoP signature | `PROXY_AUTH_INVALID_PROOF` | 401 | Sender doesn't hold the private key | -| Nonce replay | `PROXY_AUTH_REPLAY` | 401 | Same request was sent twice | -| CRL revocation | `PROXY_AUTH_REVOKED` | 401 | Agent identity has been revoked | -| Trust policy | `PROXY_AUTH_FORBIDDEN` | 403 | Agent is valid but not trusted for this recipient | -| Agent access token | `PROXY_AGENT_ACCESS_INVALID` | 401 | Session token expired or revoked | -| Rate limit | `PROXY_RATE_LIMIT_EXCEEDED` | 429 | Too many requests from this agent | - ---- +## Security Highlights -## Operator controls on both ends +- **Private keys never leave your machine** — generated and stored locally, never transmitted +- **Ed25519 + EdDSA** — modern, fast elliptic-curve cryptography +- **Per-request proof-of-possession** — every HTTP call is signed with method, path, body hash, timestamp, and nonce +- **Replay protection** — timestamp skew check + per-agent nonce cache +- **Instant revocation** — signed CRL propagation; proxy rejects revoked agents on next refresh +- **Trust pairs** — receiver operators control which agents are allowed, per-DID -### Sender side operator (owner/admin) +## Self-Hosting -- Action: `clawdentity agent revoke ` -- Scope: **global** (registry-level identity revocation) -- Effect: every receiving proxy rejects that revoked token once CRL refreshes. -- Use when: key compromise, decommissioning, or ownership/admin suspension events. +Clawdentity runs on **Cloudflare Workers** with **D1** for storage: -### Receiver side operator (callee gateway owner) +| Component | Role | +|-----------|------| +| **Registry** (`apps/registry`) | Issues AITs, serves public keys + CRL, manages invites | +| **Proxy** (`apps/proxy`) | Verifies identity headers, enforces trust policy, forwards to OpenClaw | -- Action: remove/deny trusted caller pair in local proxy trust state (or keep approval-required first contact) -- Scope: **local only** (that specific gateway/proxy) -- Effect: caller is blocked on this gateway immediately, but remains valid elsewhere unless globally revoked. -- Use when: policy mismatch, abuse from a specific caller, temporary trust removal. - -### Key distinction - -- **Global revoke** = sender owner/admin authority at registry. -- **Local block** = receiver operator authority at their own gateway. -- Opposite-side operator cannot globally revoke someone else's agent identity; they can only deny locally. - -### Incident response pattern - -1. Receiver blocks caller locally for immediate containment. -2. Sender owner/admin performs registry revoke for ecosystem-wide invalidation. -3. Proxies return: - - `401` for invalid/expired/revoked identity - - `403` for valid identity that is not trusted locally for the target recipient - ---- +Both are Cloudflare Workers deployed with `wrangler`. See [ARCHITECTURE.md](./ARCHITECTURE.md) for full deployment instructions, environment configuration, and CI/CD setup. -## What gets shared (and what never should) - -- ✅ Shared **in-band** on each request: **AIT + PoP proof headers** -- ✅ Shared publicly: registry signing public keys + CRL (signed, cacheable) -- ❌ Never shared: the agent’s **private key** or identity folder - ---- - -## Repo layout - -Nx monorepo with pnpm workspaces: +## Project Structure ``` clawdentity/ ├── apps/ │ ├── registry/ — Identity registry (Cloudflare Worker) -│ │ Issues AITs, serves CRL + public keys -│ │ Worker config: apps/registry/wrangler.jsonc │ ├── proxy/ — Verification proxy (Cloudflare Worker) -│ │ Verifies Clawdentity headers, forwards to OpenClaw -│ │ Worker config: apps/proxy/wrangler.jsonc -│ ├── cli/ — Operator CLI -│ │ Agent create/revoke, invite, api-key, config -│ └── openclaw-skill/ — OpenClaw skill integration -│ Relay transform for agent-to-agent messaging +│ ├── cli/ — Operator CLI (npm: clawdentity) +│ └── openclaw-skill/ — OpenClaw relay skill integration ├── packages/ │ ├── protocol/ — Canonical types + signing rules -│ │ AIT claims, DID format, HTTP signing, endpoints -│ └── sdk/ — TypeScript SDK -│ Sign/verify, CRL cache, auth client, crypto -└── Configuration - ├── nx.json — Monorepo task orchestration - ├── pnpm-workspace.yaml - └── tsconfig.base.json -``` - ---- - -## Core features (MVP) - -### 1) Identity issuance and verification - -- Handled by: `apps/registry`, `packages/sdk` -- Registry issues signed AITs tied to agent DID + owner DID. -- Registry publishes verification material (`/.well-known/claw-keys.json`) and signed CRL. -- SDK + proxy verify signatures, expiry windows, and token validity locally. - -### 2) Request-level proof and replay protection - -- Handled by: `packages/sdk`, `apps/proxy` -- Each request carries PoP-bound headers: - - `Authorization: Claw ` - - `X-Claw-Timestamp` - - `X-Claw-Nonce` - - `X-Claw-Body-SHA256` - - `X-Claw-Proof` -- Proxy rejects tampered payloads, nonce replays, and stale timestamps. - -### 3) Proxy enforcement before OpenClaw - -- Handled by: `apps/proxy` -- Proxy Worker verifies AIT + CRL + PoP before forwarding to OpenClaw. -- Enforces durable trust pairs for sender/recipient DID. -- Applies per-agent rate limiting. -- Keeps `hooks.token` private and only injects it internally during forward. -- By default, `INJECT_IDENTITY_INTO_MESSAGE=true` to prepend a sanitized identity block - (`agentDid`, `ownerDid`, `issuer`, `aitJti`) into `/hooks/agent` payload `message`. - Set `INJECT_IDENTITY_INTO_MESSAGE=false` to keep payloads unchanged. - -### Proxy Worker local runs - -- Development env (`ENVIRONMENT=development`): `pnpm dev:proxy` -- Local env (`ENVIRONMENT=local`): `pnpm dev:proxy:local` -- Fresh deploy-like env: `pnpm dev:proxy:fresh` -- Development deploy command: `pnpm -F @clawdentity/proxy run deploy:dev` -- Production deploy command: `pnpm -F @clawdentity/proxy run deploy:production` -- Environment intent: `local` is local Wrangler development only; `development` and `production` are cloud deployment environments. - -### Registry Worker local runs - -- Development env (`ENVIRONMENT=development`): `pnpm dev:registry` -- Development env with local D1 migration apply: `pnpm dev:registry:local` - -### Develop deployment automation - -- GitHub workflow: `.github/workflows/deploy-develop.yml` -- Trigger: push to `develop` -- Runs full quality gates, then deploys: - - registry (`apps/registry`, env `dev`) with D1 migrations - - proxy (`apps/proxy`, env `dev`) -- Health checks must pass with `version == $GITHUB_SHA` for: - - `https://dev.registry.clawdentity.com/health` - - deployed proxy `/health` URL (workers.dev URL extracted from wrangler output, or optional `PROXY_HEALTH_URL` secret override) -- Required GitHub secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID` - -### 4) Operator lifecycle tooling (CLI) - -- Handled by: `apps/cli` -- `clawdentity agent create` for local keypair + registry registration. -- `clawdentity agent inspect` and `clawdentity verify` for offline token checks. -- `clawdentity agent revoke` for kill switch workflows. -- `clawdentity api-key create` to mint a new PAT (token shown once). -- `clawdentity api-key list` to view PAT metadata (`id`, `name`, `status`, `createdAt`, `lastUsedAt`). -- `clawdentity api-key revoke ` to invalidate a PAT without rotating unrelated keys. -- `clawdentity share` for contact-card exchange (DID, verify URL, endpoint). -- `clawdentity connector start ` to run local relay connector runtime. -- `clawdentity connector service install ` to configure connector autostart after reboot/login (`launchd` on macOS, `systemd --user` on Linux). -- `clawdentity connector service uninstall ` to remove connector autostart service. -- `clawdentity skill install` to install/update OpenClaw relay skill artifacts under `~/.openclaw`. - -### 5) Onboarding and control model - -- Handled by: `apps/registry`, `apps/cli` -- Invite-gated registration model with admin-issued invite codes. -- One-agent-per-invite policy for simple quota and abuse control. -- Feature work follows a deployment-first gate tracked in GitHub issues. - -### 6) Discovery and first-contact options - -- Handled by: `apps/registry`, `apps/proxy`, `apps/cli` -- Out-of-band contact card sharing. -- Registry `gateway_hint` resolution. -- Pairing-code flow for first-contact trust approval (PAT-verified owner start + one-time confirm). - ---- - -## OpenClaw skill install (CLI command) - -Expected operator flow starts from the CLI command: - -```bash -clawdentity skill install +│ └── sdk/ — TypeScript SDK (sign, verify, CRL, auth) +└── nx.json — Nx monorepo orchestration ``` -Installer logic prepares OpenClaw runtime artifacts automatically: -- `~/.openclaw/skills/clawdentity-openclaw-relay/SKILL.md` -- `~/.openclaw/skills/clawdentity-openclaw-relay/references/*` -- `~/.openclaw/skills/clawdentity-openclaw-relay/relay-to-peer.mjs` -- `~/.openclaw/hooks/transforms/relay-to-peer.mjs` - -Install is idempotent and logs deterministic per-artifact outcomes (`installed`, `updated`, `unchanged`). -The CLI package ships bundled skill assets so clean installs do not depend on a separate `@clawdentity/openclaw-skill` package at runtime. - -### CLI npm release (manual) - -- GitHub workflow: `.github/workflows/publish-cli.yml` -- Trigger: `workflow_dispatch` with inputs: - - `version` (semver, required) - - `dist_tag` (default `latest`) -- Required GitHub secret: `NPM_TOKEN` -- Publish target: npm package `clawdentity` -- Workflow runs CLI lint/typecheck/test/build before publishing. - ---- - -## MVP goals - -1. **Create agent identity** (local keypair + registry-issued AIT) -2. **Send signed requests** (PoP per request, replay-resistant) -3. **Verify locally** (signature + expiry + cached CRL) -4. **Kill switch** (revoke → proxy rejects within CRL refresh window) -5. **Discovery** (share endpoint + verify link; optional pairing code) - ---- - -## Discovery (how first contact happens) +## Contributing -MVP supports three ways to “find” another agent: +This repo uses a **deployment-first gate** tracked in [GitHub Issues](https://github.com/vrknetha/clawdentity/issues): -1. **Out-of-band share**: human shares a contact card (verify link + endpoint URL) -2. **Registry `gateway_hint`**: callee publishes an endpoint, callers resolve it via registry -3. **Pairing code** (proxy): “Approve first contact” to establish a mutual trusted agent pair +1. Pick an open issue and confirm dependencies/blockers. +2. Implement in a feature branch with tests. +3. Open a PR to `develop`. -No one shares keys/files between agents. Identity is presented per request. - ---- - -## Security architecture (MVP) - -### Trust boundaries and sensitive assets - -- **Agent private key**: secret, local only, never leaves agent machine. -- **Registry signing key**: secret, server-side only, signs AIT and CRL. -- **OpenClaw `hooks.token`**: secret, only present on gateway host/proxy. -- **AIT + PoP headers**: transmitted per request, safe to share in-band. - -### Threats addressed - -- Do not expose OpenClaw webhooks directly to the public internet. Follow OpenClaw guidance (loopback, tailnet, trusted reverse proxy). - Docs: https://docs.openclaw.ai/automation/webhook -- Clawdentity PoP signatures must bind: - - method, path, timestamp, nonce, body hash - - and reject nonce replays -- Reject tampering: any change to method/path/body/timestamp/nonce invalidates proof. -- Reject unauthorized callers: AIT verification + trust-pair enforcement. -- Reject compromised identities quickly: CRL-based revocation checks. -- Contain abuse: per-agent rate limits at proxy boundary. - -### Security guarantees and limits - -- Guarantees: - - caller identity can be cryptographically verified - - caller ownership is traceable via token claims - - revocation can be enforced without rotating shared OpenClaw token -- Limits: - - if the endpoint that holds the agent private key is compromised, attacker can sign as that agent until revocation - - if CRL refresh is delayed, enforcement follows configured staleness policy (`fail-open` or `fail-closed`) - -### Safe defaults and operator guidance - -- Treat any identity fields (agent name/description) as untrusted input; never allow prompt injection via identity metadata. -- Keep OpenClaw behind trusted network boundaries; expose only proxy entry points. -- Rotate PATs and audit trusted pair entries regularly. -- Store PATs in secure local config only; create responses return token once and it cannot be retrieved later from the registry. -- Rotation baseline: keep one primary key + one standby key, rotate at least every 90 days, and revoke stale keys immediately after rollout. - ---- - -## Documentation - -- **PRD:** see [`PRD.md`](./PRD.md) (MVP product requirements + rollout strategy) -- **Execution and issue governance source of truth:** GitHub issue tracker, starting at https://github.com/vrknetha/clawdentity/issues/74. - ---- - -## Contributing / Execution - -This repo is delivered through small GitHub issues with a **deployment-first gate**: - -1. Pick an active GitHub issue and confirm dependencies/blockers in the tracker. -2. Implement in a feature branch with tests/docs updates. -3. Run required validation commands. -4. Open a PR to `develop` and post implementation evidence back on the issue. - -### Governance expectations - -- Keep issue status aligned with reality (`OPEN` while active, close with evidence when complete). -- Use GitHub issues as the only source of truth for order, dependencies, and waves. -- If rollout sequencing changes, update both tracker issues and docs in the same change. +## License ---- +[MIT](./LICENSE) -## License +## Deep Docs -TBD. +- **[ARCHITECTURE.md](./ARCHITECTURE.md)** — full protocol flows, verification pipeline, security architecture, deployment details +- **[PRD.md](./PRD.md)** — MVP product requirements and rollout strategy diff --git a/apps/cli/package.json b/apps/cli/package.json index 3b08f18..e3fff3f 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,6 +1,6 @@ { "name": "clawdentity", - "version": "0.0.20", + "version": "0.0.23", "type": "module", "publishConfig": { "access": "public" diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 62f2af3..ec3d3d6 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -21,6 +21,8 @@ - `pair` command logic should stay in `commands/pair.ts`; keep proxy pairing bootstrap (`/pair/start`, `/pair/confirm`) CLI-driven with local AIT + PoP proof headers and one-time ticket QR support (`--qr`, `--qr-file`). - `pair start`/`pair confirm` must send profile metadata (`initiatorProfile`/`responderProfile`) with both `agentName` and `humanName`. - Pairing must fail fast with `CLI_PAIR_HUMAN_NAME_MISSING` when local config does not include `humanName`. +- Pairing ticket parsing must normalize pasted input (trim, remove markdown backticks, collapse whitespace) before confirm/status requests so wrapped terminal/UI copies do not fail at proxy. +- `pair confirm`/`pair status` must fail fast on local issuer mismatch: ticket `iss` must match configured proxy origin, with explicit remediation in the CLI error. - Pairing peer persistence must write explicit peer metadata (`agentName`, `humanName`) in `~/.clawdentity/peers.json`; do not collapse profile metadata into a single `name` field. - `openclaw setup` peers snapshot sync must preserve `agentName`/`humanName` fields from `~/.clawdentity/peers.json`. - `connector start ` must validate local agent material (`identity.json`, `ait.jwt`, `secret.key`, `registry-auth.json`) before starting runtime and must fail with stable CLI errors when files are missing/invalid. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index b96ed54..4107efb 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -88,10 +88,15 @@ ## OpenClaw Diagnostic Command Rules - `openclaw doctor` must stay read-only and validate required local state: resolved CLI config (`registryUrl` + `apiKey` + `proxyUrl` unless env override), selected agent marker, local agent credentials, peers map integrity (and requested `--peer` alias), transform presence, hook mapping, OpenClaw base URL resolution, and connector runtime websocket readiness. - `openclaw doctor` must validate hook-session safety invariants (`hooks.defaultSessionKey`, `hooks.allowRequestSessionKey=false`, required `hooks.allowedSessionKeyPrefixes`) and fail with deterministic remediation when drifted. +- `openclaw doctor` must validate OpenClaw gateway auth readiness (`gateway.auth.mode` and required credential) so websocket auth drift is surfaced before relay tests. - `openclaw doctor` must validate pending OpenClaw gateway device approvals (`/devices/pending.json`) so `pairing required` conditions are surfaced before relay tests. - `openclaw doctor` must validate connector inbound durability state (`state.connectorInboundInbox`) from connector `/v1/status` so queued local replay backlog is visible to operators. - `openclaw doctor` must validate local OpenClaw hook replay health (`state.openclawHookHealth`) from connector `/v1/status` and fail when connector reports replay failures with pending inbox backlog. - `openclaw setup` must attempt automatic recovery for pending OpenClaw gateway device approvals before failing checklist validation, so normal onboarding does not require manual `openclaw devices approve` steps. +- `openclaw setup` must ensure OpenClaw gateway auth token mode is deterministic (`gateway.auth.mode=token` with token when mode is unset/token), so first-time control UI/device auth remains stable across restarts. +- `openclaw setup` must avoid rewriting `openclaw.json` when effective hook/gateway config is already current, to reduce unnecessary OpenClaw restart churn. +- `openclaw setup` must run a short post-config-change connector stability window (detached/existing runtimes) and auto-restart once when connector drops during deferred OpenClaw restarts. +- Detached connector fallback must write stdout/stderr logs to `~/.clawdentity/run/connector-.{stdout,stderr}.log` so runtime exits are diagnosable. - `openclaw doctor` must treat malformed/unreadable CLI config as a failed diagnostic check, not a thrown exception, so full per-check output remains available. - Relay hook mapping validation must require the expected mapping path (`send-to-peer`) and only accept optional `id` when it matches `clawdentity-send-to-peer`. - `openclaw doctor` must print deterministic check IDs and actionable fix hints for each failed check. diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index b9b7592..7eebab9 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -145,10 +145,17 @@ describe("openclaw command helpers", () => { const copiedTransform = readFileSync(result.transformTargetPath, "utf8"); expect(copiedTransform).toContain("relay(ctx)"); + expect(result.openclawConfigChanged).toBe(true); const openclawConfig = JSON.parse( readFileSync(result.openclawConfigPath, "utf8"), ) as { + gateway?: { + auth?: { + mode?: string; + token?: string; + }; + }; hooks: { enabled?: boolean; token?: string; @@ -166,6 +173,11 @@ describe("openclaw command helpers", () => { expect(openclawConfig.hooks.allowRequestSessionKey).toBe(false); expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("main"); + expect(openclawConfig.gateway?.auth?.mode).toBe("token"); + expect(typeof openclawConfig.gateway?.auth?.token).toBe("string"); + expect(openclawConfig.gateway?.auth?.token?.length ?? 0).toBeGreaterThan( + 0, + ); expect( openclawConfig.hooks.mappings?.some( (mapping) => @@ -246,6 +258,40 @@ describe("openclaw command helpers", () => { } }); + it("does not rewrite OpenClaw config when setup state is already current", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousGatewayToken = process.env.OPENCLAW_GATEWAY_TOKEN; + delete process.env.OPENCLAW_GATEWAY_TOKEN; + const preconfiguredOpenclawJson = + '{"hooks":{"enabled":true,"token":"hook-token","defaultSessionKey":"main","allowRequestSessionKey":false,"allowedSessionKeyPrefixes":["hook:","main"],"mappings":[{"id":"clawdentity-send-to-peer","match":{"path":"send-to-peer"},"action":"agent","wakeMode":"now","transform":{"module":"relay-to-peer.mjs"}}]},"gateway":{"auth":{"mode":"token","token":"gateway-token"}}}\n'; + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + preconfiguredOpenclawJson, + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigChanged).toBe(false); + expect(readFileSync(result.openclawConfigPath, "utf8")).toBe( + preconfiguredOpenclawJson, + ); + } finally { + if (previousGatewayToken === undefined) { + delete process.env.OPENCLAW_GATEWAY_TOKEN; + } else { + process.env.OPENCLAW_GATEWAY_TOKEN = previousGatewayToken; + } + sandbox.cleanup(); + } + }); + it("supports setup-only mode without runtime startup", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); @@ -269,6 +315,44 @@ describe("openclaw command helpers", () => { } }); + it("syncs gateway auth token from OPENCLAW_GATEWAY_TOKEN during setup", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousGatewayToken = process.env.OPENCLAW_GATEWAY_TOKEN; + process.env.OPENCLAW_GATEWAY_TOKEN = "gateway-token-from-env"; + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + gateway?: { + auth?: { + mode?: string; + token?: string; + }; + }; + }; + + expect(openclawConfig.gateway?.auth?.mode).toBe("token"); + expect(openclawConfig.gateway?.auth?.token).toBe( + "gateway-token-from-env", + ); + } finally { + if (previousGatewayToken === undefined) { + delete process.env.OPENCLAW_GATEWAY_TOKEN; + } else { + process.env.OPENCLAW_GATEWAY_TOKEN = previousGatewayToken; + } + sandbox.cleanup(); + } + }); + it("auto-recovers setup checklist when OpenClaw has pending gateway device approvals", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); @@ -1335,6 +1419,69 @@ describe("openclaw command helpers", () => { } }); + it("fails doctor when gateway auth token mode is configured without token", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + gateway?: { + auth?: { + mode?: string; + token?: string; + }; + }; + }; + openclawConfig.gateway = { + ...(openclawConfig.gateway ?? {}), + auth: { + ...(openclawConfig.gateway?.auth ?? {}), + mode: "token", + }, + }; + if (openclawConfig.gateway?.auth) { + delete openclawConfig.gateway.auth.token; + } + writeFileSync( + openclawConfigPath, + `${JSON.stringify(openclawConfig, null, 2)}\n`, + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.gatewayAuth" && + check.status === "fail" && + check.message.includes("gateway.auth.token is missing"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + it("fails doctor hook health check when connector reports replay failures", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 45cfd5d..f32fcbf 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -1,6 +1,6 @@ import { spawn } from "node:child_process"; import { randomBytes } from "node:crypto"; -import { existsSync } from "node:fs"; +import { closeSync, existsSync, openSync } from "node:fs"; import { chmod, copyFile, mkdir, readFile, writeFile } from "node:fs/promises"; import { homedir } from "node:os"; import { dirname, join, resolve as resolvePath } from "node:path"; @@ -58,6 +58,8 @@ const CONNECTOR_HOST_DOCKER = "host.docker.internal"; const CONNECTOR_HOST_DOCKER_GATEWAY = "gateway.docker.internal"; const CONNECTOR_HOST_LINUX_BRIDGE = "172.17.0.1"; const CONNECTOR_RUN_DIR_NAME = "run"; +const CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX = "stdout.log"; +const CONNECTOR_DETACHED_STDERR_FILE_SUFFIX = "stderr.log"; const INVITE_CODE_PREFIX = "clawd1_"; const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; const FILE_MODE = 0o600; @@ -70,8 +72,12 @@ const OPENCLAW_PAIRING_COMMAND_HINT = "Run QR pairing first: clawdentity pair start --qr and clawdentity pair confirm --qr-file "; const OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT = "Run: clawdentity openclaw setup (auto-recovers pending OpenClaw gateway device approvals)"; +const OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT = + "Run: clawdentity openclaw setup (ensures gateway auth mode/token are configured)"; const OPENCLAW_GATEWAY_APPROVAL_COMMAND = "openclaw"; const OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS = 10_000; +const OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS = 20; +const OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS = 1_000; const textEncoder = new TextEncoder(); const textDecoder = new TextDecoder(); @@ -205,6 +211,7 @@ export type OpenclawSetupResult = { openclawBaseUrl: string; connectorBaseUrl: string; relayRuntimeConfigPath: string; + openclawConfigChanged: boolean; }; type OpenclawRuntimeMode = "auto" | "service" | "detached"; @@ -244,6 +251,7 @@ type OpenclawDoctorCheckId = | "state.hookMapping" | "state.hookToken" | "state.hookSessionRouting" + | "state.gatewayAuth" | "state.gatewayDevicePairing" | "state.openclawBaseUrl" | "state.connectorRuntime" @@ -1348,6 +1356,48 @@ async function waitForConnectorConnected(input: { return latest; } +function sleepMilliseconds(durationMs: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, durationMs); + }); +} + +async function monitorConnectorStabilityWindow(input: { + connectorBaseUrl: string; + fetchImpl: typeof fetch; + durationSeconds: number; + pollIntervalMs: number; +}): Promise { + if (input.durationSeconds <= 0) { + return fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + } + + const deadline = Date.now() + input.durationSeconds * 1000; + let latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + if (!latest.connected) { + return latest; + } + + while (Date.now() < deadline) { + await sleepMilliseconds(input.pollIntervalMs); + latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + if (!latest.connected) { + return latest; + } + } + + return latest; +} + function resolveConnectorRunDir(homeDir: string): string { return join(homeDir, CLAWDENTITY_DIR_NAME, CONNECTOR_RUN_DIR_NAME); } @@ -1356,6 +1406,21 @@ function resolveConnectorPidPath(homeDir: string, agentName: string): string { return join(resolveConnectorRunDir(homeDir), `connector-${agentName}.pid`); } +function resolveDetachedConnectorLogPath( + homeDir: string, + agentName: string, + stream: "stdout" | "stderr", +): string { + const suffix = + stream === "stdout" + ? CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX + : CONNECTOR_DETACHED_STDERR_FILE_SUFFIX; + return join( + resolveConnectorRunDir(homeDir), + `connector-${agentName}.${suffix}`, + ); +} + async function readConnectorPidFile( pidPath: string, ): Promise { @@ -1436,16 +1501,40 @@ async function startDetachedConnectorRuntime(input: { "--openclaw-base-url", input.openclawBaseUrl, ]; - const child = spawn(process.execPath, args, { - detached: true, - stdio: "ignore", - env: process.env, - }); - child.unref(); - await writeSecureFile( - resolveConnectorPidPath(input.homeDir, input.agentName), - `${child.pid}\n`, + const stdoutLogPath = resolveDetachedConnectorLogPath( + input.homeDir, + input.agentName, + "stdout", ); + const stderrLogPath = resolveDetachedConnectorLogPath( + input.homeDir, + input.agentName, + "stderr", + ); + const stdoutFd = openSync(stdoutLogPath, "a"); + const stderrFd = openSync(stderrLogPath, "a"); + + try { + const child = spawn(process.execPath, args, { + detached: true, + stdio: ["ignore", stdoutFd, stderrFd], + env: process.env, + }); + child.unref(); + await writeSecureFile( + resolveConnectorPidPath(input.homeDir, input.agentName), + `${child.pid}\n`, + ); + logger.info("cli.openclaw.setup.detached_runtime_started", { + agentName: input.agentName, + pid: child.pid, + stdoutLogPath, + stderrLogPath, + }); + } finally { + closeSync(stdoutFd); + closeSync(stderrFd); + } } async function startSetupConnectorRuntime(input: { @@ -1710,6 +1799,45 @@ function generateOpenclawHookToken(): string { return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); } +function generateOpenclawGatewayToken(): string { + return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); +} + +function parseGatewayAuthMode( + value: unknown, +): "token" | "password" | "trusted-proxy" | undefined { + if (typeof value !== "string") { + return undefined; + } + const normalized = value.trim().toLowerCase(); + if ( + normalized === "token" || + normalized === "password" || + normalized === "trusted-proxy" + ) { + return normalized; + } + return undefined; +} + +function resolveEnvOpenclawGatewayToken(): string | undefined { + if ( + typeof process.env.OPENCLAW_GATEWAY_TOKEN === "string" && + process.env.OPENCLAW_GATEWAY_TOKEN.trim().length > 0 + ) { + return process.env.OPENCLAW_GATEWAY_TOKEN.trim(); + } + return undefined; +} + +function resolveGatewayAuthToken(existingToken?: string): string { + return ( + resolveEnvOpenclawGatewayToken() ?? + existingToken ?? + generateOpenclawGatewayToken() + ); +} + function upsertRelayHookMapping( mappingsValue: unknown, ): Record[] { @@ -1763,7 +1891,7 @@ function upsertRelayHookMapping( async function patchOpenclawConfig( openclawConfigPath: string, hookToken?: string, -): Promise<{ hookToken: string }> { +): Promise<{ hookToken: string; configChanged: boolean }> { let config: unknown; try { config = await readJsonFile(openclawConfigPath); @@ -1810,19 +1938,42 @@ async function patchOpenclawConfig( ); hooks.mappings = upsertRelayHookMapping(hooks.mappings); + const gateway = isRecord(config.gateway) ? { ...config.gateway } : {}; + const gatewayAuth = isRecord(gateway.auth) ? { ...gateway.auth } : {}; + const configuredGatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); + if (configuredGatewayAuthMode === undefined) { + gatewayAuth.mode = "token"; + } + + const effectiveGatewayAuthMode = + parseGatewayAuthMode(gatewayAuth.mode) ?? "token"; + if (effectiveGatewayAuthMode === "token") { + const existingGatewayAuthToken = + typeof gatewayAuth.token === "string" && + gatewayAuth.token.trim().length > 0 + ? gatewayAuth.token.trim() + : undefined; + gatewayAuth.token = resolveGatewayAuthToken(existingGatewayAuthToken); + } + gateway.auth = gatewayAuth; + const nextConfig = { ...config, hooks, + gateway, }; - - await writeFile( - openclawConfigPath, - `${JSON.stringify(nextConfig, null, 2)}\n`, - "utf8", - ); + const configChanged = JSON.stringify(config) !== JSON.stringify(nextConfig); + if (configChanged) { + await writeFile( + openclawConfigPath, + `${JSON.stringify(nextConfig, null, 2)}\n`, + "utf8", + ); + } return { hookToken: resolvedHookToken, + configChanged, }; } @@ -2417,6 +2568,91 @@ export async function runOpenclawDoctor( }), ); } + + const gateway = isRecord(openclawConfig.gateway) + ? openclawConfig.gateway + : {}; + const gatewayAuth = isRecord(gateway.auth) ? gateway.auth : {}; + const gatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); + const gatewayAuthToken = + typeof gatewayAuth.token === "string" && + gatewayAuth.token.trim().length > 0 + ? gatewayAuth.token.trim() + : undefined; + const gatewayAuthPassword = + typeof gatewayAuth.password === "string" && + gatewayAuth.password.trim().length > 0 + ? gatewayAuth.password.trim() + : undefined; + + if (gatewayAuthMode === "token") { + if (gatewayAuthToken === undefined) { + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.token is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with token mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } + } else if (gatewayAuthMode === "password") { + if (gatewayAuthPassword === undefined) { + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.password is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with password mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } + } else if (gatewayAuthMode === "trusted-proxy") { + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with trusted-proxy mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.mode is missing or unsupported in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath }, + }), + ); + } } catch { checks.push( toDoctorCheck({ @@ -2451,6 +2687,17 @@ export async function runOpenclawDoctor( details: { openclawConfigPath }, }), ); + checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); } const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); @@ -3185,6 +3432,7 @@ export async function setupOpenclawRelay( openclawBaseUrl, connectorBaseUrl, relayRuntimeConfigPath, + openclawConfigChanged: patchedOpenclawConfig.configChanged, }; } @@ -3271,12 +3519,13 @@ export async function setupOpenclawSelfReady( agentName: string, options: OpenclawSetupOptions, ): Promise { + const normalizedAgentName = assertValidAgentName(agentName); const resolvedHomeDir = resolveHomeDir(options.homeDir); const resolvedOpenclawDir = resolveOpenclawDir( options.openclawDir, resolvedHomeDir, ); - const setup = await setupOpenclawRelay(agentName, { + const setup = await setupOpenclawRelay(normalizedAgentName, { ...options, homeDir: resolvedHomeDir, openclawDir: resolvedOpenclawDir, @@ -3308,8 +3557,8 @@ export async function setupOpenclawSelfReady( const waitTimeoutSeconds = parseWaitTimeoutSeconds( options.waitTimeoutSeconds, ); - const runtime = await startSetupConnectorRuntime({ - agentName: assertValidAgentName(agentName), + let runtime = await startSetupConnectorRuntime({ + agentName: normalizedAgentName, homeDir: resolvedHomeDir, openclawBaseUrl: setup.openclawBaseUrl, connectorBaseUrl: setup.connectorBaseUrl, @@ -3325,6 +3574,48 @@ export async function setupOpenclawSelfReady( gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, }); + const requiresStabilityGuard = + setup.openclawConfigChanged && + (runtime.runtimeMode === "existing" || runtime.runtimeMode === "detached"); + if (requiresStabilityGuard) { + const stabilityWindowSeconds = Math.min( + waitTimeoutSeconds, + OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS, + ); + const stableStatus = await monitorConnectorStabilityWindow({ + connectorBaseUrl: setup.connectorBaseUrl, + fetchImpl, + durationSeconds: stabilityWindowSeconds, + pollIntervalMs: OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS, + }); + + if (!stableStatus.connected) { + logger.warn("cli.openclaw.setup.connector_dropped_post_config_change", { + agentName: normalizedAgentName, + connectorBaseUrl: setup.connectorBaseUrl, + connectorStatusUrl: stableStatus.statusUrl, + reason: stableStatus.reason, + previousRuntimeMode: runtime.runtimeMode, + stabilityWindowSeconds, + }); + runtime = await startSetupConnectorRuntime({ + agentName: normalizedAgentName, + homeDir: resolvedHomeDir, + openclawBaseUrl: setup.openclawBaseUrl, + connectorBaseUrl: setup.connectorBaseUrl, + mode: resolvedMode, + waitTimeoutSeconds, + fetchImpl, + }); + await assertSetupChecklistHealthy({ + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + includeConnectorRuntimeCheck: true, + gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, + }); + } + } + return { ...setup, ...runtime, diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts index 5166af3..36145ba 100644 --- a/apps/cli/src/commands/pair.test.ts +++ b/apps/cli/src/commands/pair.test.ts @@ -304,6 +304,103 @@ describe("pair command helpers", () => { }); }); + it("fails confirm when ticket issuer does not match configured proxy URL", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + + await expect( + confirmPairing( + "beta", + { + ticket, + }, + { + fetchImpl: (async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://beta.proxy.example", + }, + { status: 200 }, + ); + } + return Response.json({}, { status: 200 }); + }) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ), + ).rejects.toMatchObject({ + code: "CLI_PAIR_TICKET_ISSUER_MISMATCH", + }); + }); + + it("normalizes wrapped tickets before pair status request", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const wrappedTicket = `\`\n${ticket.slice(0, 18)}\n${ticket.slice(18)}\n\``; + const fetchImpl = vi.fn(async (url: string, init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + const requestBody = JSON.parse(String(init?.body ?? "{}")) as { + ticket?: string; + }; + expect(requestBody.ticket).toBe(ticket); + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + const result = await getPairingStatus( + "alpha", + { + ticket: wrappedTicket, + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.status).toBe("pending"); + }); + it("confirms pairing with qr-file ticket decode", async () => { const fixture = await createPairFixture(); const unlinkImpl = vi.fn(async () => undefined); @@ -318,7 +415,7 @@ describe("pair command helpers", () => { return Response.json( { status: "ok", - proxyUrl: "https://beta.proxy.example", + proxyUrl: "https://alpha.proxy.example", }, { status: 200 }, ); @@ -366,7 +463,7 @@ describe("pair command helpers", () => { ); expect(result.paired).toBe(true); - expect(result.proxyUrl).toBe("https://beta.proxy.example/"); + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); expect(result.peerAlias).toBe("peer-11111111"); const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; const headers = new Headers(init?.headers); @@ -425,7 +522,7 @@ describe("pair command helpers", () => { return Response.json( { status: "ok", - proxyUrl: "https://beta.proxy.example", + proxyUrl: "https://alpha.proxy.example", }, { status: 200 }, ); @@ -748,7 +845,7 @@ describe("pair command output", () => { return Response.json( { status: "ok", - proxyUrl: "https://beta.proxy.example", + proxyUrl: "https://alpha.proxy.example", }, { status: 200 }, ); diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 7ac5dbb..56c7ffe 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -220,7 +220,15 @@ function parsePeerProfile(payload: unknown): PeerProfile { } function parsePairingTicket(value: unknown): string { - const ticket = parseNonEmptyString(value); + let ticket = parseNonEmptyString(value); + while (ticket.startsWith("`")) { + ticket = ticket.slice(1); + } + while (ticket.endsWith("`")) { + ticket = ticket.slice(0, -1); + } + ticket = ticket.trim().replace(/\s+/gu, ""); + if (!ticket.startsWith(PAIRING_TICKET_PREFIX)) { throw createCliError( "CLI_PAIR_CONFIRM_TICKET_INVALID", @@ -228,10 +236,6 @@ function parsePairingTicket(value: unknown): string { ); } - return ticket; -} - -function parsePairingTicketIssuerOrigin(ticket: string): string { const encodedPayload = ticket.slice(PAIRING_TICKET_PREFIX.length); if (encodedPayload.length === 0) { throw createCliError( @@ -240,9 +244,14 @@ function parsePairingTicketIssuerOrigin(ticket: string): string { ); } - let payloadRaw: string; try { - payloadRaw = new TextDecoder().decode(decodeBase64url(encodedPayload)); + const payloadRaw = new TextDecoder().decode( + decodeBase64url(encodedPayload), + ); + const payload = JSON.parse(payloadRaw); + if (!isRecord(payload)) { + throw new Error("invalid payload"); + } } catch { throw createCliError( "CLI_PAIR_CONFIRM_TICKET_INVALID", @@ -250,6 +259,14 @@ function parsePairingTicketIssuerOrigin(ticket: string): string { ); } + return ticket; +} + +function parsePairingTicketIssuerOrigin(ticket: string): string { + const normalizedTicket = parsePairingTicket(ticket); + const encodedPayload = normalizedTicket.slice(PAIRING_TICKET_PREFIX.length); + const payloadRaw = new TextDecoder().decode(decodeBase64url(encodedPayload)); + let payload: unknown; try { payload = JSON.parse(payloadRaw); @@ -287,6 +304,34 @@ function parsePairingTicketIssuerOrigin(ticket: string): string { return issuerUrl.origin; } +function assertTicketIssuerMatchesProxy(input: { + ticket: string; + proxyUrl: string; + context: "confirm" | "status"; +}): void { + const issuerOrigin = parsePairingTicketIssuerOrigin(input.ticket); + + let proxyOrigin: string; + try { + proxyOrigin = new URL(input.proxyUrl).origin; + } catch { + throw createCliError( + "CLI_PAIR_PROXY_URL_INVALID", + "Configured proxyUrl is invalid. Run `clawdentity config set proxyUrl ` and retry.", + ); + } + + if (issuerOrigin === proxyOrigin) { + return; + } + + const command = input.context === "confirm" ? "pair confirm" : "pair status"; + throw createCliError( + "CLI_PAIR_TICKET_ISSUER_MISMATCH", + `Pairing ticket was issued by ${issuerOrigin}, but current proxy URL is ${proxyOrigin}. Run \`clawdentity config set proxyUrl ${issuerOrigin}\` and retry \`${command}\`.`, + ); +} + function parseAitAgentDid(ait: string): string { const parts = ait.split("."); if (parts.length < 2) { @@ -810,6 +855,21 @@ function mapConfirmPairError(status: number, payload: unknown): string { return "Pairing ticket has expired"; } + if (code === "PROXY_PAIR_TICKET_INVALID_ISSUER") { + return message + ? `Pair confirm failed: ticket issuer does not match this proxy (${message}). Use the same proxy URL where the ticket was issued.` + : "Pair confirm failed: ticket issuer does not match this proxy. Use the same proxy URL where the ticket was issued."; + } + + if ( + code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || + code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" + ) { + return message + ? `Pair confirm request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` + : "Pair confirm request is invalid (400): pairing ticket is malformed. Re-copy the full ticket/QR without truncation."; + } + if (status === 400) { return message ? `Pair confirm request is invalid (400): ${message}` @@ -845,6 +905,21 @@ function mapStatusPairError(status: number, payload: unknown): string { : "Pair status request is forbidden (403)."; } + if (code === "PROXY_PAIR_TICKET_INVALID_ISSUER") { + return message + ? `Pair status failed: ticket issuer does not match this proxy (${message}). Use the same proxy URL where the ticket was issued.` + : "Pair status failed: ticket issuer does not match this proxy. Use the same proxy URL where the ticket was issued."; + } + + if ( + code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || + code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" + ) { + return message + ? `Pair status request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` + : "Pair status request is invalid (400): pairing ticket is malformed. Re-copy the full ticket/QR without truncation."; + } + if (status === 400) { return message ? `Pair status request is invalid (400): ${message}` @@ -1448,6 +1523,12 @@ export async function confirmPairing( ticket = parsePairingTicket(qrDecodeImpl(new Uint8Array(imageBytes))); } + ticket = parsePairingTicket(ticket); + assertTicketIssuerMatchesProxy({ + ticket, + proxyUrl, + context: "confirm", + }); const { ait, secretKey } = await readAgentProofMaterial( normalizedAgentName, @@ -1547,6 +1628,11 @@ async function getPairingStatusOnce( }); const ticket = parsePairingTicket(options.ticket); + assertTicketIssuerMatchesProxy({ + ticket, + proxyUrl, + context: "status", + }); const { ait, secretKey } = await readAgentProofMaterial( agentName, dependencies, diff --git a/apps/openclaw-skill/AGENTS.md b/apps/openclaw-skill/AGENTS.md index 665e68a..6ac0461 100644 --- a/apps/openclaw-skill/AGENTS.md +++ b/apps/openclaw-skill/AGENTS.md @@ -7,6 +7,8 @@ ## Filesystem Contracts - Peer routing map lives at `~/.clawdentity/peers.json` by default. +- In profile-mounted/containerized runs, skill behavior must support profile-local Clawdentity state at `/.clawdentity` when `~/.clawdentity` is absent. +- When profile-local state is detected, command execution must use `HOME=` so CLI resolves a single consistent state root. - `openclaw setup` must project peer + relay runtime snapshots into OpenClaw-local transform directory so containerized gateways can read relay state without mounting `~/.clawdentity`: - `/hooks/transforms/clawdentity-peers.json` - `/hooks/transforms/clawdentity-relay.json` @@ -28,7 +30,7 @@ - return `null` after successful relay so local handling is skipped - If `payload.peer` is absent, return payload unchanged. - Keep setup flow CLI-driven via `clawdentity openclaw setup`; do not add `configure-hooks.sh`. -- Keep setup flow fully automated via CLI: `openclaw setup` provisions/retains `hooks.token`, starts connector runtime, auto-recovers pending gateway device approvals when possible, verifies websocket readiness, and fails fast only when unrecoverable drift remains. +- Keep setup flow fully automated via CLI: `openclaw setup` provisions/retains `hooks.token`, stabilizes OpenClaw `gateway.auth` token mode for deterministic UI/device auth, starts connector runtime, auto-recovers pending gateway device approvals when possible, verifies websocket readiness, and fails fast only when unrecoverable drift remains. - Keep setup/doctor expectations aligned with connector durable inbox semantics: connector can acknowledge persisted inbound relay messages before local OpenClaw hook delivery, with replay status exposed via `/v1/status` and doctor checks. - Keep `connector start` documented as advanced/manual recovery only; never require it in the default onboarding flow. - Keep setup/doctor path resolution compatible with OpenClaw runtime env overrides: diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index bce7f3e..f3f4068 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -1,6 +1,6 @@ --- name: clawdentity_openclaw_relay -description: This skill should be used when the user asks to "set up Clawdentity relay", "pair two agents", "verify an agent token", "rotate API key", "refresh agent auth", "revoke an agent", "troubleshoot relay", "uninstall connector service", or needs OpenClaw relay onboarding, lifecycle management, or pairing workflows. +description: This skill should be used when the user asks to "set up Clawdentity relay", "pair two agents", "verify an agent token", "rotate API key", "refresh agent auth", "revoke an agent", "troubleshoot relay", "uninstall connector service", "check relay health", "run relay doctor", "test relay connection", "send relay test", "install relay skill", "bootstrap registry", "create onboarding invite", "decommission agent", or needs OpenClaw relay onboarding, lifecycle management, or pairing workflows. version: 0.3.0 --- @@ -16,6 +16,30 @@ After setup, this skill also covers lifecycle operations: token refresh, API key Relay invite codes are not part of this flow. +## State Discovery First (required before asking for onboarding inputs) + +Always detect existing local state before asking for invite code, API key, or peer setup. + +1. Resolve OpenClaw state root. +- Default: `~/.openclaw` +- Respect env overrides: `OPENCLAW_STATE_DIR`, legacy `CLAWDBOT_STATE_DIR`, `OPENCLAW_HOME` + +2. Resolve Clawdentity state root using this order. +- Primary: `~/.clawdentity` +- Fallback: `/.clawdentity` + +3. If fallback exists and primary is missing: +- Run all `clawdentity ...` commands with `HOME=` so CLI resolves the same state root as OpenClaw profile. + +4. Run readiness probe before asking questions: +- `clawdentity openclaw doctor --json` + +5. Behavior gate from doctor output: +- If doctor is healthy: do not ask for onboarding invite/API key; proceed directly with requested relay/pairing action. +- If doctor is unhealthy: ask only for the minimum missing input required by failed checks. + +Never claim that no local relay setup exists until this discovery flow is complete. + ## Filesystem Truth (must be used exactly) ### OpenClaw state files @@ -104,9 +128,17 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See ### OpenClaw relay setup - `clawdentity skill install` +- `clawdentity skill install --openclaw-dir ` +- `clawdentity skill install --skill-package-root ` +- `clawdentity skill install --json` - `clawdentity openclaw setup ` - `clawdentity openclaw setup --transform-source ` - `clawdentity openclaw setup --openclaw-dir --openclaw-base-url ` +- `clawdentity openclaw setup --runtime-mode ` +- `clawdentity openclaw setup --wait-timeout-seconds ` (default 30) +- `clawdentity openclaw setup --no-runtime-start` + +Use `--no-runtime-start` when the connector runs as a separate container or process. ### OpenClaw diagnostics - `clawdentity openclaw doctor` @@ -143,6 +175,20 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - `clawdentity admin bootstrap --bootstrap-secret ` - `clawdentity admin bootstrap --bootstrap-secret --display-name --api-key-name --registry-url ` +### Command idempotency + +| Command | Idempotent? | Note | +|---|---|---| +| `config init` | Yes | Safe to re-run | +| `invite redeem` | **No** | One-time; invite consumed on success | +| `agent create` | No | Fails if agent directory exists | +| `openclaw setup` | Yes | Primary reconciliation re-entry point | +| `skill install` | Yes | Reports: installed/updated/unchanged | +| `pair start` | No | Creates new ticket each time; old ticket expires | +| `pair confirm` | No | Ticket consumed on success | +| `connector service install` | Yes | Idempotent | +| `connector service uninstall` | Yes | Idempotent | + ## Journey (strict order) 1. Validate prerequisites. @@ -154,9 +200,10 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - `npm install -g clawdentity@latest` - Confirm local agent name. - Confirm local human display name for onboarding. -- Check local API key status with `clawdentity config get apiKey`. -- If API key is missing, ask for onboarding invite `clw_inv_...` and continue with invite redeem. -- Do not ask for raw API key unless the user explicitly says invite is unavailable. +- Check existing relay state first using **State Discovery First** above. +- Check local API key status with `clawdentity config get apiKey` only after state root resolution is confirmed. +- If API key is missing and doctor indicates onboarding is incomplete, ask for onboarding invite `clw_inv_...` and continue with invite redeem. +- Do not ask for raw API key unless the user explicitly says invite is unavailable and onboarding invite cannot be provided. - Confirm OpenClaw path/base URL only if non-default. - Do not ask for pairing inputs before onboarding is complete. @@ -178,11 +225,13 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - `API key saved to local config` - `Human name: ` - Stop and fix if this step fails. Do not proceed to pairing. +- **Validate:** `clawdentity config get apiKey` returns a non-empty value. 5. Create local OpenClaw agent identity. - Run `clawdentity agent create --framework openclaw`. - Optionally add `--ttl-days ` to control token lifetime. - Run `clawdentity agent inspect `. +- **Validate:** `~/.clawdentity/agents//ait.jwt` and `secret.key` exist and are non-empty. 6. Configure relay setup. - Run: @@ -197,30 +246,15 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - runtime mode/status - websocket status `connected` - setup checklist is healthy (fails fast when hook/device/runtime prerequisites drift) +- **Validate:** run `clawdentity openclaw doctor --json` and confirm all check entries have `status: "pass"`. If any check has `status: "fail"`, use `checkId` to look up remediation in `references/clawdentity-protocol.md` § Doctor Check Reference. +- If setup throws `CLI_OPENCLAW_SETUP_CHECKLIST_FAILED`, parse `details.firstFailedCheckId` for targeted remediation. 7. Validate readiness. -- `clawdentity openclaw setup` already runs an internal checklist and auto-recovers pending OpenClaw gateway device approvals when possible. +- `clawdentity openclaw setup` already runs an internal checklist, stabilizes OpenClaw gateway auth token mode, and auto-recovers pending OpenClaw gateway device approvals when possible. - Run `clawdentity openclaw doctor` only for diagnostics or CI reporting. - Use `--json` for machine-readable output. - Use `--peer ` to validate a specific peer exists after pairing. -- Doctor check IDs and remediation: - -| Check ID | Validates | Remediation on Failure | -|----------|-----------|----------------------| -| `config.registry` | `registryUrl`, `apiKey`, and `proxyUrl` in config (or proxy env override) | `clawdentity config init` or `invite redeem` | -| `state.selectedAgent` | Agent marker at `~/.clawdentity/openclaw-agent-name` | `clawdentity openclaw setup ` | -| `state.credentials` | `ait.jwt` and `secret.key` exist and non-empty | `clawdentity agent create ` or `agent auth refresh ` | -| `state.peers` | Peers config valid; requested `--peer` alias exists | `clawdentity pair start` / `pair confirm` (optional until pairing) | -| `state.transform` | Relay transform artifacts in OpenClaw hooks dir | Reinstall skill package or `openclaw setup ` | -| `state.hookMapping` | `send-to-peer` hook mapping in OpenClaw config | `clawdentity openclaw setup ` | -| `state.hookToken` | Hooks enabled with token in OpenClaw config | `clawdentity openclaw setup ` then restart OpenClaw | -| `state.hookSessionRouting` | `hooks.defaultSessionKey`, `hooks.allowRequestSessionKey=false`, and required prefixes (`hook:`, default session key) | `clawdentity openclaw setup ` then restart OpenClaw | -| `state.gatewayDevicePairing` | Pending OpenClaw device approvals (prevents `pairing required` websocket errors) | Re-run `clawdentity openclaw setup ` so setup auto-recovers approvals | -| `state.openclawBaseUrl` | OpenClaw base URL resolvable | `clawdentity openclaw setup --openclaw-base-url ` | -| `state.connectorRuntime` | Local connector runtime reachable and websocket-connected | `clawdentity openclaw setup ` | -| `state.connectorInboundInbox` | Connector local inbound inbox backlog and replay queue state (`/v1/status`) | Re-run `clawdentity openclaw setup ` and verify connector runtime health | -| `state.openclawHookHealth` | Connector replay status for local OpenClaw hook delivery (`/v1/status`) | Re-run `clawdentity openclaw setup ` and restart OpenClaw if hook replay stays failed | - +- Doctor check IDs and remediation are in `references/clawdentity-protocol.md` § Doctor Check Reference. - At this point the agent is ready to start pairing or accept pairing. 8. Pairing phase (separate from onboarding). @@ -246,7 +280,10 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - If initiator started without `--wait`, initiator must run: - `clawdentity pair status --ticket --wait` - This persists the peer on initiator after responder confirmation. +- Default wait timeout is 300 seconds with 3-second polling. +- If `CLI_PAIR_STATUS_WAIT_TIMEOUT` is thrown: the responder did not confirm in time. Recovery: re-run `clawdentity pair start --qr --wait` to generate a new ticket. - Confirm pairing success, then run `clawdentity openclaw relay test`. +- **Validate:** `~/.clawdentity/peers.json` contains the new peer alias entry. 9. Post-pairing verification. - Run `clawdentity verify ` to confirm the local agent token is valid. @@ -257,6 +294,47 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - `state=queued` is not a pairing failure. The proxy retries delivery automatically while the message is within queue TTL/retry limits. - Note: `relay test` runs preflight doctor checks before sending the probe. +## Peer Recognition & Messaging + +After pairing, agents receive relayed messages from their paired peers. +Identity injection is enabled by default (`INJECT_IDENTITY_INTO_MESSAGE=true` on proxy). + +**Do not reply to peer messages in chat.** Chat replies stay local and never reach the peer. All replies to a peer must be sent via the relay webhook with `"peer": ""` in the payload. + +### Recognizing incoming messages +- Relayed messages contain a `[Clawdentity Identity]` block prepended to the message body. +- Extract `agentDid` from this block to identify the sender. +- See `references/clawdentity-protocol.md` § Identity Injection for the exact block format and field definitions. +- The connector `deliver` frame also exposes `fromAgentDid` for programmatic access. +- When you see a `[Clawdentity Identity]` block, the message is from a peer. Do not answer in chat. Follow the reply flow below. + +### Looking up peer identity +- Map sender `agentDid` to a known peer by matching against `did` in `~/.clawdentity/peers.json`. +- Each peer entry contains: + - `did` — the peer's agent DID (match key) + - `agentName` — the peer's agent name + - `humanName` — the human behind the peer agent + - `proxyUrl` — the peer's proxy endpoint +- The peer alias is the key of that entry in `peers.json` (e.g., `"beta"`). Use this alias in the reply payload. + +### Sending messages to a peer +This is the only way to reply to a peer or send a new message. Chat replies do not reach peers. +- Include `"peer": ""` in the webhook payload to route via relay. +- Use the alias resolved from the lookup step above. +- Use `agentName` or `humanName` in the message text for natural addressing. +- The relay transform strips `peer` from the payload and routes to the connector. + +### Reply flow +When a relayed peer message arrives: +1. Extract `agentDid` from the `[Clawdentity Identity]` block. +2. Look up the peer alias by matching `agentDid` against `did` in `~/.clawdentity/peers.json`. +3. Compose a webhook payload with `"peer": ""` set to the resolved alias. +4. The relay delivers the payload to the peer. Do not also reply in chat. + +### Peer validation +- `clawdentity openclaw doctor --peer ` confirms a specific peer is reachable. +- `clawdentity openclaw relay test --peer ` sends a test probe to the peer. + ## Lifecycle Management ### Token expiry recovery @@ -287,6 +365,11 @@ Note: Registry operators must run `admin bootstrap` before creating invites. See - Uses cached registry keys (1h TTL) and CRL (15min TTL). - Exit code 1 on verification failure or revocation. +### Periodic health checks +- Run `clawdentity openclaw doctor` periodically to detect stale credentials, expired AIT, or drifted runtime. +- Run `clawdentity agent inspect ` to check token expiry. +- If AIT is within 24 hours of expiry, proactively run `clawdentity agent auth refresh `. + ## Required Question Policy Ask only when missing: @@ -308,24 +391,33 @@ Do not suggest switching endpoints unless user explicitly asks for endpoint chan - `CLI_CONNECTOR_MISSING_AGENT_MATERIAL`: agent credentials missing. Rerun `clawdentity agent create ` or `clawdentity agent auth refresh `. ### Pairing errors -- `pair start` 403 (`PROXY_PAIR_OWNERSHIP_FORBIDDEN`): initiator ownership check failed. Recreate/refresh the local agent identity. -- `pair start` 503 (`PROXY_PAIR_OWNERSHIP_UNAVAILABLE`): registry ownership validation is unavailable. Check proxy/registry service auth configuration. -- `pair confirm` 404 (`PROXY_PAIR_TICKET_NOT_FOUND`): ticket is invalid or expired. Request a new ticket from initiator. -- `pair confirm` 410 (`PROXY_PAIR_TICKET_EXPIRED`): ticket has expired. Request a new ticket. +- `PROXY_PAIR_TICKET_NOT_FOUND`: ticket invalid or expired. Request a new ticket from initiator. +- `PROXY_PAIR_TICKET_EXPIRED`: ticket has expired. Request a new ticket. +- `CLI_PAIR_STATUS_WAIT_TIMEOUT`: responder did not confirm in time. Re-run `pair start`. - `CLI_PAIR_CONFIRM_INPUT_CONFLICT`: cannot provide both `--ticket` and `--qr-file`. Use one path only. - `CLI_PAIR_PROXY_URL_MISMATCH`: local `proxyUrl` does not match registry metadata. Rerun `clawdentity invite redeem `. - Responder shows peer but initiator does not: - Cause: initiator started pairing without `--wait`. - Fix: run `clawdentity pair status --ticket --wait` on initiator. +- For complete pairing error codes, read `references/clawdentity-protocol.md` § Pairing Error Codes. ### Setup errors - `405 Method Not Allowed` on hook path: rerun `clawdentity openclaw setup ` and restart OpenClaw. - `CLI_OPENCLAW_MISSING_AGENT_CREDENTIALS` or `CLI_OPENCLAW_EMPTY_AGENT_CREDENTIALS`: agent credentials missing or empty. Rerun `agent create` or `agent auth refresh`. +- `CLI_OPENCLAW_SETUP_CHECKLIST_FAILED`: post-setup checklist reported a failing check. Parse `details.firstFailedCheckId` and apply remediation from the doctor check table in `references/clawdentity-protocol.md`. Common failing checks: + - `state.connectorRuntime` → rerun `openclaw setup ` + - `state.gatewayDevicePairing` → rerun `openclaw setup ` (auto-approval) + - `state.gatewayAuth` → rerun `openclaw setup ` (auto-configures gateway auth mode/token) + - `state.hookToken` → rerun `openclaw setup ` then restart OpenClaw ### Credential expiry - Agent AIT expired: run `clawdentity agent auth refresh `, then rerun `clawdentity openclaw setup `. - API key invalid (401 on registry calls): rotate with `api-key create` then `config set apiKey`. +### Network connectivity +- `CLI_PAIR_REQUEST_FAILED` or `CLI_ADMIN_BOOTSTRAP_REQUEST_FAILED`: proxy/registry unreachable. Check DNS, firewall rules, and URL with `clawdentity config show`. +- If running on an air-gapped machine, confirm proxy/registry URLs resolve to reachable endpoints. + ### General recovery - Report exact missing file/value. - Fix only failing input/config. @@ -337,7 +429,10 @@ Do not suggest switching endpoints unless user explicitly asks for endpoint chan | File | Purpose | |------|---------| -| `references/clawdentity-protocol.md` | Peer-map schema, pairing contract, connector handoff envelope, proxy URL resolution, pairing error codes, cache files, peer alias derivation | -| `references/clawdentity-registry.md` | Admin bootstrap, API key lifecycle, agent revocation, auth refresh | +| `references/clawdentity-protocol.md` | Peer-map schema, pairing contract, connector handoff, error codes, Docker guidance, doctor checks, identity injection | +| `references/clawdentity-registry.md` | Admin bootstrap, API key lifecycle, agent revocation, auth refresh, connector errors | +| `references/clawdentity-environment.md` | Complete environment variable reference for all CLI overrides | +| `examples/peers-sample.json` | Valid peers.json example with one peer entry | +| `examples/openclaw-relay-sample.json` | Relay runtime config example | Directive: read the reference files before troubleshooting relay contract, connector handoff failures, or registry/admin operations. diff --git a/apps/openclaw-skill/skill/examples/openclaw-relay-sample.json b/apps/openclaw-skill/skill/examples/openclaw-relay-sample.json new file mode 100644 index 0000000..be5ef78 --- /dev/null +++ b/apps/openclaw-skill/skill/examples/openclaw-relay-sample.json @@ -0,0 +1,5 @@ +{ + "openclawBaseUrl": "http://127.0.0.1:18789", + "openclawHookToken": "", + "updatedAt": "2026-02-15T20:00:00.000Z" +} diff --git a/apps/openclaw-skill/skill/examples/peers-sample.json b/apps/openclaw-skill/skill/examples/peers-sample.json new file mode 100644 index 0000000..cbb8647 --- /dev/null +++ b/apps/openclaw-skill/skill/examples/peers-sample.json @@ -0,0 +1,10 @@ +{ + "peers": { + "beta": { + "did": "did:claw:agent:01HEXAMPLE", + "proxyUrl": "https://proxy.clawdentity.com/hooks/agent", + "agentName": "beta", + "humanName": "Ira" + } + } +} diff --git a/apps/openclaw-skill/skill/references/clawdentity-environment.md b/apps/openclaw-skill/skill/references/clawdentity-environment.md new file mode 100644 index 0000000..b8b7ec2 --- /dev/null +++ b/apps/openclaw-skill/skill/references/clawdentity-environment.md @@ -0,0 +1,53 @@ +# Clawdentity Environment Variable Reference + +## Purpose + +Complete reference for CLI environment variable overrides. When env overrides are present, config-file URL mismatches are not blockers. + +## CLI Environment Variables + +| Variable | Purpose | Used By | +|---|---|---| +| `CLAWDENTITY_PROXY_URL` | Override proxy URL | pair, connector | +| `CLAWDENTITY_PROXY_WS_URL` | Override proxy WebSocket URL | connector | +| `CLAWDENTITY_REGISTRY_URL` | Override registry URL | config | +| `CLAWDENTITY_CONNECTOR_BASE_URL` | Override connector bind URL | connector | +| `CLAWDENTITY_CONNECTOR_OUTBOUND_PATH` | Override outbound path | relay transform | +| `CLAWDENTITY_AGENT_NAME` | Override agent name resolution | openclaw, transform | +| `OPENCLAW_BASE_URL` | Override OpenClaw upstream URL | openclaw setup | +| `OPENCLAW_HOOK_TOKEN` | Override hook auth token | openclaw setup | +| `OPENCLAW_GATEWAY_TOKEN` | Override gateway auth token | openclaw setup | +| `OPENCLAW_CONFIG_PATH` | Override OpenClaw config file path | openclaw | +| `OPENCLAW_STATE_DIR` | Override OpenClaw state directory | openclaw | +| `OPENCLAW_HOME` | Override OpenClaw home directory (used when explicit config/state overrides are unset) | openclaw | + +## Profile-Local State Resolution + +In profile-mounted/containerized OpenClaw environments, Clawdentity state may be stored at: +- `/.clawdentity` + +instead of: +- `~/.clawdentity` + +If `~/.clawdentity` is missing but `/.clawdentity` exists, run CLI commands with: +- `HOME=` + +This makes `clawdentity` resolve the correct profile-local state root. + +## Legacy Environment Variables + +| Variable | Replaced By | +|---|---| +| `CLAWDBOT_CONFIG_PATH` | `OPENCLAW_CONFIG_PATH` | +| `CLAWDBOT_STATE_DIR` | `OPENCLAW_STATE_DIR` | + +## Proxy Server Environment Variables + +These variables configure the Clawdentity proxy server (operator-facing, not CLI): + +| Variable | Purpose | Default | +|---|---|---| +| `INJECT_IDENTITY_INTO_MESSAGE` | Enable/disable identity block injection into relayed messages | `true` | +| `RELAY_QUEUE_MAX_MESSAGES_PER_AGENT` | Max queued messages per agent | `500` | +| `RELAY_QUEUE_TTL_SECONDS` | Queue message time-to-live | `3600` | +| `RELAY_RETRY_INITIAL_MS` | Initial retry delay for relay delivery | `1000` | diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index 36713ee..86a4117 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -6,31 +6,7 @@ Define the exact runtime contract used by `relay-to-peer.mjs`. ## Filesystem Paths -### OpenClaw files -- `/openclaw.json` (legacy filenames may exist: `clawdbot.json`, `moldbot.json`, `moltbot.json`) -- `/hooks/transforms/relay-to-peer.mjs` -- `/hooks/transforms/clawdentity-relay.json` -- `/hooks/transforms/clawdentity-peers.json` -- `/skills/clawdentity-openclaw-relay/SKILL.md` -- env overrides: - - `OPENCLAW_CONFIG_PATH`, `CLAWDBOT_CONFIG_PATH` - - `OPENCLAW_STATE_DIR`, `CLAWDBOT_STATE_DIR` - - `OPENCLAW_HOME` (used when explicit config/state overrides are unset) - -### Clawdentity files -- `~/.clawdentity/config.json` -- `~/.clawdentity/agents//secret.key` -- `~/.clawdentity/agents//public.key` -- `~/.clawdentity/agents//identity.json` -- `~/.clawdentity/agents//registry-auth.json` -- `~/.clawdentity/agents//ait.jwt` -- `~/.clawdentity/peers.json` -- `~/.clawdentity/openclaw-agent-name` -- `~/.clawdentity/openclaw-relay.json` -- `~/.clawdentity/openclaw-connectors.json` -- `~/.clawdentity/pairing/` (ephemeral QR PNG storage, auto-cleaned after 900s) -- `~/.clawdentity/cache/registry-keys.json` (1-hour TTL, used by `verify`) -- `~/.clawdentity/cache/crl-claims.json` (15-minute TTL, used by `verify`) +Canonical paths are defined in SKILL.md § Filesystem Truth. Refer there for all path contracts. ## Setup Input Contract @@ -214,32 +190,82 @@ Known defaults: Recovery: rerun onboarding (`clawdentity invite redeem --display-name `) so local config aligns to registry metadata. +## Identity Injection + +When identity injection is enabled (proxy env `INJECT_IDENTITY_INTO_MESSAGE`, default `true`), the proxy prepends an identity block to the `message` field of relayed payloads. + +### Block format + +``` +[Clawdentity Identity] +agentDid: did:claw:agent:01H... +ownerDid: did:claw:human:01H... +issuer: https://registry.clawdentity.com +aitJti: 01H... +``` + +The block is separated from the original message by a blank line (`\n\n`). + +### Field definitions + +| Field | Description | +|---|---| +| `agentDid` | Sender agent DID — use to identify the peer | +| `ownerDid` | DID of the human who owns the sender agent | +| `issuer` | Registry URL that issued the sender's AIT | +| `aitJti` | Unique JTI claim from the sender's AIT | + +### Programmatic access + +The connector `deliver` frame includes `fromAgentDid` as a top-level field. Inbound inbox items (`ConnectorInboundInboxItem`) also expose `fromAgentDid` for programmatic sender identification without parsing the identity block. + ## Pairing Error Codes ### `pair start` errors -| HTTP Status | Error Code | Meaning | -|-------------|-----------|---------| -| 403 | `PROXY_PAIR_OWNERSHIP_FORBIDDEN` | Initiator ownership check failed | -| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | -| — | `CLI_PAIR_AGENT_NOT_FOUND` | Agent ait.jwt or secret.key missing/empty | -| — | `CLI_PAIR_HUMAN_NAME_MISSING` | Local config is missing `humanName`; set via invite redeem or config | -| — | `CLI_PAIR_PROXY_URL_REQUIRED` | Proxy URL could not be resolved | -| — | `CLI_PAIR_START_INVALID_TTL` | ttlSeconds must be a positive integer | -| — | `CLI_PAIR_INVALID_PROXY_URL` | Proxy URL is invalid | -| — | `CLI_PAIR_REQUEST_FAILED` | Unable to connect to proxy URL | +| HTTP Status | Error Code | Meaning | Recovery | +|---|---|---|---| +| 403 | `PROXY_PAIR_OWNERSHIP_FORBIDDEN` | Initiator ownership check failed | Recreate/refresh the local agent identity | +| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | Check proxy/registry service auth configuration | +| — | `CLI_PAIR_AGENT_NOT_FOUND` | Agent ait.jwt or secret.key missing/empty | Run `agent create` or `agent auth refresh` | +| — | `CLI_PAIR_HUMAN_NAME_MISSING` | Local config is missing `humanName` | Set via `invite redeem` or config | +| — | `CLI_PAIR_PROXY_URL_REQUIRED` | Proxy URL could not be resolved | Run `invite redeem` or set `CLAWDENTITY_PROXY_URL` | +| — | `CLI_PAIR_START_INVALID_TTL` | ttlSeconds must be a positive integer | Use valid `--ttl-seconds` value | +| — | `CLI_PAIR_INVALID_PROXY_URL` | Proxy URL is invalid | Fix proxy URL in config | +| — | `CLI_PAIR_REQUEST_FAILED` | Unable to connect to proxy URL | Check DNS, firewall, proxy URL | +| — | `CLI_PAIR_START_FAILED` | Generic pair start failure | Retry; check proxy connectivity | +| — | `CLI_PAIR_PROFILE_INVALID` | Name too long, contains control characters, or empty | Fix agent or human name | ### `pair confirm` errors -| HTTP Status | Error Code | Meaning | -|-------------|-----------|---------| -| 404 | `PROXY_PAIR_TICKET_NOT_FOUND` | Pairing ticket is invalid or expired | -| 410 | `PROXY_PAIR_TICKET_EXPIRED` | Pairing ticket has expired | -| — | `CLI_PAIR_CONFIRM_TICKET_REQUIRED` | Either --ticket or --qr-file is required | -| — | `CLI_PAIR_CONFIRM_INPUT_CONFLICT` | Cannot provide both --ticket and --qr-file | -| — | `CLI_PAIR_CONFIRM_TICKET_INVALID` | Pairing ticket is invalid | -| — | `CLI_PAIR_CONFIRM_QR_FILE_NOT_FOUND` | QR file not found | -| — | `CLI_PAIR_CONFIRM_QR_NOT_FOUND` | No pairing QR code found in image | +| HTTP Status | Error Code | Meaning | Recovery | +|---|---|---|---| +| 404 | `PROXY_PAIR_TICKET_NOT_FOUND` | Pairing ticket is invalid or expired | Request new ticket from initiator | +| 410 | `PROXY_PAIR_TICKET_EXPIRED` | Pairing ticket has expired | Request new ticket | +| — | `CLI_PAIR_CONFIRM_TICKET_REQUIRED` | Either --ticket or --qr-file is required | Provide one input path | +| — | `CLI_PAIR_CONFIRM_INPUT_CONFLICT` | Cannot provide both --ticket and --qr-file | Use one input path only | +| — | `CLI_PAIR_CONFIRM_TICKET_INVALID` | Pairing ticket is invalid | Get new ticket from initiator | +| — | `CLI_PAIR_CONFIRM_QR_FILE_NOT_FOUND` | QR file not found | Verify file path | +| — | `CLI_PAIR_CONFIRM_QR_NOT_FOUND` | No pairing QR code found in image | Request new QR from initiator | +| — | `CLI_PAIR_CONFIRM_FAILED` | Generic pair confirm failure | Retry with new ticket | +| — | `CLI_PAIR_CONFIRM_QR_FILE_INVALID` | QR image file corrupt or unsupported | Request new QR from initiator | +| — | `CLI_PAIR_CONFIRM_QR_FILE_REQUIRED` | QR path unusable | Verify file path and format | + +### `pair status` errors + +| HTTP Status | Error Code | Meaning | Recovery | +|---|---|---|---| +| — | `CLI_PAIR_STATUS_FAILED` | Generic pair status failure | Retry | +| — | `CLI_PAIR_STATUS_WAIT_TIMEOUT` | Wait polling timed out | Generate new ticket via `pair start` | +| — | `CLI_PAIR_STATUS_FORBIDDEN` | 403 on status check — ownership mismatch | Verify correct agent | +| — | `CLI_PAIR_STATUS_TICKET_REQUIRED` | Missing ticket argument | Provide `--ticket ` | + +### Peer persistence errors + +| Error Code | Meaning | Recovery | +|---|---|---| +| `CLI_PAIR_PEERS_CONFIG_INVALID` | `peers.json` corrupt or invalid structure | Delete `peers.json` and re-pair | +| `CLI_PAIR_PEER_ALIAS_INVALID` | Derived alias fails validation | Re-pair with valid agent DID | ## Cache Files @@ -261,3 +287,38 @@ When `pair confirm` saves a new peer, alias is derived automatically: 5. Fallback alias is `peer` if DID is not a valid agent DID. Alias validation: `[a-zA-Z0-9._-]`, max 128 characters. + +## Container Environments + +When running in Docker or similar container runtimes: + +- `openclaw setup` writes Docker-aware endpoint candidates into `clawdentity-relay.json`: + - `host.docker.internal`, `gateway.docker.internal`, Linux bridge (`172.17.0.1`), default gateway, and loopback. + - Candidates are attempted in order by the relay transform. +- Use `--no-runtime-start` when the connector runs as a separate container or process. +- Required env overrides for container networking: + - `OPENCLAW_BASE_URL` — point to OpenClaw inside/outside the container network. + - `CLAWDENTITY_CONNECTOR_BASE_URL` — point to the connector's bind address from the transform's perspective. +- Port allocation: each agent gets its own connector port starting from `19400`. + - Port assignment is tracked in `~/.clawdentity/openclaw-connectors.json`. + +## Doctor Check Reference + +Run `clawdentity openclaw doctor --json` for machine-readable diagnostics. + +| Check ID | Validates | Remediation on Failure | +|---|---|---| +| `config.registry` | `registryUrl`, `apiKey`, and `proxyUrl` in config (or proxy env override) | `clawdentity config init` or `invite redeem` | +| `state.selectedAgent` | Agent marker at `~/.clawdentity/openclaw-agent-name` | `clawdentity openclaw setup ` | +| `state.credentials` | `ait.jwt` and `secret.key` exist and non-empty | `clawdentity agent create ` or `agent auth refresh ` | +| `state.peers` | Peers config valid; requested `--peer` alias exists | `clawdentity pair start` / `pair confirm` (optional until pairing) | +| `state.transform` | Relay transform artifacts in OpenClaw hooks dir | Reinstall skill package or `openclaw setup ` | +| `state.hookMapping` | `send-to-peer` hook mapping in OpenClaw config | `clawdentity openclaw setup ` | +| `state.hookToken` | Hooks enabled with token in OpenClaw config | `clawdentity openclaw setup ` then restart OpenClaw | +| `state.hookSessionRouting` | `hooks.defaultSessionKey`, `hooks.allowRequestSessionKey=false`, and required prefixes | `clawdentity openclaw setup ` then restart OpenClaw | +| `state.gatewayAuth` | OpenClaw `gateway.auth` readiness (`mode` + required credential) | `clawdentity openclaw setup ` to re-sync gateway auth | +| `state.gatewayDevicePairing` | Pending OpenClaw device approvals | Re-run `clawdentity openclaw setup ` so setup auto-recovers approvals | +| `state.openclawBaseUrl` | OpenClaw base URL resolvable | `clawdentity openclaw setup --openclaw-base-url ` | +| `state.connectorRuntime` | Local connector runtime reachable and websocket-connected | `clawdentity openclaw setup ` | +| `state.connectorInboundInbox` | Connector local inbound inbox backlog and replay queue state | Re-run `clawdentity openclaw setup ` and verify connector runtime health | +| `state.openclawHookHealth` | Connector replay status for local OpenClaw hook delivery | Re-run `clawdentity openclaw setup ` and restart OpenClaw if hook replay stays failed | diff --git a/apps/openclaw-skill/skill/references/clawdentity-registry.md b/apps/openclaw-skill/skill/references/clawdentity-registry.md index 8a45180..e084385 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-registry.md +++ b/apps/openclaw-skill/skill/references/clawdentity-registry.md @@ -173,3 +173,14 @@ Admin-only. Creates a registry invite code (`clw_inv_...`) for onboarding new us | 401 | Authentication failed | | 403 | Requires admin access | | 400 | Invalid request | + +## Connector Errors + +| Error Code | Meaning | Recovery | +|---|---|---| +| `CLI_CONNECTOR_SERVICE_PLATFORM_INVALID` | Invalid platform argument | Use `auto`, `launchd`, or `systemd` | +| `CLI_CONNECTOR_SERVICE_PLATFORM_UNSUPPORTED` | OS unsupported for selected platform | Use a supported platform (macOS: launchd, Linux: systemd) | +| `CLI_CONNECTOR_SERVICE_INSTALL_FAILED` | Service install failed | Check permissions, systemd/launchd status | +| `CLI_CONNECTOR_PROXY_URL_REQUIRED` | Proxy URL unresolvable | Run `invite redeem` or set `CLAWDENTITY_PROXY_URL` / `CLAWDENTITY_PROXY_WS_URL` | +| `CLI_CONNECTOR_INVALID_REGISTRY_AUTH` | `registry-auth.json` corrupt or invalid | Run `clawdentity agent auth refresh ` | +| `CLI_CONNECTOR_INVALID_AGENT_IDENTITY` | `identity.json` corrupt or invalid | Re-create agent with `clawdentity agent create ` | diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 2c6d2a0..5f67499 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -3,7 +3,7 @@ # OPENCLAW_BASE_URL is optional for relay-mode proxy operation. # OPENCLAW_BASE_URL=http://127.0.0.1:18789 -# Runtime vars +# Runtime vars (required at startup) ENVIRONMENT=development APP_VERSION=local-dev REGISTRY_URL=https://dev.registry.clawdentity.com diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index dd48ebb..e3f39f7 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -4,6 +4,7 @@ - Keep `index.ts` as runtime bootstrap surface and version export. - Keep version resolution in `index.ts` deterministic: prefer `APP_VERSION`, then `PROXY_VERSION`, then fallback constant for local/dev defaults. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. +- Keep startup fail-fast env validation in `config.ts` and enforce it from runtime boot (`startProxyServer` + worker runtime build) so missing registry/service credentials fail immediately. - Keep agent DID rate-limit env parsing in `config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE`, `AGENT_RATE_LIMIT_WINDOW_MS`) and validate as positive integers. - Keep HTTP app composition in `server.ts`. - Keep Cloudflare Worker fetch startup in `worker.ts`. @@ -50,6 +51,7 @@ - `/pair/confirm` requires `responderProfile.{agentName,humanName}` - `/pair/status` returns stored profile fields for initiator and responder - Keep pairing tickets issuer-authenticated via local signature in `/pair/start`; `/pair/confirm` must consume only locally stored tickets in single-proxy mode. +- Keep ticket parsing tolerant for operator copy/paste paths: normalize surrounding markdown/backticks and whitespace before parse + trust-store lookup in both in-memory and Durable Object backends. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. - Keep `/hooks/agent` trust check explicit: sender/recipient pair must be authorized by trust state before relay delivery. diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 3a0d991..01eb7e1 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -163,6 +163,36 @@ describe("proxy config", () => { }), ).toThrow(ProxyConfigError); }); + + it("fails when requireRuntimeKeys is enabled and required env vars are missing", () => { + expect(() => + parseProxyConfig( + { + ENVIRONMENT: "local", + }, + { + requireRuntimeKeys: true, + }, + ), + ).toThrow(ProxyConfigError); + }); + + it("passes requireRuntimeKeys check when all required env vars are present", () => { + const config = parseProxyConfig( + { + ENVIRONMENT: "local", + REGISTRY_URL: "https://registry.example.test", + REGISTRY_INTERNAL_SERVICE_ID: "svc-proxy-registry", + REGISTRY_INTERNAL_SERVICE_SECRET: "secret-proxy-registry", + }, + { + requireRuntimeKeys: true, + }, + ); + + expect(config.environment).toBe("local"); + expect(config.registryUrl).toBe("https://registry.example.test"); + }); }); describe("proxy config loading", () => { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 1948947..450caa2 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -170,6 +170,9 @@ export const proxyConfigSchema = z.object({ }); export type ProxyConfig = z.infer; +type ParseProxyConfigOptions = { + requireRuntimeKeys?: boolean; +}; type RuntimeEnvInput = { LISTEN_PORT?: unknown; @@ -533,9 +536,56 @@ function loadOpenclawBaseUrlFromFallback( } } -export function parseProxyConfig(env: unknown): ProxyConfig { +const REQUIRED_PROXY_RUNTIME_ENV_KEYS: readonly { + key: string; + aliases: readonly (keyof RuntimeEnvInput)[]; +}[] = [ + { + key: "ENVIRONMENT", + aliases: ["ENVIRONMENT"], + }, + { + key: "REGISTRY_URL", + aliases: ["REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL"], + }, + { + key: "REGISTRY_INTERNAL_SERVICE_ID", + aliases: ["REGISTRY_INTERNAL_SERVICE_ID"], + }, + { + key: "REGISTRY_INTERNAL_SERVICE_SECRET", + aliases: ["REGISTRY_INTERNAL_SERVICE_SECRET"], + }, +]; + +function assertRequiredProxyRuntimeKeys(env: RuntimeEnvInput): void { + const fieldErrors: Record = {}; + for (const requiredKey of REQUIRED_PROXY_RUNTIME_ENV_KEYS) { + const value = firstNonEmpty(env, requiredKey.aliases); + if (value !== undefined) { + continue; + } + + fieldErrors[requiredKey.key] = [`${requiredKey.key} is required`]; + } + + if (Object.keys(fieldErrors).length > 0) { + throw toConfigValidationError({ + fieldErrors, + formErrors: [], + }); + } +} + +export function parseProxyConfig( + env: unknown, + options: ParseProxyConfigOptions = {}, +): ProxyConfig { const inputEnv: RuntimeEnvInput = isRuntimeEnvInput(env) ? env : {}; assertNoDeprecatedAllowAllVerified(inputEnv); + if (options.requireRuntimeKeys === true) { + assertRequiredProxyRuntimeKeys(inputEnv); + } const parsedRuntimeEnv = proxyRuntimeEnvSchema.safeParse( normalizeRuntimeEnv(inputEnv), @@ -622,9 +672,9 @@ export function parseProxyConfig(env: unknown): ProxyConfig { export function loadProxyConfig( env: unknown = resolveDefaultEnv(), - options: ProxyConfigLoadOptions = {}, + options: ProxyConfigLoadOptions & ParseProxyConfigOptions = {}, ): ProxyConfig { const mergedEnv = loadEnvWithDotEnvFallback(env, options); loadOpenclawBaseUrlFromFallback(mergedEnv, options); - return parseProxyConfig(mergedEnv); + return parseProxyConfig(mergedEnv, options); } diff --git a/apps/proxy/src/node-server.ts b/apps/proxy/src/node-server.ts index e233720..3aa5f07 100644 --- a/apps/proxy/src/node-server.ts +++ b/apps/proxy/src/node-server.ts @@ -48,7 +48,10 @@ export function startProxyServer( options: StartProxyServerOptions = {}, ): StartedProxyServer { const config = - options.config ?? loadProxyConfig(options.env ?? resolveDefaultNodeEnv()); + options.config ?? + loadProxyConfig(options.env ?? resolveDefaultNodeEnv(), { + requireRuntimeKeys: true, + }); const logger = resolveLogger(options.logger); const trustStoreResolution = resolveNodeTrustStore({ environment: config.environment, diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 758bceb..08f5c00 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -15,6 +15,7 @@ import { import { createPairingTicket, createPairingTicketSigningKey, + normalizePairingTicketText, PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; @@ -410,7 +411,7 @@ export function createPairConfirmHandler( "responderProfile", ); - const ticket = body.ticket.trim(); + const ticket = normalizePairingTicketText(body.ticket); try { parsePairingTicket(ticket); } catch (error) { @@ -487,7 +488,7 @@ export function createPairStatusHandler( }); } - const ticket = body.ticket.trim(); + const ticket = normalizePairingTicketText(body.ticket); const status = await options.trustStore .getPairingTicketStatus({ ticket, diff --git a/apps/proxy/src/pairing-ticket.ts b/apps/proxy/src/pairing-ticket.ts index abd80f0..2efca82 100644 --- a/apps/proxy/src/pairing-ticket.ts +++ b/apps/proxy/src/pairing-ticket.ts @@ -45,6 +45,17 @@ function utf8Decode(value: Uint8Array): string { return new TextDecoder().decode(value); } +export function normalizePairingTicketText(value: string): string { + let normalized = value.trim(); + while (normalized.startsWith("`")) { + normalized = normalized.slice(1); + } + while (normalized.endsWith("`")) { + normalized = normalized.slice(0, -1); + } + return normalized.trim().replace(/\s+/gu, ""); +} + function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } @@ -205,7 +216,7 @@ export async function createPairingTicket(input: { } export function parsePairingTicket(ticket: string): PairingTicketPayload { - const trimmedTicket = ticket.trim(); + const trimmedTicket = normalizePairingTicketText(ticket); if (!trimmedTicket.startsWith(PAIRING_TICKET_PREFIX)) { throw new PairingTicketParseError( "PROXY_PAIR_TICKET_INVALID_FORMAT", diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index 348fdcb..cd1bf5e 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -1,4 +1,5 @@ import { + normalizePairingTicketText, PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; @@ -183,9 +184,10 @@ export class ProxyTrustState { const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( body.expiresAtMs, ); + const ticket = normalizePairingTicketText(body.ticket); let parsedTicket: ReturnType; try { - parsedTicket = parsePairingTicket(body.ticket); + parsedTicket = parsePairingTicket(ticket); } catch (error) { if (error instanceof PairingTicketParseError) { return toErrorResponse({ @@ -224,7 +226,7 @@ export class ProxyTrustState { const expirableState = await this.loadExpirableState(); expirableState.pairingTickets[parsedTicket.kid] = { - ticket: body.ticket, + ticket, initiatorAgentDid: body.initiatorAgentDid, initiatorProfile, issuerProxyUrl: parsedTicket.iss, @@ -238,7 +240,7 @@ export class ProxyTrustState { }); return Response.json({ - ticket: body.ticket, + ticket, expiresAtMs: normalizedExpiresAtMs, initiatorAgentDid: body.initiatorAgentDid, initiatorProfile, @@ -266,9 +268,10 @@ export class ProxyTrustState { }); } + const ticket = normalizePairingTicketText(body.ticket); let parsedTicket: ReturnType; try { - parsedTicket = parsePairingTicket(body.ticket); + parsedTicket = parsePairingTicket(ticket); } catch (error) { if (error instanceof PairingTicketParseError) { return toErrorResponse({ @@ -285,7 +288,7 @@ export class ProxyTrustState { const expirableState = await this.loadExpirableState(); const stored = expirableState.pairingTickets[parsedTicket.kid]; - if (!stored || stored.ticket !== body.ticket) { + if (!stored || stored.ticket !== ticket) { return toErrorResponse({ code: "PROXY_PAIR_TICKET_NOT_FOUND", message: "Pairing ticket not found", @@ -327,7 +330,7 @@ export class ProxyTrustState { delete expirableState.pairingTickets[parsedTicket.kid]; expirableState.confirmedPairingTickets[parsedTicket.kid] = { - ticket: body.ticket, + ticket, expiresAtMs: stored.expiresAtMs, initiatorAgentDid: stored.initiatorAgentDid, initiatorProfile: stored.initiatorProfile, @@ -365,9 +368,10 @@ export class ProxyTrustState { } const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const ticket = normalizePairingTicketText(body.ticket); let parsedTicket: ReturnType; try { - parsedTicket = parsePairingTicket(body.ticket); + parsedTicket = parsePairingTicket(ticket); } catch (error) { if (error instanceof PairingTicketParseError) { return toErrorResponse({ @@ -383,7 +387,7 @@ export class ProxyTrustState { const expirableState = await this.loadExpirableState(); const pending = expirableState.pairingTickets[parsedTicket.kid]; - if (pending && pending.ticket === body.ticket) { + if (pending && pending.ticket === ticket) { if (pending.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { delete expirableState.pairingTickets[parsedTicket.kid]; await this.saveExpirableStateAndSchedule(expirableState, { @@ -407,7 +411,7 @@ export class ProxyTrustState { } const confirmed = expirableState.confirmedPairingTickets[parsedTicket.kid]; - if (confirmed && confirmed.ticket === body.ticket) { + if (confirmed && confirmed.ticket === ticket) { if (confirmed.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { delete expirableState.confirmedPairingTickets[parsedTicket.kid]; await this.saveExpirableStateAndSchedule(expirableState, { diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 411d120..cc0be12 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -1,5 +1,6 @@ import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; import { + normalizePairingTicketText, PairingTicketParseError, parsePairingTicket, } from "./pairing-ticket.js"; @@ -195,24 +196,27 @@ export function createDurableProxyTrustStore( ): ProxyTrustStore { return { async createPairingTicket(input) { + const ticket = normalizePairingTicketText(input.ticket); return callDurableState( namespace, TRUST_STORE_ROUTES.createPairingTicket, - input, + { ...input, ticket }, ); }, async confirmPairingTicket(input) { + const ticket = normalizePairingTicketText(input.ticket); return callDurableState( namespace, TRUST_STORE_ROUTES.confirmPairingTicket, - input, + { ...input, ticket }, ); }, async getPairingTicketStatus(input) { + const ticket = normalizePairingTicketText(input.ticket); return callDurableState( namespace, TRUST_STORE_ROUTES.getPairingTicketStatus, - input, + { ...input, ticket }, ); }, async isAgentKnown(agentDid) { @@ -323,11 +327,12 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { expiresAtMs: number; } { const nowMs = input.nowMs ?? Date.now(); - const parsedTicket = parseStoredTicket(input.ticket); + const normalizedTicket = normalizePairingTicketText(input.ticket); + const parsedTicket = parseStoredTicket(normalizedTicket); cleanup(nowMs, parsedTicket.kid); const stored = pairingTickets.get(parsedTicket.kid); - if (!stored || stored.ticket !== input.ticket) { + if (!stored || stored.ticket !== normalizedTicket) { throw new ProxyTrustStoreError({ code: "PROXY_PAIR_TICKET_NOT_FOUND", message: "Pairing ticket not found", @@ -369,11 +374,12 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { input: PairingTicketStatusInput, ): PairingTicketStatusResult { const nowMs = input.nowMs ?? Date.now(); - const parsedTicket = parseStoredTicket(input.ticket); + const normalizedTicket = normalizePairingTicketText(input.ticket); + const parsedTicket = parseStoredTicket(normalizedTicket); cleanup(nowMs, parsedTicket.kid); const pending = pairingTickets.get(parsedTicket.kid); - if (pending && pending.ticket === input.ticket) { + if (pending && pending.ticket === normalizedTicket) { if (pending.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { pairingTickets.delete(parsedTicket.kid); throw new ProxyTrustStoreError({ @@ -394,7 +400,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { } const confirmed = confirmedPairingTickets.get(parsedTicket.kid); - if (confirmed && confirmed.ticket === input.ticket) { + if (confirmed && confirmed.ticket === normalizedTicket) { if (confirmed.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { confirmedPairingTickets.delete(parsedTicket.kid); throw new ProxyTrustStoreError({ @@ -437,7 +443,8 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { const nowMs = input.nowMs ?? Date.now(); cleanup(nowMs); - const parsedTicket = parseStoredTicket(input.ticket); + const ticket = normalizePairingTicketText(input.ticket); + const parsedTicket = parseStoredTicket(ticket); const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( input.expiresAtMs, ); @@ -459,7 +466,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { } pairingTickets.set(parsedTicket.kid, { - ticket: input.ticket, + ticket, initiatorAgentDid: input.initiatorAgentDid, initiatorProfile: input.initiatorProfile, issuerProxyUrl: parsedTicket.iss, @@ -468,7 +475,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { confirmedPairingTickets.delete(parsedTicket.kid); return { - ticket: input.ticket, + ticket, expiresAtMs: normalizedExpiresAtMs, initiatorAgentDid: input.initiatorAgentDid, initiatorProfile: input.initiatorProfile, @@ -499,8 +506,9 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { confirmedPair.initiatorAgentDid, ); pairingTickets.delete(ticketKid); + const ticket = normalizePairingTicketText(input.ticket); confirmedPairingTickets.set(ticketKid, { - ticket: input.ticket, + ticket, initiatorAgentDid: confirmedPair.initiatorAgentDid, initiatorProfile: confirmedPair.initiatorProfile, responderAgentDid: confirmedPair.responderAgentDid, diff --git a/apps/proxy/src/worker.test.ts b/apps/proxy/src/worker.test.ts index a8ad685..8ac4089 100644 --- a/apps/proxy/src/worker.test.ts +++ b/apps/proxy/src/worker.test.ts @@ -24,14 +24,26 @@ function createExecutionContext(): ExecutionContext { } as unknown as ExecutionContext; } +function createRequiredBindings( + overrides: ProxyWorkerBindings = {}, +): ProxyWorkerBindings { + return { + ENVIRONMENT: "local", + REGISTRY_URL: "https://registry.example.test", + REGISTRY_INTERNAL_SERVICE_ID: "svc-proxy-registry", + REGISTRY_INTERNAL_SERVICE_SECRET: "secret-proxy-registry", + ...overrides, + }; +} + describe("proxy worker", () => { it("serves /health with parsed runtime config from bindings", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - { + createRequiredBindings({ APP_VERSION: "sha-worker-123", ENVIRONMENT: "local", - } satisfies ProxyWorkerBindings, + }), createExecutionContext(), ); @@ -51,9 +63,9 @@ describe("proxy worker", () => { it("allows local startup without trust DO binding", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - { + createRequiredBindings({ ENVIRONMENT: "local", - } satisfies ProxyWorkerBindings, + }), createExecutionContext(), ); @@ -71,10 +83,10 @@ describe("proxy worker", () => { it("allows development startup when trust DO binding exists", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - { + createRequiredBindings({ ENVIRONMENT: "development", PROXY_TRUST_STATE: createTrustStateNamespace(), - } satisfies ProxyWorkerBindings, + }), createExecutionContext(), ); @@ -90,9 +102,9 @@ describe("proxy worker", () => { it("fails startup in development when trust DO binding is missing", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - { + createRequiredBindings({ ENVIRONMENT: "development", - } satisfies ProxyWorkerBindings, + }), createExecutionContext(), ); @@ -114,9 +126,9 @@ describe("proxy worker", () => { it("fails startup in production when trust DO binding is missing", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - { + createRequiredBindings({ ENVIRONMENT: "production", - } satisfies ProxyWorkerBindings, + }), createExecutionContext(), ); @@ -138,8 +150,26 @@ describe("proxy worker", () => { it("returns config validation error for malformed OPENCLAW_BASE_URL", async () => { const response = await worker.fetch( new Request("https://proxy.example.test/health"), - { + createRequiredBindings({ OPENCLAW_BASE_URL: "bad-url", + }), + createExecutionContext(), + ); + + expect(response.status).toBe(500); + const payload = (await response.json()) as { + error: { + code: string; + }; + }; + expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + }); + + it("fails startup when required runtime bindings are missing", async () => { + const response = await worker.fetch( + new Request("https://proxy.example.test/health"), + { + ENVIRONMENT: "local", } satisfies ProxyWorkerBindings, createExecutionContext(), ); @@ -148,8 +178,20 @@ describe("proxy worker", () => { const payload = (await response.json()) as { error: { code: string; + details: { + fieldErrors?: Record; + }; }; }; expect(payload.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(payload.error.details.fieldErrors?.REGISTRY_URL?.[0]).toBe( + "REGISTRY_URL is required", + ); + expect( + payload.error.details.fieldErrors?.REGISTRY_INTERNAL_SERVICE_ID?.[0], + ).toBe("REGISTRY_INTERNAL_SERVICE_ID is required"); + expect( + payload.error.details.fieldErrors?.REGISTRY_INTERNAL_SERVICE_SECRET?.[0], + ).toBe("REGISTRY_INTERNAL_SERVICE_SECRET is required"); }); }); diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 37d6186..fec844a 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -87,7 +87,9 @@ function buildRuntime(env: ProxyWorkerBindings): CachedProxyRuntime { return cachedRuntime; } - const config = parseProxyConfig(env); + const config = parseProxyConfig(env, { + requireRuntimeKeys: true, + }); const trustStoreResolution = resolveWorkerTrustStore({ environment: config.environment, trustStateNamespace: env.PROXY_TRUST_STATE, diff --git a/apps/registry/.env.example b/apps/registry/.env.example index 267af15..2bbbd2d 100644 --- a/apps/registry/.env.example +++ b/apps/registry/.env.example @@ -5,14 +5,14 @@ # wrangler secret put REGISTRY_SIGNING_KEY --env # wrangler secret put REGISTRY_SIGNING_KEYS --env -# Wrangler vars (non-secret) +# Wrangler vars (required at startup) ENVIRONMENT=development APP_VERSION=local-dev EVENT_BUS_BACKEND=memory PROXY_URL=https://dev.proxy.clawdentity.com REGISTRY_ISSUER_URL=https://dev.registry.clawdentity.com -# Secrets +# Secrets (required at startup) BOOTSTRAP_SECRET=replace-with-random-secret REGISTRY_SIGNING_KEY=replace-with-base64url-ed25519-private-key REGISTRY_SIGNING_KEYS=[{"kid":"reg-key-1","alg":"EdDSA","crv":"Ed25519","x":"replace-with-base64url-ed25519-public-key","status":"active"}] diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 9de773e..b9a6ae5 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -10,6 +10,7 @@ ## Health Contract - `/health` must return HTTP 200 with `{ status, version, environment }` on valid config. - Invalid runtime config must fail through the shared error handler and return `CONFIG_VALIDATION_FAILED`. +- Runtime startup config must fail fast for non-test environments when required keys are missing (`PROXY_URL`, `REGISTRY_ISSUER_URL`, `EVENT_BUS_BACKEND`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). ## Admin Bootstrap Contract - `POST /v1/admin/bootstrap` is a one-time bootstrap endpoint gated by `BOOTSTRAP_SECRET`. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 49c5480..b7d4ed8 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -2417,6 +2417,19 @@ describe(`GET ${REGISTRY_METADATA_PATH}`, () => { ENVIRONMENT: "development", APP_VERSION: "sha-meta-123", PROXY_URL: "https://dev.proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), }, ); @@ -4506,7 +4519,24 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "production" }, + { + DB: database, + ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, ); expect(res.status).toBe(400); @@ -5582,6 +5612,20 @@ describe("POST /v1/agents", () => { { DB: database, ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), }, ); @@ -5619,6 +5663,10 @@ describe("POST /v1/agents", () => { { DB: database, ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index cf85d13..79400b6 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -971,7 +971,9 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { return cachedConfig; } - cachedConfig = parseRegistryConfig(bindings); + cachedConfig = parseRegistryConfig(bindings, { + requireRuntimeKeys: true, + }); return cachedConfig; } diff --git a/apps/registry/wrangler.jsonc b/apps/registry/wrangler.jsonc index ab184b7..9f0b051 100644 --- a/apps/registry/wrangler.jsonc +++ b/apps/registry/wrangler.jsonc @@ -32,6 +32,7 @@ "vars": { "ENVIRONMENT": "development", "PROXY_URL": "https://dev.proxy.clawdentity.com", + "REGISTRY_ISSUER_URL": "https://dev.registry.clawdentity.com", "EVENT_BUS_BACKEND": "queue" } }, @@ -62,6 +63,7 @@ "vars": { "ENVIRONMENT": "production", "PROXY_URL": "https://proxy.clawdentity.com", + "REGISTRY_ISSUER_URL": "https://registry.clawdentity.com", "EVENT_BUS_BACKEND": "queue" } } diff --git a/assets/banner.png b/assets/banner.png new file mode 100644 index 0000000000000000000000000000000000000000..2edb691ebdb2133a92ac4be44209d1896766056a GIT binary patch literal 167993 zcmeGD#Sg_zMusFfp9p2wt z&r|2z=MOmF?y9bu`OwwVHPe0d)HOZPn(7KTm=u@@2naYzin3o35K#V`B88x%{x|er z%@-mdA|NQqO6&OkI)3px=NR!?j|MeYop$x?aJ-n^9f8ad_@<-;z3~w+La~@n8MI_n z2yg-NUk#exgY9d`6dd?&4XEK?9h}E3!)*a2(|=@=Q-xC;IO(O4Xwj1p8u`Ge{?f&y zp2gqSI?Q`c?w(cys%L`dOr1u?M_XCXUUD^tkJe3jH1fp!zKN#kW}JYINri+I&^{nk zciYF`?c8;y)m?UH+Z)mn{c-t75VWKus$oy1yTj*M1N*5oJ94NO^q22uW=@M`Y$d`( zmuuQSO=P}%&vq6DDrA{dl?1Fi97f^h^Rq{0ylsD3pNb%V`~m&Y$_~mmzqzoL$Xd+~ zes6m)KihjN;#YDqp?B7<)T}i0dmdhQsTy<1N1~^j99&taU!6x~J2HKX)i+GCU1-^r zzM}{%9p~C-isk!8b2P>^V0&?hrQ0RzXw&r3Hfw(mm`A*1Cg5|B%8*v>TR%1J-p9|p|W25{RBLBmVRcs*w?f2u)lv7sdB_QVv0BTn6+g(bv*T&3R$FX< zR+ikHTV5^E6v*@Ma{T14IJnVFykpFo!_fP+W!%;-IG~3+jOE|K4zMWQwu}Mthk;e4 zCur5S)wcb>-sK5+{PWSa*0#Oo3Fy3D?>ewgdJ4XqXYxA|4S?%=aK8LrkD~i&<0Orn zTy}UZP}}ksHVzb2*|+3Xxviv;#=l6mMZn|^dB_`AySg{If5dSbVm-R2HwR~;>N=AM zzi!6+0BfNFC{Puhr4rZXUp%&%QaU$8X$v(cx~eVc|6=PuA+j$h8V49=49Xp(MuGV` zb7UZTFpR;a*7g#0Bul3URpwE3rWX^{OaFP8h%ATryo>gkMB)*!ow%6U{8eL@j39CbG~8Om0_tm+W20{8|wag7YnAZIj;1 zx8z}Fqym6FFQs?9hk*1qt!^*j)yJ&3uHT+9U8zpa8TFdSp(D<^DOGPVG7A3E4u-5| z0Zg+lS@L#R7m&k15t*v|4KJUd7h(}MmeyXLd*+JJmqYxj&9!@MlS zUfi_jt2a{JXC)p}h9+>Q9n?-rZdxbq0+;vV*2(53%_4cNCZ~}DWfc7mnL;{GHzy{- z%&^orHrY#Jv!INwJiFCe9Y(Htb7=Y|18?XkvO*VI#BLa3;Y zwW(zs$u9uNRfBas2$s}KA>}G1EQ2pab+SJB&Y;P`n^Ov`9skjdlh>6rGig|Ma_!fH zc{MRkj?(?duh#1&jY^ffx*!{Cu5GbVgbdRF?rA_@QSqbEdj=xy3yUQ)l}lRyrWO@@ z^4rY&$72xuUxPW3l0g=;lYS+BRk>H;n^oZFD~)W;h`n! z{!ugMS8xa`)zsHh?MP^?mA-(ul zURL)wWfZb&2=#H<>Y#E>$1#sH%xX8?r+E9^&@OgG|FjSi7H!O=T}}s}v5jiTg^hAxOx){}#uQRa-+`RSBMHF;Q&+0Btq4KW)(ylk?^tc^X%Trzm{C-fg&u3AFIl$}M7lmj)CY7ep zK{6g#dZD={N7m^*#{!4aQp0-w0W$4e6|l2aK)B0O#jWV$OKDyC=|=l$bzK0zxFmjk z_|1!$mYhw|byU9FDkF-h zKfUXA|6vstl@>U0E|{}r)impVz0rGVx=EeMD4Cf*;#~!Wa=&~7E-cki1<}29!_9Sh zN*)Z0spBhkK6Z`Ws+o?q9KO;w>vOCZ;W;g(AdX>CSG{AD-}bcz6>j&L2B;Z|teTa9 zi`!fQDB>t!v!J!CeApUBGRJ&o?_}zO!Hwu3rgXz8w;?C%yI9rJpI?@?!jsH85Slzt z<88K#Mz&O1Ck>yP(`u1V>Ur;4#k3p)yHNI=^5>FsFbDzl#s+eCawY0eMyG_*uS=m4 zE8N~Hq<`UnB>5sIV~i)=?O0uZ#`_vNeW9Rcf7XOSgRw?j1U+4YH}lh9O+NFX%PDR2 zAbt%&-6hU%&N8Zpmq*Y)W5*OT*X5M6MinzYV>||5J$2nT3B7AABHFOKQaVC~ZZvdG z;H@7`%DzF+iExRW)ULnos}8DQ_}}pN&GRfj*mf`J^zbl11pBLWE82pqZ?wnFI%{65tS@Xswl`yKX2~X+wk=JqVhc8rOauZX*{Q zwUsp^@m$5}It?bR8vTs+2Jv4Dv@3(M72o0UkguLpavHCnA)u?9wu}j8jfHD`cNjTX zQ5N_X?R`wK-Ccm~mv_ovmiKyj=J{;any5K8%^_Dm8FRFprf2^)`wD*PQo@%`F>heX z*vTkwhEIC6T2|-7hN2Pz$Jzl|4(hL!a*#< za=90>Crr67zx2PM=NAlqV%nX1LTeU}lb86O>p9fm?Y9-Bf9G5xxyvVM9dibia4k=% z=Z)JHk9@UgPBcu+^M6urZZ9vw%lKry!_mnh{B0d@SK=bR^GXZ8@+YfUOh=6N{?QtG zn+j-q(-h2;>b?}ssU@c%((-9?xsKE^HcaQQ-S((=VAbxVUc*P81-sjJwiL+Ljd%~s z)?pPMR!3@e3ClbvM(l)+6*YYS1qU>y{}!SE%+Z3mubC201^3(d3tTHVLLLtxM0Oql zKXwU z&JcwOL0E_k>hhByxLV|?BxM-D;^{KIshN-5;5{?Qf#mGf4^cvp^BA>uFm0cfvAcmf zm?|~39C=R-PBPQQR9zA4oQfW085Qh*&CU1!g5u2c?|pCsS!DaO&JG4EM!jR(@pP`4<>F{XUtHTw zK06eVufQVhIazke`K!UX-CY^zPVT=``}~Ejfc%ex56g?+_^2f77=5mH1?q_N@V?iy zKd^Lm4Jt7p`q(;>-I`tvD(sfwYF%gljG;S!l@;j+1w|jh+qKgT>dYSNcm}FdIeYYi z>({V6A1aj5RxiqKwjD8{&x_9EJFWW4lptu*_2)~}ld2x!)mAzvpk56%krJA(rh#QHb|!S@TgMM%3a(y26HJF2-ZMGYGOwHM)PM(YuZ?jjd$|0W}X zhQvW2zR~L2X1l#Z9qszuU&r=T!X{Smw-ddzEe-x1om-BXN+J?#4~2_eNe3v^y|4Q=aivdBe&hR# zpUpf8_&M8FQ1ax1!1GD2+RwuTwOi}R9#H1rHd=LtMK@$-S{eD|@a)5fG2Ezm`e=Zm^&y0;`n{Zw5mA(v$t8RRSZbpva2~N@8+5 zxE$wWx*e|6Q9I+!T%2CnV`nt>8LwbuOn(nv@(|pv4CAkGe0gkA$o#e(v}PKye;s2` zb>|LEpO-*yA3vd)aPRlK=O^I)7b;)uyp$)FB(Nq6U?1yBop-x}np{a(RKwgZ!h{Ur zTCmqDPYE0LuWm{frnJ$z@mz7?XL%ftLDZnFWZ44@3c(m)3jE0Z2rJ#RH)6W z2B#73+BXLZa+qI6^YYIYgk`|^A4 zGK`+d2||n8`$%1*p~06f`!A?Dk?bmI3RRIiD6Sd)SzPDNHq~FRP0LJT>w-!|1rJfl zO!lF*U-gRAjdK>K{@xuwC9nFfQjI&NH~SA-T(?BL?s=?(m}|s%pBC#mJgMQtyGXGC z>s$Q!9$2AE#~seV*-^-{!&8>vO9}taP2l=6@z;QQx^k$%R8(YEf=nUo$w&vQ{yhWB ztXn~A4y?^CjpNv=ea(Zx)7(?e$F=65af}y0#e^XoBCFD)9!SPEm$E{NRc+*~Ghq0| z{K`YIzKY59xmljUqq#Y#h0f1Z^9U@9nc3q>-E$yibb8;S zow>DRMs)8L0cMGpSB+A7{S>*S7E+5Y^$&c-JmL~?FZqHvC*%fcwU!EH`d;^IBpV9Fs(;^s_Dar*T0$lgK)quBPj7AT#ZS^7AKYww)k%>ZarBS2NE^| zbvX=YG;HjxOSVhCzt&v9{&~d`4Q;Qtdqo2sTaQf@pi$XOXN@K8T0q{Y4HS1JM{>O5)6}P~0`fr5z``24 zi`ESROxjV^iyQrh4nOXVibXp`)T@fc-Mm+z73ogDtH&P!( z=UHi%8RTDUV&IX1sOi3vuUEQ7tHu+poAB}$JR{z8+!#`R#veEOUj~sEomC_qGVH&( z0Nzol9iC<0vswPdFAUCY?0EPAq}gs6!O*r2c0$(7+*h$4f67!BH{=MYH^bFJSXla!TXILy@h-DG#xqV~)DGaQ3e;8b~1KOrs z<&Pf|8S{!SgO?xr86A4 z2McGwDBqv{y5L>)!7iXz>kiEW9tq&cPq{(Dn84#_=aD++%8UQ!rVkq&i{0%R-0Qr= zi_!L{Zaq-1?`b5DGs8;snZw)MoQYkBBLJRfM>p#C@xQ&OP?9u5f7G;p@s51sUj1Q4 zkhm%#&-KPY3+N&?u9BQQn~fb(tXS~%3h<;D~=0y->*mTyro z(`;a|b|OYF&LWChJoZ{EN9BC8NrO*}MzvhV!CW{JV{7`)+RH}#G$iFIkd{2Ww&iHu z*L9kBEG&}Ky4ptR`5LY!MS5s11c3|Ecl34_ZgzGDQ6kM*(2uuX0YeY}NlIC1s6wNn z-IC`z`jFrS%@R$>DJtA!Bh3_~Eqbclg`R4ENlFJogH723d=upzIl2SKj^D#mo~DF; zH@19HeQ6*S5y0;%Y*)HGknFrwKC`k|mzL^*^JQSc3X$$#cRFUvB^u{fsjIs!Tln@R z;6nhe>l0{J#yy%yR1=S0;*U^FT2x{7ecizKOPKl-O^+}uqSUH?HoRYda4!Drbn>#9#8Pno9X9*>z!rN| z>u5aF>#N8*FV*xLIoN=0^lw!F8#Lq7FIiops{MylLMrPaM(`m-{gr71UjUB)*_mkD zuHL$4E4~Nm+UdRZDjXY@AK8Fs{&wIVM;SD3Vn!ke`>2ZFAXC-1tMS55)H$P6o70cd z9=N{O^o*(}7@JfsGw~QPe6t<$oj^a}mf#Eim=AL+fMbHvG2UyI-8^UUscG#y3+1zC zbi?gck@dvg3TI6F-+O38WRa>lOsn0KnzSSc6W^XsJ`qk-bI zqQpOgQe!Y4Wkh!K*uT!Yf|4+@+Dd55H3K8_P4@>Ys(+G@ZocU+jc?xMLwL{e4rS|q zv{@Oyb)ObFjjJm%D+4Pn%o8M?jmC##`fla7T5&~KOrFe5`fyJC+a!z>X3|Vymh`D4 z-v3>^hr3mL?2iwXykF+H8=EF_2gtUcn!I8CvCdw%r{+}e^@C4@|52%I6xkm#A^7d^ zhb$1?8|uGr7Xl^-CfmYH(cPh_m6_ z84wC9nzW?>Y3z@WbBm-2Q0=iFg)vQpH4K?v>vkR z-6322nApOAH?H4R+K}6kCWN9Hd3{qkwX;h{bl>)8;oVnmx{7rJL^zcaSGEuFBIq_% zOUuygw?a%aE~tnSfl0AfEVQNb)rc8|$9AyaN5Mx|)&hN!lq($X^ChFs-LRzX=@kfu zKDA+=m|XtV84LhyeSP9UpwelfP;m#o^`h?<(So9D|DpKxG47l(#psohY1IP5Rx z)7Lf&3-M==7lMLTC(Ej2zIYgzVDr?T?)TmP;+VPXOp3B!ovBqa--ZOn9=P5Rpviaj zX=Z21K8c4}k`Q1!yv&s&J()JLy&5xrsPl*#`$K$6+`zp5L86GDZuNKko5=&1q4o2P z|GwhYd)0dRFyaN9-v|YNazCCc3gn>P&&`7x_rkg}R4&*je{b^YbY}f<-XRnJv(8ht ze$pLB23#pwHv?mkkQ0V&IJjhhej$xCBy0-Qul16?aKd zjC`M138~(mTn_V+i-S~e|C@;bzR z2Iwr+2Z=yF`9ggylGdM*3rx~~N`TZwg6cm%SZSy8pWaf8ZatPKnC#%YPn`{Mq?!-7 z7t&$hQ14k1(Y~Js`P&nr7neJpPvXyRAIuko@#UZSIcSSX$+d~xr>~6tao!Z|K6LcS zE6Q)x&6gC}e{;VE5oRRhcrO-31M7dnL_7Hd^GNv?1Qcmb&gqMdxlE7Gj;;&qlRY@5 z*9BiS@4nOdD;+kr18AP4@4^`9qvwM$$&za7d31<6)cA86EI9ye?R_=)*M=Fs zVIg#wO)OsL?;jfnCM36nUP$$vN|ss6*~OROeCcR(o*TVeeq43eJ7b#)sBbDtei%VB zsAsO}DA>HX*XJwxwXfCPwiT~8Zjb>WA@t<1`Q_=x63(5~FELA-vyPwa;s;W^Fk#Zl zu(_;17ItCda0jUYDYhJAa~9afZHG1hq3$l3QnIl_7`>YxoGNxg604oO^#kMwD17^}DOMZwvj!5XUCLu5{C)z06XfZ8EO!kM zMSlu&diQZCg}5=8l=8lNWjfi^^R-O7W)A~wxE3+KzKndC1LXZlExv}t?hH8{x^84Y zr)F|s?iW!$Xd$S_(^>A6))im1%DMETgrjK&_MU9EaNv27dLDURkT zoPAirr?Cn63xEVc(0)+Uz$KXr_lgUzG25$f>pbd>K{p1#?jl0W>ABL9LzSUP1Y(k+ zw?f(pH;uO=^T%EG;>RKXtMwI2kF}gEkeaRKB_ZcrMhhKdgPDDy;Wp_A!KURbP|*MCJxP}!&iokrw<(yi_SaS(h_SoDxe>&ERxicu?j>U&qv)#3;UIc z6qz=yjN255Qf3bwxG=B!A8oe>rQH6_vF;W-xrm6S zi2}9*%wC7uagS*|DRCMH9dM<_O6|R-cvi2Ua??J1sRJV2uX2-4tIF{8Pxt@aGWv8F zyv#eKe2CG)2ZCbT;Ja zZ6o|H9Pc&{^8o>O4sP+fAM=B$R2x|pl17-(|Bym}6ba0^EK7&MyMCbF>(S+wSH~E$ z7Xx2WzSUSPD*<)N!|&7NiUJg~P%m+5)x2S1m;Cwc!o7CjMT_RPN79YEoP{XrMq$ft zW<|!}!Bazp%Gu1sCW!?cz;fz1u|cnK__uZ3ZhwbyOmxkcM)2kASnsN<1&0sF*pU$g!0?EnPFK;KKuaE z8iEq9ca5vo3X98GH6}M-$;TQEJ~{xVW%8SWG>uM_{EEK4$0@Pe(5n1sl5%fjdLWJ! zO{w0a+)k)~-VsaU;Hy&F+^Xd9k%dSV4XmJ$hHuy8WTH8L!@TF(q(II+E12Vu4~&)T zSg7hsc7CCxU3UAAuH!xQd1XL?ad8x}u^0i^Gm%$Bg%4k+_{&)FOngs!e3({X!spuySXd3E#`g|oZ77z?eJj}Ur`=tpql zt`DX+1_9dUoHJNR8jde>y{bPZrS~CnKRpZ#p$+=YiQKa(ph;d;vjp7<$9iTruGUE~ zEYt=HbsghZ<|SOdsGM9S_!%3@pG~lEP1%^%+GeENX!p3jg&6s)$x+AR?gqRXJ6Y%b zonoPwcJUKmfp@7+b|s?Js;XX?F~q9kS(|!Z#$5AHjg2iit_2-=ynTGm>%cR!+eTkD zHJ+yZ(Cvz7MwMesGDFj;YxP8QhJwqs;Y%2x1CTxd9ckzJ*RH-16a^0MKyj93S zO8g=}P!!3+N_m-}V|nD6BP{o;VPtBICe~AMh;!bga>oegCdm8UKyE?%>Jn z)njcP%y2CN6*N%wuu_Ez;RCH9$yHT`k$=VY&eWhw62uutKblH&OyHmrA(LGmYwY+D zLq)8%f34E;_NFx`b3h_vEXo}1jWWw1*wCqpTrcJlf@Vq?y018_{8uxN?EFs00J)49nJ+EoiC47e!T zO;Nh3pMG;$r&6gZla9p{3<~v4*aFM8r?xX-+Pc&q-J7n;dyrISNX{G8ZD2! zD%hse>mq-<1h7p#nk~scllh|aQO>(jdB#~zUHqM*Sj1N#ZD^pXe`)L&zGor@u+C^d zYZZp&E90JV^}Etp54kpstd5!z{t_(J(|(OW(S*1Wrt6QqDg}e40<;ZKwvT6*ir=_3 z_F~2I*tUz+WK7!*PBW?aHk3NRE$-iy+Fm76$ zKzfM1zI9Gyic`d_*lS1Wbg4yU>vA%(41VzvH5LXDv=1b8tw&zVLzqqk`hy*e@`8Sk zzNXLq(ZRQ~>Opm0{>!=TMZdS(sW9Hc zPvt~^Vzq@hBTivC;van%*Y1}&VphD~BYfC}G)MGz zuV?cqm`FzjMNj2CQZ!ii^}c{7wV#}AX663ci53K#yQev{k2Fx>`R=%W3Z~IFC=J!? z5PDMt$C)j*xtBk(W9|+UT9JdqWMJmYYuu|P2X@UdCkl>qZ2xhi>eq9HYOx1^Uyamk zI5cJwHU=CuCSHD@uiMnxwTBcn*6yp*448I8UhPu`zL|Yy8N45mN46MNU-9^Luf z)aTzQWb3lFRFv*D8FRr`{0ApI_UFV_=459B=4;_&(L*QHx6`T?hqveJ5=*=5_~^=h zSj9zK(tG|JK`32_CF{m8VxS}`yIA9)t$aQcZr4`dL^wV6KUDF-lR4x%kAV9}=O-3r z06q72*zCR*_X-9R?75Di@m6ulLY&XQ|1%#k*p(mLJy-+pDb@>ta@pPXpaDeiXfJ zS`_V4E3>-Jm)Z54CxsrdnmKp%T6J@Cp2(}UDI>~3uI2o2sMq{TfbaL%8|eXZo-;al z`cU+E5y~9ML2PU)Yf{eg25e$LY$IQ?Vjys-PdS2=#)SEUqF8qeSN<7NJ6AWIo?-OLE`c6!P`iYR7?%lbLvS5TY^)`C$IgxjG*^9-;nvRfLeFxvRw5{W5QCXCMb&I9dZ^Lz zy>D)4#O)Z(%@^EA@eobL1uJQI`>I*Xyf8el+~$hK8ZkVn{!FXGKS?ektj_*14?bTX zYVf0Ql~C`u=fYWN){k0HxGYA=84;N4BfJO|k_bgv>$-ut-;I+09?wc)E~D2gZ| zz>aI}iewm#!r_2LbBB>%zI+#oJ36}6K{E)2?Vpw(UW~O_8;2t(7Zku#GBPG}DdG~V zHj_Ith5P9obk9t(pFqEyX>haTbn*f1b~{h9=UD4g<4c$;_AUgIeeG(y5+ z$xgt)!uTbdC0~)Jo(l>$tT;m9u7oA(JA|*37NM|-kOCI7JI-CfpdYhx2BA4WLU3DI z+)buAKI>%*zT>8%S3OO~v{KOG=lAmASjYut1K5vv6e&5vG#kw-qh)B|jw-L8%(HSt zGuNEzcckmq$H$)ZpJQ=pz?x@#RzgemHt))@WFs_a>+A5II+D1JAcBps?E*m+ zrWozW6-U1=omE!H^EiMGw-b{*e4QQsJAF@_Vfwk6bf$fAc*nLi#33}~`*Sc>M#cMU z;`{-F-S~t7rXBAHzm`F{N9ST> zU{La=VT-y;r>^iLNJ$TJ9<(ipy5ptZ;6`bizS|eRb97jnkam`SqsHf{RYWZb@-(Ou zRt)G_lWW6B>qlfeB}D`evxU6a8r`M<7iE{>Tw)ao3mRJO77rPQ<|O`$^o6Gy|e-m58B=P!|8o zIZ@Y>(pV`J$?7?82_mJFmfVpZT59^XojL80T!zxE+T`&Qssmwe zuMan}^h95jQ&6HZIifMQfZGA(s;an^{;6B-oX<$(9jTgF=ETQ2dx}MZGC_l$MKr{1 z@l|oY8G+B^%_s~|qNc|ukDA)de_kX!p950Gu*_tiGme=u`PFMbt~8;X5)9^6&aQM4Tw7^ zmirRW4igc=07O23{v706&uNHOiFm5aMOQG9cSMzHP%-FVtC@6HN9uW8EnqQto_y7S zB{DoZG)ut6$8EDFPOlpsX>ug!raG18bJya@$5>|)TWEK$Sm!e4=}*0|=4)v9HpqS( zC@U{#F&H%=cd>{T`Ey2T6yH10Y+SdSPzsSTW8t!LAJIoJNp=?LkGdbf>zd2*dZzp} z1d7Z1HRHsl-5i_io_QUQi|AjHaf|zpS?fL@1Hcd=yjOQ0Q#@Na2(dnR&~f##^d#q; zYa^}+B^ecAUkQkm8M_02=cqU1!UoEmAn;z5A5D3sTUKW{O3WGuOhnKBeot|w7e_() z%dv&vFuNk+=ZkL%mI`O!(#WCTcjx43yWkmgF~;i2AL>UIy}P0F+&N3UkI(CrRO#OB zGSI3uBlt_1vA$8A4I#wXi^OniO1c-HQhwE2+voRrcR9p3{=&(S2HEr@x2NPuuU#qA zA8Yf(^RbUdgk*f!6{GOnn%bQFeFe2pL88b6vd9yNL@DnXSo4n&=ObTZ{H!8BclW%3 zX-M8YQjM}6J;P$QTtR4Dx>IeLTQu^m@x`kil1YykL#co*$M*J+8@!|aSVkE!*jl6b zx=?v5*RGj2T0WOuz3!^CyH|>|_T(@7lYX;tkzp1H4Y41yx}A^Lqs7dms*sRZD0SO+ z-|WvL$%d}%G*xL(6BnC1X2<%T8wQfgOsPT7K1^(e_K2qe<~+0zP5P5cM<^H6F_GT3 z*=TpZ(*-;>c4PekuNmyhXUDM6(oPN&bP8L%UUFL=3pDQ_jE*Bi^-mOWh>aO{oj)8X zb;jnLWmK=tUUaNndZls?Vp)9=X2cV}n>3TGd2viwu-t}IwsRUa{zv(lXv;I6*Sx? zhG1KES;Hn1O~V7EJdyDmeD>8SkLQr|hQx zpCreu>9-Tvy6sHkg}iY2Nyho*U0N5<>yp$StF24?W`B&+D?RM&iNwgB9;?}NYmCO! zlg4kU+cPnAC(f2x1%Nubgs5RmcoFrYyb%Og*MdbDeCyNU9r;n|A)1>P(0$QaJuSpy zwf6RIEl5UIx~H%+KL2~1LG4uz*6S`{Q|pxs_`XfZhb-@%nxV#K0tT>Xwu>F!k+dJJvky##wdt*k2NqW4f0`%o>#x4>n+|v*8gp z`y}MNk#S-WBg%le7=zRpfBDJAdHW**ieY~5l8SpXliHW zoXYx~@3?xvtkzzxw+GBgtn6-S(rQZGfjfNhBMHw0QiGiN-~IB%jtLkWY?97sRH`j3 z!(-FeyO>C*HS82IybCE0-$qzaJEj)zorh+sZKZ{zuO@wux)X4qj{NEQZsU^_jZfs+ zf=jX$}sLlofA6TsXnviA_%D=rVC?0|L5EhspNO=M(X*VS4LK! zwyoT1K6C6I405xuUiPzAXR|?{iZ%$_+XOom^Z`&!Cm-yURddT#w-ZF;tR`ub65Pc) zrKT7FakaT~j*`0dY>aMKE_D5H$@Mu~VifVF*wIR%$oaf66taFxc0S8i_C{L(ya~(U zSn*(J@TK>mA>=39e-YajYD8m2i+h zw{6w}6?NbG-qrSRbmtkWoG-tEmh%w5oKTAVXv6PhiGn$8Av;yK1$GGEOG<9C^yu|a zTsrhIZdxW$I3#E-t*leSoXynDcl9-~a zgMZ$-n1pv3frLyJHUE4j|E~ApzaAIizh3p*HqmzYnx5oenq0}7Py$n_R64CPf&E}t z=9kv0m)cLCeKQnN=N$vvwdTQ^VE3Jc&K}>}csdg`UB^Ti8)`8rJL4QeD|SDcdv*i=ym`HBE`F`YsE@;3p2Hfm?u#1^XX0f-d8 z{m~f7@0h-iI3Nc30mVKvHA>;L>@oX74(X2f2FZB&_e}G$dwMpgj zxgX^gU=ar_r}9zbaq|0%E55CziYrf}6BX_PH%0mXHIP}_(L;n{sX&txkC#@y1}db^ zH%W{MA)6(k*5nOL01>lC3nCb3ZZUTISD@pZ`u)soTW5 zTGS)n`^ZB`eq>E)p{ng=Hz-JG{AiH!Es7QHO7KD%zEWY%zPPfJO5S}`nF4v~W(}Kv zMR1`scY!;J3-P;l8FcH-E&fMH&uDwOomuu@Pv^PF@Ul*8l{2q?LoXEwo`Z=G!YC6a z_C4K;qCqvG=Ze^qTO@?xM$sNY){b?sN2}>E z7(z+<`#O=^I`vt&c5`{?A$oo}rK6`onM6^KKN2m+*RwxpPbbG74FivJ)_@*WwHD5%=iJB9 z`>ua*V$tR8m5V7tqLfT8BKRMW5LNQ-4o_)S!o7k?7;_6zvaiRXe6PVkK+arZ{>+cB z%U;t6?Q5o9)xP+&fu!sECiC$~aXh-e1i6h`>$T5u)#dkn;+}M|$XNHLSr^o% z$MqeNIw?x~X}x-!RemW1&5|W>6VNLTA`z512vf9< z87{H+KeWiP;nOM zYB@EzeW}c!PQ2X{eEX-|i&n2YP-||!Fr2uTI_gKd4Q((Xbgi#Wi;D0UeE%n8>2>M) z&cufKz2JZtmVkn`D_{L9j$P<0mgH@0vbEx$bBv#z)TkFrdcrQC#DFAAWg$u*F+0%I z=NY~53G-UW((QxD>m$ze8GOf8__L>=zRcaSCR}KNgZ%e)$D|=)gfI8<24e4aUxXPs zVxiE2xA!#Lf`~48QxNpQizb|yrr$TjxYAw1 zOtCPn;Q5uNf0NOJC{XxA%EFBEz#0$|9{;{b*%o1HyV+}{8^SVqcFti=7(>$gZZ&8_ z++5baMz%bfhflwq>9Xm4Qy#I!`>|yV=(q2w*NTRm>hv~J_u%LxOgW`^(7of0N4+s zq_^(i54lDz$Ti4Cu@wDHG*(i{1nlzP9lMKxZ(AEs`0VjPiuX728WD1wLwhYuqyHki z__Li0yVSbIqWkq5lAaX&ZipNfwAXVxrOCF<{Ox&Fo4RMZI-(@pvu*8+#i8ZxNG{ezuV(Yy5O+sLaYlvM$`+#A{3&CPPwRLr^1|qo|tk;vgnDidlRgk5(d(u!iPw*my zDP(@CD7rXlkr;*J>%?C>gOWVkeMSfLoowqn4Un&1gnP z(c_xKA4Dr_v~m=ab5@eA*?pl#mb@-1Gs0bLAF;0y*7rZRkPC-~bA|JMI5w>Ybsbn! zYe_F;n#QZD7f65DmeNtvg+E0$YuM+85p!_P&%aLpLhuxipAYJ9@K5o-i9U>*hdsMr zGhhT!c4hoMY{h>d8VekFumU_zM41Y>w&K|jcH>&zx3m1_0GXfE)c~IVlH$}!%#NNt z;6`;12{eCZo3Rf95)Zeo_0MMP7`Aesy#tS}$PmeLd5UDmGtIPV*~5` zAMm8+fYx_6^5iC?#?gxiPY(D{lK*(ZdiY#R1N*=qTbpRpXLUALvA<+GL}$OL@*HN9 z`F(%lvETWsUVWaM5x>`ek-rBLPfJ^k*k!WlaX?J#h0f5?U*fcbg`fZ3gf>rqKz7SL zenAU;7N;9|As;rmsH3r>8vEy*Hw5%IWTGS`-2IUd1$5{?w93Ge5dFK~S})1v1{JQ4 z?fW$kwji;Ypzk;?)7Rroi&Sso*5G48K)k0!kVQ)k6y2(dOQP-;mH~B28a6t%K^_>9 znO~;Gk`;vr0#zA|u6@U((8uTR+)4$}A44JT^@O7}FA+GVmZOla%3g{yxnQ({N&9M? zEtG63xXn|Uv2*zt26tj-+k$F=U7|Jk3xf41>3c2`nGOG>r+0QV74*zjo(aK8W4>mG?=E*~8o zv=-`7xJf;>`8(s**C%RGoP9TW4m;OZ?(M$0?(Uv1!u4q}K!NXZggBYMU^2e;@Baf^ zK%~EgGKDI`(}!kyWf-3TWUg0{6nYJPZM)BWworiRcwpbp!uUs@KIc|e-?s!itaKbt z9hjCJs!N5q|bHE5T;`2;*&$k79v{!AX0ti0d@9 zJgHrIOPJ#I{ENFtALU0WHAZ~a)U_jbCt)t*1#7)68y})+AP`~+iarD=2I`M=4e*{Exj0>;C<}I0mY3v-~U2Ep|?)W$e>M5RuGXTRp zyUqRN-RAX34LA>W@@kXqr^y7LaCI6tT`Zi<(mw|qxmN{;Z1Ffw0lE;2 zMy`Hh7X)<0hxH_55|h5^0l($hNsGlZHys;0hTQ*9y8QzI`b`?(0?CMiDK z4+m?%6>L2nBu2qD%%?hbO@OkpgEoRqBw2>b-9)mooxCB|D+uuGni;rtebRTPCsgL~yr~Zm%ig&)sFlC7Gz$a@DPL}V5PR*5X6?83dB**D| zSwXaq5-P8G%a|hQz0oiPJwjq8+OtJF7fnOpGT@3f5sUY%%a0qzh{hhnbP5T~?i z!6}#g{h)K?-IZX&$--7KJcFlSJm^}zf>#X$F5b500?{xkOXEvNOJZ$FKETB4b5lP| zs8MdO_BKb;wv}7q`>B)v3)-6>_Vu;c6z6SyDlEo-x%TO7b%<)QZiz?zS@VUCZ0k+C zOBM;tW0SfG>gVAwI_S;;eIW@j2QIId%Cd0v>XhqLKPGt9PNV7(-SL2O!=Zf(@J3J4 zk|3Z>@NYy3-LqvoHi>+-r zytScq;G3%W5?($w()89b&exBd+ywaQ|7wkc)kkLtFb)E4?s-5zCx5!r%g?y#Ni*ph zzJh?>t?qMbZ1`wM53FxXSiAT@gj_qi7)(2>$++D204`V-ZORCLdxZ}utNejET7PlRFCXGZKvoQUwMQCb4Wd#+QN$`L z9~!K8_hT#5QhGCo?#8qc2Mk_jKst+8&5zc@NThvoXnF`>hCW9SGg0ej*zRt12_LHP zcJP2ua6AljoHsjLO<(T2ys)GA-k3p70S+aP(7VGSgxx*J^rn`pAb-50krTtPJOQW?01UtIv5Tb!@5Yh>Tn-_}g#RO~O!&^^@VZ0c#`JOG!J4>S z#Q}?oG=qJ*A`FYFhX}UD;O>wqHV4V^YU^=Iv+V<&pWOmBxAWL)KGccZ-p-8<>ZF_N zPn*oKY4YG@#q7WBR_;EfrH9S~dBPFf!w+Ki0e5^pmZC2i$BM0SKc5xhD|zYwpkolo zgTP@36c>o8_DWqmMPy^DFMvpH(KOg)R|8Ig;_kQ}%crq3YnL+_r1=dkWB3Fjk)5uFv~NRPrYIzuPH5ldyVBxM`!8 zn+aa|lU|Zp$k!}>eauiV&(_Xn3yTa~zsA~v1J;j^li^pKqfb=pD-y%)Y+$%V^Mm&S zJ@&Uw-Q;Bjv721{zu_0uawHO?_y;J6Zc+e-pV1#qf ziI8BrYl_X^w}@lquVMXYTTRJV|Jzaqt($6F>ms2}KYD<-YH;%&+5 zc$RkJt;2t<0<0??n%M57T9tZkGgSmZ!0mJ+YkUW_yVJE(_#U^1XDBW8&ZU}ZrnVi}i>>z&RMU zRaWNP>XU$mWqMX|z4d4cZblvhpD<>v7n{iz4r$?n&o=PN|3%-a8enkcHy%Crj`7}L zpKVWmw21_7waDBeGIcheK8CfX{LZqdmG$s+s?>A)`x51J3b6T=u}gXMK?_kKdVp$` z89$-M1P}HY)>ZuD>0(lM08l6_<5hGV|1Nhdke zNMn!9*$+EC2O$3`#7I{1wA+c8^ zB%$@;0B10@;&2N;i(7xXo)coAoNQfSoH;N-28QFbvx$bL(*$mW)x__!99_rv7@-(C z*b&JBYG^aa(Kn8{8M)w9gz(B=r-o_@;H|H?D!FMfebNZxt$w6}vvz>5t6sY4oM5Y> ztO%uBBppH3m%|K~(O;?#Shk<5PeZv*S2qq@cqARBMFfiq4Rov*D)eFEANIcF)(%`( zhCwk_8nrToU$_qG!TkmS+wxlLF!@_S&c4T_%@}Lebl6)3cYFq5tkXp|i{KwOYiyra zO*WZsJ7`!ckXdZRcIMf-CftMyxn0Azo6?6K{38lo1vt89Br*~+48>MgUr@R3`j&lz z8}<=l%C7910jWM#4@=_rhmQKGA{!k^Oht;m)M@gK#f$Q;bDzsyHABz4ZrGdx)M_%p z7&NG#G6)t|{!an+Q%CgVTZm^27za+0pX za0gxcX=m@HG{=Qj0dX#(Rrs<|h!%Cfbmo$OdkEGK7Oz_e?joJ`rS#g|4kujYIu*t0?aV4YjnkSCXQZRCGU4tE#|&~+F6S{c_r*oiE=GW!@>L`O`jpE=zE+!Q7zY}j6} zd~vwJUvN!C*volQ%}X}Ku_J4t6$A|FHU@0G53KNG#YkOBPSux`>;1lK#)L1WNkxG| z-&n|XvzpnJHCtKssOOhB`a)x%)KV&(=05x(_?X+#|Gim9Yv<=6f4S^x$`D?UW$o`Y z2{&B&Y3Z&zCAVh|j}0Hs@$>mCKzU2I%2@S{VZ5Wy^Do1r5z4W;Hen1Cal@G7Bh#yDH2jXyp6tg(Gro76VGF@K(yp45S`nJ7gTx?L;JL;)&U7kh8)+JPVs?Hl=Ak6&L;qgP{cC?xD&yOVvBH}ONVZc04G{( zaWX~Uc8oZnG_*^3B~lyvpqF;Cy;>FC?oqdF-!cBEjWIM^HJ~XO;Kos+gOGOBIn+N~ z>6=`dKb-ZB;vxzJC%*MO+t^2$R)ocnfg?tP>>bY(?eP1728NRF{*X1SIip)G38rV>03*@^*pY}FCwh7!EI;FI~m-^yoEHMn?x^i zHgPm>6GJVc^Z4Kjxjhh}529-mU>K!Bmf3ZivA;o-sIk2UwR6`^wui6hd2KJ-YKQP% z%dE-~rk4pBqU-g3!DWeWPaN8_2Z-Ko$WU$jv?smHVT@3{rc^B~)3;{_Zi-XbPUBHK8U<({(M7Zw<$5A&qnYW@)|BJH=>FQRVGPd_^}eWlZ=phn@pt3tW0AG$%-CRahQ0}} zBEL7^7O3HgvPKN6pG|0vsZE7d6)>z^D>?V&c&-Eb5OAa7eW!Ep^bR%`NYRq1R9UY+ zO8YEDFoyM$?ge?9LY9jHRUR)mw3%kMbszORDfqI3(U8-XP*J^phJ}TV6{glYh+T0^ zou9*z1_ycdZFm2+w^5}qwl2lV=Gv|r+hRwokJ&ix3D<*^)kYlH#9rbr1J;FA%4Q5m z?5+bCUJ0nt=jo>&$}zd!FcLN-&`t!ycaXWOD1*+yHUG)PHg5yAq36ErbVcbnpllDH z3rWX~p0(5-B?j_sChBkz&xy9GdGB|Yig>@c<$=y1NcFW~2daOS+(nTd7`proB19O# zicrOgE-nTu*Ux}!Bqd-oam73Jr~T40L34a_mFI#$n4Nsb3bkzF7UuJjHU{5`6&>5i z)3{ySRAS*ardeG2pRc|fdiGn&Pn7KwgC(5kS&5K=K!(8{3liV;I=>}prH#U$!!u~` zZzEY>LTPVcyRB(=0cP`TIz;y|vr_CfjrmGbk2>{Il-AOABj`Te5eeJy%rnQ0zb}SY zVR#L!=tFH$K(NbigaH(l@E+l{KbrKXr=DIL>aV&uR*wl@`6(j2-L2>A*3Z!r{UCUp zoWxB$9b4-GZx!%W@`_}ehQ4-x9jFI~3V?6?faF+#q3Z9glWcqg>J#>kl5iL+AkNTy z-U5^StK?{XvQ30(*KVn?gv{J@h@l2Dc;)|aS#is=D^C8pSSH>xB|uO;I)<4(ub3Zq zhV-^@?G{TPYSK|-f!ab+yQ*uBqSjH#ZX{VCY7_~>v~DeOAcO|p4A#=QoJALaVm z+Ki$6tA2a9DP7s*-zyB!+zcA(X8!9N_4M7p0xXj6JS&E2;Hbu1-(yIKF)5bau_>FV zvn@T=)vEvK-kLiUOGI(KauQI2gK zh*Yye}v^`fA#)3#EeZ_3a(ws2jkrLAqkM(B)WKyW1p)-}n~l(O}Izw(6p? zXg}@|JCU0-=Y%kuj*t5nZ!1@oj|u!*mR`57>hu&=d)3IXc$@KB6i?EOtn$4m_|ghu zF^GP-s-3Y5Gq+l+7Rdy&+!S#^V+n(RqJ)w>rb;wEF?IB%l-`%&3A8xpX6w$j4)?({x@9=rQ81BJi*+A12JA z?5_T)o4<#yuaD-HbUS^y@TEy^W?gtSqglw`xosT$-RinRZ{nR4c-mdbn=rvE-gA(JCh9F>Hv7+FpWxnwLr)PqDOtyUU%)AYGYq)cKyR#0^3By3-++(RsY$S=&Yy1oEE}HLo9%Omf34xsa~q@&2Zf{ zWP(@ylcW>HM~3OrFuE^V+bD>oDn?tw5(nA!^ke5r{lXT~bz4^V^R}%PZ?m1((g$RQabbVvqbj@HuV7vyiH_LhA)iz$u z^p}`%;H0Zj=8o}I_=T!~O|r5cto1@8t^d;d&A)-oxPdAU#_+bPOyEu~?vPoTRbTG# zwu}?0U3!Se1TVre2H`=R1%}eOt2W1@M+C2>_X37E0i;ujp8ch22G;zHtpj|*@0)^3 zG*19ZjNklNam1GBf(u{vPK>9~V$=y9F8D@Rr&!Q3%|I8(xZssB+PZJw6=QcibsX*3 zZ@Ues>kdWpDvIKSHHY|nVnfgv`-W&@Ka+~DMOq@kKAp= zXiPg@`1Lr4@SODi;4pZ(WwVQp_6Xy1JfQ427-9_9KT+qekp5-^%aT}kPDPwgirFo| zy-u3j+^w24>MFZUz8L}53XJ=2GwYVI4dnHG0YF2?ZgqxUVCrG4WlX341l=ZH1lk2S zX*$@bxnaY#@)5P|)M0c8A~it4hUxW_hrY3p+hyAT;vVaksI~M`hvr(Tcf-Wo65(|V z8yE`NaVxTQ<0K3Q>+UX?s%US7ANqP~qk%M7v23Uz54kA(&e$b868Al-$;<0Ylg;ia zro^O+KKj0(j<6*{Gytg!)<2P**=YindDNs;|5+YwE%q?W!6I9O$c?Z7w~o2{(9(v+|<|mfPJr zn<==qgFIi?Z{>9>&w@B6$nuk`zY+Ppo4+Fg)wS}zF*HvTT1QK$;9r|FX8N{^FF zT=SVfXV-12xauDU-+geC+2;1{s#|8Bh471~PS(6F#a9rpDSJp_(W7siW~Hzjb=`9M zXUI#nzUjIk6D-=+5Z3+CMzGRbV4P;a$UvNpse4~=_E?y|AfSz-nVmz7b83ko!XvZDlhcSMRKe*%6{`YkfkTEf})7w0zM8k?=~t zx2}6*S;-6s6PXHr~`|@TlHnd@_7oO!B=dx8qv%v;v*ZOP~ z(}Jr?KerP3M7JrmhS17C&NXKqt?J8edlO3Oimp@7J&- zRM0Ov7z`g+W-2{0eARW6#t;(*c6{f6c3o7c)Mogz<<@xJAZq*8fVc-sPsG%e zdek)zwY?pl?#C4Pr1#T*f$*nm_*xtN2*@TiLVtK1zWg7q92g$?EhV zV?I@hz98Q#u)bv5(DQaWz>cwu@n}wwd#Vif}dZaenzY;MxNArw1i$ zfZ=;lkv0ArWYI}s*KYkaPW$YZVkU`X3hz2U*!tF}5yFrDVNJ-DZ>vk6(?bvCE%-de$*Z*@u7$*(kD^=+lV??Xps&fuMl-fChKfSkQezd_XAY-}|(@8eG|1}8Y z4yOBl^IU$)t`f!+L-e=O0;V@TaLutP9v`;zvQXHqwoFnw)M0suAXtDcz?DsDE`z** zyPsQ-83;G#^%dg%Z&>C378$LG zX~@$;XJWRzxTsye6zUV*vR_1-eI_r zl@`#?1wsuWs#AU)!9SNM$6)Q)JR#Nfubtby{5UbbnrxB0%+se0`aJ#CPdWP68><7Y zc+l;oCE|5?!f-NfHKk+QM&}wa2I7V$$gadhsNy~F*;x zo?c9Boi?B<+ksuh!-tjhWJyz*sKNp7gNQ6{tr2MSXGL}3T(=vAWq_Yc2wYJvcx6tU zv%Qn)46Af(hvX(hxyId~b*4 zTGn%(PLUng&kdysiF4*^z}O(St6AG-3&#wxEp#n3P>^fWgXVz)EOFUotxW~Z#xOJ9((XxUsQ`zoZ83x&V`oczSqd~6Iu5opBDb2cY$>&;v;-&BNYvCk|B4_H3 zsvQIry|=84abE(~!9Q6LySmx7Q%j+Qe#+Iq)&KNl>1;mKn||{Fd1ckWp{MG2vr0$%+J*A{>A<=Y^g*lL`k1}!%FxYaxtbS9 zaYZwlF5D{)w~DYXxs{QrzgDF6M}Vs!E7lKH&DJwYb8~4^i0C`BiY&Epr?2N@Mfx1B z4z+|f)-b#>iIW*3w4n;#deLjqL7sDHRFw^DFD?AkY27}q@?`m!8=mrS-51)Em(W+V z<5zkcTLu%qtfn-y5wGO7-m7Sq-TIo>U-63l+457FC1loNwG;qXgm}#78m%_zG>5;w zuInSVV_6tXeX^x9YkUa?Fg<;R zaza&Av6kw#!I0|^*Rn144uLx#2iGXV^Pb0d6LtgvSxtTY^-X;A;QPI9Z#4=MB+>I( z%kr$!`le4kxt<8cl|^}f@nu7~9C6t~r!ki8O4q*7ZJe>w+xl04d9_ULqC!ae7`Ef9 zbsN{no^Fy&u!T=H@M?#blDDy){nknAa>^E%QWQt*u9o|%ay9?R3R5ak{ZcZjd2ScA zINib=w<79z^46eL9YLZvH+e)Sfexb>Uk}@|e$>L!#!9mo1Y(FSWQb;u)#o$e}4{`7>F0*&6dDMs8M5$CWyn>uVbj3x|hAoa<;W z$R5pj&KiF`5&ru7aX{3s`zg_h9_|baDcIx^Nq8dJ@eCID4%r&*JiaX*N$b^#D)1*( zCMp$=4bge3ggEpJKYG~X=dLinSf2m{j?ICB?Zx|u?sDVSaM(RlV|Xv${0&{4H}+T~ ziu3Sx5+4^`WJSk@?qZt@{;j`J=W7+zSzyNl*DNB7x^>j2Ss}i9)h{7RR6bnDS8$QI|_#Knc;oWf$DX@ z5W*|ic!p^nlk{$$N{2RvXS1!^m-aDQ$t(aU_@zeOjoNVdUlqF4wAZ}VeeUwtojUfu zGBIIsFQND44rA+rP3#)zbO#X$vfv+I44;dC-GMQelU_crgNdK0Ve|MsnSQc(TWKS` z2Ml@U4537A%xr9d|cv?Cn|KaIF*T8c~PM z(0#x|dO>A+?E==qdb}8-n^l@UUVn|_vakA_eJ?cTBwqb2ASU%*ZXMMrx<1j#J$P#X z(AT|j(}uH5mu5H8S%t?Yi1{SmaP)73t~SrgKa0}^T$D+0ge4+$H={oDx34+&XOv}0 zc6IJSSH^^a;-E$LT3V%CCb6eXp=RT?vfBlHeBIsC|1%zk983Zq!P6A4tA$w`i&OK? z6tY~bwOn5Mi_C9S=HNBQ@9smoW1eY?wom^0_Qz)3mbP4;#IY+=yU)uK>NJ>lxU}e6 z5}8?=F0)G0urWjZcMg_c8}!G1<`cuYxT=&xTlt(XU@WWOwUR$Hb&epgtWv@B{)YPR zg~)B7<5RUXj%ywnKF#lY90=wbgLEg|ek@M6vh5(chbvUCjTbg?I0#^7Kim4Y=T*5l zwo*vk$K@rsG4Nax!FXTCWnrLq96@{>K%U2+i9EEmr3hbjV9~69);c7D&pe6l54|$u z>8(nAnZ*&k*yvGp*P@K_yEe@RH)%<~erY|Hs4}jO;l)n-GR&}=x7(gq{_@6WGWhEM zp8d$eHJdhdv#sp7UG=ru^=_pb>nWv2EZw1LGSPS|N~3z+l(h}`u7^OLu1lD^&bBl{ z9lreEzOkq{;RXHk#`JyDoXbYp;VL=RS@6lm@3}1+w^&;IWI5%m48@0U5|wI zo}mWySQch7`ak$IoB$uVuKNA63II0GDQps(Jf8NCkF!J}Ge%W}Mq$Uz`ywA5BvC2m zFVZtO;R8dZv3Tv3UM_z+7@&&1nIci1FHY{MGK$YSRQ?JV|FY;-3`DjmlYJ?>M=3UQ zws26;_i-nJSi+lchrM0W-wBLi2wLt!xW9QnJYsu9KUlj_oBKV~HQF(|Qc*!QLtTsO z(|cm_)R=UUp*&^Q-i`+<%yT)q=$Ga)U>aY65+oLPwQXEois-kVUPqeVRF7wT z?4mx#Gb}F$m9Kc^Gw(Np`4h2x?I})KJNei=IlA~~6_{(CtZ4`}m8ZgFND8)Ui6u1D zFClxs@&Kt#{=o=+b#TBjxVuX`pthU7Rdto)f>)wcn(}H2`ZG{Ger-btM=_P@M~ASX z5?2No6Qo7D);Eqdrh1uC*|ReFLH%ahp$HkQGskcHcIq9dVt$VdbSudf?|c*UiE>@t z%3FZ>>VJ@}UQBQA>OMgz!+S{2TDE?fn>BfvFa0*3emd*$lb8$o?HyqdP?l)58C?9^ z>?W!qt}+Mb<-hJt=sE#wM7nt8wc2Fj0-AYP>vPi?MY}2n(whGo-zj{GiN52@@TsCq zVGt0?T=jH%pEs-3X}hh!?aacG%`r#lQx^Kz+05TkRu~wKVXO}t3=|iq_np8vEj@KF z{fDWXSrhi(*n_X5rE`gP7HeFr-T(ONUZRtC`Kqw)e6A_Vm8fo(>jA&2uP?dRKhv8m zf|#*Gs9l+UoIl+9!d^rB7?)aNi-3J5slQcE_^~dGhtRA4GKU#C?d!S0BmELV6~dmI z9M)zV(PFB&jf_|}gP~|%yIH-h1hVS!y@g3wC1fH#KF%PX6}rwNFH?vQ<+&ZzbKg%3 za`Vay&ek!&#lL3S+BLJPgDK*M(z08~Y-&kYMqzQ=++m3A$Cv+lx(GAjRgR723Xn z`eT?b=%bFFZdg*B)LOpa(w&AUnDk9pc;m0|>q;WN_&Ez+`6pPNw%@J)vBhVMbCk#7 zn!B}n@NZ@5YLLwm`30%wH>!ViP1!0S$^|F;`058_c+FWkHH%Mzud@CMPveibUxqs5KojRem7*FVxC!YRX z*3M3Kw8*@yflIk2diULqT`2WYDe6!Ov#bUKgZPM5q%1AJF$!zqu})1#u@fsmy|IOjFl-$hWx35X{3EHiP7&EqUoN^ z;Fdkh!d=X~DNW=fuHwFNh=HblT{WtnuyU+h>l9!BTXme)5!JTZu6hB(MX<2u5R0pn z97tFSKFcwi9)OJ!aL!N^1=4JUBu%mW@eD5XsQxEqS?+oOku5HMGVtjf#k4@d3lv3O z)_I&E9gl#7l|jzsMKi%JzPZrX({mMCRb1_K@TrO@sSnhg#8*45`&Z}%VcW)=5(Mnl zQ|&*Mh6~r-jwyi+IN;S)_Y2>?#|SR`RQ`cXmLI)z?9+Smd38s*F#S2pQlSCIr6|_) z%VWS@5yLJq9U^>VQkFPv!)ID9)W@9N_d3Ka-B8pnzAP~^-LUH~f5CKFjuWd z%@*CH7dgkl9%glP?Hr^0W2lDHw|R6v>%A-1hzlaQ2QHkR~b4fA7-!(D8q(t8 zdKfp(QfE7Zu&N%c3{5f4z&OV=pJMl_lh{3d3geTzNV5_0qZux|@Cq)x_$n9*Om=p` zSptrtybXF81gx4;ps}ocQSBi7%I~71&+&q-{vYeYsZM_vR06Xwxm%zoTzVgbIt3GF z@J~-ClM@S7`di~2FXZi^5ri|mXT0CQrRA8uIa7gGga>V!98=q9p|DN2Plq>Z-*X># zJm6xCrf)7UXZH?ozx6+-nba%pj+}KTal>w+>3;;&){_S34Pm+Ev>P4=-?$Bc=>j!= zOj|Kf=eCMJ+v!pp$7+!zUOBMF8tU;PU>_wILqbD$_V zuKzH{&O6_NNB`CDV*bnvNYfDtR-g=bm8a#ZhvjOw(yoGNyix(Zvy3M5%d#r<02>Em z33yT9C{6L!U-~J$<)iP#cxNBu$rx#pAt^WxFI~cmk3ETRee(18{?{MH1S7CC0k253 z8f&765;X`q$H|4em?4A-V`&<*3tes7#!v?_?oQTyGLpAmM#WH0Hjpi}+sjE_zl|Ns zII%{2_MWJs?<4+wtRr7^<5XE!_tSr7<(cLfn*Qt)9zFfK)*C&qM;!ajHjSCObqO2* zwB5vpifTtRvTmTKSzoX=aswZRai4zN%Qy73ST`t^rzZzz1N&DS2dAiJt301>^>QhC zc|wRuI_m0ec}i}^b6oRd@5AnU9>7Z%4#4L*3NYj-uyglqIP()fgzx|NzlGhSIgq6l zA#8mBkX3_si_J>&hc%?fmOT0)0~lvWILDQQ;l5w`b9mP;{WOk{gU>jooPp)#kAS47 z&*F{`UyIw{^)7t*cmDui`!~OjNl}0$N%@>aG=iE{b-no_TiMJPa`hs<2xmmJ18aY5 z#aP&;yIRM*-DN0Fl$rEFoBrCJXM6jrvAtqEfJ!VF%15`bZCP^Nj1sDt~Fv*kFbY1hUSeMPF{i61QyVs2 zf!Q-JAv<{j961=ua#nIzNAQ{_5gRnW^x2flgF!68(a{lZ_`o~y_MiR{W-il5=PXNsyS#5k<{t@_q zIX)}qaeUMbq*r~X)?DT7xZn&UB)}Xf=5ugf07(LtC17a+VEIeFixX{}0lWZXS$Wbe zO)zB)hnFuSKR5tVAV*QA%jcLdhI!6`ix)B4+Xqho&X;q`>bPO~m#?FdtC*n527=v% z@repnIbF^eSi!+|#<=GvK7?t)FguuHl%<#zIg+J%1;D`o#xS4dIDdG6xBt11;JaV@ zCayg89Cr41Q7r8%(bGg{8}C&XX((_di!tSCAvXpN0#+SP^tzHP)UUN}0^tH|?u0AL z%`dxxAU6I~U)4DF8x0Y|2A=fh&2IhH6}9(czvEPxQ(f-&Kv$<8F`*M#K zKu`O=)lb`sb@R2B)eYdZ+opc-$Zl<-9G5NfxD(68(CUb1ua9`o?OwmK&-A+|?i;s@ z`0cxobjF#)x#SM)p1~G!)3Iri!<9Aou%lHGc^I!tj=X-MTgy@YDp(R5tnv?$I6W7> zj1nh%SwwMw#VNMM^uQU?oMApKfXNPaZ@3;O?zjzmx7+~i?P5Bef#(I%MLrZjkrya9 zQ1AjsQXIW{85f^@5l3l?*JlSfKRv+3d5#N*SCEfW%w9c@%TGRo`Oy?bQ7mqcpplt zu73ZUF}~&`*f;}<0`u1{;ytnXCLg93wZ8Ve+#o`et_L;u0c`E!IE^X3R|8AWQ!A>yk0e4IeBOj zxl$xu9*A6gui0dRA!*QIzH)Hp< z>+sk=|4pQ?T?WgN_4DmG!<^?BjmFq{;dOlfZ~sG_{^1W|{F5KV&V9Gy=#M{#=YQpQ zF?;GcjP_3eqw%8nMaYd|^(;YN`6ZIo4*<7V&H*w*t>1~v*|u8!hov}Q*obrFvm?xk z8PY|HqTomYB#X)?96(Z@9z;^00K;s~k&ZKf7bp&9NcSddEtbK~MKHD+7;L$1prEyn z-Vwfs`5QPob@e$$ZTQeLEU|BL0_$hsH(~m(s(vPQ?_=VR-B)Ze4~Xc@bLPm~P`@0z zQ(Ya!_lS)Gwn{64z7FN}5vY%I%45r`w7RLcj%}=y7T%9e4?7Sk+i^KgR{we;8M@+P zHo&cIRosB8xnr@)?d$>NUnct4w9JiR;5sc&J_*0*AKq&3#@NNt{tSa#QIBdr5C;EH zPayiVa+gK|rzZy;$ zz?$O<+D)_`#Fsy|u^!74&cNmcaB>IZ2i}a!95_0fVm6!Ou$bW}pW!HdbG|h4T z!XZw)^=_p5JHWgsL+%VHP@dW=c!Bvi#pJrHF+Q=2>`iw8Z+_b@rR zhh&tNIux~-TYZ*c2U^K?je1^Q>nC2X_f@8)x<^@O2ff;8g3Wj3Nk)|2WfA4urU%v#FWs8f^i-6`T zg1N4!YR|nM%Gjd4C7p2jRiEfzVAm0~u?(&I(BA&g+-u6a{FIapn4SWb!53v(_TftJjMJDH=!>sUiaiZujT78yPUx)(pO1*rxo8A^%`DU=-tvU#6YU!6zAx_A(q!XPD1(9OiRO=LKf-0@Hbc z>1>XCKF4%A1>czf*$8Z&BTG^+BmiTJk~qLP*#0hX<|^PVci`yUDX=?k2KL6tMj5g+ zD+5PG`RqVm6aX*4`Qmj^zDBW-$d?N~E*N{)0YT2&?aY@C7Zh2W^M;Iy+=7>SFpg}_ z@zn2s8ka7d$1Gzwm>pqS6gc7>)1ttPm%%_@aLkJWJY|?&KE$(s@+Bl3$o40tc*Yhd z`l@|{)q|M;s^^&5q9tX|32epW4f9@M<#_bK%RarqNOBLywLTOzdnscZZR0Ce#1AOmCu&J~+&#+nusv}ME+SGG6uVxywaqmC}^E&C!AsHn>FyQ)d}D&SQH>p0KBd4bub0~{U9F+G}LIx8?O3LNG+ zrg@H8o?})NILfCum|n&KD}bA?#bq|b?C=2WaE?(9q)3q@DHu<{3n0rzV3QqWbB1hp zA3RIS=jDN|CLO-|%PY71@^w6pFN1&_>t776yj}(I7IN0rtC)+o{_*OBw-U21XL&nv zQRLX!-@~Oxp2Wld_V>X@BOLGoS7t{zoX>HX=Qy0taWtRdXg0@mo@2DTk8gkCPx1ZF zdNb!dw%iv6 zaQ9VsY*n^;P*mPF*_;06ThHa~x9b>DTSiTNzFY^be|@$n3b2Hsc@)Yon1_`5l;WbAH&hR?#7iTo(F&YIV3NgM>5NiFow}61!DwxI$=Y$)U)xCmP z{cO20E>eb5`@4ANfB5&{qYStH?2n&t4bi^dW`gUkng zU5Of?OZ%~B`0+?*-faG8UVmjPGHMaVemXk$)t5SLD2bbj@?>5#G9>iCQCqe)fB!RJx7`GO>vh1D z%gClVcmWh?fn+~JayUi)f*p_Qdy)fA@JL=PzS& zIK%AV3MSW_!YpOT(_~$_Wgri1@e8<%lYne}s*#uf!}3?$R%3SM64rX+Vyh_DDyXx( z9k^pAT>juN=fz5BH85131}u>7j&Rk{2+#hrU&ED$9>op+!B1oNEq5Ykz_{SZUp$Z5 zD;MzeSHFpG{mXxiY<7s<+iyjYmX&6#yhVBSoWfPalhyPASUo7%IU9QN(rasddcJD9L372$7IBTS zb^C_VOW*kGHAxUfJi^sFY6b*D&|2zf8VuCBs*^{lVXMRQ>=S&%LaF0yTm z8TmflBh|Lg7$E&2!cc{88)EYfcqw9V{M0$DTUG|SpydEVUX|!W#-OZB+puvZCRT4` z*38%|(h>!y!>TGP7owb1f^5}v3<*n6Ajf#JgY5C=aQ+|v3Qqi`pGWeRTQNJCm8MA; z_*37VXMq9O8djyr(sj5{gM5Jbh z6-4sO_4@486N}WkcW8)+nsS=v_`V>Puppbzlsin@il*2?JDhASha_(y~AEe zcr^;tL8z1Yf?P*uU39JioO#4a&*_EE!z%E=$`)RwEAUE`Ojtea)B+I^&t%vbp?}RG zXYeMyPT`}FpwiC6;|xWkF5kyHWYojW%6IC}$F1)!Viyj5^$sTfGUC@1eu@aJ6dHN( z?|f)2t>=;8lmJ{Xj*xKqVOulgyLWnOq3?pYRB77}^zB@@ z#ot)53uUCorY+Gm)c1*-$O&-!gvx6(n3_KD`AmU1y7p#WE>>Un?>|w`_nc~z9h;ZJ z?xwkiPF}lLqW)&8nAtV>;M=0riF*0_Z(r|@NLD;LI=J>CCx)?ss)Z^q(7@GM!9mgL z9a8fOB2st)NpzxBM(43md5a^vzG$4pmDPW}sNSA4u6uLmjt9(JiQ9P@5SoKX&sI%7 zgb1vU@<;j>#}Ei{i{XNd!s@))xXSHFrhJJQybWA1+2zTX3)p0S_=p$1ArupE{Ghv$ zHZl)?xQSKD$^{FuT5zksp{A$#b8pAOMcx6k=uLsTF9faFg|7x>D+x%DD zFWsVdwjtKl)>3-E7zI_xnlCvIZdUj4PrF+Kw+LrryD|69xPv3;&(I;uYtV_)_I`L! zyls^)x}sonwd#*cug=!wDTk#mLB->CF{_x z$mt9?u1ltOpKhV==@<(cBPC*@;gpwcrhkp33qZZF_D4gG zEWW(X!m|*s{wAc$`Pz&H`Z85N>SkfXz}L2Ce%P#w%}!_Ua&3iM1f)lW?2B4IOO<4N zJ-vZC(Wo`~5{>NfW2~dO^V8w#_g;X}$$g%sh+eVft>K5iTS#uDQo8>h9t?9BW8-5 z+Tnsoned=1qSGQo-gZ4k`5+^+P0j_mX& z({^lso($(olq>W?##P!@#E~ACmTxt5Gr*E!dzy#DdZRJ~{aVQ2SrH7oEo<~t$};&H%yBjLQA!_Nbc)SiQ+oXDHJ z&L-HFyD7{MHhy2QsQ>-oragx&LfHrX^Oi)1$ml?w+j8r`=du#Z)dq7DODm0Y4XttOhmLkgZoKjr_n^Rr1e=DlF(koJOSf6@PmmXN1?(g5=fA;PC*XvqJ-`pG%?^;2op-(q)I0uB zK_Vg^rB#|SxpOg|!r_hC`Hp6Bw`>+!&NXA8vw$6&w92!5$B;|d?9aef` zve(V(**?}Fo@50TndD{0v)bvKG^zp|=HXMP)0>e^DLFAn1!L7=giV5XA0jL~@Nl|_ zIL*G>ue%Hd?Y5EFipRq0v+=F?L>LQfw~H`fl&+VCj>gGT$hO^YZNV^R`>bcGLyT5E zNsk@9H&DmPEL*R0+@cR`Ro2M8^Wzbog~PKL`?X~;jJzKl^Rw7L-kTo@9a2f1>ey_##xb>3}r=x;AyS!?&3m(^gl# z5C;#ci@tpd_Ur4q|ldqA<=Kz)9|CZ^F=wM%5m z=FG=wF`y@$oPD*Z3`7&QW6$xMZnlwk^;}loO1j8i^=>45>_t9Xlju`4fUD!8T2Giy zUHS-M-iTcbCCkgcvMz(k)v4rbF?2 zquz{ISI)GvsDJ@o=t*2FW10`UHWVF*R2~a)gzV^Dq5qYN*j}b{R4&5=v!&%>(o$wZ zi6bH+<_POjs{C&z1-vPlMO=>Dspp66jfvzIFzmgV-G2>=?9JRx1C(A}obTN&P}Ay+ z00uzF=V#k+_fiZ34LAjo!G{*W`q1}&B*zZ&pd*vCot_#9X#S`^D8U{XR593VS&BFa zUx+K(nhA8jgVK+NiSihb z1iFt#L$n6#B*nZ;BaDH zJBU`4?y6pPbx}0#6Tl6pb}l=9g_wbLYH_D@^aS3n)s5z8I$syZC=;$zDtNZzNMgjY zGo($co|}kAvV@~+h3oKqBER@r$U?Nv0GOURPr?$q1T~$bnMADMu`9>6pj9<+G?2c} zBhlaD+?K+xJG2?Lj#K_CX~u+gov8V@XH+T4 z+W-0M+-lhQqA`;tPc58WyfSgCA?Z$0*$nM}F?WTV!lpj|c_bMC{mduVp3+5y`k@67 z>Sbb*6b|N{M;QilqmRrFV-3=!Em}(8K9hhIq!M%(KXm zA>XJ6;N0?jD-rGhtz>FV__el|Z}xi=8}|NQm$svtvVx0wUB$Od4jT>>n^F`lS##gw zG5HJ^?@SZEA!X&ZztrNYe3hb`4rPXl9!owZ?KW&_$8b3TE!3hXq;_nngSiyiRgXHrY=jVDCxmTD&c79~)cKQYDAaAJ>Mir8zlNzhN`q&|98Mrq*DJfy@KIGMxgh@ARQ$fx^__tIT3ZLaHN>D`a?T7v>gNlU{=9SzNJ@L#Cc=6lZ# zTMjaPWl`heq9PY@R@vvy? zLUZs*ymC@)gRZ(qPR}Jb2LjO0$!_Pdf%9h^i_oXlN`k;!F|0WJ7A6gJI#VSwD zI>>J=pV!cHmk^otERm&H%;u!lF;)UG$P<4KS z1dZ5)MD}K;A!SQptOLiI2>(uQT&6}^tl*%%sH0I9;1i3lI@3KlvPwU%TYjZn7}_~C zymPN6;^oJ?(!!c@e7buHBD_qxq zxz~UyvCLq5=roP)gPW+gmlLkxt8EGuVYPmNxj?HNSn!H2SZp;Xt&2N6Gqol*fe-2H z5qa8YvNm(GXAbh*dGwWDa^{rU?Kru?OEACTjgT?bg_Fg=W~ zUKmU*Rc}Bfj6!cu%#?m5#PNxRDt}^*bb4@F8VR0i_(1$f7E=2T_HKDw-?oU{T4IK; zfA#McP7v+XSXPRYe36nMciI)&&j%h0z!i&OqLN-6Km{9(w#nN5T!ww3++Ny&GpP~! z9{Ktt>0o_!EBk`jyq2Bu49sw{Z6ImYqqu#At8%8aZz3c>(n5`>a3@PueCK*AWL-K* z%{FdKl{j&3j+yXt-zWVly_X06GG*F|kJ+U&DELKA`L;-Lh_pTDA<{!&874YpF%y@H z!lW>BAW&}g51HIK*6m{rHIG;aOi{3uxz0WZf4 zW(nwiQaNPEl#ZP#XxQfY2&#NKMvBlbApu#|4cz?6;$m2e*D72$R2hKeuKsx*o8Go85=lbfZHhuWi6HUKW~+>f%H0>+{&#VQGLB`n!IUCg?KHlA!> z_MxNhbT8UQog1x2CU9!5saoT{HBws}uXU48{6P#PS8B1qRl_46jW-;(UbA@+CQ=$1 zlq6I7>auU?I~l14y{iS7VRcmc(yd|8SKQeVfM zEEj82euPo02`;wFKl%PWR9T?lqB}qDV$>}?ec3W?`>J0-x8`uRXQ}7TT9!3f_X=C5 z6nz~;%JMIBmgzkI<1Kaeu&PG==9CYhzHLf8*a#EWrHSW$51{9vKQpYWSN#(+R0g5F&L8e z@+Ts>B5jIa^}va|X&ZHX5|yixQfV{VbHw*Md>v6>GZ@Y3@{7Wof6j*609XI)QZxIae2A|fE=RSwJ4@JtV`x~~13`bQOs#40*% zGGc#ZnW;x_pbEN|9kComK=IHAqlmlVU92sO!M6{?!!yy0?+jj63+2o0E*E&zwnbW} z6NQOt5>Qb0@xBe^uap&LWEsMO?mD?N>&|Ykf$WqGd>6RgwP!I)N9ZaSZpXpSR3Jw^ zXBQsyoGDnA=&&M1e!KI|x^vlZOBZ7wsG>q`D=2?I*%H@`ICk65)RlE)Vcyz9#5Eo` zY(Jp1FT6Ya3RbL+jU7dn(bIy@L6X@0^s{^6^q@itV&a_pA4{MZGE#72?^yr9M?1 zt9h1P6skEerr1bs`EuFamilq^ekEZQm3{YC!`lUG@1kSLQJ*dIZ_oiy->%QSfA_VD z?Qk{1x$%2PFGQbc6~W|->y5V^v>KqU^B?z~2HB);E`QB%p$Pfkv;v(;b`M;Fc`T1~ zXa85j3;3e~Hxohs*DP_Ddf)uKJcuYvO^ytd&x~@iTa?G?yDknOEAF>qg=2m!KKU? z7hh8HmT{Z*ile(AV8jcfNUK!nrB2NtK+gXQ+f@%+iUMhk1h%T(Z4cqpTnIY9w2$a& z#Wgzw9lV{)mzK*2aqNy@<**v@Iwgr?5aYW_eB-2j?DD`2eQkvDS0b8w8MelOd7dyt zyAl|jvP$Hn?eoJ@(9wtJz?nr&>}`n%fW#SzXNAV-z^V_AtVctL#h(U6xp_QY*b`fK zzE%evvDXfuVVK1T@+o=+Mp^SBZ6sbnHnFP_=f_v}=Lx-Uqx?}+A|QY|E<^fT2NRFj zU<$J;R@f?1B{C>bbCsb-3(GO?pO|2#GV%ozW zZ~d0-IP3;^i7J`N#+C?d-pzN zlCsHwefG)MA%t-K)Da&2gjEMCSG^*);1g*Pt;Q0r(s*Pk@xK(0+Ww%76>%EOx7_u>1vlG+bu z_B%76oLP5b=%Z>tf2wpqnhfpYy#2Q_vm(3W`%(dKE?iOJdgb>QW;k_xEiH3cg)J4A ze`o}D3;WC%r1xDu)^$ytNmWc~X>?l5u%#y)cuO4kk0}33P944PB=D>w0LcB^& zm-S}*A52v?g@gLpJ)yFN_2HjdH!(nR`y(31Bd6Nj`Hr{pYC>Q(U#+Z`b)jOrHyF!4k_^~;;zt$_|*Yh6?$)S>pNxl zj;MUcXu+&S4$n?m=Ah)c4|AFf$!|oW8Gzsim|9B%(0pt538Jc_q9&w;J*~*IgVv)s z3B*pz!kg?WD_0&VX;#z62((gws$Q6+?}M&NGb_skPLzzCR0*dmfq4ac>Sa@+7Qob zLI@tgnRcwlZY4s#hQf`Qh%|{4Izw6fqIYYV!9nduISXNP5AkCRy|2SkSQ-8!>l@!Fnh+<|Ed zz$|rR)l05kFySFtdFIeg-=2?ynVtxlvi1}@4wo$=qn)&R&MZseThDX+K*a-R0gP7P zYxxie{eaosIN_NnbyDx{BZ<2aXYEdE)eruw_>4)me05LKnP@9y`cJ}CnW`seYFM{GrZ1?j52 za$yqRX)t*5C!-lkn};jX!O?$A4o=U~&-Cy&*`Mx+-}JUKKDIQkmY)he7^vj+6CSp- zJ^C-N`+`Rc?+j|H*J9hmA%eX{ihAV9r;C6ztGNpG;U--2|>s(8! z!Egq2d3L_~0RND083ff}rO&0iyxRI9yKa)qAHtfUEa_ob_|8Be$UA8jOuLc<(vK&7 znQYx`-s(ll<)$Bw;I)%F0bYixqEZDZKsISJrUY`L8~`pT{!JDU5vi*TpQ;-TMq0ab zWK_sDvP`P?ZvSM7(sWL2I$BEGT80SkwyxZB{Luo|x2Xp_5-^RhZxP^%VpvEi$b2X5 zzVlWFaft?dl2?vL<05dGGQCwH?%2!jp9CPh6gTs-fPxmj9B+NVf!aMYNTY<_rfm86}-7bG=DH zac7iI+N|BQ46gE?Av}26Q2WcYQhR6^;7EV8rurArFwO(4BA)#^40M z+`qy2RR8%=Wj!hnG@OY4v(h>8%ooKolN|n$`lw*6y5y;Ge1T@iFo)VqLEY<@lUt)f zS1t8x;vS{GX*^V}4LEuQux^2e=|~lEswz(;PJw)kH@}r_Ze1>LY&ul46Ar$7Ii@1k zAG+39Y-Pe;g+? z+P;|m87S(QMqdx9cr_o;&Q@>V;Xn}#fYura#}~rpy)FDc)D@+b`D~3@3_m%T$LW7| zNb0um^ZlrnTaD5=Q)!42x2~#&)_nNWFZG0lfq;e*u}@}M0MQ_b-yTu~zy-q%>->{_q1k<9GC~S)t{XC| z-jB0IV^@cq$|X(#>zZKp?Hd3uS?+YWfT?rGIS4L@7@!}sN+QILg%r20Jl%GRs?<}- zRjrSYiHngj?Cu;;cXjb*L(ZjlZztlgm0h25kVJxO|IU$3@mZv1m0U^0zF- z7wz^4w=6~LS~`cYiJE7OhDd7<&m_C7N?Qb-{N&h{mg+o=%OeuYAfi0<31Q-BOLrW# zVg}L3+gPGvm-P+0P=4z}GZS;_+Z@HPmXVX7gpC{{0Zc2lS&WD^IB}+Kqx9iC3-lYs zx7!joR<4VSJFU_=N}>oQmyAT(I~`{0>DTHP6Cwx!%DFWxAFDi!R)d^EAbvIRUUekmsdiT z&)GKg)c-L7Slg!wp0NNdJUZB_eck7B{B#o%xdF{19G3v!m26*rD1TUchxtX#d135u z5Eo#2B-ox%%>++~v3tF}!VG_<+U38it0caew`dCpELoaMuVI3(rcC<@Jp=?k<_>^* zKJuIexadZUEEY4vr-g6+0U9ovn8lw1)Z!iitluZ!16Y6ga*7ahV_e<+!$^>`$>Djc zW7}qB58(ZlVZgZ~;lO<{!1r$*lEjAt4jDF9yzJ_GKJ0WZcOC1?o14+y)!XauN91K1 z@_=>s3kN=aF$8MJ2a*PC1HGTX7oqZ5MRh>k5Vqrae`eJLD*a%~g&~O>rVyyB zSb(_=fFms}&H6!l;K|o)%CA#GKM1lNM+q^Y0{q))T;yX9amO~6Tw#w+wKCwIcG!{j zqfrKjDMlf?Eu##Mr_fj|`1A^Q=GDbANnLF=qXrSTEaTIla*Sz`F+k8mQhBA5i*}t$ z!E87qzz0Kl|Am;|`AEbP7<)pA*IDs8iP>Gqi1eY2=;6zS4*Yz6|nc?wfF;u-JU%nB$Xc4{$laIKokKu zJ_}}bO7jCKzm&9d;7xqQMV8O<4mig&G3E7iu_I`NaMJ)>>qgl;Qw{G0)-y3EzT{;X zM`B$kuoOR-8Fx0;!*y8N;zk40ZKTun-nxei`@VCfpZH|#qzSXHWWDasjkC;rXYlu- zGF!QsD8+#d(BykypQ+;VPST4@Zw2vI<vk=*(?Q&FQr z)pEc+gU@&)bs|DRb4cRA$7_Xx;wl)x9qO-Qu~@M}VH$cLutqxx->f z&I$2B_1D;|S?D65dg@AA{0F^b_Oy37K!o(zFhBGgz%G_WJfu8=*0x7k)>lR?pKf{e z&Wc;<4Up)k_tNV0wFe{&UIEb(f$pcet()y?v&%*A0mEQ+Hu!+IWz47AIQXrqKmCvg zq#$!AleV$Q??w`fs*8aanBp_H1nRTh4_knZJm1=ee=YhB4hLQMrs~I`vj;UH);;a? zY$y@K4$n+|f7Q!-s_jr{E27tA{(QN`_sqVT5PShp%UZ|=2tBl_1fs|x;4M325KGriN(_6^fQ z@TlVf`!+LtG14h?awpOMUx32@{wcH@xd{M#n;oV=+Ne$2@$=3jPPA5|V<{d;E*3-e zcmQ%n_L_s3VJqmEGqP12jI1UL#kDlRPHv`HmKmxJXOw#&zPy}%-LLvYpGVo1aWq(1 zmvcn4M^rA$GT#u*3QCCVvrd8U@9U2<&Q$-_Bg^8BgL~VAZKlX$h4yF80OLrXt@zY> z-79XIMSwjy=csk|h?5X-e~ZDHaVL#vBgR&%&mUC$+n z<6pYO5rfmK5Xxfs>b{am&`Z|6Ie~Baap>TBX_R|k#$JN$OPRYiDm%JhE)obx3>vy^q(B{?m;Jkg8Oc`nngvEnt7yWhVsa>j+hCB z?8mg1VQ2N&3QKzdz&jjiN<&C&e<0r%J97`GK3TtRewm{XXqf6UAmmeQ7k{mqt2G+~XAx+2F`Ha1{;Q0lyAc&DGG& zY=HT_5mmq}P0$6;*kdkv_2q`a=MN)+y**mP*U;Q=7zs3%LJg&?S@$28GSA-c6ILBQ zCps%Tur|t(auk*!ae3umYBDZ8bRk+d5I^R^9wM!qt@@AU-JJpV%Tz2tL>|^9{7FE> z1LK|atQe>(N)AebWWje*8$u2qx1Nm82TeNc_Jz)rGHWrHjR(C{XD!<^24qW#rPg<- z^_H7v^aS%RLy2hOY?XdGzUTPzsIv}_a)Phkomas;fD8uQl<4Qsp^%VMYtH=6O3AVK zX|WZ_7PcWn3a3qjkuYk{2|H!S3UVR4sp{b<{?_D_bIAKn;e23c=UBcljwfV$mmmq! zDuo?ip#JSS7J#1e^uW`*=`|FIvp-R!58;fqrEYN`K4??~rH2lR&|)yF{aVbs?OO`8 z>?oKB>;S~DA*gfwl&-q2cq`aeB;@F~KqRtW_r8jy4e=Sbepwve`1spEIW6x-+}a0!Vtu3HOwV=ilG>HsaFn)oC6U ze~Nz4CX|*icCNDQ)6PNv;oK`LU#QU^?;lO5p<1P{QPE8~d%hKqpj$FWA!a1zPgMaj zh1Z5(iAl44=eo|5_wzfnWzx$OtRmMD-Ea#E`TJP3I9rkR`p~*Ao~60?_E_^uf4qu-9m`p+0kxrZF|VPV%D5*IUWjoK zU)8&rz^0(swQaNFc9Qt`VyIYc$IgvnooGHY;3$umhh!}v!dcQn1gXpa;5S#(dG-ig zbvmXE&EQDR$VRP1iW)-Qq`f0?Fq7g#?kP;H>v}+=SjO-V zL+h=3Kq}=Irx$^{IigP45f5VxBAzE{DkBTJc(XnlYFxT%_WM;zK+vt?xEd%+`FeKj z8*aSsPSz;iZq7*Jos<3#+4j1P$r|yEl8uHd%qPK}ez`Rm(R_{Z7K(Wuq__Oo9OFARp9%@mC2S1%R49m@~U zOtfPMU^ma*?7M)Bn})&JM7qZ`qgB3doXqom9iIwx1P6L}-|`dSzZ7s|eanC=XQ&<^ zVr4BLjy=0wf4)u%M$t9ZCmXpOEhf*|pj|YIt`%*=Kgq&+HRS>b#<{Uxa+HYpW6OkN z=>a_tS;ln9cnPx2A>d~l10-$}t3xeCFqNV+!38;0(+EVexLKi#s^}-JV(h978ruPe zF)g3&$3^UJEwe4PvS^HF?Cb)fxFF8&Ws`EQzUaYpypT_buK+Mt=7HjbweOwE+%K_h zG0odZigkm93(pkKF{h1V*OpjJvj=!$>d6`|PyWigB5=vJ0PgK=%M1^$y#XxcNlerI zkihZYkThwtjFMSq&NPtTNi<}|0lm^g9|Nt%9Wh>wlgubmFT4MMlje?;87D&%fCGztt%?T;I}9YK)_QaxGe zcUi@DW*)Q`U8OsHu&ETDdD*J9Y}8WUvh`Q1zb+4Wg#0~5AV`00UVPzGiDj+s+N|#& z`b_+7J9%Z!gBF=VbG8x+SDz-5%%NpthY1@{D< zSEBYObI&QFETlWF_Za{0SHh}~_(dWgTwD0zbs6L@(?Yb8~(<^j&MZ1WOrd`!Ldkex+HM>(8 zFb&gauzU7p%(fiyqoICTUVH7|IG}h=I8Kc=yPCT&*DEMsAFI1e=PuR_ztw$+0yC8f zyLdN7G)tKD4QI#bVg;hGET-$T^F{9r-Zv-)ujd=+mH ztO-4gJHm8#pe0L2fw@v;co)51YN~cbF6OYKwtpua90#TqXs%?U%KE)%|AMHb8Rr4b z(wfNV$q=cg=i|pt1UU{c1E`EoU zx@7&g(5YRmIIh*WDU(AGPFVFlc7PsCm`Cvaa{J{~5OffwRN7vht}=U_IB6xiNqRKH z_EE9Kvh?Z2dd{Yt`F;Ipr@tO{v-fT(kC30g7yD%>bvbccJnmTU%g*@`6?^&i+Fzfr z@;MPR-CLLHZw<}sdTzqSw_0aI}j-gUbY^NXg^In$o*@vc02HoFv(GL30OKJ1B z(QW@kd4n|l_V^j+f`0=s2UgmYr~Ps}%d|$P1W)?jf(7=4jm`ei$XA^wAo|F2c0ghq zabd_)^E0=RXjf10;@@9}=!^Q|4b1nx=89VSP3sv5uv_GVE97AWUi$363v*kwo zX7Sdz)0n3KWfal1uGoG1wh4DrMhB%f%Aj%fp&2jVy9n^;g4@FLGDR{>9;s;c&Fc*`q@dEd5QRKkK_8gGZmL7ov@xV z#&)L#^2Imi;*TqiC*u(zZds*8d9KNR^h|c(&P#a0m1>J;CU>KwimVnwohud#iCr*yJGWdyd*@Wxan?5!vbGZqs=5{B(I|#PlTeVoNoo zbN-^ibI;J$M~%aOzodOpoburAN>9C$c;gjNzlrcQ0?Ov{gAs8bG8g|jPpui5l6qC= zYb11p^?Wv1@lS5UO<2cmxx&B34FyN=2lN-jy248GtFCb;_FfSo(pU96!^l~=V8;`r zl<98km5mxr$u_!Qeoq*1PgCid73Ug%+V*;Nebmg@Y!Bl&>!#^!m)%ZHsY3WB%;m`0 z^{T9LdF%9z@3QE!_V1^fQXIe{CMD(mdDASxmuf*9nAx1(RUNUE-f|j2^ zH}2XGD_^Prcrg&?-|<)R;-%`gBz}V4YcP~9momv>)dhVM;CRVZ&lMv(?CL9M zTa&PNN&h1@_6CZeQ+uu2JXgNqlG$aUDn-it*THr@|;)-Po4u zw-OmsK=ur{@SEt5HFNTdBMxukpXk-e38Nl@f=y!YO~iH6XbxXQyU+`_1bl zewJ+?32Wzt-!tKg;e*EIPS#?-nv%yBs!7HIvnP*j^-By}cV3HMZr^(|Z(M2>0;?_# z5ZL&!@DpYH*6!EY6wb3&Wa}f(Jbe~KoT|I=WTi&bL`Emn;9E4*z-mg7>a^ z<-rv1zjb(68sSl8xY#*{RtqR9ke^Hp#VHDG{!4_H0cG8bC>c_noFD@M$WWrz+h)ukqifSB|D-9pWzgBd3`yGcTdxqd3@Iug0|2) z7}v!9acWup`B)~ioGV{g)>HfUHQ!%BEJ=SS1OD;u}fP-lOA#zX4A)V#TJIM zA{v}6({;E}ZslA!(5cd})g((|US-7IR0#DHwr|tA*X@I_Ts~LaDg=TZUFwu#}+7(yQsxZ(ANLy!x3xqER3rR#{ zgs4B+1A3zfbEq0y6&H1s@2mgV)OtruR8=J{Y+N*f%BYd$E;#I{EsyjTO$RJPXcw(I zwTx9j^&!ti0fpQvnO# zU-H?vs~*u4Lr2n8pZ-$~ol#aSBCQ|A^5_l=Ro&tMgjL=0Hn{Bc+FJmd=Ra3X&3d(w z#xdM1^v`XE5NUF$mSUON)1K`Ac)}C5>I3%Nn%N|GKqfo={zU(WH=hYI(-YoGYe_me zPrc*7jyt;HRyj}clmZu2^*7h-f5V;@`tpPSOgK!AC&WNYzI*z)tKJBkA9Q#A?>PQ} zOp{7{OP2EcPH!K&TTeYi;NsKt7F!mH1Wv$<9K{EOdDr^|aqNIR)zln9;kR4E-=$yH zf0b?^r{2256eB2Uuc*> zBhGCTR+o+z5|5&{`y=K}mJ+viyDkeONYvMwfg63-QD@rw> z?S)^%njqd5DYqXwV-|J>(+`O`0eb1n#}qG;t^}RwO5~>Cs3#Y8h~<-l!@?!UZ!@-B zE@ro8k%-^!2U2n-|3Bp)Vq1b>4tI=8Uw|n>_ZPiL;~iEm1QsKeva4lyQ=wih0gv~Z zi&nt3{4V{07I15d6>Z}@r34M*0Y|Q@F6D=8m;6P&tqxOJG8@JOl*W;8Ap+S0Y6r77 ziKtw%4-oM~F*MfuaY`)w-uY-wz~+{Vl2n@H`#H}i25)}n)-A~C2VTM=Vm3%eVJn;&tddDZVwn2z*>$eKBHn@|t-xNXGTmI<-ju6Q!r2>=UM6*cehNPLjxQt7g zAwiEabM91E_;XFo033=-<+;<~8vtH-Rdtj?LsuiZ&T0vSn!6A}OEN7d=bHL@tU0=!0AOc97I0R>=X6H{VH z8jHAQ*%)!R#WAjj_`!jQCFmrI`Og9=0-dsm$VbiFos6_X_fISe%o#Efq=OO4Nq-&2 zs+)LCpH#CK@sAO);w5@mrxUZ7?SaU~byDx}=eit^4m4gC>K%FkxcHnMm{a0?`{DO2 zZ&rnUl*7Y|)|UW(l)z7#=j<>t=l$yZ6{r z+WMVjm7yDqWa^KHPqvuit$A6=(2KSJ>wwW;VrA83C_#X`;m+Kk2Y^_`2!8bdB5lz zfQEtG&0Lj_W3MyN(V(0h{xESaJ+l~TBVdBjn%4c0HLo!Eyg!06rs;-X-%{2I;WN{? zTZk7y#|Rh}2BtI$*lYAH?Q(~$%TcO@Nhd~DtEXkBI?ksj*!&*qB{<8REVGcz^6(^7 z@L*O)gk6Ct->&#B76H)##X&jxhZtzanMAaVKGnt`C5q4^yz#iBRk>spC!ZJ*`HC^M zYmzUljWmmOuq^4VAaupk(aRK2;KoLtvQA_$iCJ){J!H=I-d|Exi{YVO7e1WhE7V%sU8KL*|r*B@%??z+Ez%~a5 z>WiSKOO-{!td*16$|Y{F#Fd(G$JbyA(T{R`un14d&+GtfYNFU%mqF3EQk zp2Dn+GDm}v!dD=`p5qD$Ow%&-GRSc_vs5ppi(gm7ih+}Sx%-ef@Roste_0k%NTw)4 z-^{suP&hSIan5G}3Km^>crJ>O6g%?g+z@jjfp(E=zsQu(1!qW1)d zKt6LY0DNvv95$KX`gfltK#d8m&m4+U2F!8s*)I|TJ^SN z{SUdk*C<{}Gm6N~YVIpc@MJ`(0)R1Z!M^^IrBr!TRN0HQQciik2wv8~xm_xz0-dZe zt<#TFa5d$W#2NO5q-ZId@iBT&B$nfb8D|*nqKQ@4o0;wtzs{2>RN%%Cqjj;!Oi`^@ zXBCQ(9Iw{7<$04E9;93W&7|40S3@D9AtHp+LFU%O%BK7#7SI*( zmDh~-&rhRYtVvFZD2L!HzOU-o)D-7qa!P5TGQLvtarv(K(Q&FN}gi3~a+ z{Tsect?8lOVVs)7=#@Hp2`tUZfFf|iwuCWwaN+*|a6pg0Mc1`R(-hM;-oW4anXB-B z{*TXr&n|)&3BU?)%)ygg2yXJn3Puvdd?Ssz?eD{gh@SC5048Qe9U&BL>JcaXD zrbs6{*qQ8rCk(tOPx+PYR+d))C|LPRya_8-bLf1=QOssw47lN%llam5uEI~h`x^Z4 zJFdblH|-;tWI)a_n=|kk149bNczMfq%7KjG=*l6U{?2Ro%vZjTkALkoeEqpYoG%hg zcSacP?cl_T@>z&U!BO5p&9Drk)_3@9Z7G9szbgRO{#ZWM}zF0K3yr08j@C0O~VX0I>0;ZU9g&lLCOM zJZ>id%>#h4^h`1~tZo6We;z{s&?TQNy`RayPPm87y33RyUcC(!04!yN3IghHNG%es zPun=k(aRpfe`WcH4**)#!NFz*LN$`{_IJx4xTub{9N?@#!cu^xI7)!SbdH0)G5EEo zk=}SMvO8`=_0#n6b?mY&OBL2!R%~p%saiT2i-qU!Hw`?ovx;#CiXS zh|H|*-l}`+-se6-=lRrgt1=@aPMkQg{qn>)opsK3a?a&BmkUSFIbXNLFz*aEq#1V{ zJiucg{4BrvH~%Lz7tH(|syYk+s%JpNV4bC9EVmv#$de~7keiewOVAKeS^Bg!%v55q zg=H!2@QMHW7Vf(*$94+Dq<{k{{PWEfbnaH9bpvMp2(of3G>>uVY=;ki>M?%hJ)hzO zpM03Jm)c~_2CY^L#YaNc+6beBlhP7^TvhLvPkd|&(rrF~|d&wc4UAOFH>{`i?y{^Uf5C%a&p4OW)s z$+83$@1M{F`HkJ>53DH1y~r59xgUEeX319lW>nr2Esn+kn057?U_rYTO+Wt3aLlY* z-frXZkIFL)F7b^-uS zvQg~=sE1L2DFDDR3C3)Ax3lJ*l}=9r#7uQUq_113INuAYzqLK#qr;v=4W+q$0KmRyW&r?(sa8g2GbsSL72neW zfYXlAvpNPm4WKt|`kerP(;oqz3P@DIj0FJwwyHeoRJ&I$(?%`M>eqG|0Nni9;k;yu z)#Nmj!ps)vJJ>*M8vv~5F=a(&3jo$>HXEt!1Ar4g&<9#f`Rulfo-W_*w$UV$BmM0}c4PJ7~W29@3kRk}NEMDz@hVE2EGC z21t{Hxn_gR80_iubiVvFS3dD5>tB8jcjgkJLNeEYBtf0S`}Nau)GE2FjnitXdq{!+ zcC8HxH@*07Uj40a;(@omg1hd&i=>rtVtt)w*H^jJX>&2}vFvYm}cCu zILAWU@e6nMaByO(b20^dN5TCXz#U!Q!24w3Uy3?g`%g8z1An zpZFqApS?_)&CzT&QKz_4riv4AD!z?H!jWk|f%+E(2Eq|x?OKi=ndgVi^&p9mOznX*V=sL`E`El4^QyhA3w#TmkZ3%Wg1H@ z)ER^-KuABXqSduh)^WZcP19cQrwJF6TWmJQrSJPX5&+!U_%(}pbz3x*Bku9E>AX6) z9FpPNJ1cM6w1W-U5A9Ev?{=RXN1H6hC!KRjAaFRR&GdVdOVEFGXC!Ufr#7kdRG`zS z+vPaE12AAhAb)a@Vs`7NiGYNy7#qm{j~W_ z{>(Jx?*t?mE^|zl<1TJq#uoGP*k>p1^RQ|Cr&!NV4+iZO7(GeeQ?$)a0Kie}rfugr z0|0PLxs$c!tgxJ21OP|8aGkVokGJLd(RpGjAe`z-Zty9>0`6r)J*d1b^)+S7?^F}-aOlD z0RYy&dazGw-vNL87va*SOSBIjV($CiPWr-|FcUa# z;B7;zd7&DE{7Wd42#G>BhhDb;;MT(pUUA=z{Fw)j^Gz>3#9g;LGN2Fr`1@?B7I--&gro^VsJc4r&# zYXJaOX?F$yjxKZfnQ7W^d&it-QU0XqC)u=5TJA0YfQ;JTZ1U{X^X&w>H(_071prPe z=e-61W3sLK?f5c=Z%orShqIY*OxH=qrRm#Z5;<(1$Mo`>7L?nG1>o)hfPP}$1OTi* zs3YW5Lp2B2Z8HY|$3MTl$$ZuT;BcUB+Cu7-3h@az!)J- z49&Db1HnFZl8w(i!rJE^qxaM)?A5lnwre!d#QUQ2z?xQEgn%CXIEfHr=z8EU2@!pJ z;BuB;bQf>=zCX)@-|~9qk1cb)+u_9ORZiz^uGoSNEKZD%ysHsxGG*f>Du^>2%`)y; zULZZa$}jxwf63V|JWaMZkJ_Ayy-fhX^3G_`YIJi4UwmmwzSiX(_Z9r&-#W~~au2nF zNkg6{37I9~djgW;1CnH@EMuEDksZ7Tj@*Ts13dNUDSr7kKFojk{ZH}ua~B}VNE-=G z9nN`La)FT z4t46IFWUqaC0lG@PpemO;j=eMm_%do#1m%C#0)V5yzoBfdM*whZUZ$1#wgJEaJWNJZ z{@Mb7wX^{MOvmQ88BN|YGoT4)&+CL{E}kuE|I2Q-z`ywab{wfsSBj^-Z`)au_I)41 z@^?hr>BnCUeSZWPSpEnwjd*J>);4G?H_!{ zK6R4Qy$x2?(lyS5WdZmrPc=*E$Dw)-p#>byQVvOj_xo^-~IL$h3$be;VB22_#?qi{Xh{55sSE-w0#BF zeg=2(ILXR=-1UNc`N^OBi+tBRALQTu>WBEn_k50L&tD{KrX*R47V5JqWjzR_dyrP0 zcxy%jtwsiojKaYam%2Rhdr$HIe&BiTKEA-8dBtJA>rJ=t!2O3=SOC;PFGomx9(l@I z+tAI$2O|mphUBNpu&EqUB)Nxy~(ZTN(0J@(Ay)^*1<=D3uoAm9ydF!X? zc$}o&_EWpgRL}Og(oRKtHO|*%Hu$|xo2|}wk)@sH!%p&=Xa}(s5VR9P?eZ8kF6%M- ziF$3FI-D3>nxP^jX^J|0)ZJ4T=zii$T>9)|tbge-WVM5tnrwPNEZudSmw($g^4jlu3rn{h;q28lzWn_2obR={syRiXU<@jW-$l9p z;2kCK82?jPFv86Xi=6$+2|oF2e}GA{5cPOiqb7edHe8=-!yF_Vj#jHeBZVvNIez}b z8@%n64GtbCP@5p)ee(H89YXo4J6Kb!%EhIJAA_8AEOUrsCLSy-3+J0Ma1X`J(jg*3fFF(J|!%tq} zXW#P_-}JH@dDF{p*8-n011Zy*y>uvIaJ%} z0OSnfiX+;nMD=uZx*_;+f`O$DPEG=@45!|^1^-R}v)$lxmectt?quZM`arY=v>erQ z3LtQFx-kkDe*5ui5}Ke5bm+^_?M>^UgL*#-W1LaMbVLLZ(ft}cSq}6|DF2uH*^AIWY${#zZ?B(^L)44SN6wPvHt*p zK^hA+PnpLI*PVMVv~BRe-x_s@pNKzfr(L$&o;%Z}@y$K~KYd9?@UivMs4uJ0c}`ZZ zt@lQ|w@naCX(;yXQ98AlN?Tg|^oKgU?F}ss zuH?8}Ni+lF_ozq&yBG$utOs0G1mn7B=QPr}fUG@Ee(^C{N578md&k%D_6P6c-~H!5 zbdwKrEWg4wkz~EfPsm3C-wm}9{1JYvg&N)OCV}zyU7HuiWaX1N9ipB$zVhlzU zRfnTG&0OE$mz(rb{tuKSwA*k>czgG?Qd{kJWw=9 zGAh9cd2QLcL(_lNATl=1u#RnZd~}NoTX7h88Mq$U+)O7oJN8?xRBCDZl4~tZKLgeE zVURH9xrs`TygqnE#(;mDJ-I#ER&(5Dxs8^yj{G$&kI{KduB%&@JxQwZ0QTf%@5xv* zHv7@*>q+_Fe!6|IuJ7A^%BaQL*vEFh`kkWw*9)R?&6;$31XX~9u{(ahIUSC%HEqua;5KPA$hg-e1K(%eir0RU8M}zx^0_I%H`(qWvbAic z(~;?9Jz3uSWgyxMKKc-kt&?tYd175CckuNLr_rXpnJ{$V?^I9K0VIvvtl_Pksy|AL z0mY#rI8mGktSsUZp|H;TiyLUC(er-a3X_O4fnZF6>vXU!!#zLrZM^a?zk|ga4)Nsr z8mF#Y=eE4z8Z-2`1L+@%p-T_VV7Jw=J&IPqUUorR~E*8`w z;y?>1+N8x5^!d}|m!9P4(fj#(fAy`r;Z?WskN(g9kN5r2qo~P9(iCSMrp{;lGp53V zcTE_rDz)fQI7gxdi3HS46mS>;BZAW~C~MeTj9f@a&B|^V7%AIZJxC`aU75*qG> z_0eS9qH7{hXxu_V)9dqgWEch2i^r}hU5mNvdZL-gX5>V? zy)j@vx7YN1U3=VRkT#u9w@I_ElAr`=i(=C>F6?#mhCa^j-vb*ycUB3>iv(H;SfmUZF* zmI~1s4IoZZw2#tjC^}u__yO+!i9gSM-}y$mg>v!IWiD*2b2)E^co@>>Lr$CoJ~ha| zU~s4v5AK9|89d+CIu0aFZa#R3M?d`~9)0geFS*4Hm_XrYVt@^Ioe0SXTUiqh{CtAap@h^Q)v^XW{ExQnE#9c=GB=HPw2?iKg) zi~sYF@-P40@A1>W{2?w~>CkMoKy9f-QtrMtuch?$6Tvz`2|-LEC>)Mqt44o<+LwO> zIr_m4trz2~7XKn7Cc*VAYI7t>F!N2CD~rq@TA{hTh)EO3QW^{M%q=d^m}?@&1dBh< zt3gn8G4tvK62+Dowxt z(lI=5k?)S%O8aHI8|#vh`EF-kkJGnQubQPV^@L>)rPYb}>CdrW0k+nI&1Q|^XE)1l z->vo7pF9oI^}UDN5e=&Dq#>fO6S_Y1?>eWE0^VCa+MRa3PWr;MEY^gzQ<;q~dV8{) z007({jk0@#%5K!}TFI<`iM;Cr@kHR#xP+4>8?`x;Y4W^oX$JKhyPK_mgITk(wcd^2 zV(%vqaCDBt8;&hGs->%kQ;5}7@amT6Rc8B&_oAq{^-1SJ(1fz;`4x^M7fEq!s(%-8;W-3Oghr3l#oC+C+TNhVI zyOvM?`tOpTyhOUZf?DtUPZfWpG@wx&f(lkaEF$a3#uJdAr&vGF!J`lG zkN?)Yc*Q+8@{|Aiw|MN?%goQuQCPP%>W;0)hA_({&Iq z1OlmpfpsiU1W}>e>5wkWap2C|IePyKSh@E$mTta*g+qr(8yQID2U0{Z0rc}gN*zE2 zqYlk;T%O}PJ=zy8vv%$xS5KVb{Ikz-@u^d^Po2e-K=b%D?pQx<*Ldeq!?bn{xSQtQ zX8G+X;lKuVlE5ADu^QG_`>TBqSM6VCU_epFR${Wq^^?kNtGq_7O{n*)ZR9x>l@rNU zrBx-4t$SUxGO2u|L|`Y8>8KX%yE!!&GCu9FoVEX^jQBq8(k2}{49nX9k4LAPMmML? zp;75~b;z9gl5E!NyzS&S=`;N+lfKKF{hp9^)^7V_KmVtV_GW1^VnH|QV>8`2evE zJs2!>OcvEUdmTXums~1>0>LBk?IRfla0rw(V4nF=|f6H_l*=&|}985fmyz6<+j`nxE>>bI7Z=1<_yZvld@*bDZ&cJP` z^S*C%V~aUtc7;qrRr{uKW4T?wao2{X8$fomUi)XVy>{SXp9}_ftul16|G^oQ4cl*n z{nLJtb^w}=eRRTw`ixa*yRzMXZ894w!JfEjh7xRwHx-Zh*#Ou?Jc^o&w7$*rfUq=z zbt)JY@=lk=>t4X2zx(HD-*F>nozRjg*^iI>PhkuCUt8NyMQpf@zMVDLi(<@xQ*O$4~uWgIqUAr4fcIt${ZIY*pKj z0JtD01tbB}02*MLJ}8GVV7i$07vaoh^3F?n^(*e?=l{X?^5g&TKk?y*pQg32h>L-o z12|V95EUdiZ1Wio_y+4gADG3J?NUZCRzl#Hu+1s+#{+%pFDlMkdIm7p76tiygZtn0 zcJBS=H>2}&*q)=inxoo7026>)sE#p5izf+{F@u1CC|XzwEc65?-aViwtSrw{IOjpz zX2Y*S)Qtuvg1j^!22x8O;P4Sfa+Hl0? zU?liA8l=$yxeTovr2psSb3whGbkx;)HlK%4dD zT>zVAM>E+{U3>ZO0~#%ZQS#lX%(r^}+Km|{1|H3@HcMFFSvVH}n|>H*@LN3t!GmH3 z!FGEjFTIudzw@13z4b6Bo;$^r(`T@qjt9=1`Xj6nM{*9Vg~IvBx7MOXfwQ*!SgYjD z(X$2J!m(isHj0Avyhl!tg@rku{LGhm{trG&(rTfF4VeefN`#`(F?ZWBT(?s} zU45NK2R@?(rr`yan1nocJaG4M{>9(^Zf-w*1nm{!XsYvInFHgLB1`z$4|e$UXB%YA z7SJ2_ zZnV^IBZ4s=I4+8u^C!<>H`=}pYz^twK}l4i8Qg#mZRE#xvuV)-sWnn0p}(`r!1cj2 zam(M=!S|51D09%U-^k}TshdjBCFz$}?YR7=BYs`Bo8fC?`5*X9D&s+ZH_N}Ifi@hv zH|qOlSxeuq$(|XJfBEQ?`ZXZCk<(Kg^UC};DP!RIIP~fi45cOB;NYqaY$X?yPXJ?feTFDQ#7wJ*@G#qFgOW85y`5dVSZ>YQGX9qi$-O z-6uv55{U@p4!z?L$$#+<)^9$*+A}9{7cPUdAOKU>qwe-UwpN?A_i9!y!tPFD{uH4-%X)C&#%<*f=PuMH(L@c z_r!4Ttq1s)H{HuS-uPk+Io5eEc~Qttp_#&|wS<58>s_v%%1ATuQI^$)Oyl4BPEQ9K zFfttd1S)6_+6B7-t^=lnbe<+T`6-I${+JiuS@1J|^KHE7<^_7aoWul!!b#`BQdt5o z0R$ARKYk2Alz0C?tM%P|teON!$~x>^gO`8rw{gqsUd_ezHF~|C2WBOLcy&<>0*l(0 zzt(pBX^C==BdbF zURx@Pj{G{r*CfwxA=mNG59*}<)2Ck()PJlc-8k8o{dz;0wPH^w9%es)Zo4bb57 za?{RUK0nCoAdiCuNqKFvzBjD*RpEVlH3k45N0Vy8w0X=T-`UYlw&%0nr|yT%`Yd>u zHjhDB)_!aK+Ww_20-$d~JFVAlVkNs@+Mc%R6wBMeZ=>%|p3~0K1e#!7FwL@IYcBS# zbxbU5vPgQ9Plu!KZD+YNeOa@~Z+}gedq=xc?|m-J-C}Roy75E|Z=Gnbnq|10k*oBt z9@+xLiqUXY`$xn&_@@vo(njSlrl9as)KD~2%y)kS`Mt-t^ag=fRxS}Qs0KbZr226O&16lWgd zWiM&)KmYJ+S!p)0MS?M=)~@xpB_mtdW{bT|hbrlpeMGdwZ|S?Yc{zRc-D2@M z-5fKJdZY{o6U2Iq&TU-zBdo%$rX$rrhu; zeeezOS?NSwUk2-2R9ycQ$VrTQx>LBZghjhfX0jkde zF>PIhQ_%ooFj|Lo#J6e67$`<{d-+^rx*mD{YM~iJ7!%~wKk5Q~W6Q+Z>?Q3;L9ZX} zzSa=z%*vm&&Bvb5cJ*Z7xvA>Bck@Nf0odES z+>JMlGobyVsxzU!_xbyZcEr(88}-k8Qu;TmExIJcat+7vR0nqvIXz+}z@?fBS1m5{rxV6>t&(4M#H9*z?HcM<^~o#yj6I&-cFmCh|@Zd@1Vi{5wVfswQmOqa~&y;{1`?XltbdONess zUGu=E{u$dVSi0*rj=lBuT*`Cu;4iVTigob=R-Fd}osSe3_S&{T zlpWPd9f;6dZ?k^>0wM-U`n?_w83olg(jzgD&8Uv;4B#r`_Bs7!50_a2C=sNpb2nr; zH~7pL2C27EmB%QJ7_nuWfCh7t0laGjAJlwL)}Al9>tkZ@WpC)vTDu+^pbxOp`qTAX zM$~+;-G(?H*G|K=tFaTt-kQzgfMUc^d+^9~>As-vVt<-pi^oA_lahy0KeQ%0t(QG0 zlM#JA{-`Ou*8Xa(6O{iV7#Tlx`3z*;|2{!VnQGq~tiVF~Y`uR}Kl}RFh)!Kymwx%w zt2n9+N|NCU)WlWgS6h$N>M?ZGw^juPo)xNqS!KD~>!=gaE@itj+7qVscFnjqrOl$0 z?e5*jjJ4Bd5qa26+H4&CQ%buf@tNm4DF_%bsJ?aateF{6?BInh-J?vL(@2`7_D-Hx z)z`06AAqx_(Tz2ImJ>G+BKXj|P;SOXoXr}y*{uo@5bujivjq8u`%%f^@>=P}uC(5R8i*IKg9nvd zb-nMe_5*>|c|bAm(K>pF`Ht}9KlufUlb4aXCh7{b&x+2V-5z>$fj9iQchE=^+G06) z^9{Jt1H4olt1&_x+#kljPK8Dy9BlTWxPsj{%@2Iz3wi6;-HdiRNRm)Lgy|e0mfSS? z$zSjB3%>_RV-6S13i~#F(P}45oKapSSX$3C?nqrJPjv=KQ%rFVES#s$DuetDP&M-PgD&+W)8jbR zen$GLKWWuv#J9mR>Qx(t3S(&ljUZI@hL{=cM}eIUU|;PM17+l08V3)sl4qR$ zZ-18d$G=3fxPWs-Eqnh(j3M7>bMLo*6R&*D8@ae~nOu9!tt_EpY8&ls=!AfCO5z-g zX$QLJacgH-I1E4ey|3o*yutPyV(R((fD&Fh_xc#9$QlL9B+# z)eG0!oEb6Ca;ND(iVs*K-HX^u&vXCn9bWgM7TPUpd)f(gRD{0c$AY(>a~gaZYPAt9 z>rnr$hzK?>NRAw2{sp(wvx;*TtByQ?dxi5>e}%LDcR2D}I7dPFu8Osm!WI zOYR)G^M4CxtMBS4oXr_7v#d|f-ZC-y?S3H$|I?H~lK9Vya zsy?ubfC-Dr61bE44C)4E(~6I}zK+E3#{BZlq!_8d@iPCQ5YeWuJc>HvTKU^Id&zs> z5+=w*zz0i@J~TtwLp*EkOCQKJAbOQPej*<+vbV2aE#ttS0Yn^vVsJhk{v#SbXgO9$ zIcC&9wK50f(ht7Yz%{LS=zoqrS$a`xsUt`|&q}gm!+L0~9y+Q{wPuN;sVPr=v@g@t z`lx7ir?O%kXqsBRqx;pqI%|bXiuwRY^{DHkda5umt&E^)diZXyZ1(RGe!s8DuMvOO zN-TS-C_3WXUebQoz0q`ialMm$_l#;fi)=Q@V=r`fd>KkwPvqyV7{Gr24*RpGYg^wa z`|FX$^S&GMoGj1lS)Q|}{nnRvxpl@?cjt205$6IJ4N)Je9h9J8N>{AqhRLC5->z=} zgc@0dfU}r8Z@|uFu(na($@u@2YG+$xr5_}U#b^a?X}Mzx`B}_?L$un$<^S<#=zio+ zNDdspxxC!g`7AuZX*V{=?my1E{_+npmm1DoxkS(QNScjuXXS%VOxZ?}MFCZZacgk( zDOj7sEzI!^Z+ihh`T3XelmGE^NIHkD0j;9mlC9A&obN32H~*h?4lOi!)2lLyjV_5m zpOJjs1Y{&=r&YSXU>*J$F@m!e?OY{Un&al>fEEgqgMbmtRU1HCAAm$8_}VK$4XQR3 zA33-Lb;9n`6$Oho--tPUfV{{tm1IzL#3RLCJrptER7n6xa45zpX{$xr%n;+#oB9#= z05EGfvaHS_4s2mj;QaF^DLOsO!lL&jDGq1xx*vHi!Kmu5w=RkY?)v3cH^S-rQrG#u z1OxDrbN&+&i1a5JGD0e?cl}?9{i|L~REc1LtHH!hA9dB z_WP{#sK%Tquvqo-U;M2Y4WMYPPTmuOtJ)}J62~72LtjKLU9c90Fg%8orE0_*wYDn9 ztFmX+%F@`D)v#a)V^176;<)Q))9_5xcBxl8_Vhjx^?#RAu$4@!e&PG8VuKaFspnaC zzAD4Mp{K7;_LsePYwNGktZ97v?Loma&TgW5OufY3?wb6oS^sCfU)yG0!^^7K(_mG{ zDA}gEx8Jt6Q_^mX52N~P|1?wWfzqK=yGYk@J8elj9qh}^!%Vo@TiTn6?yFgQCZvf( zFsdc?Vl>1rQUZCkgb}EUJDR~=i4cwt+iIsHiMI+|StM&K>;L3uu^;(7$)Q82E2`aCoNB}% zd5%pB-uV-Mg;zcBG9Fny!P&J-qzws2ZVAUynNHBP#u1>fmeutQSYOAT>|*9x$l(M0 z*pGcPpMCZ!zx3h9$rhI=Y#z!pph{6VvR1-#tMmM||GUdCFSK~^Jr38grN71s{EIud z5xce8Cch4-shTwisOCX|-^G3cR z5Fo*TCONV~(M+*%oJ#&$XZ0ZGUj z4boPN#=;!Ug?W;NInt~_quCON6^Az8X3kU*lxEBgk2t=b^{L@!CFhAhAdd}R(&Zz0P5SU z7yL&W7wQA}@XaB;fgB|*l!hdz5j4oYN?+q$2I?KI)z9yiAZ&bCD;-t!7JF)QYzWxn zdOpaD&&+r$$kc<(s!Gymc$vF`hRG2Vj@CPG-Dje%lx0<5jf>hC`g*(TSkHyR7_uy3 zv6+&lcrYyB<{Ff^-YepZ0##)pDNyI=XzGEx8sP39^yT}JGy+x{;`-Eb40^6&5FiZ} z&_&Y1HB$e)fK~DR&viWz>MYJWs&Q5+93)vr(nv{Mcu=A!;!4l!n*%Hgnv=K!gEzR& zg9%`Up{a_fqi336nkbf3EG?H)0HO|Yj?x7Lil_;m3pA7~B~U718WO0$=5ksJ0|$O= zG$@I)^rV4Jn-~)!rWZX3K0hG_CJJq@kT{1X8BQ_;XlPEuc2N?F0JuhwH0o{HtNNBJ z<>XlbZ%~ezQPsV73PT6QV9+eZ78augRs`!5+*^7+7C<2ZPDzJgzd?G zpCKC4uE(aPs=i~3*&^TTanC$U+O7Ie`!4>P(Fn9_n!VcQ`nS*e;|@q`e`BrE`duAL zI*N88knK-Nqo16Nm&r=q9}PXrNqf8b?bS2WUg^J9r4EwycL3t>tGax@)}l$G@NN z{GRV*J-@=a^(&mbe3fH|50dv>i8WxgTy;#h;qcZnox*YA{03-->s`h@_c$EBo28?- z@DG3DTY2R4f8=wIT_jtW!{!zjcCRXe%az7lgD;(K@wfh;4Sx0S3Ck;>y#hZ?)&aUG zqB^qp*>k#k9eb@BJ{J{|~=P=fqi@ zTGWN3&SD@IC@y}u;^A;59jN+7V&xkXo zijt^e)$)>WekXUo_GPTDp7&O9&fy9hPQP26wctQ-Nk~*MNkVE8Op=f+E%M1<{tZrj{Ikp-Tn-?p5YUoZ zC4yyIU%N_e!1CpK?4bhTO2AzFv0axJm@MUWfA*amx#K2!Ya8V;a_^o%P%;*qEuQ$? z=lJLcKgxX8;9ajc$~#|m6qAFf!5D8bnuwB`>S%1$FEos8o)_!j#Hlr&JKN#(hVsa% z4Zd`~!=*x5UPx)BK8-)z8Z}wUx)nJbnp6JEw;$)$Im>z{Ly{n;09;iTv964qZGuJW zAdVZ4c>D2K$i#AfBjw+IdX-1d=Pb@;-j`)j@LdnIc*7m@thYT_tIm2qn1v%1Wpx97 z{_|IPqTOJn)eOOq;;zJ7*H-<|_r-VK+u+T|8)Svz#Lx=>a5FJ1qz+9D|MzFxeDpb3 zX$jx|;*{4Pg|$wP1VdJzQ`D5WvQv-nsfKqs0aS}4S!pqephiJmurgHCco}9U;n$uh z_~4laJ#BL9Vwdl~H{}49PT_$tbqQenF=J;bX!4&Q&v|6cASU5kZ|m@9ZWM~#QAh#~ zk`P7S2w03if*#*_X$*)J=P?cP^8LZZBfIML+bQJMQ5a>t5I*#L&b>zrx3(N>xllMI z!M`Xml3;KwH-ty73cvWoI?|kHK25@u8SPjJ$Bsdb(k+zNA9lRnGarXxV$eif{*5g52%Hw+lP7TC%9dw19FdsoIZw0HcB-!tsA z{5`9~E*CU-;;^r{A+e2`ySHYqO0)S|dh)7Xd#oVRc%HW>D^KAxj&7EI1~RB;J}W&d zyWFJvy)Dv>=)!}nj>e`29-4yoYHUcOf1-wkK7-xg;uO>UHY#x=X8Oz5$Ynox7p?3{&_xxi1;#+J zraXV4I~;~woIz1z(h}5KC}40BH^f}poK%S1hcq65q$@SW3PVQ z7jqFT;b@%@04+dR9N~$FALT25@DUbP zmdiFT6)9xvFrU(752!gtX)yiC#S zA&IGJk06C|Y<`(b7cQ`|zQNpblh@sQn76(5IIMO(H(G@1#y~hOpi8w?WdK*oZt|jK z5Pzij?4_J9Ja(Di``mfn``8uEw_$0q;p-MaWdhh^5S%*Z8^VM4EOYB&1M3Qj*7A?X z>$T#i1d{4Jg){*;EMI~&Ec6!n;8)gp==mNit(4qC;&Q(6oKHA@LUZ7(<*m2R@n>F^K|5Gmn-EnT2Myo$AO2E@57~@{ zC~v%3dF@@H-laxpRZbZ>m`ZmB7xaoqUn#m?;2Q+=zyRARd z61@#wL{G`*X!t%?IExv$gqW%0cB%6YYK@Zz=P^1;z{- zXn(Zx4rN=HM_rlYwXe~iLYl>_u-yqEK30KmQeRl9U{%X_9Ap`N4zc2+J~1V@6Ledh zqQ)T4Ux2F{uxPv)O+4on%IM#iPg13@c}@(;kwYY3c%JUBe-P7{M~y!Q7r{(12_U%6 z1~gON^w)lfANt#WmBpou%dW$h&tKr-)2FcWDYj!-J9j?RquwE>>zv;2mILE>{L}`W z^#)nfV1fM0e?qo$BQ|>lZ+`QA{5$tf{>DH1Ex!23d1P?`lbX^2JSbLkq&d$&|3Ja( z?^9m?b&Bn}0HpNGehfDZZ?*#*Ct!<$x#c-N{uRd;pX*^7&A$99JCMQl@tZ`a>W^A$ z*f7Q;eyX*dW}TH9KlKHP0>)sP8P++hbr@qjz#>B3YDMrq%`_g}Glp)b&13I3GCR>~fJ`{~_mn6ej$8%r)68BtqGrev>ferYM9{`LP zdPPCu91F*e;>2M)U6^YOHJt#~71-3!I6Ut`N+%fWt+8B?b+725o!lR9tXm2OozyVU ztlWHz-dw_^3l}R4C(p+qkO5%M3ma9}>PMR1x-5lE%@~-t3 zhvG`Buu#jugDbB^-lWD~+2@S^mE;B8LU4KRE!?cbsv$83r+_?hM zX3jUgY>C%Aw8lp-Zg9iOd}!tf;KqaNzWEEn(Nb%1MUFTHvEc}>U~-38L#Jo4BIF_z znqzwf-Mkkq_L$oO8^|jxN>4D)G=oqwrjy!Tp{^higL4j>D@Bnbc-I16T`Ikd$J}+GZ`Sp`y$qPo>FY=aF*$(4osxzdxgO z>?G^`P19W4z3$hD;cejLPfVVA^7h|5pzdp1x!t1OYr`?Gb!MLLZa;UuQ@VAMinR9s z_GmZ6e68OIjHX!wXjA3)HN>{0zRlDAQ+VNr=v2e-Ah2S1U+{J^Tc`VD~^M7Kus-qZ4*2g3}hrl3pfWy4v-ZF_wGMLFSU`S z1+>V0vLqo9gYEXvZkxq>ZsXg2;z#(FKmRS5#BsUZ;qx0;dH++7a(e9wM;DILxw6KU zXU~!(=`bK!BOUlIgMt(fu8|s^I@{x^vzGggf=v_5dI$UPACWCC;g)XV&2Koye_nV8 z|G$6r0p9<~XRvvLWUfVG5}Yd5=17`yZgqiw`Fk6@{B}cg-fyn`26PkPZvf+{2ahOE zRVISddEWhrHYcwpWQ!^O2%AqRGNAE&xM(;^tTp8czPDVx{Ca!V@xQtdgSC@OOt3mt zoVox?NT1rphfM&DvNU1+(p6SZoTAZep{8o37~G=Hi{k4If)g;QkV!@fq8Wp60%_`x zA(o*v!}D76-3HiRkM7zA3yX^gS!H|*{mL=`2A07AMzouRz!!(_xeJ+ZqN{7(FQL>L zFhC}KSQbiN&FI80%lXCv4XSsUy3eC9%H27@OGyp*Y zq6X^<4&HJMvW&djp*h#^pl^j8kt7N2^>yxk>l=9Rd*0Q%KnD7Iv>P`LR18n%319jW z{MmbjrcrY1`(i!Hb^P-jj}mSpyD!3!bz&U?>DSfMYi@bAE5ifl{j##Ob>*xR`X#gEnjJHB^KKMhSTKnrLTt#;0X;4XgfRbLT5w&EY$5V|{Hsj4>5>>{ZBN=36Y@dW@cR zSR4gW-Y)!AQV~u({v^FCYqS~-tU8P+s70~W%s0B#F0=y!JW*q_CY$~?s*STzg_PiKAdZQOEgW+heE5Xpv!^Xd z>isTOvWY{bu+ek8_y&06O&M#e9da977ibk=q+KXCHkDT%0&I@e=wKmOm4-2)nKt4MK)%sH^!pplu<=jCV`yfOJ4cr9t?T<$c7n(}UwCS3u}W~DgH>t^}NmMJEuugA*GB%sYrpMn8+FGO7#*H%N**RIYkJx)cauH0UwdCA(}@AVQ4f!&y{`CV zUnLm^zO!%jN^)>wRKmIm;^7~)It6p#rExNdN?g6X}F1P3T|2}Q8 zYl#OMV)Ryr_q|tcK5P5xS{lDM3{LnRB$ANpJiqXVT^_xVkTepqM#}9s9$+r>Rz}ex z$BSM2yS3EXUwr&fLVyDo{Q^$V|3xEE=J|6j1fD5B>W>9G4>-i{MeT2q=V*xDD>3Lr zP0jnihiK5oltC7PIaTnl7mn8ku{q}hZ&bjF2C&Z`RkqH03p%aoX-$Q<#FFdx)gR+^ z-GarNZesbio9K4CB_9#+^KGc%=wc0&Q!lelUU1{x_t3iW2;Fu&#LMwulyi>6r1UPW za^=ivaujO|tW$Dlv4xkbQ>BylXe`dNcPS=;=DjA^QdO6LNC01^_iPd(8)wOlj z+ili2Hdx+y^wn8`NR>ip^p)wNf(gzgsG86}4XO!F{ZV7J9`x#V9PO@; z;4K0!1luvb!<6H+66)wE`*6`ISXy-a<<}izMZ2tbawNzmf{zj8Mx+d8Q&-+n?^L`$ zQRl&Lf@LCVP-g=07LMR*m=oeN0hup{`>LA+e)_M)>A@k`Xjxnc?c7H|b~b?g_;w() zxkF3w`h3KX6&m_$t`+7Q19(VPN5V@3$~sq8eM)6DR?QI-+Ry)Wq5i7gr(#Q#!U@0q z6~(Fv?N{j;5d3~Zk~+TW1x*s$quVRO`0nK_B5YX8%MTUYzR;zY7r}tkAK~}1f<$1W zm-7BcyIiq`rKLtVb}v;>>H(_Ngjx*bF$2~1X*ZTdeXFlMUU%)z5_fbZwp%sX81b_edc{qewGDGz2>^1vkF!)%?J}`3L-^|K-2rC2xB<&*oS8qswP`|HX^^ z>a$Ps(GyQ|xzqO6X3ksGCG8HEKK}%!@D?mOFnUbf!a>K%SQu%>N1x1j;*uk6fGv7B z5%NM=f8sg1U;F}Gcn){z0@~f+!IvE6pZ?Vc`IZ0iUHsfnd?T;8dkO1uBuUV;#e2U5 z4?hd35%M6j2*jw904z}Le9@Y9o|a&n#gff8`N%^FKleu+%v=-O&iVRR-o?-U!yn|f zV@njhE{REM^)dtf$dDLF#0F5;`6$2v`lFX>D?mw0 zRH^zz3!W0yDE|&x3W8C^sb%r5Td>U*wy;=d$%CIl&ret^g=4SL+Kdpir(5)B9X-sE z`|hE$zJaI-z>q&aZj8Zfbh-58vshb@7mmEJ6jsTdqi4MpVAmE%ma=loaf&?mfdL(OM!TiBP%q<_F z$P1$XKv#mwKDzS8`YNX%e}XLWQOQ#gOld2qF*_>v1%zNTkcwiA1YduONxj9Vio>b( z(ax>KsKsdEBdTH%qvUzc9XA+$>q zfflG1Cru07JX2~FXtZc^KH9Z9@23+-VWAiDFisH{To1%PNCeBZ>XWLdE$C^FuJu-RR$Liy z2uzu?r(FuGBmv2EVE|I+$c*rn3kB~x(W5y( zM+JGqUO=+KQxWxu-s_+Dy&#Q*B7?F$v$9{REk3&7nP zB(Aj-w>#nP65rzr)t$+IciOIh(K34waF^dD?CV+YTK9`FG-);EakyHmH5F{@kFTBNu^&2B~4w3){ZKj_JRrb==Q#FFuj2>*m!IT;x4f1! zy&fNY_IZBybusELs$#S(EP57wCNPX{6BzY;4jVn+vo2 zYptWvFnr;x z@=O2CxA5v0AH>=&*?fa1E;V`gXD!w?Koqs$eV0X1)d0Wwm4aFjhE+v;O#>^c4`As(ec-6&NXjw>mgkJ{qnB5XGtv z96PoY&XPOpk0#s73aqpc)S4P=m2=J3akT0vUVcf!p%>mw;RI_73R{p{M^RX8QTWKk z8y#F;;1G&{VFb90yeN=H#_^ZDh&;5D;DL74==)5#^yG8o-5$9@*B0c#XJ27M{hXsv zVfnUOvCa_;+wGOR7qe)R%9C&L_^=;q^ha`r}=gxECndeBeEXX$kN>t!$X;m8jio+HK z=dTo8S+%UL=3Kej;nJlZs~y2K8>B74nH;1*C=dczhX-{8An);}2Ud95O)W03b%M2< zAG|Dr3&&8)bx{BV<1u6cKu)6pnW0ECtZ8A>CN6DaO^d=bDNK``l$-_~ZD3`Nb8Etx zwH|4rWj!TW0TSCp3BVuKxE`z`rRPYq9)I;UOWfEfSnv7qzLHXPJlbfqrmN+nR6&fz zdtz|b;);URE)+$BuD0lql50vqij%~@@KA%52v$)Z&lSRV91_M4ON!GJYlXZgw0j8~ zy^Pgv%37zvg|!yv*9b@3Js6L5m6dY@{iq$=brxJaB%9N9hQhWebeT_1K-dXe;v|v{BH-6`WkI_4|J_$IE_whOaHI2t1nDUAR}q;HP#fg*GjT^dBb))*MxS1g@HEOL6eQobXRHTLYY0M`$S(h zqupR}yCdh0)Ma!f;?+J7V!t$t4*VTKf_ndG)Ovc6B$L409F22VSo`D`eUx8O zZ(?+4iF>~L8|faH=lxGU!&lCq=gis~UEA}UH}S`l)oM5f<&SzcO@qrH|1|FGWzxnR zT63sAJ$c?Etdtm`=MsMNb3MNOMJ-mcHeKr@HTs%5^0hUJwN>(yCo!!#%<>A!@fTo@ z+`+vsT;}h8`-}LY|Lv1(xE3y3;CDWMg&%l>;qIILCS0Hjrd;EpkE@8MIv*jju#5bo zU+MBk&j{&(d2FZ0fg2C-ZEt=Fnsj*aUB~#%k3Q*n^q%MYB3;I2viPH$XcB-B<8{DA ztGfZwjG@@_Rv`&T5o3@IUz-Y+jR)_95J}X{))MV#^wV~B; zY#j~3Xr<35VYC?RKNw#dXttX3Viz!iZRgA%JHmmx@1(E=;w&!l5s`CMNE3Q%9scm& z{5xLs^$&8-H@=?k+6H0>J`Md5?7}&2e9=8*hYwM7IwbS0GN6j8B1y{C=TFmJU4zAi z(%+{+FhVr+qiZe8cN~W-4Uv*-Af{6wVdeH)&}K@$zCn_BYeR`gz(d`7y`05^E36zl z%9$rlGG{Gff>oyTeX@1qVQk`$YkDLmmf??Aree5o;w0=R={KdB%PY0<|;XtDy4N@_SR*2x61=}WPI;^P5#9f+O(QW z7%hBm<@1V(m2gsx%Mr`RN4U;yVjn)ObtN458DpJh?738U|9s2cGHjJ$D&yn6u>8>y^az5~VodJKGMAjL^rWqb|S!WquG{u zZY|3_UGPp!1C~bt5bO8xtjz{*Gb+ybh@MsJqDW=iDe(q89$rDH;^^X>gx%+RUd&dpD z=ZVL7`kB)dMHiCPZ(tR0##?h(4Pja>NvnmPI7jEBpQRolF=9j`+j5EqwhCD@<3rDM z`P5?#-g$dQx7|jK@yA915(6q&5o~t@Iei&9dxG@H<0QA;$Tu8m@xmJ$eCFAlY@xxQ zoXz;a!@^z1n;->BkU=e4u5acVDkvTpafKyYp6BO&&+)UL>>-PbI2Bw`@T1@MDqeB_ z5oG-&FS>b=EE5Vd;G2Vt!Ir+EPTqp{LyUIWYgJO)JTJ%9jd4JTP5w^bJ zNM+I&P{Dty2%A+YrHb`sC1svfdY@Mjtl^5nq9y{GK9^`EXKMgp2|(4h<{Eh!wK1;s zKU6UodU?*lTaO`23*<#k5&)hA4`fKQLGS7Y&;0iLx#foAxNm#|g<6a=A!@V7K6<^J zm78v6@z$H@J@hCGbMsg&5WKZ)nr3v)on!s%dFB=uD2l>I8de`2*&7!Ly>6G4n~sqz zE>h$Tj$hr)mQR-^Q%PTml(6c_EOqOOmxnX#? z=lpQvTm^(VM`98#ZS;6xC4nOGU~i1nT-SYZtmX~g|Gec?FJUoFLK}F?V8r{tGomcd zH(5vxP~e=!+1Tc>&8WS`@w!tu#W^7(<%?G>pXu5V%{qFbG#C|(5ze)H9B&EVTcn8b zfMwY(1ZzNX6ju1~i5?%n=vZm_G)c(QD5&_bLybgOOrX(Bd3wFa$4^-P{5?WfJ=m(D z{3u&n$(Uy~+&B)9TN)TYt@}K6}#!yD6vI>#ysSuBUzb{->Sm!7STiciMJ4 zb)0qo*!BLli=BF(0f11|bcC|(P=|I;jyqv&%4dfvG2!#qU}l(Ik$XQW?2PyNR6Hv{ zJJL4t8)Bonu$$oDmXC~0Fnt|0Vpw~nDqA&e1p6w(i?OhZI7PZ10Hi$LsAv)`0K?OR zXb|F`$x_mr4x(zga%GK`SHGBZ58hAbgP(^+3Rlw9~^ZEFcJ^3Cpj2fXCK0xOC#AN8Zvj)FEtKT>$<}1dbeyR)h9? z|Cr*r3#9W4RNKY~;#4(`a^7S{=)35#1BWq&PCMZjKG)@~w>C+n6QXcRY4zcQTzXGW zN!r3-DbAjx*F8_OwaoE!12jRBjNCN&<%b;K@oK}7u4Rw z_|QWs|INEs(PR;mCfKzuciwvgfBnzB5?MbBy2dRBgagf#3q40O^Hx-46cmG1|ihnIq&Do6|P(jh)S@)iqCj3#sf@>6p9qC42V&+ zkklHivPP6zBaZCywTsp%J!eCZhxnXBk0UICSq_Jo_hKX0bfh z>E&lqL%!B#_4HX!};%rS8A)t!-03!~=p<8dIz~PDl&AeLyr;0Q(<_{n7$A1Hm6uW)l@fK{JbN<=q z{5ygq!5M=ow9LcB!7U&`@qblE))3|wnk+Y)tSn@xiH+?r%L{z+%sT(`yPx6T{FQrH zTuiVS3r-MYPg%6JLmkZ{J|*S=O;HScO#ej~0MoLQu}}FXSXN6YY%=-#P<%>c(9D!*R_gS8T@kXMP8%5 z-@KaXiW~#V?F#izoaI988jVbw%V`D;H%%79D8l5=g^v#Wu2zwoKYDF?%pNrQh-z)O zrv1>`KA1|!RAl18emB6sQM$L2&pxAQE#kl9$1m-dix7YYtI+P1U;{2}fU=}N5^x_$ zxk00qVGb=ruSeePk{h`FFT4$oEJM$F|5qn3!Y{rLPOQS*ynoJ83j(55@Q>1u9nfrI zPG7`*{s|;&fC#Q|BsU!)zwdUgUcTzB7ZHyd*zk#pW0h#;XId@v=?nBe_5~7?q2f2S z0WtuM-n%L(3Uxnj^{f8l>ZXH1(CQS^$av4=9X{|>!u;|A);fRuQ9QsEqh|WR6OJ4~ zS`8dZYrexPk2@g8q9k(}AAJ@cdMZUos!f_l5koMLu%mYdbx`D%){^0oXB+&D|FF*a zZj)poMV*CA`Fnri)!cfMBR}<7XrJMR`GS?Xq?AWLtR)eVVyR_lW(sP-`CtjFdBN&N zuiSK2A{k!T_lZiqAGjFU!xj!#_^7W{G~f_~qqH&#-^=5dHDoWaSsg{L`1X8Bf5P&2 z;LAB&@b#BFNA4VjE5hGe1mYO=Re=zQLgG=xS|xR;#v1UCI6@09FGyFGn7{p24-#7~ zqaf$b`g&UB{L{}u;W+=)vs}G&5hwUSA;E8-b>0u4HE`ntFGS~?xV-RDkK=LWBtZ+y zl@n(|pbf>@5NTNz8;)1@ih{=c91FJ`r`zpfOt2Wk`|w;ib_3a=CGrrhILgYYe?E7P z7~Z013l80J3#Qee=;b~Tg%OIvVdh#i4=j=Awmf=W1$?oH2%Ysd7oR^(nx+A?kv@RU zg|>)+LsYBixMBnqA&)ZG2rhXWsT)@2`S^)04?lScN#lXf!GT@SLOFII)yz-#q%6b=)7@5|~MrzT4DTm<7ucD9n@Bxz~NDSg4{wT6_mG8Ug@38KRGmZ!G<%o&nL^)O%-!24~ z27p!L^DOJ^+dwHrLOCD!1|uXr$tSOHrYbW_;v%$3qvC!z*(oSUR{Q zsV`GDU$X4wcKBLlurHTvUlrtM)M<5SCvf3rX(nss9WUdzxkuSPZC>C$*zsLc+JE)0 zgSUG{GaTXi0N2@$oTe&a7Bp)S%TSj4Z{irw(QffN6Df~lZqt!?`Wnqn2K#SV*^K(d zI7{Alp4XnwmvtJc5R)y9C&_y6#)k-C4D&D|y~!f&Y5Iu8^zvJkwZO4|=0 zdJ834Y+;t?v4ur)OXu<$hws0e+rIysKnuhPG@J1BS@^fV3tu@4b90`5tKMR-1OsCw z{Dw_hE!-EMh0~WIO+9Fq7bN%H#_B=?yV~{}vLfzg!w!iIkv+=|wga16TJwg- zPb~1)ex}10Pd7-Hnh1*PbolOXegQx5tuI0@KLzKWfX+D%FXSAY_s8`-d)lvG3aGF+ zCp0sQj}B3QDd^?}-LBUMi87dg6c6ylz#b9!DV(DyTy-2b1_p64IEeKYcoOWCBG^~k z_(m0~l(hyFj^qWf>Vt0-1-XqMR|Uz4M}XD!KNxY+!4W`*u9QFgBi=__>!zcmM-Gv9 zy$`g)g$TWcKgO8nUCuxC6wM@K{p1C%oH>IP0$+}vv*fNIS4*ecKmi)_CZw<)N&puGXXFQR(0{1xKl_I!&PfIaml+S@X^34-7JHBILCoGSZ*5f+*W#~ z>c<$OrDgqPfj^8KF+w36wRJu|k|-q39~(9CUicdUKO=sOs9UnS5a~Apf0Yi_ze}lm zUiFn;Zn@!V0^7C>(jI{@|1!h#Wy1d+no=TAtEIFe zd6yA(1D__0B2owE#ZVuMtW#_eqtj4&0Z^#skI)}W@Gc6s22lv19Tda}Hu#;4 z0ak`kvzm_7A-0Os_;LXV?j;=15W|JM!H1u-bb21dbTxxW)YrFL6x@H*@W4{e<<&07 z8kRR5vsm@an;6v`|H8+J(6uSQ`&frU5>{G1NKWbhbr1+thSa8?r2IPyM4S!nU9S6Y zuQn^%$?AD`SKdSO?!+{E5O8m47%SK^{q!a5IFG%dJ>Y8}6ft`i_G^3AX>uMXNol6b zbCQHI@OKAPZY*X;vmdx>Rt?)KZ<`#ASRCzKRJBaYHRE6*+sk!FqywMb;$uXDo$_9> zJZlhe!u$p*AS1qQTFfkIq%e1`U)!fyj8XkSV0>9q;t@~=z=&JDx>Z#9y(ox|+(b24 zilstoNs8*9soEQF+^K39g%CBte=j z{@`)p{KYIpl(Zodsx6~w`Y5bDnsX@+J+sIU{&bH&e6C4$U=gE2-gdm|<%jskKlW0V z7B+BapTVrHqD9Vuxq`!sNwpcLHg*W!nnr}>1)-4^9z4YZft}ntw-=difK({0{H(26 z{8K*)RU5`t34jcT*y>1??`f3q~RoQ5_xpeXb*<6ESV~tBEPm$y3Y4|u>^0(1xlOA4S z;qKe%SP$w%%OOyb#ISMZ9C^EqigNIO^0zJ<&jJrXkwVV<2XGGg;npv zkzhd>tqRoxiVjD&+oQF-!u$<~$@82TrPZmWd2kt*WggTGK${K6daZRJhN~AZ(YbV) zG)uit#uD+V+QF4#_RqQiWC|7|{y3{Ynppug9!dlPkNQvRs#!U%%XJTn4(dy9W z;V|U`KH{${(I^dVY_KH8k{KVYArXH>HU&~6Bt}R@NR1&&gftPdBwD$C)-G$>W9p~o$ymPaj$H^%!Q4ee`PP402NsG(b;g3&=Jt1hn+O2Y@k za;SLOmWO`_r#&Z#t52O(a3Yb&Z+%7XOHKjFGCp)pc=D2xNelwwkzf<7Dto}u1?4q2 zK(cm~R~``VKA2KCKdzO5tnlN1I+yat=Mp}0GG}F};ipm|9Cfc4uY-Pr8y{(@*sxD4m#AJ1y_POy4Z2Da3G?#5)Q6X zw9I>VWVhLav(>px`PjuVvswaQ2ejKv?(#4)xvtHYk)c-~aaUD?fOX)B@^|6T+=h*x zLksm5cZossu;DBtWyZ~4=#cXbY-&Iq)(KZ1Kf&4NA`AE4#Y=wtFYxIn&(eM9X=Gsm zodedo=zshma-ogB{pB#AZ7jTcCfFuDCwAaPldkgmN6*0^OlziuXE*ZE8mxM$n3w+1z^IW~WfhK~g zsRBjdH@?>S4P}ulb9DLs2Qq&9@f_SdX~XcP3ke^6TKM+EIk~eTa;E?()_J+L(gl9+ zvl)N;S2lR$qL3_QsB`4Kl-uu^=YRc)uj8KE7O>}^L(V;e>vmCF&}vwg7kvcGYD-?z zm|(_qaM93Aa%_$dkf4TcZpjO8y@N>2FWub&^b)t;wf}2~wnWg>I*ikS z{Ho1v9|&Fp|Hk#pa)@_IQ8;?mLg5{Csf1&HBG@7vbtN2K9M^xx6r`ar|D>R~gI0sq ztvAy{z~wj+oLVIDAhJk8@5)uyPMjfYwa~oa!jsQ)!-EB4t@k-u9@_?I9rLYuj=uPQ zE`H>*XkpRR%RwC^CZ%)k65Y#JXx(rFu9y4%qa_}VqU5$<;qXDojQ7oF1T7q9zCn89 z5sKUg+9(jO_e!uf)KEL0PoV`1b8{>nzmbby_%hLI%UMV3=pl+ECGQm^CfdL$R{XI| zr*P@iIqdod>D-dHobs*V{kX+{3ef>vw7_!#_!`yTAqNlNIp`JvOsXk_O6^(zo|5=p zL4$4wu-RMWnZC7x$D|YzrQLH}TrHSaM~yJ1j~Fgqmd@1tJN~o zXq_fHV2xHGQzGi4a+juYN{UkC%7J;ukG^V!&;ITudfj=FEcKFd8Z4$w@aY&@P>ql7 ztkM6R849@F|D$$uaDg0$=K8oB!Mna2GVpkW1AQ}F6+hl-EpKX(b>oYjTKUm;UQF3v zgn zgsy{*I{^*gd}ArPo-dP?5n~Ow9E#lUpqxJfEREC$@o4t~m@=S=8cJ_u0SpX2^-RLK zr%tkP=n%Kwd?PRU`#;Q|{MY{n#nb1Jg*ntTpeWGa`52tOfc)9l!p(=!Ue9}mx8j|s zlfq-q;Lcw~(u}w-HLtx7H$R7-JMZ~$4j(hsRXnPITFhL6IeV4PFTEeL;V_E}Sj|i9 zGJ*k!U~{FVIse5sC4Apoa$a?ZgXJ!y0a$b?v<<}t;e(Il{6Fu5|MW$HW)__FdoV)8 z=V?_*8-|PRjKBZhs~mmj0(6KL#|8w@9)d_2fwqIK47W`B>6~q{BI+T(oIlZm{u^WUf_$G1^ol_dD~t1yiE|JAeJ@+J5O)M~>X)lemXP|*Tr#1Z*Xy~2Yy-U8Hx z-IQs?_DG-ixjI}lC(jzPc>8<3(q}A(e08f9|+c2fH)6;s0YKG zXgQQA!J1KBfL@}nwW_~I2q<3bx{7br>bm#q=$|!4F`-NsktTJ)`j!%SEWsqygE~dd zOK#2h@wcpyG*TKSK^znsyv+sh&lsVB(iF>jk@E3pEvGjejl@)aCPZ?rEDb9ai$IpB z4=Ul3;GV%za+VmOTjad?h4Z}qi>ti*xm8++4&&5dgS&+2m(&MP@^UGEhe#W-ztxpq zf(#_|KSeMa0-eMc0;2SRd{J$cFb0p=;MU8M003$gce^_MtbNajiuZpg^~=L!i^tU@ zSi4r~`(E@bmES|9Hh+ZswLz{LcEY%>-pW&&4c>Dq=bLU#xMRr&$x#Dh6fmelVJ-LF z*x+?Hq}+dl!H|1Vg)$|~D|1=O!{^}lpYqn_jYblDJ-WI=^wDM@;C5*LfDN8@8|>L% z_1zuBX{L1?*f#GC4Hq_Tetm zd$0MkUNlUY-vFjC_TFyO?yQ+c(q00sr7mq%zrOo%@KQ`7NUk1~bJ0SlAZcc>(1J@H zzuAiWxW0k~Nz>A~JFWmpDb@>4KJgr_#TGZd<5-xR>6Iu6LkotBqF-HyIaUxkVNd zk}K=vzxaMQafxJc39AKQN-=1NGHJWYVxz;q`JN@d@0(Ylu|~0;pr^t>uLg`klZ13$ zc>8PNO%Jqq&3lCZ{( z2!vkY=vg3FjxJc<^}>WtoV6qZk_jJsKIPF18TTLV((NdzQ5y3JPoJ6Nr`~J%e?Q&D znv8VLpg3I5vAo>kpZ(|^eAgQeV%INW&Yz|D(j(}V4(6x_)o=yPOelks2$V+*gY{S{ zaA?T`ts0#6qu#$*u9QC7qLPgtMk8F|kDFpWs8?;x`=cpF-HrgDwTf{8+^OyE1qthu zX?k59rK^tqYT@ug*AmUMwO|(kE-Hbd3fv%oBr*g7mc<0{L#-t{b`%aBpx4bwaM%FY zm{5XI$K|JfyNmMEqtU?br=lYUYGMvKkK7D z8#6GbM!Prl2E*cNn5=>tMEd>MMG&p3tOs|k4XBP*@SXQ}7mmOdUoFa@XL$`qfLk`zr-=bIlN#C$z9o zg45!>-g$u9_s7&I=T{S+xKQx2TM{2=-G$&9;w>+;MvuSrie>)r#6_+Y1qq4wYw5J= z*Fvx&^m299l#VlE%~&bQLU8;n+^#lk4@;3(SK?j zOR8)obZ+X9O z?oU>u&j(h4fVv-V2&{y08D+_RGwKB(urWqE?e_5P>Owyv~u%H+k|x!jJym2Jd@5W4_fOQ->Bn;jNy;0fk|`4c**8JLl~$ zQ0`pH>2`CP&4lN!CcN*l1ThU7tqd!R{HISX@NGZW<==lI$7M~DhIc@3bIZa)#y|VZ z_wtwCw1jP6!dyH}@1ZZEm)5~#WfV=+c|*rxb3MQ(O9bXJ?{CmYIW54!)eT3t@P4dZ zh>AN-*9P~UE?Ap6>wy?+J-`RfDQW>n3TMd+ORwGK;v-Lx zIKUVr$tW(Yv3lY(1xoG;thE$H;R9zlM=#Hz*<|USJLwj=KdLR>x>1b5sIq?Y47RWo zsE5BSFIm$-f zW#QHvNe?cQ7dh5iXf|maSRv1q!V!Y2sIPzFa3ZwVJG4)qqiKwfU>t+**5w&Nbgi|i zR={2PVxrzc1C13UpO^mWqSahoF=_xzM1x{v-WZc3-m84)`6J7{+|ldy=(Zi*ZcaPT zY4>tEc|j-7X?MG9bi1r~E$iI^XS<}{CXy=PZv+i1rd^FcMSV~pfy-^-Jr7$htP81e z70Bk4M10_xm))N5tuM%EudiZ6DPj;NrLy=V_(s)OJE^Vx2IiE3zOJ<-5xWnmY^!(+<|T>S!|F)kGsRoLUp! z|3sVST$A}`Is|9-wZoX-gUjb>*Q0yBe!VUh%-02zJEYMo?BT0BTZvCE$<3eG?=7=^ zX^j|wuS2^2YcQJjv|hEf%1+ANE1qir;NCX>$iZNm5keWBL9ZFjps6R#Vy4YBW8rNx zJFS1&7w!4gMSauLwI1d7xctr4X*Zb}UfK_zi1fgPF^RWWxx9+ss`$+$X^Jc_^lgM< zUVVT|;e6!aa@QB42&-`Ek!N`RsS~t2Imh4lGVb{wei(D}L3C{$NfHk}E;Nv41D-gI z`=$5de)`>*-~J5AryoNvT!AD-dpR6hMDD!}UF$(7_fd;;fv!RxI&bEU%H^Raus{DE zPzR1XdWfUAY?rt^cdyFIo zk#M!|)itX+I`@(0ju8Y@`g@EICSc}UH1EEJys$p_N60^dkk&wFb&a*Bo+E2C{G*8= zot%y5PNS#?|H|h=&=OS&n{(*CyK$`yS42x$|4EFYbLt$uwKh(q7J)aE9fF>?H1)@H z^Pcw)nm~5jG1`RyeL#Z0C3i|s9bIcF1d49XlOOp6?Tc3+O(~oWfk2?$%V{1y#Qe?2 z=yp5U!jdg5VV0MCJ)Mts?9>MnDVzfgo%J<37cbGs((-*#6+|N1U)BJs(oCxY;)bU5 z0BjL3Tn)!bBeJ8xiZ7~hRPz|ocDV5)aOgsLT3stMF{t(9iH6|h!SEI9{1H(NL6F1> zY9V)~47^b<)Pq@p|FXJTUc)K#4fx`jg3mnN!=&DFGJ=}1FE^4dKk%|SjwC(0y&PMj zW>wv5$Qs_Baq~V_za5_}=l&R}rF_)4j|XW-1lAD-77hduF@Sa>*Wc>jWt3lKq8Ic^ z^es75js7wsz-R^-8(iQFVV2=ouqyLe#+TMp{^*>N8pB{FE_@VA`NBXHHNoejaik{U zV^4MY;+o^&O6%HLQA|vmkH^=Fhp&;0_%;dcmMp*4M%$%py{)R~Yi&3+&9d=if(dWx zJSVy|84WC=(*C>sRtJ3#N$qd{*V)kyGMtv5o$x>U*)g)4hPGJW?W0jFOb-b)+SNF}vgUa(?ur#Ov9)gbyDtV4Oa~{z<{2lAO zTfN1FaOAAUvV{EO_q@RM3hzi>q&oH`Kp~v{>{mz@GH$))7{|ZiH7u+g;F16BKhycb z6PVUKWEoB@lFoVmgU_6!c1veu%-+=ZC$P&LH!$C8JW`cBk@XYi6 zSn;PH!xWBWegVaz(b7Zk$H#COG31?`?|XT|U;KtHZes%}nn>z`qkZYKzNvPTUbe;v zY7NEZb$;}%OMLWE06;`Z=bBvXX8gU6M++btTX{zfEALg zKzs1`={D9uR-TcJs?lesQOv%?) zdH#JL;o!|T;%+_W{V1k_IKeq+G#6O8^A1iu@+6JC#lnFlv^7s&__0L@7K#!g5u;o= zbC&Mq%Pgi@*=Et#98k0NtF@mMa#Y*5Qn5Hoc-`(W3VE;K#kZvV`Fm3SkB8Q=W)8#zAhEXK)(UX! z>oS8IjZN^tGP{^gD)~j@avd0Slwe^wf{cuMnMI#`Reih06a4iU(0$DHJzASH`v#NP zb`Gic@F5-skUx(w0~%fiDGf1fNR!_^(cv3!F)XA$0=CimgQn8ckVR|)V}wf`!*4&) zCU0iUH%-+asC08Qf7_)k)@j=)XIrezez`c3_S^b#JnfM{_Bq9M?Dn7Tr%BuX`p}W< zgLYSz*RGFLYu}ybF)8WT2lkD=*2IzauMQ%O8Q6BS2K7mEn=`=hW1A1qvj+jUOC$K) zH`+3qW?Fyk`vbGgds5o70(WCD9E}`LR;6((`RNxo6Ft$FU~d;t{p<4CmaEWbwH3=L z_ibaxHNm7H&Y@S=Lc~YPGgPv?;5RSv{9SE_p|QYjf$R4C_9#t5p$^1AnnF)G{i#Q3 zCQWX?=@7?X@gkQ0`CsRefBT!9{oPMO+mf_eA)o-TH0SqRML~>qNn#*(n6n$uYC@KU zI_Gd@6@B&;eDP^``~>D|8`GLY=2CAd7B;H_sQ8GBh@mJFZdmT}V{b`VZsz3e28jsR z=yypfFf7U~_&Nh`#f5;{gt=CizwuzkyT6>HHbWwKq#^7;0JYRYnx>!$zxHUC4?J7& zwp*09+@A5`LkY`s1uH2Oc|toCE_EAx=~9L*3Xq~aZn&lyNz>45c`U#nosgP>^^W7IQyq|mBuf!oj1@o?i8IKn($tW-2UxrxKs5PHs$9$vv^8NK!ftvhanm1T;&ham#D z4pAk)e3|vHJWKn`Wfl~2RZO8J2+|W}XwTJkINn zhC8c1`g8<3OB%|7G7ie&>or)$~r(NqUOZBpA z?`eDGZg&kj+#X}g-;?F_-mT#yjjeB3^KEDEGIL(`fU2^$-v-yZ-wskgKg0FfOMrMc zWiu}C&B`2_-*(dX6w)J+})VHHtXFZkNRx!*bdWPA2iT<6OMs9kwe+^ zGtq&)pG1a=-xHFQZT zHGqI2;ekaoGbIo-bO2I-b`RIiy#-U2`5meayCLVGk)j)p=RW-i>8tMHj$=nyId+)W z{N!Kcwal+Nd!Lq%yUHn^0h{K{jN-+EHGc^;PLg}DT9LQj=;S9yND%c?X; zGY^y`B1mF_Gk8Ppl)NY0bJHRI+Mm0P?|DPUjoB6Qb`DoIXg~fOdU>5B%}^Xp3??2u z6v2q0oeQhI+D3IiztjoK^U8r%51Tu%1tN~CZOiG41teKG(x#N?T)F9!zD&!h9B&<_ zMGaJo=ws|20niH2KmdZYW@M^;WvQWz#}O^`;!*54R}6L)-#Y%P4L&f%1;268g-eM-g~1c*{l9k>rlzspn~Qat#d?SA@BwUMu)P8ewsj`RM}Q)?Tz%#QrdMFn6rY_2-;-Jb zNiHJdCD=xFwA?y>>^2T(5p}CIh(7{Md92*4p6{Q&tbqAD4evm_u1VlmgP%cl4h_Il z8JGK_4bK5+L6rL~Vdy)#|UFH583}1ieJZCQF-VbaD;u8CV1Te~LrYVEOlzm8*y75{a zlXDOPi}el^zqwwCwBOlVN6$l z3#8(K-$A}=tcVO25L!EA-88~woAUc73f^!mA&HQCZQ6}_l|P+t`fic(J74LrW||zI z&jL=UVLYvkg;OrTwti%r>y1^aQ3*Fwy^Vhat#VdQMb6b}+$kFIF8kx@sah6AbCu-^$OJbyOHp_k_a0d_nhxc&eJ)ePfNamU#io=%HAsT_9%WH6P6@LU1kHnZ1(`Z1` zZwob8E6RX(qzJ}<;xHgNFTcakShDo4Drr&|YaOx~Au_hTu<_Ihl6H^8BoHiajL#Kk1e2zqhISWLH)529I#O5F`@o!I zxhWLA;K)pstLt!K)mvGbBw8c&NxT9SYL=M3Y7Q+558~Je8Y)9$NDV+my%m!!3b5Yy zWVB8V+xt@HO+Y$QsO__!^aq~x7-u0H=f2k*RvUI9s3$xFo7 zuag%n-gzt8!DU>Ylcq@kGzH_JbK(reMmJcm`M?{#qTaf$YYWl?2QbTv6emy9xalZ4 zK5&D2lOzvZ6^_+Y=g>}<_0t#W=3NT)(S;@P;Ev=4ts{qh;E$`TG!7l0P{CS@4~px3 z%o$Y@laRODtUZ5%hJqwMs14y5YY8%G0D=SPDnWN5;ODe{+&Aj9lAY z{ovt&TSLE1Uu?)Q){Q)yTuU4H%(9G+pR+uC!SLE632mz*>mKJU=agoe@W&@DA9}vW z!ooa>N%}z5ozkA{U#|ToY4>SR$-cUluKSGkqj0C=X(!!bHeG3ZGOWrTe*RBGH{WfV zrTliUEjH1e3C6#4{H+11EvhG{Z4M4&wRgXR5&M;|J)p0RI1|$KI#^%NE2-V$djwE3 zvdr+k6k#RHXeD7x0#{f0|Fies@wP12eINK;Roy$BaL>*6zMLN(GC&d_0g@s}fK(t& zMN1ZEG`6gfKC&fyWLx@pWLq<`{jn|CA6xQ_9Au3xnhG{8QzAi16bTSPAOIo*5AnSR zyc}+hC+xkuyVm?s)p7TZXP3ht7=ISBIRZZS%w_jgkziV z%2iC%^JMVmyZ~fg5v-8YrNGHa0yST`E*A$j`w-bP%z=o+9`MEV@av-n{zg5X8^x}-i zPg_(I@8)BI!CEA)gMUq9t7`XV3LWcs{A>WErgeER81u6MQ|HeU^!RFCXL+Wa_h@n$ zYcR%;Iv6G?M|xd8@SeN*v%l|te%BM0E*Hs$d!)O&3}3v!=(S5&Cs-4efJ88iRn_mM z>-h84ZtOU2?2LlfrEhH2_D7YoMWr8$v<*rpn4`@dl$JJ=m^_Tg#c^ z$~}{vLYg>O-|zsS#h(}G-R1Itu~-2erEI_S8nGDm&Rt}0Zx6*_RKj*?` z*+L^qSt;dq;=*G|3qVlSgTMY9Pg6Yi(TB_Nq~6)F7%$?KD6)L`%RBrZpSupmpG$~3 zGO;1A&*V>Wbi_u<(u{9iQR05Lh>%;|@rvR-=JK}Z{8B=Ms*b4F<%)~=hhN#@2Y#Yg zoPAor~-h$ORUmKKO!xiz^QD5_L`m}nb7xG;$L&`T-bcg$kN zk%hT0pYy9L9uXzWB7XJhAr}+juH$j>ewb48_vm)685io5W%-;h-z-+2F~LwUk>kAN+MQsTQUnQMZe`bQQUD9Zqg|Pe6dR zH8^n;Uc87(uTh481sNsC^*wZyVb%>%uS=E<{p?;lkj;CSm`r%(sn^)Ma+5c|{Xx#2 z*dQ|z5C8rj310p6&oX@N8gwG>5g0|@V06x(OQqhxEBAs*tzFGqIrvj1 zQ6eL(bt+q)i?g3v^tS+znlhA7ngJu6+_e6?n{Si1gdicgcOFnN?6tr%Ey{4{fw!%V z9`AkI3I1ib_8@G*(}))@47swM(mgJPm?ZyC+5%%#AzA19 zGxxeP!3(fJ9IAOr$R=}20kUcn{L(rI=!NVZb#id0^1Qy4*PqR&nPQyxa&zIih~iQu zOI?T>TLHEMk6PKf+ypfWnFMeuzV{4?7)TS+aPF5O-) zcXKca3a|}wAh0~6E_0#IqRR!GkU3LCjL!FP7TYq_AtvIPD~@0L=73JOj~It@8BQ`( zr1pb~hPl9sVXYsrVGVg8AvE-965gW^UABaX1H?NBaEocW&^nJdyF9d&CD_ z5gTwc--l}^LnOncej9n|AsGq`9)X4T-h{DP!S;&J0_Jn&*4xrMa~c#c)TRn8fi7Qm z0RF1)wdT7?n&nSe&`Z4)xp-?wMryN|dDaf??Nh)i#5Uq1=TrW`HQ_BMg`F%v=Ae89 zDSk|>#SvdR=lIY|3BC0`ab&oCC(a!#t=6u)jW*Fj4H@TizU=#H-vP_!7K#1!ISOXe z;c54{9^Ee9>SN~i+jqB~Zt;BAw3Qa@_$dHjrQoHtX`jLvn}L@pYf&ENPe1;G*l)+L zLU|S`V>QZJU3ea%S#c`O%jO-@oZCS2fq|_{|3d8!da|5<9OpEDf6pp={CTf@kKznC zpVMU-##pYLyUFid|2hvoem4(4a)veAq4(%LeBZzSN4f9!{s6E1=4Uzgv9Gc7;zbxa zq}Rdpy2UwIHQ=XWFXkvnGYV|!ujLJ`RE3>EZcq~r)vsBdjOm$<&$!+rOhF8h5it2# zVr4(H^=q+ROmdJcL?`wSL4+)IBs(b>%iX88c>lZa=4ak}f)Bi9gX5bi+1^Fcn|mZz zt}=M>RkF)hiB++2rvNl0Hzq3pm?}tYkk~+~ymD!eYrBaDn1UytghTPO4EnL~_*qag z4{ln6x`^jq+r^C>QDjPVL<)$dAt@=1ZIAyUSpXJX2)t5sM$T)+h|WPg=e)<1jZoby z&FtVkgN*kad7G>31X+}4Z~17igmdi*r@nB$no~#SO1�raDEfL(~+iQvjcue-GeX z=Fnaj?m5XQO^7lD>w{HT@e=Uhs@I3iWtb5NBZv%w!HJNHdS^5NQ4bsyvzZ29C{soX zYmYp@#kCIEXarp!_$7)X*d8#vbdAmf_jq8b2x8%7p;?0M_KCMPvG<-ujvgT!B^Y&R z>d$Z!V;Nq#%HZ4u`u#qmYd6`sc!eWJws2_v- zg{QE6{sL_860NNlfQ&~*Evo+P$kcg|ON}c=HcpD?PW_1T!9}Eg79=1cYJ`!bdS#1k2c<@mO!^mayVRNHH$9g?=0?y_DS*|Db4W=cx z0Y-Ba?IHxEG5O39_*LT&u|z?ayVh_u>+nB3>3GMP9vzaR9Uyev%8l6# z^>6R;;vU?4rsJJr$7VUHS2CHo*0u74H(urYlm>m*Q;V%CjNi)zI>*v*KGE)H{n{A2 z`zU2B|AZf=4?FE!oe!YhhjEC_;^}so;)hXIx1MeR2v`GM+V0Os3wi+7X?8ucg)o!f zmB?|tLt24})m1o=rnmRjPTJK4?tW>KK7Nm?G+8*Sp0KrI$lNY!!Vfh%u(j;|+T6_D z%9G|#X4IwSg$6??zm@x1<+i7d4KhsO@->XykIRCk*#S9w9F1+U1+Mm^kKjG%UcC&X z1eayRn`=nq4ea9VzC55uhPTSH3~MdfuH%`nyv&6cu5kCmXSnNuGxRpQ9Q(eEH#gtGRZk;9 zaH_-|3mqTO!*pn}(70S1)S*G5bZ?rH9sbdxkmLogOfbe$3i zAxJt8(&Ke5E7C|7zzJhQ`%CLm0d>kSN$A{j59Z8ql5dWuT5QQqo!_c2lAlHCJ+tyiy)KVtA>g+L z=kt>H3{4y)SqzoEFc?MvrN}5N^F18&zkK3vT^o&zPGcpA0l#PDO z$6p@su~$-l@54&6n-NLavX7DnC-Fm9!G+*P`S>tC*h>-6qc|I|;@evcta7}%Fz+ae zhM-Ui<+kZ!o^FCnRjSS*zW+@Q@CA|k`K=1#`A?I3vts->&3_50%mhLX66RaMDSj9W zKGS8}if!U$G0j*}6RAiW0AB}@0TJN~7ai9|F~|ClWVsA;oeV$LspI@^hKoAH*7{&9 zRK~p0m~$&E9h6l(rQEhRZ*`5HPm`-%bIyfx-ZrZB#RAK>dF-{@-A=7tlziO^o^E&B z3t9^Ny9Hp)R9dd?_L=6e8Q-^$r&2z*zW&9>!W{ER7}w>Tq?4Pb6Rw*i`vd?F&A{IW zt)>l&DcQlx?J!OThnVhkCB9Rsp{DyE7pvF#))MnW@8`@FY_&XALU}*m(&Zcy8H_6K z+!cxgz6jA?PaT7eHP{(JCn~|OP>mRiyKoJ&GbGze>1}Kf#S!T+gDA=?8)Haah8V#D z*DvpIq{?X3{okRT`R&wQ;e<3f|akj#T<335P4kiTX|K{QOfT53q+a2#vSq~ zxFt{~qz*FmKzFVqC6E)giQ51!<=}4)I`~JZLPvzNTM;TA%oC#lC{)l8!B-8VR7tYH zL*BKHpbz<6>PLNn2`ku?T zDJwPtnmG)Pb!)gfiuvEZ<@n*d`}Ei=ycNd`_WZkq9{@t-f_e|Ywsn8=Mwt9u_|)8^ zGS|!e%*tF)#V;UI*WtWhoA6*|`mN0?`F$IXPN=4moM)~gibdK{FlJ0N~Lz^U!z-HQ}{9F$3~1rxo=)iS|l9uHD_I< z2V+Z2UhlQlH;2FuxlG4tg?8G6V)nD9zSTDBtoo+6Wm!2MjBeiwTE!SW*gjs4r~T;c z(p38;X@P93=VY$-&q_1)Y{FEtF97h?(tN#hTa@Q8(}a${U5Y!NUlUCaabGkpGK*3uNy?Rak^F{R~OfMq$1AXIK3E(;YI#A&ex z%hX{qR|1te#N@&jU<6}r2`nK6OrfEU0OQrHaB+Z%aQB=dwL+9SG86m@Gsw53g27}S z7{iF)ZcEt2XmY&7mu-xoQCt9^C3yfNOEYY*NB5Bj*!koaaHA2@_h)%V){tGiLNXj- zqu@;m1baY1>LgqzR5jHDShGQ`%f^YVF;!2pB7WiEh} zLOK{=&z|C*pZ-ZkQ4E6&t6J!sae^8l-5s!V?gFumFxHZx6`aF!7y>ni?X01Xa=c2E zEPz;%5uzMe3S(77h$6%FLBgrN^2WRSq?+QypO2`Lp8-e!dtqm&?2gK9+TI~E1cjLZ z0D2HiFenjHfRXt)s9lr)<(gW~7&!naC>fB107062G-?HOH02g#lW*e()_Pq&_1cI} zyaYe2Ou4g*PbVsbWW|Ef8T%@6KHJ@0)IMv+w*d zvrn2!-qQ!zp_1VpIBZricOMD>@OHn1_E-NMs5xV_x?iGhcVB*wO1dNZVue3~E=~Ka zf0CTri*2BXx$$T$(OQq;xeK`Sm*IhvFmh;`qHA5`>n@a30y@td!)d2)l(XHIbH^l^?I zU1M!6CVKb;@BFuZmiyoTRzCZ;ewj~`jQ{wr|L+YrT+tOXZXrFcFvt+BiiynbnZVeDA z@uQ?FNS67nqvE~8B&b}5BksjSYkk!Ct?d3-MMbcFYojQl29rUOror=5E9&YCOPwQ0 zGUEI1Bi-nu!x5(2^`34vMz35W+1`btTaae{tTrh{m1Y@cy$|%rk`#;cfT?;BUO2B5YQ6F2#_}8&gadIS4pDf8_6roF6IDLA9g&)@}9foj~rr zJ82Cfjq7~&D(ck611;NBFR0<8C}rL%*jH;)K9}VM2QSflGS81EK*MQ_3Zu-Gfo;Nz zv<@^jS}WCy6)tjQ#pO*P-Qxcol8h0%z6vmaVJVWz)^W{mZ&q!eIoUWa5|C1L@eeL*2v{ zbz6?-CG~dUZ0#BHyR{#p=P0`xG$CGJgL60GwTp27DQ{$C4%fAqyHBB!3EKp#z!*Lt z#NH5|dky>U$4G`ljvPPA^?nC8a3O*(H3JT+Tf4LbfJ6X?SiwX+aE_bjwz+ZcCNHkP z!jYrvoVxb}XYW7FTBpy}WB2nD|LMQZM|=M(7ysE8vHdLkU2YHn5VV8J%9lr@v|W`(GF!{g?t7)T$meCP4*6a3gqZ zW60n6bC2+6_EAQPT`amWkTN7%UA04{|z6OODKe&j*LrKty<)Nglv>eUf@drI%P4M9Mv z?F_DJHEUTyzvDrnG$A{G0fs3QG7stU{hXn!9N5d-oQ_h4M~>3_eLqF^+0V0c{t{6D z@d~5Sf=Gl!7Ggo-4yN0IjdeJ>L3DHzcj+dBuY8NOEGfk3oI|t>aWKM~k_u6ya zE(07$>d%F%_A7!j24f6r{5f7-gg71Hj-4Rg*zlftnxCoWw^W<39XO66(wAN%xo{bY zEg}x%aW@aXS%ZxX)){OZ!~JJS*Vd8D*DX>;qID?4;fUze3GA5@jK2LUy-iT(Fj0(b zZjUbwQ^bVlAUa=RoabYpoKJmaPEClE9F(%cQ}ttPkynYVtU;lD5fXx0<3q$jWX6Pi!O}zX|A>E*@{fS_NKHKEVbw3(D71h+@;%a zSnbi34>*ElP(TC}F>oB<)?dI^3^8O-N+9687EZYC*S6b#GP0)4?e?{5rlI z9W?kifBCgj(+c#0(wM7|P`>I!6x-|hpakcygdhk4S@0~m`xI`YkJ%Z4&Cm7A|MTaS zxtA|tt`A5y6YOT6{)r=OzkCTHD%HOR2sPU!s&i`o%63< z;PJOVjxp=3Z?5sKKlcau=yzUa@YDtDTAv)os?Gww`f?zuQ;acOA9eT-|LGoYIkv`Q zZ;Ww!BQGwc1|oXzo?v+5NS&kGg_ka`^OyhW9v62lQD1Ow4ue2I#iDM&pZon@Xc2e4_-wP#+Z^JsZ4xTySU1< zfW$F&n4xJfpnH#cL5Quz*qAIbY(|FN4}Xfm*Pg~6KTg)~^H)B~jo2eYt#3$j6w2#)>fpQH1;eh8QO{!rB-sE9KeE>Ud1OZ?z{44!_0UY3Gb zj4?z>#^~kO=sfZOgNd;wkV4_#5qhSBDrz{Zrr5P>m$ZsB1^nCBt3VD z^s)O%)%iK4=)SNOJIlyKk#KgQ2GA>0$IeTyB0IZ8Tbn+>MV?1;{Sch<kxS$6N0abhOJTwM5nwKCDjiv+k7G!i_z{ zzj%7Ut}~qInxKOIM+X0Uh1Z?@ID;HiGbn>$%KIONyZ+@qYki098hq%YsF+(Uju=Iq zA(703qG>`Tj^B75KKO0ubt1o=*NLw$L>N-%<&x{GiU&VM%X2DCn73jPwxft&e0s=_ zJYcwI9kN6#?}5q?Yk;7Vqh6KQNp<=ur?tvA)!^i(#k^jCh)!{chh(LkADZ)ZMFp#s zRMt~kzEiZEEj;d+LzSi&nbnddpG%5&g%m()kuKDcZ)5hy3^dv~0Rya4`QFK|RDueK z@p`S!dn-4h7zdcrdku?VOv!Wczm)aV>%Ws%T%}Gn_2J-X(n5V)e{KOYw~Xeqb#9X# zHIZ(;QPtX(TdV@BljUL7-Bs|mY#GykESX227uNZ?SY1Tl>+hS@QpJYaXZZyHc)Kx@ z$qhNogT&KnCfM2STm#<0}U%a{I}|5ylAa)yuG(Kri-2_b5fq97oO^MW1;MiKEJnSZ^j8 zYe`?b3NKzn?>$MP8JlO0vGdBM;58ULFmhn0#$ufxg24(v1}gqAeq#h{{aIyKUb@a# zcD~B@z2~iTq{GpB&+_0O{c*ne*M89lHL)Qa)|FrSXt)vIyx{l~|6rH@`43y( z{MHy9CTJ!VenJ_Vf6eH24Nsq2e%Irmm7r3cI&BBd=nt=MBw+2Z&U>=MbFBup8?{8-2)j zxp+bOMue!GVG-Z z)+NY^lVlR3>4;1`_$LTXJWwXFkPUXw>sMIs#cZ599)Oh4GeJHgV2DVrUuXE*RV2wM zqx|?F5vYU+%NdC(-G?6Hdi+Urm>|}VZ7UY{$~iKZd9Sk6E0&T2;Ca1Drq1_2KmbXT zbLTO;Bf6)KAy!Zfv4iy0i?}Q;w(5ceP+m*0C-h@}*9nj$?7i?3Hp2(^sSPG6G!qEk zcL9*mQvU^iR%17I{NeX(5{dU(^p3%PJF;lX-d;+kme?APzvfkGs3~>yyOz(tlJGB{ z+F`A?Mr17-+~TY_tOSoc&5s@k1*tzzGfOhwa9Vl$S%ctkMu=1}n)>L+Rd6hTWoiU< zDZL)t+*3aIb%%*8R=^sFLOs6iKEka-gABa9B?R`(&sX%)$v6^QzH!Cy8&8k;lkbR7 zQtxdx2EA#{Py>K#8O@bfPC435F-CplcR!XIyNZkL%3fzn@GJM`tge%aR}KL=Q&+HA z%Fk07fjxYdkyXZ;nSamEOKjFgJ`SoLe@!fCD)={zB-ERufmKT=pG!S(?X(O4%Eihlx2Cpoqah@w*enH@GAFT!I-TvPllDZYjUCEZ-)aA>KsZs<^eb%cH=Ff!!fWH z>5N-D`1UQE+aTOk^yyd`Z3WI#NpsP8h^c7~%%5X+s@p%ta=OJGu~c>YfKfgl7$ z4Iyf82?~_VW3#ZAA|FQuB zXrg4p6citzLY4fKIML}*-kUI=YKp1AKo+9NN{D3Zw^V9HSNxdG z)Npg)5EP@{6V8wo!7YsO+j+A9kmuV|b5B`gluK!k|K;Pa^2*hOd+v`=0=NcX3?>JP zHH=rhzKA;VQ%;A8VGuxkL-6t<2bL)Xt%Bd{?ZMXL7)VV4`WU6+A?C|3^X5MR_|pMR z4o=pFpb$I9cDKjRKa=sEd*O-WO6u~11Euhu)oMP~2BOwBi`GWB?*kzO*U>UcdvR!8 zC@0MQV}M$ytGQlq2r1T|8yUupXnr1IUbh41gP~|X7iq3Y;}m+M*fuYr$t5gWJzBs6 zv^9oX_#Uf6DYn;_lbC||^HxizKdtauP3zktMk-Z@UM;{)3A`>2{w+hdY8}_QW}MSZ zT8_sy)Nal?wEeT__SwZPP5WJ2AEveWBIeZc<=q;CY&xIQ6?w7#ShOAOb>B)IXxF*r z`eVGDWr3{&0>oBKhh?nRXd$Mg{K2VwHy^fUHD30IiuT9T?VR>jo$a7$rLsDtZM;=< zc&64S7&S4~a-@yF+eJ<9hYD5g4lzQ!zK&eqMqa%P#)3GI%!5OZ-|qv0B>7fA$lW*; z!5BmO^ef0~S4l@HrW14Q?o(mgYHl2BMJ8HXAQud^$W3z~ChYH09B~|RdrWeH9!E^)Xt1vP$4rSoajsgF4994>6T)XW7iB*Ta9%~0dO zWZqL%oF6mR;mCZTjC6$F+$QUFaYxqB)Cd2_RB`IiEb}0HmJz9eS1#bLT_alWd$7uS z5JQX!+km~NqZ5eM`^1;8l3lz)mMK}9lBFqWn&HxvEX`1tkq(E*<~q?m_mZRo?=9vO z>BQKp+qetYNR1^)9cl5Gl4TiL=7XQOG(%J8!BW8N?2)~4kxr+JbYq+-lm%_>2qZ^X2k%&dVUn{5I?C`c1M6m+7zd!BitRmw`fZ;7HX$mQftIN0?`V zXBaLA1xsDb;SocwU%6f=x%OuYY3A7I_xWd^+vb-)bCu1_b-GbhLn8&_z4)95k3}_n z`ZFz6OTG+XgZHFGJrF3)pZ6yrs7#1LZ1G^E-^DE;4x=v282LcY0qZ*Om+N`;-BtiX z0fgJImKSzA{KKb}k#xh%6~=dUnyJi@>T>Vg92Coeyj-@WjPo&WD95V=SqQ#={8oKO zmStssXuX{3^qpT8=WXWikAgwUcai!aExtaj=ci6p?mZ5vhr<6~}b2 zr!rp_XBm39(G>F}m8%e5#(CYC2=D1Q_;={nnvM6?*n??wpk`U~{nbW`E$R->y7$)6 zVOPJ`54~QrYeqYC>|*8gJ)Lc#33IK?cs`3(=5fASb-10D`>m_*&?@@A5SJAdbx7#W znAR8P@-Vk+{<&;ID{eR7H;XD>1^JdM!P_YyL(&B**M(AB9;GR>sEcFNF zSE6M)mti_Fym|%q_2)^40|sfrk-Lx6Ki2mEOJU46s0s*H1Xp}3>{)Lrsu+Lnmnz2g zcB;733akq3 zCFtG`96yfh_mN@Z7e0B}szp5}a|~a66`Lt>zvqF!JfMp45D)<>P=Gkb4pa1%^98V0 z`^z#i=fS&74e=unkc8kqstS=IN>b#-bD-)0Kj(vMxLi(IS*|+g&!$As%Qw->SLk%t zP#d9Q{I=*g#_kSquU+&x>dj8gACx4E&#uRl59um7W$7@CM<$JVgM zc+k%ne-4{b@71VU0ju-C7Fw1Y#->AXB4h!)&BtsQo_;>9{U+rQQ=a)L;fEBDZAqz@ zQRi9EuvMb4B%gxgEbei?aH|fR{-Y0{Lid{s5vtUB4-=8`x$3 zJ3R4pdW~1zOZqslRMY&}Hu$S{d2FGeSb$_$~ce z@G7nC`mHsp^xVu}wTrBOYp-T${EanWp^_K6Jw|h`rB<=lZ!6LcrU++Qxy_SL zTCP^j$0Qn4$9-3DcrNNY+nMt-MzC< zDy*P#BDdX`sMkebJcnG{37`XzW=OAtdBgpFjx100FMKT2`mL;^uRM!hy218f2U+WI z^!`&|8pc}9M$f-DKg;q9&n*bRo(FS845LBDD=%GUl)>6vr!hx2&@3spEmjYl$`!5= zO)(dNsuD*^*JNns3P&>!1O`K0A&Q}E18~!5Pdk+l#3-4Aqp|YP2?JTi-u09;1C>xu zvoI|$lu8853h+nXyx;}nZ{_E?Ij}Kncb&m80vGX%UrG4RxeVz=9t;Zw`Q6C^k?`J! z5{^U}N#f5B>~t-!UJ$)8Y>Yq+VxL!~+JIVlzcje_j2fb{%-i z2!D!X;$6q`bSX&Yqb|$#9&UGlz59N|b})Mhk}4+rNjOi>s9<+SBrm^0)a_zA9mIrl z4UP8=663E;er}jRBtrbkYmlbFD-Q2PnJT0XO+2q|n9#ZJE?jR7m!$rTLh&Jb>`Uii z=#W&=B*P`%+s~zbd$db4?7xS|gA*Z-^qS!VOl0EE@pPJnzW}-g4@(bbN&z%x z)xU5?Vw!o~u>wcdW3G(W_#6Lho4@csKF4c2aO%t^osRK{nt}XmOD96&EGa&+cBZZX zPpkE)^W)wB<@amiz&XgAE1o;&y(06is4gEkB8)I3AsVeCfZ8E|O{TuBR&y^rDYuA* zK2m{hWVo1i`Gs$0Tuoz)STdC$6D>SoJxHBF>bFskqe5+i+oO+4*scx1^V0>u(gSp9 z=D!=0`@My^W2{KYrwbInJvvbzIKycG>4WE^Qzca;&0Gj7U!DV6n|!3+1~m?69I04b z0M~QxN=K&p`|RV6$-Ht4t?j7faD0^#Ht` zbgNr(YwLSm^wSC&acd`}gQ)FLz;`RBJK{yLBFc@N!X^U52)4h5T)7VCE|Z`2hZC|v zih1Gzd*< zBaplAAvH=s5W$49@ey`y91-2zCB1No?s^}w;-k%)#xJj-lX84Jiel`m7wPN{!ufQ5 z`?C`lqL%vYn`!2d6UXV^a~DZ64EfccgLvUG(as2r32jwe07ab1$ijDmT*(~67hb}Q zQf#kNda(g&1S^8Rb^*7&3kDC)75pg9sE@WAf@nA+WWyoD=U<@PiP26>?#5R4GV|A# zl#W#*)Z=oNa2BP8bM#y#i^GWDdYk)BTBAfZC5ltldLC4N`LgAI`K=*;>i_!+|Ha>X zfvbt(^xa$ZdT{|#c>vANDJJtgbRr=#f;ED*A*h2;dM;A3?$09hTYMvR7|k%wZ}Alc zs}7?Yj&W41a~_N@#*Gq*BQ_Qz5mvl6qBZ_(La~As<0DfWQ=98*>f@BPZii341|NM% zSnKt%YKbt2m>(TlfcM08C2tD2;uws|4>4iRQvj!IklS%|F=DRhmaXM{*h&C_@m zE?3k!Ha$;S>CCkVT4tq`H-66K`^3m@#aeFBIu3fpJYV?-!NqE%y?80pa~zZojKQ6C zod3ImR@dCupB?FGF$o>K`W#dZ92*TnC-n;Y}XYX>vVslT}BzTATP&x_&^~t%33E_^HKth z({)G7W;x~lqspl@hucd@hTiX_dJ312$JNkacuT#@YNb}q-BNu1*slz;7PEf;ePkvh zO5vY=d53R2yMuPSq23CY3hIRSJvQL+gFWv$!C9yvmmD$YhAKJbNipv*XKd!%3b7U!?tq3v)ER+6ebg8({R zvTN7ry>x-icRbE;I3%+l+I^>~fs?t8iM!mIc#gAR!;+2u=gUOq?n?T@j$ zvxDDiAAHY5aaOQS>7*%%-r~BkRi) zLu!3LSXGbL#X{%h%Ur*7jZSwRu_j>aQuc3t;gr!{f=#-J`Z6^K&?F1z>t!BPGvPeF z0Ae`@XvX$1TAI+Da!>4}E5SMFY@W|7kJI;?9$pbSfMP2!>W}G3z;l!&$k}5CV6mFh~ z8Kw~ud$7=`_&^&@NK?z6IuD476u+J1DkI`YY%db7CSCr{7x#Fmx51;E26Tvt1S=M! z4mAd2z?m2!EA=n`A*MOLRY5!$dM$zLi6RoCQ6A_b1dtKP#IoZ$BswaGco>>v^Ju8j ziZV(Zm-h@u`Vj+{VIzW$J1r-%dw8SFSipNK^K`$xhL18Jsyz7S#PBCHMVsp?{T z)5hdin>#0EoOAKB@-DXzeRu2Q zr2bx=9_$xW@-)wsJr)JH%(`N_qh273bikcA7*^jEw+Y0oRyWUtzho7+TWt>m57TLh z32Q>54qrQ0GZ{}VIX6)JhqXf7?_z93_Vn{`lYLm)W~+_~ozdvX=>_E5S3)=dlLT6i#lY{PYusekWsR=+DWEyN0j4YWduY zdk`BU^et!4K6!(J?O#4!Bze-6L%8HRf)(ur_Z!q&mmZe4lqQ5qd=-#-=@PGfO z$i}7*P7(l34FI5uMZ|`)?M86rI$XPkJ$(vi@?qWtav|0eEC8#@mfFo;k8f?mpZl$Z(~%Ey z6N%8VLL`E<&_Qr6;j>p@y&IENbWw^QfJ%i_fqrcHN8gF~?e93c7?Kc?*oYKZ{<>sb zPi}H4>9M(0g2-j`-STX}ybT>Iys|C)#Sib&OH&3SM6n5H=2=uB#vgg`%{`&Nme1TR z6Ow|8kY&nNC*ou0yL{)@2RI->=o+wItH8j9QFasI`H^8`gAn<+{=z4AF~gLdM2RB7;B_by3t5KKl;`$@wXIGbjDD;|v%cqZNc|@wZM5Ow z)*Jhw!@mYSJajPj)dZcmO{&ryTgx-Dd;?nLJaZ6Ed)5Hxzego4Esx2CYdt)*pUOL-tB}Px7xJh(_L3Rd6QVZVWrA)LVEu z`3gElu{cs^0_GU}Ez}^!pO4q;DEDlpRV%?!)i@qOw&1nEigA9Xj5t@vMPadUv-&rzth`SO0=aVf(_O~*6;xW>*$4Li!=-wWZBors+=K=^`Wh#zv zP#FajDS01+?W+b%WH6ns51`TMkoJ3|Mu{U~^y(`NZ`>dqrao$NF+BuWA`CrOw!lA= z{M^N=PJ5^~KC;fnT_;JeUF60~&!I``quv$}bV+$a>cD|Sg zzoPXn>E3|rPd~-z<}S%7@zGav`53Pe&Omp)&&FM+y*Fd%k7&J5a_t&BPkjST!ufM9 z-}+w8H)8zeXo-Ziqeti*+k{9kIVhTQB|@j)!Ckw;@THeXM@cxw(FLwG?>dU0F>&PE zzScp-d-jgu$qI37IKN|g{sxTp5=LnrNme`xsS>=%!Ve00F$P2pNQU5uB15+qbMi=! zD6&W-6|cGS?7_UBOMu>cVX3uzd(b1eAmlFUcbc0xI# z3`Sy(YcQJQn2#tJOB5MS9O<&wwLu2r zBSE(;b8Apr@%k;jxIu_evN+;fd&=G(B#A4IazMbEu-zPCW3$J4zvJ6!D)MQPHs;() zc}3kdz;FC&nhmm_HCr(qx6r|?F%QoESdQ0KEiiATO?4;OZ|yRfSEmjQz+6le{;TRC z;j2Nb=4q<@=TyH{_2KQ&g?hhFYIA-4H-})x8j~-m#`D&1O*dl4N818CRVZp_auT%< z@alDKlT_XIx=kZ#(aCe3tz=EQ)h~oaXc_c<^&A|;gjJ=PVTl&!5kAAy6Zg#+k5EOzk|O0ap*@rpnwXl+dJovq_XS-J5e; z`2^<4uhMz&K6;No&fvsRvaP-cU-D?cLXqF#fhWXA%M}49VH;@N1GSh`iEdmcd*LPY z`Zdzs0q(_D{Z`;^M0f23-Qy>UGXwLMmBX*HzQ~{D;Ft4Xy!ou*frq+m91$B!|Kte| zD3^Fi*s5L$G!WV#p{Glcc^IQ^+YH3J>Xun7LmZSYOue<8gYM~@PYo`7zy+Tfvaa@yE@Z?+T zS~k0V+*TjwoX;217Uy`1!R2S_U}IP;O5t3Pm5s%g{^_zH5Y{%%zyRsT-tuXm93PKaI% zc~oKldUoNsTOUhV#2?8}n+NiDV+$WGtzHoww#Cw6r&Y;nb~)`fk4||uEARe!S|zPk zkMC5PuRjkr&Cr_#)N1uIxQ+8$Th(I?oP)nbgi{wg9h)WWx3L=(~qD>+ z<^I{aAw2dARe`%m{XDqJ!{N>ziA&kI=QzE4jx%`q3OQU>O+PM~x-er2+w{C0S!BrV zKyQaDov50%0worGUe+WzSc#&k;o&omcN`y~Lr0pL5-1?V?N!BMc=BS#h3gp|1A|m6 z$0sV)s&d{6Z%n5oZp6kL9-?#CNwU!u$Si;Fw{P;9?+l68yzyK;Z%f2)@qYMB#_xV4 zWt7H<11BjRWBA0g9e(Sn9ipxwiVbnckU1^SstaX!`#T408isb@z4S2&;2#vmzea}s z$zybnY%siZh2&G8Vfe{Uv+>Y_#1GuhKrF5k`?Ke)Me|@8HiRM&A3+nBA-z8C@&)$3 z^=)LBlI{&iZ`{P)+(WVvxQyP`I-S#JJ+CeexJ=c+uY((9Cs#En9OoZE$*T8SnNLCe zUT;%a>Kd@F;6)`h->3TYAr(z)xcKDdXQ>B7#7eGC^?uN@FUqn@5QK1UUY^Epsg|IZ zK9?5#*+d+w1BIfGMYG~}2Fb@$0IL*or#;G`8B*r0wUGjcQ58}R9*o8UWe}Wv?B)7Z z#$yPj8CzgYF383dGAhS$0ptcgq|#@8dh+`3!CV6p&ch647eWp5R&9({Tc-rzI!ptm z?48_0vML7`2Cdktf`MYZr&=NX#+(|;tj&MMdj~qLIfEB62nCae(k+94RJEtfQ=MT| zg)~oKP1!%b;^2K+@mwSqa<$yBwLg3cfP;(>typZSTZAE2ITN8MquR3CL|zI=5mEy1 z7a{?dNy?)IOk?bo+_%4TYfksgRAC5Z;m3nq398$Li#|XqTdwO(sI_mx`e3p3O=C>2 zj1FM2y5JgXF*&TJ&egVi;mU70-FACxF4D1Y%kz9deRBA_dS$d8SsA>a2#^!f8hrf3d06b7_IV~gw!$v*lddeb-v=0Wrdp7la3T-V4tkOIY_b40uky6S>%~nxuR5?z@ML#~&q2uMwM+FF&jNosV5Y zBJ2AybGh*RHZ~QkCj2XJQXV)7qmc*GtVy|^t?_?;Zp7{odK-qg8|9WXrtsja8uSz( zlDA*A;u%;m&fC0w0>EK9F}<^=iH{v)ZEr|+;VL`d{5IKeL{ZOhx+9L;qQr=G*>w3y2Cpj#TslA0XyK9ZW81!{n6O^<~F6t3>wD3e$F z!3%0^E(+Cp@Q)ggMIH5~h4S*UEB#W8!xBWRPe&5A>o)2ywoTWk-m(y($#zA#3!Jm4 zqmpGZCzZxms|tNrJ+H0`XBL29<=BD3yROJk0kx}M?oyUfSC=q;3$HuI_$p=F5JjQH z<83UHl!GGYxgx1C{i_z+91yjBsnP4f1ir&;pUGW*+o`S%hm^_J4ShGtcfZPiC&=v9 z4)EHTmCDT(QT5$3et(&==Ade$8b4Y^<=qw0A$tEU7jX#=TImjvh+a2zTbEu3Fj34y zcl2^|WsD!z72uqFg)^6$xzq@qBWvtk-Nrrp8oY7|?mr2G6hzRW!#?@|`p7-#H=hU7 z8>`QWVtO6)g^Td1Z;_oj!S(JMXYM`Ck*hbk_U%^@>j4W@+MWKYt{qT#mML0Xqv}0O zws+XOxl8=eT^xPnZZ35Unq^=uV=frg{;$4q1-ij8F&B#u)DX59=5}Ki06;1pMy*sn zRDmoL)_N&F^FWFM!vQ3z3p}`jzg{dndo$wm=Nv!w_K4K^^lgTzKc~%~ffcEcCPO;M zw%B^><2VvThupXk^H+Z90xw=niFy{z(xSbnmt~s3+fTzEeH&!X6kA)3zzf$bpL)Sz zx(2lwaWD2Dq65F(wqnRjGg@&4@{qb-O07dmsGz_$EJXbn?Zwz5eWFuG{n?HI%u$!? zZaC*LY+cL&DFws|CNc$>QgBt3qvl{s%fwItgR0+~U0iy%RndqhaMB7zH!7=la_L1` zCaHpT%`LTFC+omM6-+94mg<+hYjTjg0zy>uyIMx=TEdtd11&W5N!GTydAnSvf@?&VqQ`1*BYAz!^HBvbT9$y3?bg6lEh zIrn<3ws34%2Cz=^%=v4A@`nb>N1=>-G}KtA)!TO6tasu77U~ZxFNVf28lMH)8dw>u zO^@HA{BVC_pp$)5bH3gW9n?ChF_xIl=PK5%#aYCMa_u*p=9j^p*}mDd>UJHFF1%jF z53F`>*?<03m$NdO%FtqZSN;UGoMP=u%eDTgtil6urM-2SCWZO)?zdCR`r1mqlW4jO z4h2Ye5Ojxi)9OXBe*rY@tHp|(k)c}nyyebifUKh9>t>1OXA5N+-F{5x$Qpyo*O4cm zfxC_eFE!)60J}ZRyWU3n{5fQl5nSSYnPE$-iox_cWS{*8(ZhE$+Sp{rb~yR)y$r73 zU~qA}h<*u`Eq7WmT7_aurd;1fx-o8jo$?Gg|BPYEjf>Z?Z@8BeZ+@6>pV&e#ZbKA# zLy=&V7RGbsO;j-ydy!yN{CR)**@k%(UK21cC&U3J(VE9jU9M$R+5o9!@WkDQAH8=# zI)uG}Ax%LdZ_J7M;0tTLnE&y2ELTS<4;+t4(-f!qd4sv<9$|~FAULcVMrn%aMI3+Y zW7saZO{tM71}J}M-sAyU9)L|w}VzixQ( zaz-2}-EKq_qW_wz#}-l;q`4IL_=WlAw3$rpd@;)))TEgjpIr!3$jS1T(^Y&{<*FY;PRpeul z@?1hGuE@7tmVk~@qVRd`6@k|9rLwJ>%Qlx?MJMwNl1oiIFRoH=6@(fSxl(%VbuYj?X|=jaHxrJ^E6*&{YTimeDbW1&sljQLwOMHtm5qc+O-xrT1hr&UpH>`$+EVg4K9FG=gq=PKmZ z(z5-vSibgl?N3)$N4JxYeb+mu+8h>HFHU6x4@(2zPzSvrPz1T98BLHl)lVW?tyjFDs|}& zqh{k)CD&;~dSSJ17+gy;RI^OWWBIJo+vt*96|!$U2XB2Cx#uLBI4}_|Nsx!{LEiQ- z`k8Maw(G$J0gCgB99WAPCS)J_ES%ipdSvLHIL_HOJjAPmr%7%Mys?%WV_I1fh%Z8o zBE;y|zm15)y#I&El0i7Q%6O0>inw{41o|9-v2ZUEB{GhLJj$dNU|??4NwJ66N1K$c@^g<1CvC=!9m^mQGF75H zl4YixJ}Uh3bC&=8s|jzo*K(peWaK0isb$1XRUGOa2SrFyhe0{{mPawiH%JE8iH+m$ z{>F9w=5Joc*lvI}0+^{_$uQP18ajUbA;(X>5nSfTG(y$Uj~w5)*yDftogr#1E)sgZ zF2;%nE=Bw~VpV`A2b5|?dVoBt#;o|eV}M32s;0jXLe$c#7f}xAXkKEA$Gt)x5Trl` zs@0jgccN6E)y7EKp7QQE!Xeeh6BttOAT_bZ(ik$XxdUE$X1qm@=O!+u4f5)RLE9N)Nb6 zUQ%U)Oz~MB{#k4nl?JZX(zNkOyt*Zxnhq8>$J9&5o>fbiQn0ml7H6Xt+ga<6bDhh_ zeo=h!**G7t(ZK2utu3>w3ZW&Kj*izyZH!j0!Z^?8jH)6{;G+r&)}Lx!ra4$vlckg* zDHU%jN#(z3sZ0KJoL4Qs6%*H3^^E}*zSPnb&1vDJA+y;nYfCc^AbJaJf+>w?if#gy zThLNAz`_|;L+!jSBKOH@XUky@LC#V8Z4%wS%h6IWWVxp+^ZM4cI^hz?QrIahq1i~&GJ#1BY@%U<QC^wN2bpFP22Kly{O9z&WjeQqyXZ-A~9a3%!iRUZg7u;g6W z66_!r@#o)#D6TCeP#bJhH1dGP4vkbDVtZBlPY%NjkVm965gDa|8aLzkHcd5`m@g zgv@)$U~rk`cpv`tcf;u;8KX>5b+{}AVt)RM!i$%HNa^$~oqkN_6q5sgMSs`N|0_iz z#V!9Ww_cTY!@GSD-~SaJtf78#utKV`DDPLXj2aXnRN7WPFVAL_5G1y| zH@13`YCWcupQ)y4lu-qGD?BvYRFz@*y)3JGhNRSZsgya z^|qo*S>;Wtf3M5Bs-M+D~>#b!Rr&_%gg{aA4Oy{bytLRrn#zMW#Rc;$iYqp}$fi^xx@DqMZb0&!R|!iS<5h%}l$y`r}neuIQb*#SJ*GvDs zUfMAi-2ouuA~g3R`5-#EXl-8={4o0KpCL{hcC!aPfe_H-rI#*p{^mAsfB(BU{*Fg+qtpW}zLDc1()!nq z$(-GVRp%z>CPe*guFEK)wM=4m1vO#r2V07>IBG^}y20R)Z# zB#pEckf)B)NR6LKWmVfIX1x6NzltVT@0*gRiTSE+Fs7atDZ$PfZ^7@<%;HOGJWXv! zqr7qXRFju|QbTq`ifG1d{nFb1RXuL>HO7QMza?;4<)>M9A-5)YJn64Re&f2(a=-c# zsoblBi`7SUT@}X5t%As7vX*Asr|G}cSsRl>kTv6D0;+uh?Xs9OmL~B%F1Oj^uPO6# zimHOi)j6mbfwc#vyeqX=6W8TNQ%2QX6Zk0^V-Z>`^+8iRH~U|99&jt?E`D)77HYeW z|J_MT_S?7kVt`nv0#{nJ4XtA)y5q;DEVM*caazM4;pMmv;%D1A^XzP4F1VS8C|=h?BYp zVct{&IK>^`M4osEI%{OV{dtmq@Xs+%K1+mPkF8;jbQtdL@TKoOM|AcSZ~PGJoV6kfTUVXe~bSvs8<=PFypbDydM>ee;_IU%5tUhS8XFjU5cVcih9R%h$=5qzduo~fH%D3@Kr`;LPJExfDqc-N}pq28lvPxy@ z*L@VcI8C$F7q;WSd)NN^>zMP@R?3Exwf&}BKQOAi(4VCnt^awXul0##TdOs zG-i!d?k~fBKxOh`n-Og_Q%uLw-RzSNhNNG68hz}3X3fwYwSh_^GAM?Z4d)5@U>jx&VkNR*eV$T*h6G-^0ekXK{mF zqIkr)3&NlM`5S!ZJ1KU}l4Yq6E~0`D{Ned>D)el^pL%D)n@$Ya8AezGE=$?!8b126 z<(IxZ3Q=^G{#plPf|IA15>6;UoD#Swz`u&VDuk`14bnpJ!om&d2mE%LER$Y$EBA{{$f>zO-qJ0Gj*ia51 z;oZ;c)w!Z6imG0Xz5W^pzSZTL7f^5Q*rzo3rqG75#3tvOz^Tutm7mkHYo*n$u_p4@oQ&qlvv8sr`KC5i=1Ej5s;as3<@g>= zrYQ=t6y0ihubTfuMsts&*)(a)%s-ZHH7(e;D^`Q*n&5z|T0RQ!t?0bNt042xYA1+W zB8!!B@?d*~5?%_W5 zCFJgt(2c^Oc%XX;dipr>-Z$a?`DZZBd5@PGu;9y75v+E|KJhhnOoaY{pCCQ9$@jKacs`H!<&h1NJArpNl)! zIC|_TZ}`9dZa({kr(q{SLiF0Yp_-eA-lK2bWMt~WKoKvE!s`d`S(If$-zeSSRVXM< z8uq9tMtqdt-+licf9Qb`gJA|v+2|U+dA-Nq{&vhidCGEg2cpSqLgj7#{c~3l#ADu&bknHBJcgDuHrRU{mTKf;=ejbQv(a? zCVkGAzYcwpr>Ov0QUGrOsB@a+!CwG2TQFs?Wyjsb1!>cdO&V<1S)bgQY+ z+y#c_7)`Xn#>6=;U!|trcRU@wrp%t7znkdb`sdcnALIOw*E??;7m-DMCDKygl&0Ux zG0|9YvTVQ1xdr%pgX+$v!!=c}isrFP?p%6(_SwGau#07G{>BqZ)$-8tMWmU-#)fFU zi!}y$@iq9=S1`svmXQbS$VMslt&d>e_Gkzo;jLy5k%d12)PWTuv7{gUBHRD?L+o9; z%JyJDeEKLy-t+*rZy-tY9ebXqV0CL`kSFegyN=;h(Kv>!4NSj}d*wRmFMf*c|KuO= z>i_mpF1-9I_r2#Goch`K;6{VcrgC6v^}AELvE#0FAKvK{Ms9$M53ELnlcGJ4ANCYvKATOtIt^S;Odp`7VlW{^+4@Din^`r@3yyx8_+ z)RjNaJ7+Z$hD5c_aau$oS|pchwz2+gr=n$f86i`NyEK4M?Wv-bvO-(3!TB7Eo zl`dg#<$8EObuF~UTR@omuWVeS4_K!26@S!8jy1XWj*Afw$(qYM#Rb@DPuJIcQl_cK>MZ_^*bCk%~3WEoAeSzOEWj z`GM7aAo4r>`cu-K#OndlL=jYth?Yc!r9B3-gsqK^-bRmPdl#1}?(<)Vd(U8=xF5GS z1o2y8oe|<6c_;4bO?crvL@}kISuv}ff*6d6ai91qd!vNpr+nX=V&yfWW?Dkg3p(GydW`_V~cP$_v*lAA7aW-+kKhiE{!(*p7gDugD}*PIdSA z3vb%vkzxx*nZC&)-sPy&Kug;z`Ms`$q5Kpk$M>_z0~Hz!9#`<`t%7bhM3(M(A(-4s!Y!Z1s_mL&e}yB|EZC z_kBOW@RvW1T;D^ZmSw5{A<8frxKDnKWM>=wLqAD&-$^=~UDn?Epa%gj-UK4(Sk#C= zU(j0OH$FgiZI|pLUqp-rYkaVe$UxKwRnp)7I$wD1uW|NA-bLhu6uhTf$%+486>A)$ zRQMa8%($-`^M@Z5G}~i0aimxvWvi>aIPCEkKHTGMeJpUI5yn> z#xty+I>KmYht7uOqfZ(B{Lf$D8!wEoJ&V(f0#wMm)|p^`8q!4g)9;D-**9EgZv-l_ z?{k-8*LwWdAMf(DS5rED!CF{b@0Ea%X-bX)Sj_L$&IYXadht}p=KT8Ge7XwfXi^=7 z%E6*0gkAk;+#Tnxwx4kPjWl^X>O4mI@{`8=b5+BboI*vwhc;xRZE@91B=go(rjyn$ zEoGw%lw8fT=(GH3eIx87_U2n(F51tzEVXt{-hA)=s?IGcujbWkS{Qbkz}BR-UMmn* zxtMz`RQZ&tM#e0eO_u9i{Zj>aD)UvX&65ggxnJ+K#q?~VWAR~K%fPKSz7C?k{N34{ zSEu#+#6B4ZAo47?k5;Qdhr{OHZz(IF%CxacP;NZO@^s4putPz&MppBrT@=t;>SyQp$zp*5*$;5mo_>Fdn~QNd<(an~lX+aOW7_N3&j6T`w}L&)OuK?Q zS2YnWtn5oxyt;{V|Knl5yd2z1;2;3NWu~UvwVJM5j_uQS4(e&+I|U-6`8^-%*56YF zRWfxbj_r##8QmP9qXgdcFy?1|6ek2A$RkAEwHW!v^9+9FUm%%6Y>MrprF|BF#c<}A zONQv9_YnWw2e6Oah0VaKGJNSG$;+4h0P1z1-$A=Ew%4KWl)*d-<%w>sk|Qp*eR8wtNmIlXKXbz?&mhnZyQf#LTTJ? z);jY^7r`Fs)@Gl@r^ zejdKZ#}yOq%rC$Nu)^BIS#qea!RPItX?ikW`BU>%fYSNeT}eA_4wzP6%Y(MGbI;FH z(@)7ib3U(M&SN{t*hTBt)^euuKf{Y>89-{0@zu;bKM(u2Z}ww%FQP;3JLLAOZ&T`< zJO3PW%K~N2#28xMQ}d3k`N#33=lhY>GB`|y=dr36nHL*nO-G!SeCnNIr9K-vt9uEZ^L|iOs4s|d7c;9vUN^c>MctPzbuxlc&= zWHvu7-nNAoWph1Rh2Abg6DHS5X`7R>);~3O&C&oQsTKgBhg_6&kaFYv60P+N=(Wq}aD;Zd$Yu}veR@awY${y;M;~GEiLXGfhu9D?)&gQz29MdVqbzTGVUDrlmRq|kxL2$|-x7la5mN_l*1 z$Y1;65kG#n}A9~CC$$* zEcnQOCseF4B*Toy?uNhp`;|9r4jE*I$O@#wR+RAM)gC|he;8i5*&*s=^wtbpM>lXL zoQWnm_!qA8wo4fqHwVo`#BYVJXxn(C)3*7!^u*Fe>XI7BL#3GJ92>NnVuL~x9Z+hX z8P$ypt!gsWK&+!N``LCf`CT=~{4xzBYJ8vmPM?hMw6|+=rFF;3=~9_)R82vgDsx*~ zDvys%0XL+PHosN=x1hl{+n`^#f2&H-Hb8VqoojYoOI__}c~WD!4c3}$q%6L+pZ;u? zE>Fr^?d|!1kL3;IHdk=SZS6h1>;;Vd+WLM`kZQ6{Pa1ENj})4+o|X3d z^Bd0l1!QK=7p)KXCENMhGM5fd;aQqrmg%2a<<1yK^Uk&VAKS~$A=7P*1J}8Q^ET5i zD*v`LbFObH7xT7r{&C!vX8&zl+?LkH*q*`KH98&AGhbiVYmz2&%0=kE_V#@E9(ff zMlWkK=%~+@<72WvnpKg!)cu*I>1iGTwgUuOUjIp4%Revdl-?NANP9`}dA)Q_&QX({ zV$O46RYUf!n^8}F(MY!N=b0(k4JTYXHWK|fpR+7mh|P2nhKZ$pz(VTJVT`$;ew64D zx2X43kI<~-r`E6@Kf01jq9a(OWX7~l)x(FfaatLEFrpA{S5U3& zTfa1#7c)n@y%7$_mlJJemCAYQC-mWEj8@t3p)*UkO>H*B~Ppe3L4#c~whttDK}FcO6}tQmMk1ccK5Fm-yeF9ZWFf!#HNF*O;07 zFu4oqt}~3c7q(;t*BNLZUn_|r*WOJN1B>`u3Ny-vluD7yj)LFW?j55|&q(IhV8a|X zOkD1A-A|2uZhe)Fh12)-_6uI@Pd2z%{;hibdCq^$KXFwq>d55L8UZH~Rr>si9%Hh; z-&ZM*f@k=E<$aE_U-6kC_Zl2&+j{5urqSNdZrxdl{G%!06m9f`kJE)F;OO4(_4tf5 zBryHGa@Zm7-#}0oCi&^~M)by<)w{G}t~+7xO=ed&<%3|Oc^zhcmV6u(6dlu4y1lQn zLL?Nr;Cig=>k;X2Pp^=rvA15hG@q#%b}Kk^e8M0s3-av@t*q|FUk3TrJD8-ZRZ<2&^3b* zO+Q(g-V;%nzP!{X2qLs)1ll^7Ok(MgQR)9BP1pB_fSLog_fPkiZ(@tf?fI=A=8pR0 zG}mVJ!NDVSc2T!aPfe(>^t@%fBl9_1N_RtXTyhe2LGPX75i=*s+)itZ~-7973 zUhyBy?54$Z+=3z=VQz|}$&VppDZEx$jR1>qB3}CLgqRI~w&hkAO%{j3myI2tE$pOW zvRZVqld0hE6+)3-_OuUY$nyF#CDVQq8Ry0;9F>9{`8=8+S7dK|iy!NpwdC}Z&eHs2 zbPeiCZj>RIo*mB_--S;xrZpD0@KdAaFy$BeV=}td7c~JVUm9Phz6-YHGAFdvGY>>_ z(9lz|cV2RsbLdHgPPC=_zQHT*9bS0sRD31!6BMO}8*<3W{L8)*{zWBS{jo*#cgH^) zjPYq5v#u0_so4qM)NxL}4cy~vuU_>z#46w5OfCBzM8D)fP52gkKMGxUETPYhe<+YZ zcWIB^`(upYg?t|CnBwax`S@L2J8h2u&J%i_i?bv6uSlItpCfS!HjuSxNtWE7z`n0^ zOE{#2k=Ys6RZG{Ac;`Y8bk00|yjcT(5)6}<2HCuwjBSD2pcL{j7hoFM#_;ym!`mk# z*BTX}yPg$_KWcY>M+80%OL?!We*bf?raE6z#h>JaQ?3q+TTrOX)*AuXfF=zAt@dMD zX${p5#cWCT z?q1QFbRBnuw{5RaGtV#VWUGCDj^gmE{`fadrL1xABEE_BblSVql8sePwls|=T}Ml{ zbQR`E@O|z~vdU6g6DIT52ueFPczYsotQ@8vW!i8XAuP~@gm^>bmR`aaoVc`#<$6y?p*Kw-NRV{ z?W4P{yH{}s)dIp&C|21viI=k$>psHGnog9)km`kt zFt_ucLBZSK<&|=HEv9KoKa?+euI1fGFvP##&{2J$E8?L9yLqW3vstN^xX300o-cYN z^WgQ!q?wfdl}wNa6Pu%NpXb}kLGXoBFBG{*`7tu`KQ@5Wk3pe?>VZhO5$*J-cNsZ$ z10JDtzQ@_C_JsD$77DH=y@d_#8i8PihAADzUaQZ>%6i+8+h5G*+7_BB>LwW_G!jM32TyZ{{GhBCKJy*MArEvl zEld#uZI9tb$`=w34ig8-Q#EETj&5c?X86!mT)gIY+3-T< z8Sa+nIJ4Y2ao5dEFEEa{@z!-L6sTE%1TIXcG6f|kmO70aGlmwGZ7l6?ARLL0B{j7Lcux zaICchuf5$#`ly((1@6deJ=bCGbqiF4xa&4=RGulp+ikNY07oEfgyqV?YlL}iVf zAji1|*B}$4t64r&DvR*AN5a4Ovni>9AKlixk2F~b`5>8@BGGb2UpjLMnn0E+YMxHt z0-)4(P?X-UjRF$jGf6=_g~Bd#$le77ZZ@vNUd)5{d?o_&XHo4h@7ViN!V_5)G`&(- zz@`d=ry0J?U%HhO(Uvrfm4J*lltn6mqsMA_XPQK5?9%3r!g6)sD$ut3J27>C08Abu zHFdyS4Y4G8<<-+Wobq0Ef=ua}f3Ps z^Lj^n98#jZIh7FPH*g*Nq6snggu!-InH-pT`FKZ6xG=q3{^;O8;2^1rhGR5k6ha~6y_Ee;AKzh|01NL>Jlm>s&L%}sWwPrh>=GF;~}_t=pU zo&M9?kejZtRPbsE>B)Z#T0MGkdi2FGzO87FNDCt;buc>};MwK9^Pi1CsMFl$IELDpu= zqz!j8iUPnJkT2&CP>qcZu)e9?0*!eq%lSUNo`Fk(YNkLfm8opxv$)Ss2Mc;Q{A&vZ zFJ+E+>*z$UKQAETcuPWvFFH`Ax4*Mf@&o@od16ksh(Y^t)7OB+rfIfU-w@GtA#S;= zBhpU>)+PHfeL-UIK#6~z6KrSmVTN({+mfFk|1z-A+UXxw}Ih!*E}bX)G>gW(FzLku~Jc^w4| zEgMDuX)ZlRSJ~~j1i)cEpdS~hv_z$4*~;%fjhKxAu({Wq7>+so35H;01C|JtR-?^emNFyJxw^-P1%dug zC)BDOxs*U|_&l@7mV#H`mKgJ;cZq}tE~Bfj9`Zt6Y|h7#Lj6tOpN9{W+*NcDA~ zIJpsVk)cqJ<{6>e>iE;mOl=?e={M!b-kxw|hUr+l%3k*bCGRIyv*1>6ZYvWaF0=X7B`urTRj#*9YTA;uYhqKII;YHed5)xgwvu zJ`oYt*+q>L1DQ@1BG|meZaz*^*#f3)|H1Qkg`7skY-zc`OAH4QAgaiG7#p)p1e|!# z<(RA;k-AE8jdW&RU8{b`sP~kuA_QEjQm74Pe|oo z%ZLg=nHQ;q&A+4SBQJyW>H-$Lr1aSzbbk7Lf!#J-xfuf$nKswwQ@tTw^L22LU4P2o zmQr=b&zpg16Hm!XgmjF9 zFG&Xn3$%J%Doe^HP*5%9b3YM7jnpCnB5iA^=W~*vky3^Yrmr**u}x={$b2e!%>8f!BSK6Mjz~-r1dwQxs}VOeJ%7g4AlJi z@jg>=^5#T8GP5$7LYoiMplsCVokaGwQ z=WnV~)Yl{z7+GeWS%MT<>tsj1Pgjz1ce%bI1*ACji2#8oienYo*nbGrMPk$=yv7IPzv$i4+kj-6YSqFk<)1mg>+V0- z&s?_4dciiaPB>Mig+gP*a+v*awLS=rf zPsvQjlIG=S7M0HdU_9N`*xvgi$8WrD?Ip5Ple`fovPs*dDwA&U^4A%ONkJFWe(KU! z^7oj9t~4Oj7grj0w?WJiDCT*x`~(NYeytetCNo**fjGm4KY`ZPuxZeQ{kZZvaN`~I{L{!P3$~U^_tS0+tu#HJ zeTLW7gfy3a;}ab zwM|QSi1;ys-!-a1KmE9eT#~EyBK;XW$RCMsE@0$W2gm>JbWL`c)Ar%b$M)HG*1ICL zRF6O{8275Oo1St?`TWA>{u$BYOz{m2`{m!8YNFk*xSS}SjjN&h*`5#u=r$^LR-rb| zbc@B@DpRDMYT?)O<&V-j?jt{ZgT+`Hb|7DYB7Of{yl&F&UHOowV2pd*w6rN{L z?sM4e{+){dGxM8so0sJ7k$gp8mH{yxcRuTlX|q}tr^E}1AL4l~Fh)ko>c7u@h^vNw zgcSeUdz0%<^LNpqvpwx#!8rb+9GdTwE{0v{!V-z46WKkT_yx}tyfuGTOeLpWX9Mq zK3%dnFfgw>``7l&`l?Lsd$_^x^M*``Y8%JvTOQld?S|Mze^m2x7YER+wBlHaa?dYd zeH(%LR4L4^;}W3k@-H+u-GesYrP{wu5L)2q{J#*B%G43{QS@|T91{bL!?=Axi~pA^#qg)g&vI zwv6200kS$chxwm75nUzZRYXPy4NzRz!Q4(>99bTTtVJ3NJ5&9#Iq-eOz06L$lLGsx zCpBX&E}~JsPy(PN4O$B*iIG8X1)iAwSZVQ0_w|VW*27W<4-lGahenK``3=8Egt>_A z&wwP*la3pk#?jBB{~N?Bcc#$3g@oNibGGoQB>L`mY~wCe2yzuxXNFP%_v z5AAVT?e29}{*(8K(5oFBpPBH%WGW7idnm<>Da%1dio~xXQwg++L*s8b4{xrR!`hfZRRDGFUZ0He@2t0_691M!*wn)Ufz@C??K9|4 z;jyUL{B-Fm%8RfkzelotLKGkTD>>{oW@)bifXX5L0nRbUHxs?RCB%C_Wt~~p-&%hT zc+DFah`9?7dwSs?j3@p3OTR*4M)TYi%9@1)5ZJXUtLfkEzl($aJ8!>_3x>e=ne}8e zo?WdSUZst)K4==T$~?v`r?dJezdZ!H3>mBZcjOfvZ(}<>G6{hy*ms;wY-1tPEdPzW z*Z6DLKM?}pnj9f~?)^p>j3Sf`wYTH&0kJaX`#Mg2!|N9Gty7*kKEq$}SWCH*;J-2* zs3rj3CX+g4i!%jC>Tk{szZbtJLyRZI_-0I`8&fV9s>m(6G69MD6|b5jS7Xzn{;Qp2 zz6Aej)#D*@!#_nUl7GDLu9CZ?Zp)5Ar!S#ksRh6P2`l zHjgD2Kp5jG;orNbsI1PQV7@G9Pdcm-F_5_Na*Gc)Wbt)nn zsrTpkZnH?V_xd{8Pj?LT>ZmS z(5+4T5v1Y)Ez|@|6xnblDfiZ65q%3yV>{e~~a4vv%#?;Nsa!SkI_JbIDz# zMMqqCLpJ2tl`zEaYd_)CV+cEH9nQ7>V;1cGL{(&QkgMpr)B|U?$s?9TNyjXh!>=3} zBkbbXv?$|~GLB^Zcd^@w?n(T0U0=%`#4>$YWN@v(5JiHZD9g5uKmDC3gE@wCo$oQa zCbAP$zNs%##b`JDa5aNz>bFM1o`bG#mu(^^Iu9J2ct0!Ii)H$)t*s=zHX`(V!}~5f zDPBBuPi4d=1RJd}&)!~2C`Oz7I7e_(ylH-YW*3EqLA~XDrl$8*4h)dFAvnI|jM$gz z^g((xgfOg@?CR@;IZYBBLa4j7-apo;CYQ-C#1%YbI(XW_TXxj=g7Kl7q|`zqBfnvn z?v@9Obw;K4KdHkzKQ^FSy>q%_l`Kv2(8{{T;mg)QgyG|br&iV+Knt`h%Zj*>Uq`on z&)l2;R-4I8Vie-;7B>o_L}s zCSB3zKIigD#7C4i%n7Fb?`D>7nsSQj&Z|tiOS}1T_R=8aBNK^_if6`6 z*yhBf&fx6-paQ(01!+^YA#s_d*O=)bcI+^lfx6(4ldOp3t9!+tLD1C= z$|m;al$?id9(BR9(cv-)$5K~~J-vos~_XI?3Z2lySmKaBtGmS@{HIx(1XYaWo@KPV0?gznabj4#~36-|Zg9g><1b-0uT zEcm6y-~RI(HQrr0>=>G>wo_2x^a?7FPwkQ%seXkwE~5JK=L5gir;|qZ&5we@{jf&u zjs@Z-qyWly#E|C+MnF}&t9uyEZ{e?=)OyK6ny^IE&&cc`!fPrnXT-= zn=}inn!^^3N#T%wVEr+peC)Qx$C95g3kJY7pk*)Cx%DeX1^7sUcRp&mdyIc(;_yzS zpccR7=<{cb1^qeX6^VI9!(+fxzSkLMkZ&Xe9dxab;?;JvjLQ7E<480gu4)NuS5Co| zqYiEF5bBs`AH@HxghY(PZ{Kbg%)I-I_n5ec8)ty`*$lpJ-9EavWy6PB@~6>Bu$!Y4 z6r>FQp~7u9HM?;|TsQSFaO*CsS&qp zH)uzG`mkh^`-0Zn>PMbA+xY?%e)Ap0!Ss>Grs|WdOl9<=L1g4p+Hv?jNJv+2t%A$# z-bmz>aZDppI8SQE-M@Q-(&<4}p@*!jeJN*9{LAJS4}9(-dc{Z>)uL?6hA zIgv)88}$cPbVR)JQd-l-cn=ZM3zMD&*Qh=6Ey`OsqJH*Y)(Jb%Z(7cah8Sq2Xy27RHLZ zE*r)PB`2qUI=_FkWg_w1BF1$HzE-T{XjMn*-`Sms@BCFtUYNgh<_I>UDtqHCEGiMX z``$By)&f$Gvu88|JX>Y$WI?%F$|FC$1=2g|fI8TGXjolfemvJN^WPIWyzU&pVu2$> z%BwuCbpYkvwBh{Z%N>3g?jyKflOcb!%%5|e#s@pcZ?9w1>M^CrG&tav62{lu< zL)W&@8*cZkFfuxeZjF+sE@J#ISwgSa;uJ9sE%gMQqL{R{yyWxJ0P6aC#2n=-E-v@@5DNfM(kRz;ojYg4L`1{NUHGh z4!4}LFDK8t2U?0{C7?Wj?UwfvI>3=2?D+P57e86f`%+;S=85&U6aX`c^ZShYQ}#)5 z$fzv82NNKpfA{-B(kWRC{0Zo8^3Kc=sEi0hH7F94&ymQW@k_BWkf@OiayZIk@-GSLv#PTP6 zfF~p%a{|J;We=8Lp#o?ImY(#B7U+$z&CG~bTl3Eid7YF0#{vB0a}5hOuKR|6ME*Lg zk%G}coz{c_GUvBDEY;3bYwt>UfOE9N$#MFwU7AS#`mpn4KBE57w|K(k7)_h@KWkA+ zu|Ws2b+r7ydU__|Dj6N+)j|Q_agOi3l=QP?J4d9W%UtR>3GYDUqikc}tgx8+PQ}AY zslTCzK1Y~N=pPyK?PC|A&^0>b?DzvlSu_#dOV&T*+ltel%e;d)Is>PN4Y?8=MI{ z!g4fneReqWjm7gpDX7}7HI8dT$9y7GNQg0we2*ry^P5lJFTs$-8UEuu{9U>H2EeZ> znEqd|Enu6x&5ff06@Xnlpz=%{^hzu}>LWvq-ph!mzo%c`9gd2d;sJVk&vH%3fehFv z(8s$&6t8=+W{XSuelc{bS5P zU^gk;-*x=DO$ptbpH#oQ+q$Rfz#doS%+3WrFY%s_g?^)HI~`D@moXD4RbamjQg%y)S67m#9ee zO0RTlHrcZ1=1#%JcbC)s%yq+EIe8xi{*hB4_kNsAgTs3|c6G~6@NBbj9&S(R@GP1O z%(pesLBOfR9~U`H)ANnGYg62TclfVPA1HL@yjd~RD}aXH|9CqGuZhQcevZIpwKOB|Ywe?z-$#&W!A37ot+ zVy}C$LdTnp3cwhll&7wR*&U;1;CLzmpyiXzI0>K^DwUu3@7no#`PF}|%KI?9Zmqd- zX{p`h3zh0?To04sFh-2`zqC@|=mpf3uf+iCM*)7rM&iHo(t$8TuN+c8C_dmBmne(Z zz0~&6JLOCc*E9=UoMS!U`UP&NdX%E)+d?0B5PZRb|GSa4k@$;mW&bhgYNxryG_8Ce zE)M6|2-o;%WON65TEELFBmW;+qP%B|1dsHTyl>$_-g-UIc*wYF4USTjw6^lTne86anT1EF6Pb<$j zDak64O!x0nz%SPyDG*w+(KqNJjB;O&M`AUu{UiJ36oU>t8wHyL)hq^D}5 zX%tdz16#GKf3V_osSYa}QRbN_zX~b*T5# zCMuC#Ij~t{<2uQa-{z5EE#qqkuWxquBRLNLb0Ti z#Yk_L$173|GTSdbzol(wiah;K+)|=gBf6oiUY^>sO{?O7Z`ZcCXg?s}n-6Y9@l%&5 z4AHn+8Kr^Teg-X#n%K3$54~1C?+gmOt;Gn3oce%24`f~=5J<|H*erY(HNLWH@wLJMKW%7)~xN#dyBGm;q{= z=_;<xTc_qp|*Th0z6^B*+br{1g^e$+-``tKtpW|5`R-J%b?NNIl>yRBsc|^6ynv!X5`9 z^|7>5(p^udFXg6X6sKX2_dbI{HT~g`H#|Xn8M|o8nFAS$-XCdKFCiDC$|A2?28hPG zi_X{nocO407<$+6`;;vC_sQL7etz|9HM>L)JJsRU5HsQVU&o=7ra~!8fXXT}2RiM7 z2z}@ka91I2KRj7`agf=j@|ZRES2ekoL;9J3TbDIjT`9Cq7B#^;+*yIzj`9ZaY$9?Io=0 zDLX})cOL$G@3I9bDozo5le&LzMF#h^Pp#UZXanKc)8_qt8zh;6f;{e`4unRBfHcK7 zz^gd(8l)MFLLu(J2p7Z|2u(J1tysQ+N$7)#XSz5E>Dn}k`b@fK@wjcx&<|CTl#D2E z2IiYq?H>;CX|%0S8XC~en1(u03g^*WmMT%;(yd z$SN*{sb!K9>C&>17kVjW+=1--ckRhtcUJ`%I0O@c;ev=yR~9Nwucq(q?}v+mY7tYJ zibaQaUn@W_sC`>0eCD>%_iYUd?kb5daXF>@;p&g0^Lpe`yy8`cgts?b%#87;`vdVlw*#+RhT|n6)6rIR zs&v!c>TL*{v3@q^LzjQ*ag@uuL*jVK?dX=;+G^B9kwOJAZ>tlFk1VeewH{5h1juK8 zz#yEDJ~ZfTTJXH%G0BeVDy;tg2C7?NI!8lhrqy#o;=7O8{!@`O{)Wo4B}Z4_iHvE$ z4%^f5vb5c5iXrUdt})QTd!rAv1!o`MXD@t8eg>OG2V@_O*!sf>cfRhy1Sz zffG6^afaWclTqo8tx+S9`$C2!&niyyHFUz8Y_?& z5n@i`X(_Hp{&%;qqhbPrA{`n`{=obp_&TAFX|2*!>3Rl%nZWp!fI2kv6XBYgC2w2k zNJ~bxltI?k`j)wy9Qq3&ApZ(|nwnUNpL8Fg} z0#kW`zmG@K73aTGz7G67I}dr>k`3nvKHMCW25#TP{OlM|7jM98vgzssie4t}(T6SZ zhuDZ#CU%^EF6=+9KU%mT2x~iP!s`y{qB-%(wUC{%Be7KAea=C=2 zH}0JCZHJoMKcBk-^N4+pm^}MnCcwAS=Au5Nss*@7!{PWAY@RgLvD@}N;J_}1p8hT7 z-o126K-4tOtXC;7bhq13VTEcOH@&!_ab5+fR2 zc?TAnDykU)Uk}UtT(e~id>uEMEye_ z9ln)&x8zd{*y?rt6hGpuxr)+Xl#P8W-5fbQCgoERw!RSFynCQ4QMFPt@(?HKF7jy% zl+HzMrbn-+g+vDxP94HSRJTg`yjoL&gL&H8OPR2n`qLy}oF^HR%`INa7Ja;aifC`T z=$0~HW$j4UCv1XfX&L-(Ua3?G6$MeP1@9KphF~&k7KCC(z7`C4$#pqm{?xZ^T0FC( z%F*)K)(X+lG~m;{WW3Po8`MzYBfRU&e0bNcq>g~27YRZ>=_7WW$QCJk5$%J$kF?|! zr%($5tfFez)5IDC7$tyvk;^;OE<}1J8KR5qPgg|ufF@U0(+AFCKD-ItdoFt^eb`yB zLSL3oXvt6*@ny!*dD6pY?TbXDBa&d`f^6a3h~>SaQ<)$Y^z2(mOW<}XHd-^q^O?ZV zR(4~LG=7NFFNT1rTZ7?Pr=(Nj-%-H2qOQC0&$ld6V0nHptyLf?V16DI3=i{mqy)T| zQ`>3%4i3!)I-r5ya%=xcrAuL34>%*~fyXRG)eOMw30cKx>a*~}!ZM)fweqV%GCXDL zf0j0ofv!%=$$3a~{hoVuZ7D4e^1T7C9cAu9$DXX9NrJKODo`ZUd6;u0)cxaU310WB zS!qF_g?!%`mhPpxl^0{Av7EiIk@q5#sg-5qUkj{3<&AYaH@73+)y^(q(dYR7JCWgY zyyG^hq{N`}j0TQ5?FUDB{X8E*%nrF8U!Sp1FQ~c*%}f(}XHP$LngH@jSFeM4|4qiTwF02bmm?!1SP#+3OS?q4e-;ahI7UgRB$yh zf8b`-V69OD*{sgo+-Me}=ixK{*t6QHTquS=Mjx8o(w@Q`xo;LKnJFS%HBfW6yUsjh z48QcLCuHc)F(BiiBcR;UO!NtZAA(O}Fl+j82#>oxj*v?QtwIRL2R^M{?tM9M<0s~r zn$Km|wY+!Zg_H;^8mf-;2sNDchibQh1B$)aV+f~$xH>gzaqa+`IyP*KpSd4AjCmWd z11%z4&)~)lV3EN;ej^)gikm*aa`}xa?Z}25F`kN)gFd`@hqn0a92Z}$l1ig9}=s zeFN~iB`v6Mi*})_xrjDA<-3c|_+jJQD@FJqt5Iuy=as4u$#$Iwcnds;&QaPj?YT^*+tz;X+Jw z=HbfW2gs)(!I}IP%=`Za_-#jKc zL~ZzCBTo-kSRa7>hfJ6~I3~_I+*MPOv^gzypMUVw#Gu3$^l7a4=eP zp{nRGeeY~P=EQncA99Y`Oeolv`MO5@a8ZCzL;!dw0vCANXbVOAS4Xmz&*)*ya_sMv zM?ty-^ez^QpM5x6>yOyG&8VMy2HWy0H)T?#MKpeEzH-j()<^3sXp{SiXI>Uig;Mh{SQ$U6A` z1~27rz!*JxE4_9!UexT5=HM52MWE~0gD(<^0f!B#skD7lzXlU@-IcO0V{EW3u+tf} zgYnK9^;ilnFRp=gBFs%&-`(ltj$WYM>mG4wvy~e{$F92l{wj-L&RtUFr=7;eFR|~4 zfUyQUtk=;yHV9A|bcQCC@;s#E~%xu;>28VqA{iCqPkq+)DnZ za=>;aK?giCLF@+bgTKphs-nzhEvLXx;-cAnOo&Gri;b>Sq5u!pTVmS53P(qj>O?A)@Zt zokRe~EiLe_%#8(TIK2N%-_teHF{nHO92C;AWgUqylscy+SjtIbBecCoe&tYS{exZK zKF~5fWU5|_sOv7~ueg*RBQQ$pE^epxz+#tZNb1ZQ6?5i5O$MAO=~DnF_|G$dBJSt% zyuhQFQH={(iQ4k)@aHf~vC1>!DnXLHF8R64CNFDwV%u9%tJ`}^5%*QIqMCZ@M!;>?cWPUe)lI_P9DLA(ED7t`< zl1WPmL8PK|ZLEbw+Ju&H-AyZWGDmZkmkkDlv+cxOI;HvQr>wQ-)`V;=Ga+k3zwR+% zY|Em&%3_(FvIw&LYfo@|L6^S&!ZTLT2x}Gk!(b6HUZ!o~;=Iu`L15GRdT8O>;7=S!=1)@c11o;*v+;fn|Tc)6`FM1rSWDC0xX zdMBqxlk@>qpA^vZYR#jP;95nQwQ_-gRzV>iaVt=}e_UCJ`~AGA-jk+K!=t-yXgjZe zD{0xFm%;gKsNqZ}X0H9?-e;^@)0ct8vaae(JZ(V%8kvX69$gm-$dg)(Ari3=nrMy1 zaM6dH=Md6TUYp_?d$p8V4MwsDPTVSdIKD`uBzdpDvxo~79*#+e@a(}uFF8VBcS4;k zAcUlLn`ViM&M>v%WrZVp^<|3cWG(jJS4^%A+ukl1mq(xpzMYwTgCr?in%AI{{W*== zz5c2oIwmot_ny|!*&?W&%=~)cBs=mrr(W+ddHa={t-}404N_uk+My#k@V?eB&So|) zW52u>u`0z)1X0hN-17biV<-LN*dyVy&K`z&4|TfceTga2~b*b?DYQ^S2{2PI6d z@VXsRQ|JJrGP0!t&FwJ{+RB|eVnCNbbCGhJ80ft%zY=(ysQSS!p(QWBYXPt|Ih{y- zs$EkmHbOjmKmxoRiUw7F$v?`cJ47?Q;;+Rx9IrMlwxZ=O%ht`^{LW34k_qGFTy!osV4lB(h=1S#-)mU;X8_T`b+y%tG5=trry8zKDL=V8HX1GFg z`<6*qPel1@m{`lX=_1O~_@MD>=Qt=Bq^5LfTms&&GDT*C=tB_Nlev}QoMqf91XfYW zCz{jTF$L-+u}O#xaut2aNsrLr;zI1I01|t5r3t-No4&+xHyXHJ!peGWs*2hMm&vVl zNjS>$76#vvA6P|a*r2W4gGDB=ygaZpXcKD!K`+7OVC5pMp%S z(OoOQn$N-A(+>|a!{%H@vZnE$%d_NAMoph=pAm#9{_AQ4F#rup1fHo(_kl9mok~T5 zNk$7?!EIj4Rx;@gJEN*=NUMS($US6&&$nXCB%2T`L$61+9t#Wy2&R7I^&2#TS9KSM z?|>D!h`Vd}i9UQvd5x=^yk;j*Pp=Be6*~BH-nJKbC?DD5#vi z-oPr|#HXh#-53l|b!j$q%}(z3f&T-eKwQ7%2hh9}2>_$TefppGIXN+%DH~{FAksbhv)gdnUXO)@SZz*fJN1W%wU^)7= zG#T4O1(Q)!kFz!5SJ{yh;$qPjkY5iC$ zn8aO}cCeB%9n{!~D2wu&mD+NA!mUnFcI?b@C&MQ*#>tJeqkgzsA>x7lt|O)0=nGF9v`5hDIr-9}0Q*I2|+p@D;uK`j69> z|MpMOM_>CgiOYqao}TDwd?Gmt8kQf@7$sdU7rMK<&{YgHzImW`-~KcD__w}A-~IL9 zr@uaam;TKke@B1y2PJ*+ejuVJdiN;kag;O&Nf$Az4+iI4VMc^Ig%Vl5@J}}Q~fnx?UdiLTN3jh z^BH~X+s*KW)>GJa}c^Q=9W8Ea@82yO?fRmi`d8Txrg;z$Md7raUh$A)txJ}T) z{5Zj9vX>Fk<+%j_c?}zDgTnDTq(?BVBr?D5%aWfR0A#k)ZvVrP?U!%gz?KdGPHEj6 zRC^|8HBTA<45mL@0MKi;seu}v3|_xTXX2=NmJ!gt-&EB&%3Urc=*ox+@;ojN)$L@yLc#Puh;zY<7svg49POp*oqgh#soU};%4 z!CYrD4o6B~R$WAvpT|6#iRz(*u$q{nv;B**F3e1~D6 zK@wfWKzCBm{dEfF{kf07N&m+`{$J=TPv57va-^q`=xMI|K$2*jeyHSLNP2OZWc}$I zqF?-$q@Vwtk$&xuo~9oQxd^(DD}A{tV_hEPpxmv9G4XUwC_K5iEaB(wolPR05Gs|jSA98 zvzvl?@d8G68ENZ8tPThO?msUO04(8m`+N?k-1GpTF2_d+JNL`a^P}PeZuJ6yX+hR9 z0l+w)BjhW@1_0gH?4PSPZ2R_5DBh$#PEWfmroGke5NjCKa9C87bK3~JEfp@Hw@Ukn z(6yaG(fQgjk>x>YX_@KxHY%fozD3qe&GuSqX_b3EE_K$=jj)sw;L^S_ywh)9jOMQ> zODSoTlJ4GnL0@=rPw(Emqc@+up|`K!()cuzxC)}jM|%9F-=J^(@^8~0fB8%F=l<4D z(ieaFuh4s6`2nJrcl7k|NaN!;b^kEg!PC2uo-Ts!Uk-G+JkpzoCwdf@>BJvRKg=;c zj&vs^eRw5$D@Xdx?-TvP?-Tux|8%53{P>Yb5)F43x?Chl8fmy*=+y@=>D7lXN!$%I zlAv+wdBixi``soX)A#s#`vpL_Nx&w!g- zrYg*vW!tEbf=n?-_wCW;>&V@^WA%%-1=o#<TT^VNDTm<*y-Ys+TXT@ z%T^>9ZH~DRS2XLEL}E4FqRSbVFRFg~+{hqZs^E%nS$jF7-psMn+nTThmDF!RElsrs z{O0RR&`+PEMI zbbo(G?>=}%Z$5iX@7}zn$9GRO+})8Bg2q4kQ~Klo_y0hD@<06&eetjV2z}w}KSCdT z?Jv=bkKQLC1U)@I(D*O~_Ac^5kB=96D_+oh!|O@aarz;Xmsd%5S4rP{Bk7m_nCO4} zr%&{Y-<0&dPsQ|nzn51U$C1Wyq`SKdz4+h-y?Xxz-Q8X2Nf1dX*5?@#O;wijL@Abk zg(R`G#oTUFk~IRj#Rv!8ib7^B)C<<2V3=6D)z*PrqgR5!+h;w`Q9bLn5T|3RGjGpV zvwikPC*(5sD3{nO^tT?iUPDTkx0iQ78*7GXv5x?|M;kEmPRTRtX&F#8?1AfN^UBs6 zsgC`Yupyp|;{zKi+teJ2N!M09_-(EIm-M)^d)`=^qA@tczV*FPls%|CvkH}53Th3M|R z3ypH5r}2rd*MVNW|B7C`cTd;53y~1?C?%1&=!g53z~35=TY`Oq_IGWs^^YZhryU10 z+g|AXmTLbA`PM9@2Y7JUwQ#^0LYk0QP)p}jFRT5$Ih8-##tnGneQzv|TpRc0IuA40 z90V~ZkiwB^pGA8eTkYF%A$BW%?RgjN2}w1@C;kF_!9Gk)I_NvJ=Z=-cxz!^v*~Aq6_sG0=A;TD zF%n6kerm$pOrMYA6a*XY@92a3E4_ICCB6In9liPFa~hwXh^_-&?yn>tpXkjW z{0Y7JXWypp{?ETk-}*~mrZ0c}N9iwq{YUBjANV4@H>Z1iGti%YD(LsW^F+V+hmZ8D zfA~m$`uRZPa3L`eU0(_s8rFrbU4uh7Q&NlYEs5 z`iz2<$+)FHmHl2*w5_OR{i$)Y1VdI?0Bbcy?xwqrURGbKXi@8(Qg4#&MSS74r_C(+ z!HkQ~NECGf_sDx0H$;a^ZV|M43EPKpbw9G5)Uwyt*@JREShD^Ih@kb+TdaxKHUI_9 z4{1yto%=ldxzlz##ZOMh?ep^L<(iCPjTxe)_<5d)kqBhvV^nnmlcrBE;{hQ zRYryF0hyAkmSw!8WOZb!IpNvjE&is7X}9<`u2a{XzP98wK5{#)mR}1FmF3)oEw^yx zDp@Q6N%aRph=@kz*MpZdBdwFvYx1H@vOzY<6!pPopL zk97IyJ^IpL`*Hf0zW!zU!9V$3`n7L-i+<Q?H+gIkX82E}$IRu4UWlt0Aw<>H9-8Kc(h z#rwDV>M_Lnas2dAGGtsG+;eBf7+>46fbT6BX=2+x_$#}1`N6fs@?m%kJIZ5i%lvw| z%Zl@JW7^!BPxWacWtFP?KE2;T<^?d*=OOO=Z1wA-%V3lhxUt8v0kQumSaysL5O3!k z_`U%*fF9-7MDEf2b5PFO9aM%LC-B?BcF{Ms`Ric9+)QSBnVyuKV{e84i;u7+ne)8= z_y4_Ve^w*_C}$12_PMDa^uo4I)IX`o7SvW@Z z*MsMe#&LbK1PA3fuFn*dQqsfQ2YPt>NDpt`(fBwLjnnD5K?stM5A^u%fyB!fL{E~& zhmq*+Lf6ZMNJ^4oBpL)=?(XQt%X_+ibw}cEARz{lmg zUwV1_a?=8UsX@?q1Jg>foL~a|3|!W55?k^Xrn(_N)M# zE}y+A73#|rJFOW2Oxtfn1(SBkLgOku0O(Jnr(Yk9u;z*=*P66(05D-s!3|nH&j4V# z0D7Dbw-^B2^84*Tzv!|n_FP*r07%mC@s;(}^tQ#kN4%~{q|{U7U`8TyWtndd+Q@`R zQcK%Fni2|bDssJhX??R2*X1<>Vx_R?7~O_i$Uymgzxho^mzzBFR6nBuC8vlzac$e_ zbfUvCnRTXD76>7&Q>eX+CD%MDEo%aaE#ENzI)*5O}FD$ks$Vh3&o-gi8-Mp)9=iTtw%A5xyv*ye@b?2-y#Oo-vS9Ry)uqJKW z_Y8iBjd}K2ZnUcncf#sK^}91+ZeJ$;iggnNEkEQ{YwvP>tz?P$uH{7I(YM>FJGP%&5N<5?9Ng79lj}P!QL|F|^ocb7PCmmlXJ6Gs z*<-3;W_zD>JL+%Uh76Q}(|1fZiBffO2Xzpp-ORi7uCW zx_fy~@^PF_27dmAo*o|7A0r_l=*7zmU0+P;hRcO$AR2|FQA(mg&>)7X-fN_83-}RD zx~RY4t9`9^em`i>o6;kDYtYa;R@6KZb|}_=GJ=1;47Qv{j1$|O$kXG~DU&g&<(_I`nLeD%H(xS?kgYD$;K(A&EA}&K3xqlq;wr88@>X@?uk$9#)6?(gp6b zar8fCZLVRjc9}KWp;nu1a!Uk#IWmrzH!{jxe0OgQQ10HfAIFSRXEoVR;G6duYXETB z&s6I}HNxoIgUowNryje9)$$ZB*Ksiol-rm+EW|Ue##{Eeov0Zk$~Q`ha30-fWy|m5 z)XQ0nt8cbYpc&1Yk&<>#4;04hWuch^Y(h$sVhI~glfio0k0jA3=97QZT_-V2pAiws zQO?irn}d$?N-U(H;clSI-Aj7$-U}KZMtXSrhU7?e|LTsecXvdBrjvg23CBUqztFqH z@`hRAIlj03qVE#$TYjWt`_*6CvaCP+vDJCYBbTq0Od8N#JRZPx6vW#qSLV?X6NE#Z}dfH8f(OUuaI z`zUQgO+&3W=J;{l2IIb{A=MoxvG8QaohFXeH$J%Nat_nw9=^@hNK~e3z{`cdyf$<2 zW)J0l1vm$(zjhkpZl@)Q+j&^rD$~`35Wq4YI_)FLaJZRx6LTB|Wc3mCYl1!KOH`&$ z&NL{9mapZ^G}E^phrZ4hB-K>Tfq8Uz&Vh&1_PDA z-u4T^hV}U0S}}h}gu_1IXuN+$GqevgMCYf9Uk+X;k0%*%k8Oik#3jG$E>1i%d-8)0et>c zWFgLO*h;xt(_}gkyGr=vSWo#Gf4v+r5w!_%u0gT-j}Ge3Gm&QE^@Qy5)nVncL1+hPr&EJ+mA?kn z2O>`vsAf^OPW#!uL|HBR;MV$?=r--wPmMLX$5k+$6q_hyLrO!I>ZhFy zPW7@1PH4Yku4h<4y$w6JVNW{Wl;|U2ygnk@>e0W9I&JzlRnNFnfc$3?KD$$Zht$Dc zXuLhdoisI5UzaP!A-s$MuI=CW*`)%rRxVO*oauN-^5Yd}buevpYe`cz1 z8SCENnKXXI^y+B4s0I*Zx{Vaulv;GGvBvvl8o{I_T+)T*s!f|CS2*c?Taz{3wLVNj ztUFd8V1q~%l!Mf!rpd5*oaN@8$Ymhd% zV!EI?Hq!~iEr>V$kjMOXSo&KJ@@>BgtoHM)yV#xY)7xlCr+vmF6fd-1vc)GuWkgYI z_m)WFc=xV3c6lY{GX3;GpTWO<9o=Q5v`00+>ac0Is~wFOmM<2p%9M!jQw0QiPaG7W zx+AZzDIuKm2y23gwKYq6hlpu?sbkBnV?E`{i)oLP=qfj8S+ZYJ*X-Ge3LSiq`fL41 z&Xjs6O>sml&<0D7?S8W*ZMr|C?=I@CC%O+(8_#6psA9BUi;X24(Gfmuo13=FZ-Pf; zSuwjvm2+Lvnu(^94-{EH^_%a40#8>oR8k6(bkI@aG}@_pjDwn7Kks;quS?Z-_Kgy2 z0`r<9Tdp2Q(8Vg=S^-<|ue}D=_^(hCw9`i`e0Bb$<~wdGiS0SE>qR*S>jv5$3^;E( z=V$Z@InK}3qjkaAu5Hg1)SN1@mbfV(lypk~PZ)tZvF1|hv_=Eh4~3c_VlmQ&jc@LY z3H~+j^P>WRwJKV#0j-;%j@VD*wneT#z0*|cv@YOO#H{*s;FR2%v$&NHeHA% zIH%t{TGg~g!FEBkCGa+^&kdBz&jw4m3f?5BbEi;B70XWy>w!D1LB;xK3pCnvoo-ld zmRI0Tl-ijAQNoo*Xn%kY>)fe-SiT)|2dF!LxmnOB$^5*x38xjc>+N6Ik8pX5mG@jt z#>Np7wXt5dN3rdr{nwd(w~%EUC@W49f%~MK45WEJaDt@mX%KfPT+Oz2)I%S&sqfmZ z^J&ZU9|5tp*S6DHe1W39|B4$Ct2+S?Py7gT7l6X00I2{#S z)33ipZ2Mu4^)+oL+nwp$i;vGDoUuY%ywR4V(Dc&#nzyHgAwR$MN?70R1M&7v3z4h6 zss~V$yQ)qfx7v*KJ#LR5JAJp0!U-d}`RX2TA7zQ1OO9M$+LV3V_$yVnTeHzlP+g1M zCbsQ)s&mc&woO-_{_}wd5;SbtO0b;FAnKtR>uMmBf75cO0zO6d;iJ$+0qQ!@j6-#sueChLE>^iPD)aLNoR^*@8xd6zv;(0reD|H0*cGm zc$e2=80K#c47mXkE6e_n66RFy0mY%DrUF416!7QL*+=j ztzCAye7(I>UW)FgxW^vSZvZ# zgJhz*S0dMc<8_RC*zl3~>lYF7cL`aZ`sol;HqfCL(mdKVLnyXn=&+;I1Tb$t55?X`DPgLMnl$$!1-)`{C=H5ORub_AvehUDwkk_I*n>Nt1#8t+C`F;3lCr8tRle8}l}<8TAoF zlrmYQ7gkkzHgf%^grHNfi&u3$cJnZ6wzC;C>SsCqx65b>5&9cj8AAQLd@Q#F+n(E@ zsWWJ=jxnK>F$Mnw5lJaXtc4A<{Kl1>m0ALclM^9awIFqJTMo0!HNP9{WnY_lgAjTh z?V`=-?B3G-+VaLh9j7UN&a~>m5TUh9%s#3MdT{7;I7>gT`Ia$OtA5G=TYflBRlZ;! z^Xt3Swd{trvF~?K8C8M+0ih+zE2g(s&=K>5e}3Al>srfFsZ(cGK(ew9I{mE!ZZ=)Z zj8$HsESt#Y)jU{F-cS%QbZ93ycPx9|MUles%(!+pebU&xb#2^NALribP*V1+b^MGN zIYT%}KGO244K>O$cjWz3U)xNx-S+vQ2Fw^Lejt?vYB|A}29LCe>Cmsla^H#;=dcO9XG^3-d>l3vwIOJqtjK2ddj zPIKR`o(0CaD|_=c?Pb2&DBlF>5~JrjTZV(i$*m60xO(+x-*vgwUgvPGzCM?>DrIc! zc^wx>M;3XudL*Wgr}?h!)v;4vk;xyg_Sh0Y;=zi_J7uwA~6UE9(JW(H&!BK`OOPY(j_2jBh{#Am1Vhc?! zzY!wk<}WOV1DE@)yWi%JwEpt1`nh>eGhF|Dsx`5s=?sGe>gz3LHY39iy#vo@aX`&x0pvu5OmVUMbdxkp>at%Ou(|&IO=JC;W+FFJk&@`ai89g*fl0y-?5x<+Q-zVrzqdls zS&emKY)X(^E;smPjr9qu{V(Ud)MZgBX{O-cq_>=p#;tb{Z4o)!X*{JIwJg6i*~8}? z3yco_DSemkT*T{h7GmmQjX4)DrBu%2B$uGV8G+&wH%e%`$w<-kwpOv{N~RqZjurIH z?ODA3V`hx>>#LaF%4M|_G*@AkIqnmr_ITNLEYr`&JywTD zKel(ECf1^l?ytP|2x?!XdYxEL1vNUUSv6|Rn1^F^bjyfr83b(ED_c`V-zvW~>nXsp zD~f}YWBm$nS`l%imeKkvdv1kQAup9Z+l24gO|oId@jK`FcYeNqWQI!2m{;uuH3=nPMd4r)y7A# zdNt<6qvu$Ao%--WHU^@WOH#(}>ZZ^u8Q=C1jie;4+s7Hwx!=1hKr!>`(oUI;C>(#w zm5_iW*~kd$e~q~Uj1w;zU;K55RD*mjW%Si!_toGaQj@YC@KZk5V{@C)ir5IeMYpQM zxX~QZ(^Qlb7V3gd)jAS;lay?z+KpcM)YE~|jw+&QIYgOV$j@B#Jc>B$bmG?f=xud# z&e?%gXwejY`)X``p*y|m-|WUYqoI}Z3~KaidVAjfnZ)huGV51>%`IJxQx4JnUaN)OQ&>WY>Rm-v@UJE==}P22 z`7Y8{K{LR&d%X-iZ2&SC4)boU*=D;1$*pZFX-gdYJFN{Uxkw*fAEPcIr{@{E4@9AA zmueXEbaA+3KEm=UE`Ry17oK;8avwdsaxkWT&V8Qx>t8Pc?@lM8;`Y`@J-6GCCFR{F zd9zw_LDA`a*(E1YloFBjVfM+SuCaOfTQF(HAHVM7EcxFp%aylnga&TluQ=AAU*C6v z`?@ohR+sJ;pzahhu&)w14W zeH@mFn*!FJ|(+NP|o$pjEd4=a%N$Q8`%2gomR7g+cyVpHWV(r!uUOO(joaS3N zR(i8xqSUd)3RZE8@8XM;ktffT6B{wR)aAa~7V;Cu=l+%1cLU43$K>rJug)!ktZ5vr zm!N$+@35LsPh&saiu`siTdyPgS1kXqI@z8-J*ZNC#XWkg*cwypKKJMDhJcxK_j|&4 zpSqA%xwRj+NHnB{9?z++jMAeY_?;h4?rDYO(7nDzZ}deatV)&^i+i24#>YIjP?u{f zqL0>0C&vDJ87uBz0j`8?Gj=|X?mlMg6;d-S=5{tcXCdLavz=kLnPW#*dg;>fIt@`u zP$ae!Dv1OFYD*dZuh=M`dsN!C(qW5ng_($APHXU3sA*{|1XiMGQt;TUNPY>S2M4pK3-`)h-~w9zSJ zqy5yd-k9sBjyc$uzF>bD9T2LG0#yXO>2-VQaJ`6U240mSzx7_8vTJ%(4`DZu&atED&?GG+1c<8IE#nRgS4@wmtbKbF zvnzskqZ~4K@Efp&&Bm6|QSVjbOhJ=gY^0h6 zw8mM?Jg#c8UA?L~hfKi{$$F$m`wIs$dgd&8=EEL(S|Z`Oc7Bu?V9*o()b<>tmFUEolcU5jACe%6Z9m~(qk{(ez{2M6J%9rpw#eXpGTje>G!4c&kvuACFfxwak)z zU2dkd-jmyj=j$%j^u9oUG-=O?Zn~!QZ~(h;`LXSr9@r}rWN979zZQ`a!aP)AJEBpY z(oJh_7~}KHmO((Gf?sp0dtUYs^Kj6M9ws&)_B`~sm8z*z;8YpMyFGD-$Ed6xt}y2T zuN>RS61o1vEuHy^q+p+#s7X)QhkK^_`wq{vB7VIH^qWPXq}eu`Mb&zoBOR=+5L0h! zFetxxE;;^1Ti>Le9YilL9@SbO42>?Lh)#?(z1w*{->zKM3s*U@Y+DwW7$MmI@VL2YV0OM`1F%?@%@+AZ)Q%p`&g)#I0e;Sa-?0A zlQwlcHBNwcgj^n2d3nP zITk}6YS(ox8XbBYlAe&KF_PDf<4k?w~~9 zjA56TIDYr>$=&7Bw!RzlHif)8Nb5`OgT0w*XA)fXYB9E@@kWZxw1%ps+uO#WS%1`@ zbbFZAL#Ve$EFVeZzZf}((UUGSnRg;UESK&=Pw7kj^Q=QJcdXMc$Xt5JlshqAAyGPq=?k+g4xg~)cq0UIFXd}@I z+wW3x9t)Wy|D)}7l9zW2PP*soV&AzcC^3F@sbklzck=r{ zquhUNUQHEm$g%xb9@Au_>_kjoovH2&2x5OnhgXO4V{9Yd2HLIx{L)&Ew&NYjI-IUv zLg>HnUAqDqWu2~VJ14I_Z=^ToYSj38W4EUOm9@2as4$|^M_JHro8Xy?TIiT(C*?FO zZm(W}v85#)Tjff;aT0H;^>Gd;EqQmHTgGTJ0)W02vF}REBd*%JCHQB_mr~W#{=yxC zvr{7gqCVKh_PP{?Oh*yC{Z77a{SM^kBEiHWJ&K(NA=J7O4 zx!WI=pYg*qq(0{F1TC5c0U4znz5t1eF|!@tiyK-uF#QTMznVP1OU*L#TdUv+z=9U5w0uj0 z=*jq=JGJINw%iscld5z68W7c(IBO#JZ&?d^S}l{?#-H2f- zp+@zsq+<`zvz=D{Nz8o8Hwc1&{cwhs&9lJR?RG4&Ehv(5@k={Hk17Z{c%vTIUT#%8 zZ1Z50Vy_4*kvY?YcrBWur{*ro z(PJdPVG{#@$vfS=Vj?)YYwDADQUpYDgeAGljMJu&As-_P|Ykv8^-Plb&~Hq*V=a%V}~m26N#(D({@NA@+qK%d6(Xg7()(0+-|vCU#vmCbX&-^qDEF&sf3C*j~Yyq+v&kRM&~W8 z4CPv~k3LD!<1}B6w-}t&20eyZ+-7%;cWg_B>bJk1eu5*_x8d5GNVMgel#st`H**th zz7DvBHG({~+Od-_>1}ksh@E0)Zp7RcE$8<{deLL0_%mCK(5!3z+9&mW7;(KnijP!5 z`eG?8-oLPu-bJE1;j^Yi%G$lv$6s2eR<%?n|{?D{W_oKU}6el591h z3FFHt6TZtCi_JG!iOOnOr21h-8?v*OA5Sgj&XlH>G@sTisvtMg(;Ic>(zxc0FTabm zSsSC4&t30*>5h7+b72#Rtx72UbUilkSK^Fl%KF0}Q-|9y6A^VFO}g7l*AF}8BB(-J5?eO<8& z=F8)DUO3(AfA&UcS5O##3*zgyl5IOM=<;)%5G7^6>j(yU|$fBnJu}z9_d{_KW%~}n*`o{ zhj_WZi*`39DFVUQC?J~G4u|g84lcVDsrB=>&8@at@W$`Wm`u6*tvTt}JVn~nenTQf z&hajx*nLuu8w^^77R};fJ8XkQ_I2D(eAaf@jk{jfkXn5ccNVc72|nhoiyayl?zDJu z$BnZi)`a{r^V_R8cPBWtTc(}TK|pJn#c(KJu#6JR>i}&okvh(T9w(lZgtAj-OrMty|VglYmgb_YS9l3%E?+FdWoRa^JH0)*Lf>X z4}b2Q;NOkWeU;8MN0+y{{25Xfnp(Oylq7lx!=x5j(>*U!`+{qlSpO+TUbxe5IZ}O- zP-+e9+qYgF8|QAh+MTc{>08MxQRkmU-7W079LUYT-k#(4qA?n5`>V2W_ljIQh>=_~ zJ6mMpJYI@e8^~dsm-`!yHAs0b?v|J6NH20I{v67}IO32eC$I<>lNgixOpPo^Hi^vl z+bZ#a_!bn>!;D%)nP)F;kq);8wSOp}gzV#f%%I+!cDgf3$1N<$BF{96&!lS%arMfK zYV0D_6=XVRvs$DaVKPre;$CT;8Q8>62p9jM7#bgQQ^THYpA zKR(lu{avi=evCGOBLcR)!q8moUhtAv?kxB3 z5NZ|7nWS@n4kvJdkFsBR%t!E`5_AGp{<(cE-6$_ZHM^G zeH4d2z5{LVxYW;2Vw59Xr&RqK)Jw*O5UU#5_iY)vN}}}`byxQx zyHTV(TGd+qR9+-il&G}hahi)K=$cYBVV6dIP^qEpXhIO!Crr=;y6#d5yG zSE~V^OUEp5o~+r`tyGY~Ze5k;hk(bvtq${{DlCmnEaAqM^p)cEDk-qkQ(ntRk4;~* zAFDrMx~pCFdFFb~A&KPLlsdLp@WIE`%w(s2T^n%M<2<%!*EClD()H!b%iHnu((k`H zRIZGev2fh9=NSj6SwYC1@ zHp}4?-m9Sq!E$Y|erhc$AgLL1)fh35+{ZF@Id(lHE(51Q_n-2n{6p?G_ame&!NFAa z6z}nzvo3L`9zqLOX%1}D3f|>&-QvX1e;fI2vZS@f$m+k{IPk(d?aA5j8WPN4kSbUb zqk^SQ-+3ZY36)K0pWbY?H+>e$=;ije!iOK?O7Z4B{%~{RLAbhSPiwO86RBU2#_Iz( z)N}R}9yMB&Fa;}pQ!3h6QWfhR<|@NctAW&CN##XK(B6_I;RN83OQT~EX=WY03}7&k zW{Zb7s1<^Ly*jCJq&3VDBk{IJf6uk+S+{dzcWR$kd)J(SZlxuUsmE=sa8cM!i5=Ap zQ703RR5ZkadPNi+r)Am(u-6{ia~@5@I8N3^=O{&`HNNL5pe5Ze`jr%{Snse|b>HYt zr1~P}=sDxuj2<}LGw3OMWYXSxRMJdVU5}pR=(L=7OCL?=NI7&}>2_?Cr+7O*qqdtk zZ-x8R{6MWZU)8$-HD~e({uNUn6I7l8GtUT^%GBU|Mu^$hhO*OYF7G-K;88b+h*HLLeF5CauBY0ceZml zsq;w;9klwvUx}$t@b+2)t_NS8odb;^RP-UL9oHVoB_DiG!F<&pVpY!vUa?{T2#8;K90VT!=p-cNBY_b{lZ(jy&er#6k5oPZY)2i-=5r5G)l_>k&GMbuiV} zp2SM_ILdVz%ucloBw&-y0HAcdZfDBp`zlgc+oMnarJ8Jx-Ypy+UYIadldOSW%#8b= zCeKZ&L+0h4(o-T$xzgH;ss+1;&bI_5(`I(*``k`{U2Xq@_9;m=uv=dD5+AVFB%U59 zz@fuo!3z+kbk{U-8xFK5N&2I<*6-6L3hRQ5_B0<$u>L3X&sp@+9x2G}594c?{gI+< z{y6OOTZ;8HsH2M^m1%=!lI!x}7QkQLxr%BP>R(FL58VbEla;6r?qYM=t8`oM+qFmH zyU<;)BIaOgE|*}&(OX_(SAli=Ew$DV?6`#7&VV?oR-+_roT4GUI)x$>P8uP!@GTUaYF1$bM zIEJ=o?h44MU{ue;7CLi^K?eQyW%O8*#%u1*z?QENXWW?N&_<_P!N1PNLbkuuzCJzL zWN+PCgxtfbNfiA$K~=i6!yIDvT&erBlXG)@ZhyNINv^#D?5hA3 z8?k`7WNBBZYFjmK&)`y?)7tmSS;**w{+6B3mggj96BPhVd)9^hlj`kLtbmS8|I`t7 z_v$SKZO@VAob(Glhe9sTa1e9%nF2+yTdVsTnFX|X*!Eez0$7exy?)H>AKzD%0uCJ= zn=RF&^+GepejVm`a*lS_!Lw5NGcc;){xZ??gxBl!?W0x?@HJ1UUthy!|Lu-Q-K6DJ zJ`P!@9&)Ev-jBs*cBhY=wpC*=>>nA>j`H;p<}24&RrX|TdNIqL-f8Vv=8NCGG)f*j zyUCJKm)hkNmme*#oCd4_V2wjs)sdyj$#LIqWALjawJCRF6f4}7eiw?b9Lkc$-w-`H z?H+pDU--DakyaZ?oZdZ4|KB>SelVSR=Kwx-w7BacJnwFmdVNvzA!vc z7Nh9sG0^%XOp`BE6zsTT`X{H_zB=JUUV!>pYm4fk8Ln)1lr5za35!%OeU?Yh zwdh&5T;c~qs#({=FTKW@vTCPzN=OY=wt!k#OPNHqb$Y9wCIA

O669<>Kqs^DVd7 z#uVD07L+Qgw-8yW`{Kt@@s#@km2FpKM!UIAQtl2%u(LOzO1UEKG*G$QTs4WS*pYL? zzU&U$m)k(X`!-6JP+vImMC}1M^WuDcaQ6YTb7CvWqRhk5`dbKlqQ#d;E7R+@=Y-|I zknljE*+Ku-GQL zTh^rOpE>D~+l&qVww@LSwc$F27fEoKVd`6hjB5aN+p z_HexZF@$Pro0QTKJDH3O3FA4gfR_1sDj@-uQn@>aR7r{DmY)1eId7Vr_%Tq!etKFb zW7f6!wvgH%cKW91K@|QeE+cPl*)!!^pw0}~<-E$E#NJ~kBp5_bFDi6F2&Hh_UssI9 zOtYqw>j&%rU`6sucLu0n{+N>>U3)Cf9<(rTZ3+o=w% zt*w`OyXjv;ogy3KTCklSPohej^gL1?KwfUFHEEACZDCXj1p#{qIOmnQzuMn8YDcU7 zY;n`ZzUAIN^#lO*Q7s&w$I$yc(IP0e3&wh>(_+;Jcd&w)8bmZp?>1L@ap|6d6}q{$ zIVFXAVV*nkrCMg%R=+;Qy4ucqZ6$5-sC|Ijb+fUAA~jkkEySKfLjL|2qDV0NhDmtv zlog%~!B5&PvBA&M)onjN=af@BuaU+&hxKT1e4T1;_FMh-po^XYQ!(eKW9iWX+tb>o zIdiKY>wEWvBLGb1h`x2*1M9fNH`WQ+7N{2kQl7wM>XdUHhNEZXLMcVKVY_4w6P*r_XuUP_+kb%1IB&UxZEnxOa1EV(M2tnVI{4e3~}7 zq)thU^MajplAr459@$z{%ey3CMtIwFTcI}3Rn}BPxGo%T&S`gwGTYe}1eC{*7b2en zko)NSBkRX6+iwqrt+j^n_hCjF+0}H+u}N~}`cHjk1*I@_m1eRMW2_Y=#`4kgflmBQ zTEG5BuW3XytC-u)_HV`bZ*RxfzDRpTR&}rW@?igEux7MdwrSfdlhsa3g=-EwrBO?L z5!{8?z2dePrvdWk<2pBeI338;-Bq|64XD0WmXY0hzLm1}AO^opxTqRy^{b$++Jls~c*5(y*Ael8UI{;+7{W?$#j`L^`f5uWxaam$C@-^R^6 zG0$a^<`aPZxSuM0YL!F3XNU?(W!#u+v)$?7IhDZ&fON4nD!}c%WmB9%*DXvEAV5NJ z3mSrJaEAcFEw~QuFu21&f&_=)!Gp`-GB7y72Onf$&;)0Y;K5&>x9ZgW)cFag?*6v= zy1Kh+SNHC&?!DKlo-JZjfKb#Pu;2#rf0aF27y7C5H!-TMJk!HZ4zD!l>nDjA$|((@@2GS^ER{Yhb7clt4UtNCyt52QNO@i!RIQnWA~Cs17vGDZlS3c(eAlz9GEJ(J z%5@~uF`{uOm(#SHxH(+0Td# zsi^axgyZ#_w^U|BUZRN3^RD)eR=ZbHcgxWD3q|esxtNAVPl0G{^o16Nkwp5F4?B_rZ9Si={7pr6a7Kzc!ROuz95g zvKX&*_<-i;Wkmz%N{s1SQ!{t?2qMb= zE#7Vo5NuS{)*qIB%`@BvrTeHB57MC?X4yNiX)CH5C&}(RJJ2NJ25??JPw-_RR&H@0 zi1D7WRh0`rF%vrfP{Y*bm>yQ)UkN7f+?~b+P+N?UX4T?p$joB*nEWYi4`v$L3G0$E zQe*KT*vhHuQp(a3?y3uT_ztMt3yXNn&ckFvcQBh{DXdi(Ru8m8ic;q^V#kddGq0N! z>S|dZbbI-JyVzJ9k0EI4eo3wHu-KBGN{y?KSe>IMJ}@0qs-wMU&-bpdZ##+Q>rt8B zoDi7{c9)j|RmAZ7J+$>x*CaMC9^t6+#3EXdfgBDA|4x*3mPT7{eu}9VSvy&}(_vFx zkR-*_T+%OAHpE5DBTn6Wg)=mj)$rU0I`hth!WT%-ua9456sOyDBJzrKtR%+kc@SZx zpl|CpITuro!Nw2@QJL!umg+Ew$j9AZ%L_5}TiQ~M)IabO5JnWgE-iRPdb>Mvcx4jm zx+DUA1FFoX?iMPad-~wQ5zoX7LEP4zOfkA+NWjHy-Ry!Ao5BSlqink4=BwlYKg(Yt z!XCuc&Pm}HvS{;uTUFuF@y46iA;hinnYNfu`*;{9q@j}nY%x;G^Q;l) zL`@eFC@Eui386$gEE?bT;*w91){Rm{^SO$gjqHqm1l#un2QkD)6;p7s4Q=Afcgg}K zW)r51Z8I=UtClC$;Iuo@X1s^A72J&F*v3$4kqHNE`u<{9A(4pLw;CrNa$b_b34Xv! zAoDC4Zi~SKUV1H>7)RS+`ge8}+ErC%hK9KG&Ac+A==AsLA@mKdvhRz=lq>u#d2QxD z{CxXDs!2rLjiFV=<--g41^;BG?Qe7`?*m38+hp6d9z~NXBjXpYJw!gvSw9-4M$LmA zSV|&GN{DU@Gn`cWC17w?A+A5-eKms{`}mADaf;y5N#|!8(*l;?re)sjt;>s5{OAdk zB3f1*^{;7%87r+QV1#3G=i8{bqo`I)Cr&r4^aor8bJD7Bu*)_od%im<5yA$bG`!Ix z_lvK~NBGeEmxj+iFM-Z?NV;MdF?O6!W_AkSO84-O=sJ567|m$<2v_DP6|}Os-^$j^ z7A7~895OX66HH81egTzjvD!<-u$WN^v^Bx9ry6Swom)xbO?(#)5pIQS#&O0*hpoW) zrs|Sh0*F3C+|@L%79km|ecMyDEXlsSW^+nQ~eGM}z7yH)MENrW|2LJSjYCWg9$Pca5R(Ox}oAwu50^#PWZ|x2i z*bgFV+k?vkmZ6+hD))-Jv}KCi^6xAl*I8nb?eJVDT=R*tYBrljA%yTm!g7Guum=1G z!xxjDw|q-9Z+trf z$Rl#^fDt0>3%gzMrk~=UyWTGaQb;4<%0@cT^ED}35mB~VlyT0UYs8*X3I2@w$~x>nq5M?t#>K%DE6(ds#o_S6{v)}< z=Xa@d#^P{2*zP9Y^UDsznSef-8LC;BSWESBi)-qh6M(_5wbPCUYzyIMoTWJX2t2}Wvw zgG7fZgCT<@4bGfq0uT16P0#9-q3Qiu%yZs>C`aVfeE}9Q|q0Mvnd%q&Aq}|>15U)X?D_3<~1R7 z?2V|-RI?wa$+6T1i&yQn3#7llh)gse z=PenQqw*1tQx2!>J>$C*qjAN_A+zacK=qEC^Ckz9w8kUx*+McKKh6o$*;d{KMA}Nl z)EdpmZ*^r$`E^FrY(a8+TUS|T!0S@K?o+$&#u6; z;%v@Gs-Krmp8fJMs!X(LBRx9t_`zs|GlBh(7LTBS6Jjv$5n4p~*ZXRe;D|l15Y-F( z^)dO2>CV9o9|F2F7$DuF*L&T!BsFx7x2U5y3J$}r_N`9+OPWRC$MJ)&x@z`R>(qU8 zG`1h-P}k@lb|ZUT@^W0mZ#ADsMl*aF`(IQC<1pUy^s3&YAr^yQ5nE}fuB>+B6dp3 zjAM6QT3i2k{&C_S)crxN+q7BJb(%%7+J!8eDCJ*#o%Lzxmm{TKR)ohpFwAzFK2J|v zVH^u;_iR61GhxH-@#d9zvKIlx3ysq$BGl1v!&4Y0lBA5fs*F%P^B_S5Z!qvf2j;Ft zoQXmZ1&J$TdRWPlI$0%&{i^uI;(rM*&c{f8OB+(~*pwp82)=zjs(&&rdAc z&JrDL(dzc=+laoKxh90qG`o1-UNzyr&(mxCw;2kQmhw)z$fa*8NegJWN5$ybzA`|< zb+mt%6ZDf~36K0F!m-cjiF+exfAv=P6vj@2uFB8?BqH$@oyWfE2fMYAP=?oyQw(gs z89Xp&vS4tH{o55kl9hYpre3tmqa%qy&VIkUD9TQ3ag#WwzDm2IoSS?d|0)F8^g`

xwpah&L+L*ti8{@Y+pYnH$v@GLCwpA1TvI99I%=zOr9!bL}+0i?Wl3W;$T!DRo7u((Am7nh_?uVel5cdmkT zqIZPwK?q(~o*7?OF~?W`K#`qo%QoH+8<5{iECDC^=}Jt+rS(M4?%$eZ1*x;;!@}^J zqjH;aTj=Q53rm}Y(4a*b*vvdbn)K(o^-FKMyfgnPKMk?e8S_x1aj3e8p_x!Lp;l>i zVTl1DnV4Jd=aRaY--SY`p)0JGDkG^hv4Ms`xyBlnmLWx|6Wn~1ZPhJr7bA1R&}{<< zk|eW&D8*gwe0J<#V&&l1VPZzypUKSbi4ml77CmPakxrXka;ZO4?3Ivv(V%KSo};<( zg)?@geb+6WM#ckU1y|zvySq*N_$ouGzrWrC%&)eYeTua2N$IkVm77E9gF2NhQCnJx31PlB9H@1UmCzN?O+pcfT76LHmUt^D}m}Gd{rZ>u*53 zhyIIM-q7jSk9IM=x*#!)2L6yWZKAl8}O*bjmC^;V_5Oc@?s9+^W z9&+gVHGaOYh|s+;SYlMzOq+DJwT?5*WwbHpd649p-lEuJ{rwWsX{6SnCfqM~QYb&I z1bClFb$#K=EahvA^Uk%OOe}|20neD!DE{KJ*~iYl;7=W&pq}>~g>Y`tPyKf|Zu!j9 ze@;H#jeVheU)b9&MN|e*?m8ztJF6BJ6CWaUvB20bd($l7j+be)8V+zF<6Jd}-aZ5F zaUB-UoD!t#EBxvBYF5!%Sc^!W0irso4xbjSCKk*B(wgO2=DwXjp*8#2^(4^4&H?zcrj>#6W zIgJQus=lM6N$_p0{8UjqNKIhjL0|SE`g`LbiI9zMKW{jUkz5OW$GD!Hg%GSqrbo<* zMjdJ(^~wIY5H@XzZ*YJh@k*u-nA%6FZYYUaqN=xkE~R9gLvo(_PFx>k_kQ?rwl@v~ zIV|oFp&b|WwtWV%!qj(@TC9OYB|c^`j)z97F8!f8H*q@Edfafq=rk5@QJz(<(WUwi zv@VV8zkCB$3DEf?a};S)D((*^hMc>xhRoR6$+;_d=mj$oTXNS*N;*mV3N??V>WwNj z=8f5ZI;klr(21{m^O1N*>5ZXA6wU2oGre)Z#B)g&1ir-X=Sq`&`4jKWc<>#a<8zwk z`D;0~nVI;V9G=om5|k8%yLYLsokRCgC9M$FE}I$4lZt`nnRlgH3qKTK7@b6P8IY4I zDfn;bR=64Hxn0?YGR^@R*{Ea%+qa<_|bN`eaa%`=a}&%zq) zFOqMaE(?i<+TNlQpNUp_Yb^iZI;K6iCv>6c@Y1^WW#=)gscrJQ2Va}X^X;T=yfSo# zZ_NM9k#ui?mcS=EqHmAG8F@NGMhCEzk5cZ=Zc7^l0HK5zS1g7Td+N$G{-8WDNFk09 z_AZAHcgLP@TQu;vDYf`k(e^q{@Z5BjOJ4lN_jgy{SSWvq$VcvjNB|#Uq+14mRz^-m z&lr?9q)hi4W+C3{3}r9Kg}|-ImTH^dhP9!m^R)m7gN{q`PW{^KiAQncO#?2=v|9bO zKc6`@MzM(6z~&b7A&ovj&}pYux5S%88r+4GuIFgFSIA#~NH~9-> zS{0ueQ2*i!YfP^9nURKNQ16~-)YzTs*mFm<(5e_J=!ZGZC9eB&mZR*Jg<&ph} zBS%excS>G8+#>=Ubq1PC#f$t{3x%1ff~P|I*V&{;=NF+NPU)|g6c5we(#-^;G$nr; zQ?^^pRMHF~)-)8mvkr+vZi`=whm;h{nm`gb5m{8wdHw|14;R}#ro<$L5SLqGiH`TTbp3Awp$|77dn zOhNW>A&HgR%-`VTQZGEAui3ln9M&NPy4haVwx4wL^H=xV%3!kC4QIL*EnnYM@ow9A z<~4P`(fPMaXC8gp$@>H2)1G@20rn_3Am?~c+W~G2bvE9-^4aS$ykeya-9MvsPtIb3 zm>9o%qW{C(!rg*io%;Pn0Lg4%IY!jw^{PTugurSVfi!b!$ zOW(d}OwvQZP|KssgVHV31b2-LTjnjPgNt_5A?StnR-#!fuhy8;}Pg0;j#zOZZakX{J;XP;V>F}$u z)Snpw)2m`N(xuDqmTz?f32dJkmeNTaYnA-d+JhOL`Ll;C|J=?nDOJnGkAe&;-ayX16{>O~|!47AwwsOp`ywf7j;%giSKaZFw+Go+!KNUjy zvRe#Bo}*O%<#{mQ_N4)IZPrDF$=kIxi}L?Kv60aHhWi7h`KX_M_}thSVbm;NdiYwDlvS!$?Ruar zQ5-zk3y>5(#rc&E>Q60Cm`))qYLhy=ek(nHbCb(GHB^`J1NvuJ&-Kq4&ZGdJwBuJ1 z+{K8qKH1xEu79(;($N*f0=JmJzvw+E83jU+$1)5;@o^@J)Q?J~Ij3i@HvB9((J?fb zcb|Q<@UMPQYsh?K2ObFH6{B;obZMb#Te;gv-xOs;K<$$I>T7IW6I={vp%2-FCcIL7 zW>@nx6gdOX3N4?k=UTVMIPb}|D0X#S=bg+H1&_g|8xmzi)Us_8>(47XPe%+gd8f+M9 zhS>b-XY=Lu+t0}=3iB25TZ!Dbq`L#n z(-nKo&M=`kpz-6vYhTu^)qOTISI~1s{SQ!pa6^NZ$4n{(x_=Gogh(3~_m0vM`WWRE zYvRlgQ?}_+dl`#WI(%;6LDkn6HQPht9*e)>I}Ln}3crd`J*F0eR#cZ@>Ww<4{E)#- zsD4~@4z+myRYkzA;OE${8m(L?BWDv8z9Bq?^_%YLyTEQq85hQiijc&Mxr%oC;`v7o z!8y4{9L-Y7ne@sohkYVld&b(~oAw^waiBKa(ZF?CxdHVKF1mFOy;HO!s4 zw^&H(uM-sG6^Wt~X$?eCe7KYCtCOTLlEf%@*f=E(zO zCLXKGKvu}!8G8u3j0=RIk+{Lx$Ifd@pR65loadFL7Yxo18)H-+k6M9da`BVv6}de{ zAW6&-77n@SZP)F}m_e+EV{%^bUYwK)CcD}|CD=tC!FyYQmptr*Lo?!##8A-hI@Z9h zjqU7T^)QF5=nh2A#>?x-BNH*!ly82{OTweyx92)M;Cv_j|KxTSD_0RE2djr>h7%QMc^M*caF3pchS?=gmW$;NP!+ z6U_I2w#zKfV)R3AIhk9)cNbX-w&e~YD7-RU2!D{7(d{y!)1TcgQ{SrFr&p7)O$F*^ z$J0afpGJ92L4%wXg!O^*7gB{)t{Uh;b;^Hn#&k!sq%`eMm^{V=;HO6S^1d-NNWbwa zMQUw#KSx0^yOywN`?1FyZCTdrIl_fJrO$8kmod)(&$9UjPKb)hOoER@9>`Y;g1vbT z4hPpH=GgfQn&fjcd4n1e73v&w2EhhfXPCT8BuRi@z!T!+9U7Wr?E%l96A6Rr zMlEBVh6$#*$@{m(fUzWQ3I~tT_2x)Vkl7^y&TTc$4Ww-&(M>5=T6t2Rixwe8xeDuK zwquN4=F28=9)zUCs|7ZzMzY@Lt1kb=(eI8Csq!o=s3 zr{I)~42K>*H(ENZ2zcGaPCvf?gQ?q#&z3Ycl%;bdc4^vy@jGk7DvBF_GcDb>_G~5^ zd_!^#1)A@JuE}jFQ)%yl%EIl;@Z*h@lddf@Cd^Fn1eL52vbPqreA1`%?aeSvN!uW( ze+rS5LsjARgy*uh%H3A|Aml2bRE}KIvty0pVys~~r!N0^!)9|I|46FnAssv|isZ!z zfAVnpwuhb%6**1vZ;mlNf-yM38)rLcS0O5$vmW$J53SLY9CSGixEOX{TdkdpoC2VM zNnBv`q);J%ql2z>CPK3-#&oB~JZ=n*ClKxH(GbX~^s(QMVd7tOTWX{>LFCI))gC!- zHU^H7N8jP{5gqw;1;vFlrF3IJj6YcRw&k!y|P{lfGnz+z4 zyn^1^xO?M%5T4Dn6TksKWosErf3Rm?3A%P1zvdAnDKg9J65$+P;%~39Th#s2m_N>& z0b1w!`WZ0?v+6lDvC_*9$cxO8@RftO{^n#51A1kxbn#A%#Hbu+iYQMz-zz|`Tebn~%cR99 zIVmK&N$MA}=STr)cI6r!6E;m*f8BW8f2yJ6SCOo_XEL%%rfw8x*DtJ)l;apIx(i_a z&@Gp?#ZjX3u5I={o7nG?NYHaN9?F@9s$i<{JCGmbpI();$`Y|&TH(byfSxBE7-63;;CnO}LgzMx8|GBzoB=HkCyLWrp*qgZ8)!e#Q!Yk`R3Dt4Iunq%lUSe z#p2N@$iLuds*SV#aAW6d(?@*~Pfr!w&Wqi3U(@Alk5dM(pH{tG$S&AI1;sEHildr4 zmZfAisac)px$R~u(UDy`tvtoi&*l%^rWXE|^G{J{;W=S~Sq!~SS9MuKjWd(=1#fg9 zcTD0%R93g^sOPSA+;kynKRN8({h!2mGHG@7C!iLjH)Wa|x@~voI@{jMNTuKAhslol zs@Tsf_sEe4Yu6oYL*li;+f=GP+;LWTViHA{ft4YXBN;?d9xuT{Y6pJzR%#FYPSN*6 z(uU%<8dzRD$*0SqQ9SEPDGf|#HqN5kRmV5*u8guo+btyT+N5KY9QB1OzOR_dULfYr z%pcj`-w(6KHG{b;)TDoik}R0@Z@#9n^#*X@kD5oXGgYk~mb!KJ2$Bx{l^5B&q#)1d z9awybO(Rg!`s)GKn@vAy%982eshQ=t-HfweBZF0pZSCG!^p~ z3reccn$jIpV86)Kgu1`Ujt-b@)#_i}N9PmI2S@l-)#VbiV~@Qe zn60oi=e^W}Zr-FYmC0ux*fhALFaq>PzdHuXA7oq`Q+op3P`|8HGM4Zj{u-AjFgn+r zC znL36O3|r9DK{9`i)stFWnO*1bj$5@_maf^-cHHt89Yoq<3!NTlUFDZy8X$o=K}N;J z8q{=`gd5!+`K}iAmB&Cf?MQmDr_@)SKtBD-4#Bgsc&H6gh}N%k`Yz}8aIE$ya?y7F zu@qdRfYRs#2|~Mi4#OJn-b0bSTAzyqqa_iWZ#LJbm_Xxc|CEi(aTVZzj=$+Whsc<3 zPV;ZFS*|NjrQ|~(bDR6%v73PCH9tVLms<)PPQ8*A0+GYgVGX9YzUaRDtYH*a2GnW`F8F{#@>CtWh#0!d}SA*&!MloKCd_ zsu1vJ{N!vvbmdZ_-q48+9XD+0WXKQR&^l7alH+*2VXq!FZS{nC2)N*E-TIwaEdw<4 zK6BAUzVf9SERMy_f^H!h>tE=m*DLYjGc*Mu{I9dvCtX@W1c<8>K@0A2A+BrB45|1| zD&y$i2GQDi{59;b%mC!7rx&qjw;6_7O0iq`zl@{NuR6hOoI#e8qjKfL|MAsn7b%Z&O{? z?gA5o-^2!8aEmBtoerv2)>oxU>Hzf?W)$-Q#>x=f#@rXwj<@(nj3YF3gh-VTRoP4u zYMWN;r<8MxdNz~zey07c*XpAUZrOe>r34MaI2 zDk#EFG|L~yBsG>yZqAz$nGvE5CQsL2X_c|AZ&cPk9T&fyJH3TJGaPdYX#41ovE?JxJmyy36SU8>F9ze*FQ znE6-W5v|!u@;@6`Fr%~YNwRTgxgtt+Dt3?IL57KNg~<+NtqAD}x1Qnf=HeL*O(Pwr zIN%`1u^)xJ8-qoo11CqwobNVyC8`(3*5T_SALr5 zJ}slTkgpnlvqGKD+9v-rFEx*x16JPDGhU%!XMA2OE8d>_^jF^e0&do`qbF6KVwves zGtm2{*^YXL`)AZk3&f=$*24p5$J;C?#1Oe$vTqoCNyFM5yLdn*yaaG5hUo;2iHh;Q$7~8&pbBLJ*)Lvz5Dk-~T+0H(Q4@;TcdcSP{3-!@ zSUZt0rctkk_?BL|Z#>cMq}b!r0=88@H7U2$IZ>xKKX|DR1RoPJEjiVVc`{8^=rBel ztKJm##agGrrt>tyGpMqv)A}E7`KOe9{M}E!$uQ^-8nIg zNBOK(LYDSQo!4|~@3+9VJeIard*n+sH{TA1^#scv|2nbzhh~}zkG2*B3$5A6(9-We30HP@UW3B1;K+ozrtcAsQK5mxoTD^<61UmR1G9a|AF6TyCc08AaxVzP|F8GVQx_-ytf|@{Fmow22 zAY_od*fW1Uw%m2{>oV^sls3F_cNMjO>g+OaN)+9s8tDVsS`(K_U^xlNk6x6z#|>Z+ zEiSoj_NzasZf(;-$KH+ec#*d(n30e^evBDM3);FJo7$nc-c&9WSy(ro+OQ?z%y_qS z&Uy8t@eYD>5Sez9`e~F-Yx|TFdxiyY`R-ClX36K>ID2e_@jTMoBSoI(A_b`0uIHr7 ztRLU$pLsERI=W?>SJ}NrMl4TgCUF42%tz<&5P~0ceP=bk9@}v=NOxUp*mIeg$}!&~ z0Y3BCG`f3jgI~FL>C*^m^_MG^cq<2USXqqN@|AL;K>jg;@tjd?nCAy;IeWM0QVcG$ z_NT7~7{S2cWct)$j}?xlM~|nJ*DE=c&A9 zX{8tCzQOZC;n}Eqgckslx6Un%4@~PfRD?aw&^WhX4%eggwOjqn~W&!-jnE7?VgXu^f3oeXgP5k|;j~5;%F2q}B5ikjlV0G@*98vbj$irzI6R zXgmv1K=bp&_Ru(2g}9eB%{_5?rYUl-%^VuP6-neA@*^mQ)!z|jSeiq`rV)k48RcxJ6{FqI$E=EA2D_`55XN0iMsuF61`d?2Z*ejlqoKogQ^@P4moe zBWm3cig{9V-%#M{+T^rPJq(L%d&j*pYULGQugOHKs!?TQQH%Lso?lGLVs-eJ-ocIF z!#}1OC7ltc>fNQ?%P4`gpDcB61iW|UU>U)iBkuZ`!>htur^=x*WI;=WF%A-d+QjowxZ=Ey9R0E zkSx{J_rS!zxHiyeybU;W!SyXbwr43xC^=rO{1e=#(Y`daYmDv{HESRJu=Q^+LPgaN zsEsPR4Igc5zTJi_@WTgFJ!>Gf0)i`$J+;ZA^rA`GZ}_q=Z|x0hP6ku<$(*DVx1+nu z3-6i3=Pqf9b4!lqGyRJ#NMxXp2kc3nnFqLOI*q{h`F#f)(Ygt_=FCvPou}-ACnLkw|qD`FUBB55~OAyJa zj#uFCUF*jvdbsH9tbbPxj4Goc{mQxq-KZjEB|L0Ck~*yuu2>@Ps`Dq z?#K6%qXzHg&GkbUu4d;sWKO#h3siKYt4@lM&Sn(!yg9EJlBCe|;nrafgU_!}jQj>GW!@U~(y9Gb zNe3to_^oMa|0WX}>}NJD*p2V-FnI2fW9CHkzS_!pS|7^>v={)Y-DneoqnII+%puwo z-Lajx9G0yH|Jr?jm9CjRH9QTL z&m|rpy{|e;zL7tsE11Y_SKiyZlAM{3M!tn~rNYwWq?L}^7U$VYYEyZH?$;y<)aA+> z)xpL>vrc=!`jk*>p@3& zU&vHto1Pmu_Cvg`apYm28TqkqL9`sun^Ut*ukrKJM@=~Ab$W@uBmp2=@lN4!D}-}k zqZI?#zr3QQ&P)RVjER4I`L-X@yMT@&ikzlxqA0A)6eZ_K%lIUt7^_jn&9%Qy_+s4q zr#~Si*B)x1yZXs!L{sed6{QN#vylmmj9y#LiS=IHr%Ns1>P31>C1% z!#QN`r`i^xpe`Gdi$VTIaocM3bc8D5ruG0Ez`~|h>=rs{#FS9XOe>suX{E3g=B*<1 ztwXc0!H;8)!)a9qF|b-TiOIuq*}ftnc%jKWJr328FDv#YPSCWsDdQMq~GyOx`r7Xn7NJs{*E??d~fuuyWFo#Ot#O=Naf+2 zuW!No<1F}d^1b8A%j$9B>;+N3xgc7_me(dqXQFkWpW_?fa>24AqKwnLQi`Jjb9%20 zQE!W1`bH_9ENz+8I&fv6$Wx4`^x(*$KKPa6h*q-0D7dk%ipO>(+QglmYQP@4O7;y*1Rl9-2odd9 zAGpt^Z@y+3W&(vI6iTFovbFYQLg%&JmGe2Ba$LZ$@?;Ncu-1&Gn%I-*S>hAxsIu;w z>!CnP^1gZ1rCpQ`j^=T%>PYFwC_&mxqTleYN+=e9+X?M^lw!i|B|ym5`0>@d)Q8+9 zEl*_XCBBLn3y3Zb-po7R_WePg784AtwTWGsRk(>C_NUR=`7XCL5Z4s2>r;DKQ8Ah| zUI+;W&D)v`4h1JHH@Iw7U79cFyf3`?25-w_x$F|GrMfts^We{(Q-GZxw=U)`Z?CNI zK${&d2%j?F9JN6Y+Afmh*YOl;VkO)VYkAt}>MEP#LS%g}1WMyT)q?hOa%jP9?c#Q)=l!)ftcCZhN*t zfm#FhPLOSQo!;%NeytoWa0>omP0eKIkHxfpW=5PSl+sFyOP|%lzmv1jgS&?}gYzK{ zJa!l%NTzI4^BJEZsyOHV?EoxQns+fuB-wUU^S-j#uOuioVDu|y^}X{(aBev6sMe^C zYdi`Bs@D7bsd+hhDR+sQEb(jp*@b7o0 zHy??3t*Mg(ThcVZlEr$uyN(W3L}5r4@aGa&vakCz2fPEivlK>0I|E9QK)mLK+$k}c z?Ci4b9Pv#}7_iG|DANS-teVGG8pZC-HDgNYQ*%Be8p(Rw4ssr}Z&;yvr<8DmaJ^6T zU87>mgu--*&&JZbGKS};+od5evfjt5zMf*>2 zldsu0dNUnLs4jT)N!WcwqGJ5028O%-dMiKbvwfy<_cH#4;Zl-+}NvP@}$LZ3wjR6(35HFF8k&?XP7y`8d;-&F5+-uw@6LSE8?` zND}x5@#ggz7`2wPZ6hrXn-vcP!(E^#0@{sp4QvU^g@pR+nTTz$>D7-3+I+7ZwjAz5 zmaWH7z_rL1catj71tW%{u^xmHBY2NnECB`w>$WzboW zqi);=%SB!SYqP%cB%JGbfA@0uWV1Jqv>la2Mg8^ot6uvUvGQcCg8!VLrEf*cgkc9B z7vpxiu_NA-Rf1h?G_cXoB$B#^e!Tc%URZ`J@E=>DU7IB8{lt^;tq2gkc=^BTY0=T5 zv7{ybQ{x7Ht-$^th=x}F^7)tlg_l2Ip*{Z(cI%@oYNMD8K ztX9|5#Fzmw13xm&21HM3?5mL_DwgNB_jEu!I?0YukO!Y#gL%_?8wc#_3qn*N#Z1V% zTkJG4U7f{f_|ijy$m(_8@a3}?5WuJGSOM9+fPu@$yUeA*gqB#t@{qeFxWOmq%{_9& zHNkyzFsZ?@_^A&l(?a_p03!aZDQ&>@wk~*U`)(GZXvnTTXw4P@La?cBkx-`a4I$-u zXknQ_?VneDJX!hl%NFF=Dy$kR6Scks-S1z|-8THQUaDE{WcjTA@WX55Y`P0qWnzvI zG2*pfo8rGw9u#C(u>7OrA>(l+M1wy)>%+df`2Db$x?cO&Oh`r34h7K}bU3?7n{>4N zYekgmOf3NI!(6nozn^_2Q5rAn_Tqpic}hx0Q;^_bxOk1nzh^zE5RJx_CXsI4x`G_p z>!2a)wqQnJ(cSi-({bLPeRQ;|u8%15KF$QCU_%?hj_N1HKRi~~Em^}eer&XKs+2%~ z|C@+=P_W@v*y+3C z7?IfZPp&ZB^;wID>BbrKbT+%rShQ~Ewcu>**n2c`wla0P&Z~ijD|1=le1Kl}vmQ(z zO%}8TN7b8A(N(Sk8;La@fAF)9u6f(fdf?2;_-IW#?(Q{c>lVG3xaepbuzi6Aw*)lv zjKHkjAjl~`S{yrgZ1s?I)z!|a=m)i^Le$73LOpfKf^_%{RDHMqXp8YV4-DQ|4)x7{{sj3=|y+`fyBax zttZLpnkrKbG|5PBtqk8-E3MBrdn*5HUiV;sM8p3N{z8fQ?BjnRnhQBP+S7vgKc&?I z9}WHg05r7qZ_@v5{9l9pcaos}cVYb38K6BKCH~)V20tFDyEX()j_U=@|Hs)Y$*F&; JkueYb{{T4VczFN- literal 0 HcmV?d00001 diff --git a/packages/sdk/AGENTS.md b/packages/sdk/AGENTS.md index ac15b6e..7bfe91e 100644 --- a/packages/sdk/AGENTS.md +++ b/packages/sdk/AGENTS.md @@ -43,6 +43,7 @@ - Nonce cache must validate the top-level input shape before reading fields so invalid JS callers receive structured `AppError`s instead of runtime `TypeError`s. - Registry config parsing must validate `REGISTRY_SIGNING_KEYS` as JSON before runtime use so keyset endpoints fail fast with `CONFIG_VALIDATION_FAILED` on malformed key documents. - Registry config parsing must validate `PROXY_URL` as an absolute URL so invite onboarding responses can safely publish proxy routing hints. +- Registry config parsing must support strict runtime checks (`requireRuntimeKeys`) so app runtimes can fail fast on missing required env in non-test environments. - Registry keyset validation must reject duplicate `kid` values and malformed `x` key material (non-base64url or non-32-byte Ed25519) so verifier behavior cannot become order-dependent. - Use `RuntimeEnvironment` + `shouldExposeVerboseErrors` from `runtime-environment` for environment-based error-detail behavior; do not duplicate ad-hoc `NODE_ENV`/string checks. - Keep `agent-auth-client` runtime-portable (no Node-only filesystem APIs); delegate persistence/locking to callers. diff --git a/packages/sdk/src/config.test.ts b/packages/sdk/src/config.test.ts index dbefe7c..b62bc67 100644 --- a/packages/sdk/src/config.test.ts +++ b/packages/sdk/src/config.test.ts @@ -234,4 +234,53 @@ describe("config helpers", () => { expect((error as AppError).code).toBe("CONFIG_VALIDATION_FAILED"); } }); + + it("fails when requireRuntimeKeys is enabled and required runtime keys are missing", () => { + expect(() => + parseRegistryConfig( + { + ENVIRONMENT: "development", + }, + { requireRuntimeKeys: true }, + ), + ).toThrow(AppError); + }); + + it("passes requireRuntimeKeys validation when all required runtime keys are provided", () => { + const config = parseRegistryConfig( + { + ENVIRONMENT: "development", + PROXY_URL: "https://dev.proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: + "VGVzdFNpZ25pbmdLZXlGb3JEZXZlbG9wbWVudF9PcGVyYXRpb25zMTIz", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, + { requireRuntimeKeys: true }, + ); + + expect(config.ENVIRONMENT).toBe("development"); + expect(config.PROXY_URL).toBe("https://dev.proxy.clawdentity.com"); + }); + + it("skips requireRuntimeKeys validation in test environment", () => { + const config = parseRegistryConfig( + { + ENVIRONMENT: "test", + }, + { requireRuntimeKeys: true }, + ); + + expect(config.ENVIRONMENT).toBe("test"); + }); }); diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index 4c86c81..fc8caa6 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -100,20 +100,77 @@ export const registryConfigSchema = z.object({ export type RegistryConfig = z.infer; -export function parseRegistryConfig(env: unknown): RegistryConfig { - const parsed = registryConfigSchema.safeParse(env); - if (parsed.success) { - return parsed.data; - } +type ParseRegistryConfigOptions = { + requireRuntimeKeys?: boolean; +}; +const REQUIRED_REGISTRY_RUNTIME_KEYS = [ + "PROXY_URL", + "REGISTRY_ISSUER_URL", + "EVENT_BUS_BACKEND", + "BOOTSTRAP_SECRET", + "REGISTRY_SIGNING_KEY", + "REGISTRY_SIGNING_KEYS", +] as const; + +function throwRegistryConfigValidationError(details: { + fieldErrors: Record; + formErrors: string[]; +}): never { throw new AppError({ code: "CONFIG_VALIDATION_FAILED", message: "Registry configuration is invalid", status: 500, expose: true, - details: { - fieldErrors: parsed.error.flatten().fieldErrors, - formErrors: parsed.error.flatten().formErrors, - }, + details, + }); +} + +function assertRequiredRegistryRuntimeKeys(input: RegistryConfig): void { + if (input.ENVIRONMENT === "test") { + return; + } + + const fieldErrors: Record = {}; + for (const key of REQUIRED_REGISTRY_RUNTIME_KEYS) { + const value = input[key]; + if (typeof value === "string" && value.trim().length > 0) { + continue; + } + + if ( + value !== undefined && + value !== null && + !(typeof value === "string" && value.trim().length === 0) + ) { + continue; + } + + fieldErrors[key] = [`${key} is required`]; + } + + if (Object.keys(fieldErrors).length > 0) { + throwRegistryConfigValidationError({ + fieldErrors, + formErrors: [], + }); + } +} + +export function parseRegistryConfig( + env: unknown, + options: ParseRegistryConfigOptions = {}, +): RegistryConfig { + const parsed = registryConfigSchema.safeParse(env); + if (parsed.success) { + if (options.requireRuntimeKeys === true) { + assertRequiredRegistryRuntimeKeys(parsed.data); + } + return parsed.data; + } + + throwRegistryConfigValidationError({ + fieldErrors: parsed.error.flatten().fieldErrors, + formErrors: parsed.error.flatten().formErrors, }); } diff --git a/sparkling-sauteeing-brook.md b/sparkling-sauteeing-brook.md deleted file mode 100644 index 0d16dce..0000000 --- a/sparkling-sauteeing-brook.md +++ /dev/null @@ -1,364 +0,0 @@ -# Clawdentity HLD (High-Level Design) - -## Context - -Clawdentity is an identity/revocation layer for AI agents, starting with OpenClaw Gateway integration. It answers: "Who is this agent, who owns it, and is it revoked?" via AIT tokens (JWT/EdDSA), proof-of-possession signing, and a signed CRL. The design must be $0 to start and scale when needed. - ---- - -## 1. Architecture Overview - -``` -Caller Agent (uses SDK) - | - | Authorization: Claw + X-Claw-Proof/Nonce/Timestamp - v -Clawdentity Proxy (sidecar, same host as OpenClaw) - | verifies AIT sig -> checks CRL -> verifies PoP -> nonce check -> allowlist -> rate limit - | - | x-openclaw-token: (internal only) - v -OpenClaw Gateway (/hooks/agent -> 202) - -CLI (clawdentity agent create/revoke/inspect/share) - | - | Bearer - v -Registry API (Cloudflare Workers + D1) - | issues AITs, publishes keys + CRL, agent CRUD -``` - ---- - -## 2. Technology Stack - -| Layer | Choice | Why | -|-------|--------|-----| -| Language | TypeScript (strict) | Monorepo consistency | -| HTTP Framework | **Hono** | Runs on Workers, Node, Bun, Deno - same API everywhere. Portable. | -| Registry Runtime | **Cloudflare Workers** | $0 free tier (100K req/day), edge, TLS included | -| Registry DB | **Cloudflare D1** (SQLite) | Co-located with Worker, zero latency, 5M reads/day free | -| ORM | **Drizzle** | Multi-driver (D1/Turso/Postgres), type-safe, no binary | -| Proxy Runtime | **Node.js 20+** or Bun | Sidecar on OpenClaw host | -| CLI Framework | **Commander.js** | Mature, subcommands, lightweight | -| Crypto | **@noble/ed25519** | Pure JS, audited, works in all runtimes including Workers | -| JWT/JWS | **jose** | Battle-tested, full JWS/JWT support, works on Workers/Node/Bun | -| Validation | **Zod** | Hono integration, good TS inference | -| IDs | **ULID** | Time-sortable, collision-resistant, no coordination | -| Build | **tsup** (esbuild) | Fast, CJS+ESM, minimal config | -| Test | **Vitest** | Fast, native TS, workspace-aware | -| Lint/Format | **Biome** | Single tool, fast, zero config | -| Package Manager | **pnpm** workspaces | Disk-efficient, workspace protocol | - ---- - -## 3. Monorepo Structure - -``` -clawdentity/ - pnpm-workspace.yaml - tsconfig.base.json - biome.json - packages/ - protocol/ -- @clawdentity/protocol (shared types, encoders, schemas) [zero runtime deps except ulid, zod] - sdk/ -- @clawdentity/sdk (crypto, jwt, http signing, nonce/CRL cache) [depends on protocol + @noble/ed25519] - apps/ - registry/ -- @clawdentity/registry (Hono on Workers + D1) - proxy/ -- @clawdentity/proxy (Hono on Node/Bun, sidecar) - cli/ -- clawdentity (Commander.js, bin: "clawdentity") -``` - -**Build order:** protocol -> sdk -> (registry | proxy | cli) in parallel - ---- - -## 4. Component Design - -### 4.1 packages/protocol -Pure types + encoders. No runtime-specific code. -- `base64url.ts` - encode/decode (TextEncoder, no Buffer) -- `ulid.ts` - thin wrapper -- `did.ts` - `did:claw:human:`, `did:claw:agent:` -- `ait.ts` - AIT claims schema (Zod) + name validation -- `crl.ts` - CRL claims schema (Zod) -- `http-signing.ts` - canonical string: `CLAW-PROOF-V1\n\n\n\n\n` -- `errors.ts` - shared error codes enum - -### 4.2 packages/sdk -Signing, verification, caching. Works in Workers/Node/Bun/Deno. -- `crypto/ed25519.ts` - generateKeypair, sign, verify (wraps @noble/ed25519) -- `jwt/jws.ts` - encodeJWS, decodeJWS (wraps jose for EdDSA JWS compact) -- `jwt/ait-jwt.ts` - signAIT, verifyAIT with kid lookup -- `jwt/crl-jwt.ts` - signCRL, verifyCRL -- Dependencies: `jose` (for JWT/JWS), `@noble/ed25519` (for PoP signing) -- `http/sign.ts` - signRequest(): produces all X-Claw-* headers -- `http/verify.ts` - verifyRequest(): validates headers + proof -- `security/nonce-cache.ts` - in-memory TTL Map keyed by agentDID:nonce -- `crl/cache.ts` - CRL fetch + TTL cache + staleness tracking + isRevoked() -- `keys/registry-keys.ts` - fetch + cache /.well-known/claw-keys.json - -### 4.3 apps/registry (Cloudflare Workers + D1) -Hono app with route groups: -- `GET /health` -- `GET /.well-known/claw-keys.json` (public, cached 1h) -- `POST /v1/agents` (PAT auth) - register agent, return signed AIT -- `GET /v1/agents` (PAT auth) - list own agents -- `DELETE /v1/agents/:id` (PAT auth) - revoke -- `POST /v1/agents/:id/reissue` (PAT auth) - revoke old + issue new -- `PATCH /v1/agents/:id` (PAT auth) - set gateway_hint -- `GET /v1/crl` (public, cached 60s) - signed CRL JWT -- `GET /v1/resolve/:id` (public, rate-limited) - agent profile -- `POST /v1/bootstrap` (one-time, BOOTSTRAP_SECRET) - create first human + PAT - -Registry signing key stored as Worker secret. PATs stored as SHA-256 hashes. - -### 4.4 apps/proxy (sidecar) -Hono app on Node/Bun: -- Verification pipeline: AIT sig -> CRL check -> PoP verify -> timestamp skew -> nonce replay -> allowlist -> rate limit -> forward -- Allowlist: JSON file on disk, hot-reloadable (SIGHUP or admin endpoint) -- Rate limit (inbound): in-memory per agent DID (default 60 req/min) -- Rate limit (outbound): per-agent caps (maxPerHour, maxPerDay) to prevent local agent going rogue -- Human approval: per-contact `approvalRequired` flag → queue + notify human → approve/deny -- Identity injection: structured `_clawdentity` field in webhook JSON (not text in message body) -- Forwarding: adds `x-openclaw-token` header, proxies to `127.0.0.1:/hooks/agent` -- Hook token: reads directly from `~/.openclaw/openclaw.json` → `hooks.token` (same machine, zero manual config) -- Pairing: time-limited codes for first-contact approval - -### 4.5 apps/cli -Commander.js binary (`clawdentity`): -- `clawdentity agent create ` - keypair gen + register + save to `~/.clawdentity/agents//` -- `clawdentity agent inspect ` - decode AIT offline -- `clawdentity agent revoke ` - revoke via registry -- `clawdentity verify ` - verify AIT + CRL offline -- `clawdentity share ` - print contact card (DID + verify URL + endpoint). Supports `--json` for machine-readable output. - -Contact card format (JSON): -```json -{ - "version": "1", - "did": "did:claw:agent:01HABC...", - "ownerDid": "did:claw:human:01HXYZ...", - "name": "my-agent", - "verifyUrl": "https://registry.workers.dev/v1/resolve/01HABC", - "endpoint": "https://proxy.example.com/hooks/agent", - "registryUrl": "https://registry.workers.dev" -} -``` -The `endpoint` field serves as both inbound and callback URL — bidirectional by default. - -Local storage: `~/.clawdentity/config.json` (registryUrl, apiKey) + `agents//` (private.key 0600, public.key, ait.jwt, meta.json) - ---- - -## 5. Database Design (D1 / SQLite) - -**humans** - id (ULID PK), did (UNIQUE), display_name, created_at, updated_at - -**agents** - id (ULID PK), did (UNIQUE), owner_id (FK humans), name, framework, public_key, current_jti, status ('active'|'revoked'), expires_at, gateway_hint, created_at, updated_at - -**revocations** - id (ULID PK), jti (UNIQUE), agent_id (FK agents), reason, revoked_at - -**api_keys** - id (ULID PK), human_id (FK humans), key_hash (SHA-256), key_prefix (first 8 chars), name, status, created_at, last_used_at - -Indexes: `agents(owner_id, status)`, `api_keys(key_hash)`, `revocations(agent_id)` - ---- - -## 6. Security Architecture - -- **Registry key** (Ed25519): signs AITs + CRLs. Stored as Worker secret. Published via /.well-known/claw-keys.json. -- **Agent key** (Ed25519): private key never leaves agent's machine. Public key in AIT `cnf` claim. -- **PAT format**: `clw_pat_<32 bytes base64url>` - scannable prefix, stored as SHA-256 hash, constant-time comparison. -- **Replay protection**: timestamp skew (300s) + nonce cache (5min TTL) + body hash binding. -- **Revocation**: JTI added to CRL, propagates within CRL cache window (300s). Fail-open/fail-closed configurable. -- **Key rotation**: new kid added to JWKS, old marked retired (still valid for verification). Agent reissue revokes old JTI. - ---- - -## 7. Deployment & Cost - -### Phase 1: $0/month (now) -| Component | Where | Free Tier | -|-----------|-------|-----------| -| Registry | Cloudflare Workers | 100K req/day | -| Database | Cloudflare D1 | 5M reads/day, 100K writes/day, 5GB | -| Proxy | Same machine as OpenClaw | N/A | -| CLI/SDK | npm packages | N/A | -| Domain | *.workers.dev | Free | -| TLS | Cloudflare | Automatic | -| CI | GitHub Actions | Free for public repos | - -### Phase 2: ~$6/month (growth) -- Cloudflare Workers paid: $5/month (10M req/month) -- Custom domain: ~$10/year -- D1 included with Workers paid - -### Phase 3: $50-200/month (scale) -- Multi-region D1 replicas (automatic on paid) or Turso ($29/mo) via Drizzle driver swap -- CRL cached at Cloudflare CDN edge (Cache-Control headers, free) -- Multi-proxy: shared nonce cache via Upstash Redis ($0-10/mo) -- **No application code changes needed** - Hono + Drizzle are portable - -### Migration path -``` -Phase 1: Hono + D1 driver -> Cloudflare Workers (free) -Phase 2: Same -> Cloudflare Workers (paid) -Phase 3: Hono + Turso/Pg driver -> Fly.io or Railway (if leaving CF) -``` - ---- - -## 8. Gap Analysis: Agent-to-Agent Communication - -After studying the OpenClaw codebase, here are the critical UX gaps that Clawdentity should address: - -### Gap 1: Communication is ONE-WAY only -**Current:** External agent → Proxy → OpenClaw `/hooks/agent` (inbound only) -**Missing:** OpenClaw agent → Proxy → External agent (outbound) - -The proxy currently only verifies inbound requests. For real agent-to-agent communication, it needs to also **sign outbound requests** — acting as a bidirectional communication gateway, not just an inbound verifier. - -**Fix:** Add outbound relay to proxy. When the local agent wants to send to an external agent, proxy signs the request with the local agent's PoP headers and forwards to the remote endpoint. - -### Gap 2: No Contact Book -**Current:** OpenClaw has no concept of "known external agents." You can't add, list, or manage contacts. -**Missing:** `openclawdentity contact add `, `openclawdentity contact list`, contact storage - -Clawdentity's `clawdentity share` produces contact cards, but there's nowhere to import them. The proxy needs a **contacts store** alongside the allowlist — containing DID, name, owner, endpoint URL, trust level. - -**Fix (Phase 2):** Add proxy-owned `contacts.json` (decoupled from OpenClaw config). CLI commands: `clawdentity contact add`, `clawdentity contact list`, `clawdentity contact remove`. - -### Gap 3: No Agent Messages in Inbox -**Current:** When an external agent hits `/hooks/agent`, it processes silently. The human operator never sees the conversation. -**Missing:** Notification flow — agent messages should appear in the operator's preferred channel (WhatsApp, Telegram, etc.) - -**Fix:** Proxy's identity injection (T31) should include routing metadata. OpenClaw's `/hooks/agent` already supports `deliver: true, channel: "last"` which can push responses to the operator's active channel. The proxy should set these fields based on config. - -### Gap 4: No First-Contact Approval UX -**Current:** Clawdentity has pairing codes (T36) and allowlists, but there's no human-friendly approval flow. -**Missing:** "New agent 'weather-bot' (owned by Alice) wants to talk. Approve?" delivered via WhatsApp/Telegram. - -**Fix:** When proxy receives a request from an unknown (verified but not allowlisted) agent, instead of returning 403, optionally queue it and notify the operator via OpenClaw's delivery mechanism. Operator approves via channel reply. - -### Gap 5: No Bidirectional Conversation Threading -**Current:** `/hooks/agent` uses `sessionKey` for multi-turn, but only in one direction. Agent B can't "reply back" to Agent A. -**Missing:** Conversation ID that both sides share, callback URL for responses. - -**Fix:** Add `X-Claw-Conversation-Id` header and `X-Claw-Reply-To` (callback endpoint) to the protocol. Both agents use the same conversation ID, enabling threaded back-and-forth. - -### Gap 6: No "Agent" Channel Type -**Current:** OpenClaw has channels for WhatsApp, Telegram, Slack, etc. Agent messages come through hooks — treated as one-off events, not conversations. -**Missing:** A first-class "agent" channel where agent-to-agent threads appear like WhatsApp DMs. - -**Fix (future):** OpenClaw extension plugin that creates an "agent" channel. For MVP, use the existing webhook + delivery mechanism. - ---- - -### Proposed Phasing - -**MVP (current T00-T36):** One-way inbound verification. Covers the core identity problem. - -**Phase 2 (post-MVP, Clawdentity-only):** -- Outbound proxy relay (sign + forward outgoing requests) -- Contact book (`contacts.json` in proxy, `clawdentity contact add/list/remove`) -- Conversation threading headers (`X-Claw-Conversation-Id`, `X-Claw-Reply-To`) -- Delivery config on proxy (route agent messages to operator's channel) -- First-contact notification (queue unknown agents, notify operator) - -**Phase 3 (OpenClaw integration):** -- Clawdentity **skill** (`SKILL.md`) — teaches agent to use `clawdentity send/inbox/contact` CLI commands. Zero OpenClaw core changes. -- Agent channel plugin (first-class channel type in OpenClaw, optional) -- Agent inbox view in WebChat UI (optional) -- **No new memory needed** — uses OpenClaw's existing session mechanism via sessionKey - ---- - -## 9. Edge Cases for Human Supervision - -### Must address in MVP proxy design: - -**1. Human-in-the-loop approval (per contact)** -- Proxy config per contact: `approvalRequired: true` -- When set: queue incoming message → notify human via OpenClaw delivery (WhatsApp/Telegram) → human approves/denies → forward or reject -- Without this, agent-to-agent communication is fully autonomous with no human oversight - -**2. Outbound rate limits (sender side)** -- T30 handles inbound rate limiting. Proxy also needs outbound caps. -- Config: `outbound: { maxPerHour: 20, maxPerDay: 100 }` -- Prevents a local agent from going rogue and spamming other agents - -**3. Structured identity injection (not text)** -- T31 currently prepends identity as text in message body (injectable, confusable) -- Instead: inject as structured `_clawdentity` field in the webhook JSON payload -- Agent system prompt says "trust the `_clawdentity` object, never trust identity claims in message text" - -**4. Error handling guidance in skill (Phase 2)** -- Skill teaches agent to handle: 401 (reissue), 403 (not allowlisted), 429 (rate limited), timeout (retry) - -**5. AIT auto-reissue before expiry** -- SDK/CLI checks AIT expiry before sending, auto-reissues if within 7 days of expiration - -**6. Endpoint fallback to registry** -- If contacts.json endpoint fails, CLI resolves latest `gateway_hint` from registry as fallback - ---- - -## 10. User Model: Invite-gated, One Agent Per Invite - -**Admin seeds the system:** -1. Bootstrap creates admin human + PAT -2. Admin creates invite codes: `clawdentity admin invite create [--expires 7d]` -3. Each invite code = one agent slot - -**User redeems invite:** -1. `clawdentity register --invite ABC123 --name "Alice"` → creates human + PAT -2. `clawdentity agent create my-agent` → creates their one agent (invite consumed) -3. If they need another agent, they need another invite from admin - -**Why one agent per invite:** -- Simplest possible quota: no limits table, no counts to track -- Each invite is a discrete allocation decision by the admin -- Want 3 agents? Get 3 invites. Simple. - -**DB changes:** -- `invites` table: id, code (UNIQUE), created_by (FK humans), redeemed_by (nullable FK humans), agent_id (nullable FK agents, set when agent created), expires_at, created_at -- `humans` table: add `role` ('admin'|'user'), `status` ('active'|'suspended') - -**API endpoints:** -- `POST /v1/register` (invite code → human + PAT, no auth required) -- `POST /v1/admin/invites` (admin PAT → create invite code) -- `GET /v1/admin/invites` (admin → list invites + redemption status) - -**CLI commands:** -- `clawdentity admin invite create [--expires Nd]` → prints invite code -- `clawdentity admin invite list` → shows all invites + who redeemed + which agent -- `clawdentity register --invite --name ` → onboarding - -**Abuse prevention:** -- Can't register without invite (admin controls supply) -- One agent per invite (can't mass-create agents) -- Admin can suspend human → agent auto-revoked -- IP rate limit on public endpoints (CRL, resolve): 60 req/min -- Per-PAT rate limit on registry API: 100 req/day - ---- - -## 11. Deferred Items -- **T32 (Web UI for revocation)**: Deferred. CLI-only for MVP. Add web UI when non-technical operators need it. -- **T36 (Pairing code flow)**: Optional for MVP. Implement after core flow works. -- **Outbound relay**: Phase 2. Proxy signs outbound requests for local agent → remote agent. -- **Contact book**: Phase 2. `clawdentity contact add/list/remove` + `contacts.json` storage. -- **Conversation threading**: Phase 2. `X-Claw-Conversation-Id` + `X-Claw-Reply-To` headers. -- **Agent channel plugin**: Phase 3. First-class OpenClaw channel for agent-to-agent conversations. -- **Clawdentity skill for OpenClaw**: Phase 2. SKILL.md that teaches agent to use `clawdentity send/inbox/contact` CLI commands for agent-to-agent communication. No OpenClaw core changes needed. -- **Bidirectional memory**: No new storage. Uses OpenClaw's existing session mechanism via `sessionKey: "agent::"`. - ---- - -## 9. Verification Plan -1. **Unit tests**: Vitest for protocol encoders, SDK crypto, JWT sign/verify, nonce cache, CRL cache -2. **Integration tests**: Miniflare (local Workers emulator) for registry API, Hono test client for proxy -3. **E2E test**: CLI creates agent -> SDK signs request -> Proxy verifies + forwards -> OpenClaw returns 202 -4. **Revocation test**: Revoke agent -> CRL refresh -> Proxy rejects within 300s -5. **Replay test**: Replay captured request -> Proxy rejects (nonce) -6. **CI**: GitHub Actions: lint (Biome) -> typecheck (tsc) -> test (Vitest) -> build (tsup) From 8256b3ee48755c51be2d4c1b888a9f538524a450 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 21:19:46 +0530 Subject: [PATCH 095/190] test(cli): reset env across pair command output specs --- apps/cli/src/commands/pair.test.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts index 36145ba..042bb37 100644 --- a/apps/cli/src/commands/pair.test.ts +++ b/apps/cli/src/commands/pair.test.ts @@ -782,6 +782,15 @@ const runPairCommand = async ( }; describe("pair command output", () => { + beforeEach(() => { + vi.clearAllMocks(); + process.env = resetClawdentityEnv(previousEnv); + }); + + afterEach(() => { + process.env = previousEnv; + }); + it("prints pairing ticket from pair start", async () => { const fixture = await createPairFixture(); const command = createPairCommand({ From 987e37d03009f3653f8c6f1d05b522114ab61ade Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 21:46:38 +0530 Subject: [PATCH 096/190] fix(cli): make postinstall no-op when dist bundle is missing --- apps/cli/AGENTS.md | 1 + apps/cli/postinstall.mjs | 23 ++++++++++++++++------- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index bb5af3b..f5a8c44 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -8,6 +8,7 @@ - Keep `src/index.ts` as a pure program builder (`createProgram()`); no side effects on import. - Keep `src/bin.ts` as a thin runtime entry only (`parseAsync` + top-level error handling). - Keep `src/postinstall.ts` as a no-op compatibility shim; skill installation is command-driven via `clawdentity skill install`. +- Keep `postinstall.mjs` fail-safe for source checkouts and CI: if `dist/postinstall.js` is absent, it must no-op and never fail `pnpm install`. - Keep package identity clear: workspace package name is `clawdentity`. - Keep runtime version parity: source `CLI_VERSION` from the package metadata (`package.json`) at runtime, never from a hardcoded literal in `src/index.ts`. - Implement command groups under `src/commands/*` and register them from `createProgram()`. diff --git a/apps/cli/postinstall.mjs b/apps/cli/postinstall.mjs index 7ca79f1..31034bb 100644 --- a/apps/cli/postinstall.mjs +++ b/apps/cli/postinstall.mjs @@ -1,15 +1,24 @@ +import { existsSync } from "node:fs"; import { dirname, join } from "node:path"; import { fileURLToPath, pathToFileURL } from "node:url"; const packageRoot = dirname(fileURLToPath(import.meta.url)); const bundledPostinstallPath = join(packageRoot, "dist", "postinstall.js"); -try { - await import(pathToFileURL(bundledPostinstallPath).href); -} catch (error) { - if (!(error && typeof error === "object" && error.code === "ENOENT")) { - const message = error instanceof Error ? error.message : String(error); - process.stderr.write(`[clawdentity] postinstall failed: ${message}\n`); - process.exitCode = 1; +if (existsSync(bundledPostinstallPath)) { + try { + await import(pathToFileURL(bundledPostinstallPath).href); + } catch (error) { + if ( + !( + error && + typeof error === "object" && + (error.code === "ENOENT" || error.code === "ERR_MODULE_NOT_FOUND") + ) + ) { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`[clawdentity] postinstall failed: ${message}\n`); + process.exitCode = 1; + } } } From 3016a71361f96fe7ba2ee7893ff5bb0355ed778d Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 21:48:55 +0530 Subject: [PATCH 097/190] ci(deploy): use proxy internal service id/secret in dev deploy --- .github/AGENTS.md | 5 ++++- .github/workflows/deploy-develop.yml | 12 +++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index b40e5fb..c0145c7 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -21,6 +21,9 @@ - Deploy both workers in the same workflow: - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy - proxy (`apps/proxy`, env `dev`) after registry health passes +- Sync proxy internal-service credentials from GitHub secrets before proxy deploy: + - `REGISTRY_INTERNAL_SERVICE_ID` + - `REGISTRY_INTERNAL_SERVICE_SECRET` - Verify registry health at `https://dev.registry.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. - Health verification should use bounded retries (for example 3 minutes with 10-second polling) and `Cache-Control: no-cache` requests to tolerate short edge propagation delays after deploy. - When using Python `urllib` for health checks, always set explicit request headers (`Accept: application/json` and a custom `User-Agent` such as `Clawdentity-CI/1.0`) because Cloudflare may return `403`/`1010` for the default `Python-urllib/*` user agent. @@ -35,7 +38,7 @@ - Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. ## Secrets and Permissions -- Required deploy secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`. +- Required deploy secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`, `REGISTRY_INTERNAL_SERVICE_ID`, `REGISTRY_INTERNAL_SERVICE_SECRET`. - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. - Optional deploy secret: `PROXY_HEALTH_URL` (only needed if proxy workers.dev URL cannot be resolved in CI output). - Required publish secret: `NPM_TOKEN`. diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 0bc545d..70ca6c6 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -23,7 +23,8 @@ jobs: CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} APP_VERSION: ${{ github.sha }} PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} - REGISTRY_INTERNAL_SERVICE_TOKEN: ${{ secrets.REGISTRY_INTERNAL_SERVICE_TOKEN }} + REGISTRY_INTERNAL_SERVICE_ID: ${{ secrets.REGISTRY_INTERNAL_SERVICE_ID }} + REGISTRY_INTERNAL_SERVICE_SECRET: ${{ secrets.REGISTRY_INTERNAL_SERVICE_SECRET }} steps: - name: Checkout uses: actions/checkout@v4 @@ -44,12 +45,13 @@ jobs: run: | test -n "${CLOUDFLARE_API_TOKEN}" test -n "${CLOUDFLARE_ACCOUNT_ID}" - test -n "${REGISTRY_INTERNAL_SERVICE_TOKEN}" + test -n "${REGISTRY_INTERNAL_SERVICE_ID}" + test -n "${REGISTRY_INTERNAL_SERVICE_SECRET}" - - name: Sync internal service auth secret (registry + proxy) + - name: Sync proxy internal service credentials run: | - printf "%s" "${REGISTRY_INTERNAL_SERVICE_TOKEN}" | pnpm exec wrangler --cwd apps/registry secret put REGISTRY_INTERNAL_SERVICE_TOKEN --env dev - printf "%s" "${REGISTRY_INTERNAL_SERVICE_TOKEN}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_TOKEN --env dev + printf "%s" "${REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev + printf "%s" "${REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev - name: Install dependencies run: pnpm install --frozen-lockfile From 8887be250b77a69884852f922d55ed3c694584e3 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 22:42:47 +0530 Subject: [PATCH 098/190] chore: harden deploy workflow and worker runtime config --- .github/AGENTS.md | 7 +- .github/workflows/deploy-develop.yml | 53 +- AGENTS.md | 2 +- apps/cli/AGENTS.md | 19 +- apps/cli/README.md | 97 + apps/cli/src/commands/config.ts | 29 +- apps/cli/src/commands/openclaw.test.ts | 29 +- apps/cli/src/commands/openclaw.ts | 22 +- apps/cli/src/config/AGENTS.md | 6 + apps/cli/src/config/manager.test.ts | 154 +- apps/cli/src/config/manager.ts | 304 +- apps/cli/src/test-env.ts | 1 + apps/proxy/AGENTS.md | 2 + apps/proxy/tsconfig.json | 3 +- apps/proxy/worker-configuration.d.ts | 10939 ++++++++++++++++++++++ apps/proxy/wrangler.jsonc | 8 + apps/registry/AGENTS.md | 7 +- apps/registry/tsconfig.json | 3 +- apps/registry/worker-configuration.d.ts | 10932 +++++++++++++++++++++ apps/registry/wrangler.jsonc | 18 +- biome.json | 3 +- package.json | 2 +- pnpm-lock.yaml | 83 +- tsconfig.base.json | 2 +- 24 files changed, 22579 insertions(+), 146 deletions(-) create mode 100644 apps/cli/README.md create mode 100644 apps/proxy/worker-configuration.d.ts create mode 100644 apps/registry/worker-configuration.d.ts diff --git a/.github/AGENTS.md b/.github/AGENTS.md index c0145c7..a1aca7e 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -21,10 +21,15 @@ - Deploy both workers in the same workflow: - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy - proxy (`apps/proxy`, env `dev`) after registry health passes -- Sync proxy internal-service credentials from GitHub secrets before proxy deploy: +- Install dependencies before any `pnpm exec wrangler ...` command so Wrangler is available on clean runners. +- Regenerate Worker type bindings in CI (`wrangler types --env dev`) and fail on git diff drift for `worker-configuration.d.ts` to prevent stale runtime binding types from shipping. +- Sync proxy internal-service credentials from GitHub secrets after dependency install and before proxy deploy: - `REGISTRY_INTERNAL_SERVICE_ID` - `REGISTRY_INTERNAL_SERVICE_SECRET` +- Add a Wrangler preflight dry-run for both workers before mutating remote state (migrations/deploy): + - `wrangler deploy --env dev --dry-run --var APP_VERSION:` - Verify registry health at `https://dev.registry.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. +- Add Wrangler deployment existence checks for both services after each deploy (`wrangler deployments list --env dev --json`) before endpoint health probes. - Health verification should use bounded retries (for example 3 minutes with 10-second polling) and `Cache-Control: no-cache` requests to tolerate short edge propagation delays after deploy. - When using Python `urllib` for health checks, always set explicit request headers (`Accept: application/json` and a custom `User-Agent` such as `Clawdentity-CI/1.0`) because Cloudflare may return `403`/`1010` for the default `Python-urllib/*` user agent. - Use workflow concurrency groups to prevent overlapping deploys for the same environment. diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 70ca6c6..99e92a8 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -48,14 +48,20 @@ jobs: test -n "${REGISTRY_INTERNAL_SERVICE_ID}" test -n "${REGISTRY_INTERNAL_SERVICE_SECRET}" + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Verify Worker type bindings are up to date + run: | + pnpm exec wrangler --cwd apps/registry types --env dev + pnpm exec wrangler --cwd apps/proxy types --env dev + git diff --exit-code -- apps/registry/worker-configuration.d.ts apps/proxy/worker-configuration.d.ts + - name: Sync proxy internal service credentials run: | printf "%s" "${REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev printf "%s" "${REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev - - name: Install dependencies - run: pnpm install --frozen-lockfile - - name: Lint run: pnpm lint @@ -68,6 +74,11 @@ jobs: - name: Run tests run: pnpm -r test + - name: Wrangler deploy preflight (dry-run) + run: | + pnpm exec wrangler --cwd apps/registry deploy --env dev --dry-run --var APP_VERSION:${APP_VERSION} + pnpm exec wrangler --cwd apps/proxy deploy --env dev --dry-run --var APP_VERSION:${APP_VERSION} + - name: Capture pre-deploy rollback artifacts run: | mkdir -p artifacts @@ -85,6 +96,24 @@ jobs: pnpm exec wrangler --cwd apps/registry d1 migrations apply clawdentity-db-dev --remote --env dev pnpm exec wrangler --cwd apps/registry deploy --env dev --var APP_VERSION:${APP_VERSION} + - name: Verify registry deployment exists in Wrangler + run: | + mkdir -p artifacts + pnpm exec wrangler --cwd apps/registry deployments list --env dev --json > artifacts/registry-deployments-current.json + python3 - <<'PY' + import json + + path = "artifacts/registry-deployments-current.json" + with open(path, "r", encoding="utf-8") as f: + payload = json.load(f) + + if not isinstance(payload, list) or len(payload) == 0: + raise SystemExit(f"wrangler returned no registry deployments in {path}") + + print("registry wrangler deployment check passed") + print(payload[0]) + PY + - name: Verify registry health endpoint run: | python3 - <<'PY' @@ -155,6 +184,24 @@ jobs: echo "PROXY_HEALTH_URL=${PROXY_HEALTH_URL}" >> "${GITHUB_ENV}" echo "Resolved proxy health URL: ${PROXY_HEALTH_URL}" + - name: Verify proxy deployment exists in Wrangler + run: | + mkdir -p artifacts + pnpm exec wrangler --cwd apps/proxy deployments list --env dev --json > artifacts/proxy-deployments-current.json + python3 - <<'PY' + import json + + path = "artifacts/proxy-deployments-current.json" + with open(path, "r", encoding="utf-8") as f: + payload = json.load(f) + + if not isinstance(payload, list) or len(payload) == 0: + raise SystemExit(f"wrangler returned no proxy deployments in {path}") + + print("proxy wrangler deployment check passed") + print(payload[0]) + PY + - name: Verify proxy health endpoint run: | python3 - <<'PY' diff --git a/AGENTS.md b/AGENTS.md index 89d2de5..34425a9 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -69,7 +69,7 @@ ## Biome Configuration - `biome.json` at repo root covers all `packages/**` and `apps/**`. - Excluded from Biome: `**/dist`, `**/drizzle/meta`, `**/.wrangler`. -- Generated files from tools (drizzle-kit, wrangler) should be excluded rather than reformatted. +- Generated files from tools (drizzle-kit, wrangler) should be excluded rather than reformatted, including Worker runtime type outputs (`**/worker-configuration.d.ts`). ## CI Pipeline - `.github/workflows/ci.yml` runs on push and pull_request. diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md index f5a8c44..6c248ca 100644 --- a/apps/cli/AGENTS.md +++ b/apps/cli/AGENTS.md @@ -31,12 +31,17 @@ - Keep release automation in `.github/workflows/publish-cli.yml` manual-only with explicit semver input and npm provenance. ## Config and Secrets -- Local CLI config lives at `~/.clawdentity/config.json`. -- CLI verification caches live under `~/.clawdentity/cache/` and must never include private keys or PATs. -- Agent identities live at `~/.clawdentity/agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. -- OpenClaw setup runtime hint lives at `~/.clawdentity/openclaw-relay.json` and stores `openclawBaseUrl` for proxy fallback. +- Local CLI state is registry-scoped and lives under: + - `~/.clawdentity/states/prod/` + - `~/.clawdentity/states/dev/` + - `~/.clawdentity/states/local/` +- Active state routing hint is stored at `~/.clawdentity/router.json`; keep it machine-local and non-sensitive. +- Local CLI config lives at `~/.clawdentity/states//config.json`. +- CLI verification caches live under `~/.clawdentity/states//cache/` and must never include private keys or PATs. +- Agent identities live at `~/.clawdentity/states//agents//` and must include `secret.key`, `public.key`, `identity.json`, and `ait.jwt`. +- OpenClaw setup runtime hint lives at `~/.clawdentity/states//openclaw-relay.json` and stores `openclawBaseUrl` for proxy fallback. - Connector runtime defaults to local outbound handoff endpoint `http://127.0.0.1:19400/v1/outbound`; keep transform and CLI defaults aligned. -- Reject `.` and `..` as agent names before any filesystem operation to prevent directory traversal outside `~/.clawdentity/agents/`. +- Reject `.` and `..` as agent names before any filesystem operation to prevent directory traversal outside `~/.clawdentity/states//agents/`. - Resolve values with explicit precedence: environment variables > config file > built-in defaults. - Keep API tokens masked in human-facing output (`show`, success logs, debug prints). - Write config and identity artifacts with restrictive permissions (`0600`) and never commit secrets or generated local config. @@ -49,12 +54,12 @@ - Cover invalid input and failure paths, not only happy paths. ## Agent Inspection -- `agent inspect ` reads `~/.clawdentity/agents//ait.jwt`, decodes it with `decodeAIT`, and prints DID, Owner, Expires, Key ID, Public Key, and Framework so operators can audit metadata offline. +- `agent inspect ` reads `~/.clawdentity/states//agents//ait.jwt`, decodes it with `decodeAIT`, and prints DID, Owner, Expires, Key ID, Public Key, and Framework so operators can audit metadata offline. - Surface user-friendly errors when the JWT is missing or cannot be decoded, mentioning `ait.jwt` explicitly and defaulting to the normalized agent name when validating input. - Tests for new inspection behavior must mock `node:fs/promises.readFile` and `@clawdentity/sdk.decodeAIT`, assert the visible output, and confirm missing-file handling covers `ENOENT`. ## Agent Revocation -- `agent revoke ` accepts local agent name only, then resolves `~/.clawdentity/agents//identity.json` to load the DID and derive the registry ULID path parameter. +- `agent revoke ` accepts local agent name only, then resolves `~/.clawdentity/states//agents//identity.json` to load the DID and derive the registry ULID path parameter. - Keep revoke flow name-first and filesystem-backed; do not require operators to pass raw ULIDs for locally managed identities. - Use registry `DELETE /v1/agents/:id` with PAT auth, and print human-readable confirmation that includes agent name + DID. - Keep error messaging explicit for missing/malformed `identity.json`, invalid DID data, missing API key, and registry/network failures. diff --git a/apps/cli/README.md b/apps/cli/README.md new file mode 100644 index 0000000..a6dd814 --- /dev/null +++ b/apps/cli/README.md @@ -0,0 +1,97 @@ +# clawdentity + +CLI for Clawdentity — verified identity for AI agents. + +[![npm version](https://img.shields.io/npm/v/clawdentity.svg)](https://www.npmjs.com/package/clawdentity) +[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/vrknetha/clawdentity/blob/main/LICENSE) +![Node 22+](https://img.shields.io/badge/node-%3E%3D22-brightgreen.svg) + +## Install + +```bash +npm install -g clawdentity +``` + +## Quick Start + +Have an invite code (`clw_inv_...`) ready, then prompt your OpenClaw agent: + +> Set up Clawdentity relay + +The agent runs the full onboarding sequence — install, identity creation, relay configuration, and readiness checks. + +

+Manual CLI setup + +```bash +# Initialize config +clawdentity config init + +# Redeem an invite (sets API key) +clawdentity invite redeem --display-name "Your Name" + +# Create an agent identity +clawdentity agent create --framework openclaw + +# Configure the relay +clawdentity openclaw setup + +# Install the skill artifact +clawdentity skill install + +# Verify everything works +clawdentity openclaw doctor +``` + +
+ +## Commands + +| Command | Description | +|---------|-------------| +| `config init` | Initialize local config | +| `config set ` | Set a config value | +| `config get ` | Get a config value | +| `config show` | Show all resolved config | +| `invite redeem ` | Redeem invite, store API key | +| `invite create` | Create invite (admin) | +| `agent create ` | Generate + register agent identity | +| `agent inspect ` | Show agent AIT metadata | +| `agent auth refresh ` | Refresh registry auth credentials | +| `agent revoke ` | Revoke agent identity | +| `api-key create` | Create personal API key | +| `api-key list` | List personal API keys | +| `api-key revoke ` | Revoke API key | +| `openclaw setup ` | Configure OpenClaw relay | +| `openclaw doctor` | Validate relay health | +| `openclaw relay test` | Test peer relay delivery | +| `pair start ` | Initiate QR pairing | +| `pair confirm ` | Confirm peer pairing | +| `pair status ` | Poll pairing status | +| `skill install` | Install skill artifacts | +| `connector start ` | Start connector runtime | +| `connector service install ` | Auto-start service at login | +| `connector service uninstall ` | Remove auto-start service | +| `verify ` | Verify AIT against registry | +| `admin bootstrap` | Bootstrap first admin | + +## Configuration + +Config files are stored in `~/.clawdentity/`. + +| Key | Environment Variable | Description | +|-----|---------------------|-------------| +| `registryUrl` | `CLAWDENTITY_REGISTRY_URL` | Identity registry URL | +| `proxyUrl` | `CLAWDENTITY_PROXY_URL` | Verification proxy URL | +| `apiKey` | `CLAWDENTITY_API_KEY` | API key (set by `invite redeem`) | +| `humanName` | `CLAWDENTITY_HUMAN_NAME` | Display name for invites | + +Environment variables override values in the config file. + +## Requirements + +- Node >= 22 + +## License + +[MIT](https://github.com/vrknetha/clawdentity/blob/main/LICENSE) diff --git a/apps/cli/src/commands/config.ts b/apps/cli/src/commands/config.ts index 05707a8..fe990e6 100644 --- a/apps/cli/src/commands/config.ts +++ b/apps/cli/src/commands/config.ts @@ -93,11 +93,22 @@ export const createConfigCommand = ( .option("--registry-url ", "Initialize config with registry URL") .action( withErrorHandling("config init", async (options: ConfigInitOptions) => { - const configFilePath = getConfigFilePath(); + const config = await readConfig(); + const requestedRegistryUrl = + options.registryUrl ?? + getEnvRegistryUrlOverride() ?? + config.registryUrl; + const normalizedRegistryUrl = + normalizeRegistryUrl(requestedRegistryUrl); + const requestedConfigFilePath = getConfigFilePath({ + registryUrlHint: normalizedRegistryUrl, + }); try { - await access(configFilePath); - writeStdoutLine(`Config already exists at ${configFilePath}`); + await access(requestedConfigFilePath); + writeStdoutLine( + `Config already exists at ${requestedConfigFilePath}`, + ); return; } catch (error) { if (!isNotFoundError(error)) { @@ -105,16 +116,12 @@ export const createConfigCommand = ( } } - const config = await readConfig(); - const requestedRegistryUrl = - options.registryUrl ?? - getEnvRegistryUrlOverride() ?? - config.registryUrl; - const normalizedRegistryUrl = - normalizeRegistryUrl(requestedRegistryUrl); const metadata = await fetchRegistryMetadata(normalizedRegistryUrl, { fetchImpl: dependencies.fetchImpl, }); + const targetConfigFilePath = getConfigFilePath({ + registryUrlHint: metadata.registryUrl, + }); await writeConfig({ ...config, @@ -122,7 +129,7 @@ export const createConfigCommand = ( proxyUrl: metadata.proxyUrl, }); - writeStdoutLine(`Initialized config at ${configFilePath}`); + writeStdoutLine(`Initialized config at ${targetConfigFilePath}`); writeStdoutLine( JSON.stringify( maskApiKey({ diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 7eebab9..3450407 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -66,8 +66,12 @@ function createSandbox(): OpenclawSandbox { }; } +function resolveCliStateDir(homeDir: string): string { + return join(homeDir, ".clawdentity", "states", "prod"); +} + function seedLocalAgentCredentials(homeDir: string, agentName: string): void { - const agentDir = join(homeDir, ".clawdentity", "agents", agentName); + const agentDir = join(resolveCliStateDir(homeDir), "agents", agentName); mkdirSync(agentDir, { recursive: true }); writeFileSync(join(agentDir, "secret.key"), "secret-key-value", "utf8"); writeFileSync(join(agentDir, "ait.jwt"), "mock.ait.jwt", "utf8"); @@ -80,7 +84,7 @@ function seedPeersConfig( { did: string; proxyUrl: string; agentName?: string; humanName?: string } >, ): void { - const peersPath = join(homeDir, ".clawdentity", "peers.json"); + const peersPath = join(resolveCliStateDir(homeDir), "peers.json"); mkdirSync(dirname(peersPath), { recursive: true }); writeFileSync(peersPath, `${JSON.stringify({ peers }, null, 2)}\n`, "utf8"); } @@ -192,7 +196,7 @@ describe("openclaw command helpers", () => { const peers = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "peers.json"), + join(resolveCliStateDir(sandbox.homeDir), "peers.json"), "utf8", ), ) as { @@ -209,7 +213,7 @@ describe("openclaw command helpers", () => { expect(peers.peers).toEqual({}); const selectedAgent = readFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-agent-name"), + join(resolveCliStateDir(sandbox.homeDir), "openclaw-agent-name"), "utf8", ).trim(); expect(selectedAgent).toBe("alpha"); @@ -224,7 +228,7 @@ describe("openclaw command helpers", () => { ); const relayRuntimeConfig = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), "utf8", ), ) as { @@ -244,7 +248,7 @@ describe("openclaw command helpers", () => { const connectorAssignments = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-connectors.json"), + join(resolveCliStateDir(sandbox.homeDir), "openclaw-connectors.json"), "utf8", ), ) as { @@ -642,7 +646,7 @@ describe("openclaw command helpers", () => { expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19001"); const relayRuntimeConfig = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), "utf8", ), ) as { @@ -677,7 +681,7 @@ describe("openclaw command helpers", () => { expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19555"); const relayRuntimeConfig = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), "utf8", ), ) as { @@ -1011,7 +1015,7 @@ describe("openclaw command helpers", () => { const relayRuntimeConfig = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "openclaw-relay.json"), + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), "utf8", ), ) as { @@ -1036,7 +1040,7 @@ describe("openclaw command helpers", () => { const peers = JSON.parse( readFileSync( - join(sandbox.homeDir, ".clawdentity", "peers.json"), + join(resolveCliStateDir(sandbox.homeDir), "peers.json"), "utf8", ), ) as { @@ -1567,7 +1571,10 @@ describe("openclaw command helpers", () => { transformSource: sandbox.transformSourcePath, }); - const configPath = join(sandbox.homeDir, ".clawdentity", "config.json"); + const configPath = join( + resolveCliStateDir(sandbox.homeDir), + "config.json", + ); mkdirSync(dirname(configPath), { recursive: true }); writeFileSync( configPath, diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index f32fcbf..814bf17 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -12,7 +12,7 @@ import { } from "@clawdentity/protocol"; import { AppError, createLogger, nowIso } from "@clawdentity/sdk"; import { Command } from "commander"; -import { resolveConfig } from "../config/manager.js"; +import { getConfigDir, resolveConfig } from "../config/manager.js"; import { writeStdoutLine } from "../io.js"; import { assertValidAgentName } from "./agent-name.js"; import { installConnectorServiceForAgent } from "./connector.js"; @@ -20,7 +20,6 @@ import { withErrorHandling } from "./helpers.js"; const logger = createLogger({ service: "cli", module: "openclaw" }); -const CLAWDENTITY_DIR_NAME = ".clawdentity"; const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; const SECRET_KEY_FILE_NAME = "secret.key"; @@ -565,11 +564,11 @@ function resolveOpenclawDir(openclawDir: string | undefined, homeDir: string) { } function resolveAgentDirectory(homeDir: string, agentName: string): string { - return join(homeDir, CLAWDENTITY_DIR_NAME, AGENTS_DIR_NAME, agentName); + return join(getConfigDir({ homeDir }), AGENTS_DIR_NAME, agentName); } function resolvePeersPath(homeDir: string): string { - return join(homeDir, CLAWDENTITY_DIR_NAME, PEERS_FILE_NAME); + return join(getConfigDir({ homeDir }), PEERS_FILE_NAME); } function resolveOpenclawConfigPath( @@ -609,15 +608,15 @@ function resolveTransformTargetPath(openclawDir: string): string { } function resolveOpenclawAgentNamePath(homeDir: string): string { - return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_AGENT_FILE_NAME); + return join(getConfigDir({ homeDir }), OPENCLAW_AGENT_FILE_NAME); } function resolveRelayRuntimeConfigPath(homeDir: string): string { - return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_RELAY_RUNTIME_FILE_NAME); + return join(getConfigDir({ homeDir }), OPENCLAW_RELAY_RUNTIME_FILE_NAME); } function resolveConnectorAssignmentsPath(homeDir: string): string { - return join(homeDir, CLAWDENTITY_DIR_NAME, OPENCLAW_CONNECTORS_FILE_NAME); + return join(getConfigDir({ homeDir }), OPENCLAW_CONNECTORS_FILE_NAME); } function resolveTransformRuntimePath(openclawDir: string): string { @@ -1399,7 +1398,7 @@ async function monitorConnectorStabilityWindow(input: { } function resolveConnectorRunDir(homeDir: string): string { - return join(homeDir, CLAWDENTITY_DIR_NAME, CONNECTOR_RUN_DIR_NAME); + return join(getConfigDir({ homeDir }), CONNECTOR_RUN_DIR_NAME); } function resolveConnectorPidPath(homeDir: string, agentName: string): string { @@ -2227,7 +2226,7 @@ export async function runOpenclawDoctor( status: "fail", message: "unable to resolve CLI config", remediationHint: - "Fix ~/.clawdentity/config.json or rerun: clawdentity config init", + "Run: clawdentity config init (or fix your CLI state config file)", }), ); } @@ -2364,8 +2363,7 @@ export async function runOpenclawDoctor( label: "Peers map", status: "fail", message: `invalid peers config at ${peersPath}`, - remediationHint: - "Fix JSON in ~/.clawdentity/peers.json or rerun openclaw setup", + remediationHint: `Fix JSON in ${peersPath} or rerun openclaw setup`, details: { peersPath }, }), ); @@ -3734,7 +3732,7 @@ export const createOpenclawCommand = (): Command => { .description( "Send a relay probe to a configured peer (auto-selects when one peer exists)", ) - .option("--peer ", "Peer alias in ~/.clawdentity/peers.json") + .option("--peer ", "Peer alias in local peers map") .option( "--openclaw-base-url ", "Base URL for local OpenClaw hook API (default OPENCLAW_BASE_URL or relay runtime config)", diff --git a/apps/cli/src/config/AGENTS.md b/apps/cli/src/config/AGENTS.md index 8f504be..071c8ed 100644 --- a/apps/cli/src/config/AGENTS.md +++ b/apps/cli/src/config/AGENTS.md @@ -5,6 +5,12 @@ ## Config Rules - `manager.ts` must keep precedence stable: file config defaults first, then explicit env overrides. +- State directory selection must be derived from effective registry URL and route to: + - `~/.clawdentity/states/prod` + - `~/.clawdentity/states/dev` + - `~/.clawdentity/states/local` +- Keep routing metadata in `~/.clawdentity/router.json` (`lastRegistryUrl`, `lastState`) so CLI can reopen the last active state when env override is absent. +- Keep one-time migration from legacy flat `~/.clawdentity/*` into `~/.clawdentity/states/prod/*` non-destructive (copy-only, never overwrite existing state targets). - Keep human profile config in `manager.ts` (`humanName`) with env override support (`CLAWDENTITY_HUMAN_NAME`) and deterministic precedence. - `registry-metadata.ts` should be the only module that fetches registry metadata for config bootstrap flows. - Avoid hidden host coupling in config tests; do not depend on shell-exported `CLAWDENTITY_*` values. diff --git a/apps/cli/src/config/manager.test.ts b/apps/cli/src/config/manager.test.ts index 9287510..730708d 100644 --- a/apps/cli/src/config/manager.test.ts +++ b/apps/cli/src/config/manager.test.ts @@ -1,4 +1,13 @@ -import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { readFileSync } from "node:fs"; +import { + chmod, + cp, + mkdir, + readdir, + readFile, + stat, + writeFile, +} from "node:fs/promises"; import { homedir } from "node:os"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; @@ -6,10 +15,17 @@ vi.mock("node:os", () => ({ homedir: vi.fn(() => "/mock-home"), })); +vi.mock("node:fs", () => ({ + readFileSync: vi.fn(), +})); + vi.mock("node:fs/promises", () => ({ chmod: vi.fn(), + cp: vi.fn(), mkdir: vi.fn(), + readdir: vi.fn(), readFile: vi.fn(), + stat: vi.fn(), writeFile: vi.fn(), })); @@ -29,10 +45,14 @@ import { writeConfig, } from "./manager.js"; +const mockedReadFileSync = vi.mocked(readFileSync); const mockedReadFile = vi.mocked(readFile); const mockedWriteFile = vi.mocked(writeFile); const mockedMkdir = vi.mocked(mkdir); const mockedChmod = vi.mocked(chmod); +const mockedCp = vi.mocked(cp); +const mockedReaddir = vi.mocked(readdir); +const mockedStat = vi.mocked(stat); const mockedHomedir = vi.mocked(homedir); const buildErrnoError = (code: string): NodeJS.ErrnoException => { @@ -47,6 +67,11 @@ describe("config manager", () => { beforeEach(() => { vi.clearAllMocks(); mockedHomedir.mockReturnValue("/mock-home"); + mockedReadFileSync.mockImplementation(() => { + throw buildErrnoError("ENOENT"); + }); + mockedReaddir.mockResolvedValue([]); + mockedStat.mockRejectedValue(buildErrnoError("ENOENT")); process.env = resetClawdentityEnv(previousEnv); }); @@ -84,40 +109,59 @@ describe("config manager", () => { it("writes config and secures file permissions", async () => { await writeConfig({ - registryUrl: "http://localhost:8787", + registryUrl: "https://registry.clawdentity.com", apiKey: "token", }); - expect(mockedMkdir).toHaveBeenCalledWith("/mock-home/.clawdentity", { - recursive: true, - }); + expect(mockedMkdir).toHaveBeenCalledWith( + "/mock-home/.clawdentity/states/prod", + { + recursive: true, + }, + ); expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/config.json", - '{\n "registryUrl": "http://localhost:8787",\n "apiKey": "token"\n}\n', + "/mock-home/.clawdentity/states/prod/config.json", + '{\n "registryUrl": "https://registry.clawdentity.com",\n "apiKey": "token"\n}\n', "utf-8", ); expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/config.json", + "/mock-home/.clawdentity/states/prod/config.json", 0o600, ); }); - it("applies env override over file config", async () => { - mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); - process.env.CLAWDENTITY_REGISTRY_URL = "http://env:8787"; + it("routes writes to dev state when registryUrl points to dev", async () => { + await writeConfig({ + registryUrl: "https://dev.registry.clawdentity.com", + apiKey: "token", + }); - await expect(resolveConfig()).resolves.toEqual({ - registryUrl: "http://env:8787", + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/states/dev/config.json", + '{\n "registryUrl": "https://dev.registry.clawdentity.com",\n "apiKey": "token"\n}\n', + "utf-8", + ); + }); + + it("routes writes to local state when registryUrl points to local host", async () => { + await writeConfig({ + registryUrl: "http://127.0.0.1:8788", + apiKey: "token", }); + + expect(mockedWriteFile).toHaveBeenCalledWith( + "/mock-home/.clawdentity/states/local/config.json", + '{\n "registryUrl": "http://127.0.0.1:8788",\n "apiKey": "token"\n}\n', + "utf-8", + ); }); - it("applies proxy env override over file config", async () => { - mockedReadFile.mockResolvedValueOnce('{"proxyUrl":"http://file:8787"}'); - process.env.CLAWDENTITY_PROXY_URL = "http://env:8787"; + it("applies env override over file config", async () => { + mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + process.env.CLAWDENTITY_REGISTRY_URL = "http://env:8787"; await expect(resolveConfig()).resolves.toEqual({ - registryUrl: DEFAULT_REGISTRY_URL, - proxyUrl: "http://env:8787", + registryUrl: "http://env:8787", }); }); @@ -169,24 +213,65 @@ describe("config manager", () => { ); }); - it("reads, merges, and writes when setting values", async () => { - mockedReadFile.mockResolvedValueOnce('{"registryUrl":"http://file:8787"}'); + it("moves config to state mapped by updated registryUrl", async () => { + mockedReadFile.mockResolvedValueOnce( + '{"registryUrl":"https://registry.clawdentity.com","apiKey":"token"}', + ); - await setConfigValue("proxyUrl", "http://proxy:8787"); + await setConfigValue("registryUrl", "https://dev.registry.clawdentity.com"); expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/config.json", - '{\n "registryUrl": "http://file:8787",\n "proxyUrl": "http://proxy:8787"\n}\n', + "/mock-home/.clawdentity/states/dev/config.json", + '{\n "registryUrl": "https://dev.registry.clawdentity.com",\n "apiKey": "token"\n}\n', "utf-8", ); }); - it("exposes config location helpers", () => { - expect(getConfigDir()).toBe("/mock-home/.clawdentity"); - expect(getConfigFilePath()).toBe("/mock-home/.clawdentity/config.json"); - expect(getCacheDir()).toBe("/mock-home/.clawdentity/cache"); + it("uses prod state by default", () => { + expect(getConfigDir()).toBe("/mock-home/.clawdentity/states/prod"); + expect(getConfigFilePath()).toBe( + "/mock-home/.clawdentity/states/prod/config.json", + ); + expect(getCacheDir()).toBe("/mock-home/.clawdentity/states/prod/cache"); expect(getCacheFilePath("registry-keys.json")).toBe( - "/mock-home/.clawdentity/cache/registry-keys.json", + "/mock-home/.clawdentity/states/prod/cache/registry-keys.json", + ); + }); + + it("selects dev state from env registry URL", () => { + process.env.CLAWDENTITY_REGISTRY_URL = + "https://dev.registry.clawdentity.com"; + + expect(getConfigDir()).toBe("/mock-home/.clawdentity/states/dev"); + }); + + it("selects local state from env registry URL", () => { + process.env.CLAWDENTITY_REGISTRY_URL = "http://host.docker.internal:8788"; + + expect(getConfigDir()).toBe("/mock-home/.clawdentity/states/local"); + }); + + it("selects state from router hint when env is unset", () => { + mockedReadFileSync.mockReturnValueOnce( + '{"lastRegistryUrl":"https://dev.registry.clawdentity.com","lastState":"dev"}\n', + ); + + expect(getConfigDir()).toBe("/mock-home/.clawdentity/states/dev"); + }); + + it("migrates legacy root entries to prod state", async () => { + mockedReaddir.mockResolvedValueOnce([{ name: "agents" }] as never); + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + await readConfig(); + + expect(mockedCp).toHaveBeenCalledWith( + "/mock-home/.clawdentity/agents", + "/mock-home/.clawdentity/states/prod/agents", + { + recursive: true, + errorOnExist: false, + }, ); }); @@ -207,16 +292,19 @@ describe("config manager", () => { it("writes cache file and secures file permissions", async () => { await writeCacheFile("registry-keys.json", '{\n "ok": true\n}\n'); - expect(mockedMkdir).toHaveBeenCalledWith("/mock-home/.clawdentity/cache", { - recursive: true, - }); + expect(mockedMkdir).toHaveBeenCalledWith( + "/mock-home/.clawdentity/states/prod/cache", + { + recursive: true, + }, + ); expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/cache/registry-keys.json", + "/mock-home/.clawdentity/states/prod/cache/registry-keys.json", '{\n "ok": true\n}\n', "utf-8", ); expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/cache/registry-keys.json", + "/mock-home/.clawdentity/states/prod/cache/registry-keys.json", 0o600, ); }); diff --git a/apps/cli/src/config/manager.ts b/apps/cli/src/config/manager.ts index 9e45f6f..640b18c 100644 --- a/apps/cli/src/config/manager.ts +++ b/apps/cli/src/config/manager.ts @@ -1,8 +1,19 @@ -import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { readFileSync } from "node:fs"; +import { + chmod, + cp, + mkdir, + readdir, + readFile, + stat, + writeFile, +} from "node:fs/promises"; import { homedir } from "node:os"; import { dirname, join } from "node:path"; export const DEFAULT_REGISTRY_URL = "https://registry.clawdentity.com"; +const DEFAULT_DEV_REGISTRY_URL = "https://dev.registry.clawdentity.com"; +const DEFAULT_LOCAL_REGISTRY_URL = "http://127.0.0.1:8788"; export interface CliConfig { registryUrl: string; @@ -12,8 +23,22 @@ export interface CliConfig { } export type CliConfigKey = keyof CliConfig; +export type CliStateKind = "prod" | "dev" | "local"; -const CONFIG_DIR = ".clawdentity"; +export type ConfigPathOptions = { + homeDir?: string; + registryUrlHint?: string; +}; + +type CliStateRouter = { + lastRegistryUrl?: string; + lastState?: CliStateKind; + migratedLegacyState?: boolean; +}; + +const CONFIG_ROOT_DIR = ".clawdentity"; +const CONFIG_STATES_DIR = "states"; +const CONFIG_ROUTER_FILE = "router.json"; const CONFIG_FILE = "config.json"; const CACHE_DIR = "cache"; const FILE_MODE = 0o600; @@ -33,10 +58,32 @@ const DEFAULT_CONFIG: CliConfig = { registryUrl: DEFAULT_REGISTRY_URL, }; +const STATE_KIND_BY_REGISTRY_HOST: Record = { + "registry.clawdentity.com": "prod", + "dev.registry.clawdentity.com": "dev", +}; + +const LOCAL_REGISTRY_HOSTS = new Set([ + "localhost", + "127.0.0.1", + "host.docker.internal", +]); + +const LEGACY_ROOT_ENTRIES = new Set([CONFIG_STATES_DIR, CONFIG_ROUTER_FILE]); + const isConfigObject = (value: unknown): value is Record => { return typeof value === "object" && value !== null; }; +const parseNonEmptyString = (value: unknown): string | undefined => { + if (typeof value !== "string") { + return undefined; + } + + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +}; + const normalizeConfig = (raw: unknown): CliConfig => { if (!isConfigObject(raw)) { return { ...DEFAULT_CONFIG }; @@ -65,15 +112,147 @@ const normalizeConfig = (raw: unknown): CliConfig => { return config; }; -export const getConfigDir = (): string => join(homedir(), CONFIG_DIR); +const isCliStateKind = (value: unknown): value is CliStateKind => { + return value === "prod" || value === "dev" || value === "local"; +}; + +const resolveHomeDir = (options?: ConfigPathOptions): string => { + const configured = parseNonEmptyString(options?.homeDir); + return configured ?? homedir(); +}; + +const getConfigRootDir = (options?: ConfigPathOptions): string => { + return join(resolveHomeDir(options), CONFIG_ROOT_DIR); +}; + +const getConfigStatesDir = (options?: ConfigPathOptions): string => { + return join(getConfigRootDir(options), CONFIG_STATES_DIR); +}; + +const getRouterFilePath = (options?: ConfigPathOptions): string => { + return join(getConfigRootDir(options), CONFIG_ROUTER_FILE); +}; + +const readRouterSync = (options?: ConfigPathOptions): CliStateRouter => { + try { + const raw = readFileSync(getRouterFilePath(options), "utf-8"); + const parsed = JSON.parse(raw); + if (!isConfigObject(parsed)) { + return {}; + } + + const lastRegistryUrl = parseNonEmptyString(parsed.lastRegistryUrl); + const lastState = isCliStateKind(parsed.lastState) + ? parsed.lastState + : undefined; + const migratedLegacyState = parsed.migratedLegacyState === true; + + return { + lastRegistryUrl, + lastState, + migratedLegacyState, + }; + } catch { + return {}; + } +}; + +const getRegistryUrlOverrideFromEnv = (): string | undefined => { + const envCandidates = [ + process.env.CLAWDENTITY_REGISTRY_URL, + process.env.CLAWDENTITY_REGISTRY, + ]; + + return envCandidates.find((value): value is string => { + return typeof value === "string" && value.trim().length > 0; + }); +}; + +const resolveStateKindFromRegistryUrl = (registryUrl: string): CliStateKind => { + try { + const host = new URL(registryUrl).hostname.trim().toLowerCase(); + const mapped = STATE_KIND_BY_REGISTRY_HOST[host]; + if (mapped) { + return mapped; + } + + if (LOCAL_REGISTRY_HOSTS.has(host)) { + return "local"; + } + + return "prod"; + } catch { + return "prod"; + } +}; + +const defaultRegistryUrlForState = (state: CliStateKind): string => { + switch (state) { + case "dev": + return DEFAULT_DEV_REGISTRY_URL; + case "local": + return DEFAULT_LOCAL_REGISTRY_URL; + default: + return DEFAULT_REGISTRY_URL; + } +}; -export const getConfigFilePath = (): string => - join(getConfigDir(), CONFIG_FILE); +const resolveStateSelection = ( + options?: ConfigPathOptions, +): { + stateKind: CliStateKind; + registryUrl: string; +} => { + const hintedRegistryUrl = parseNonEmptyString(options?.registryUrlHint); + if (hintedRegistryUrl) { + return { + stateKind: resolveStateKindFromRegistryUrl(hintedRegistryUrl), + registryUrl: hintedRegistryUrl, + }; + } + + const envRegistryUrl = getRegistryUrlOverrideFromEnv(); + if (envRegistryUrl) { + return { + stateKind: resolveStateKindFromRegistryUrl(envRegistryUrl), + registryUrl: envRegistryUrl, + }; + } + + const router = readRouterSync(options); + if (router.lastRegistryUrl) { + return { + stateKind: resolveStateKindFromRegistryUrl(router.lastRegistryUrl), + registryUrl: router.lastRegistryUrl, + }; + } + + if (router.lastState) { + return { + stateKind: router.lastState, + registryUrl: defaultRegistryUrlForState(router.lastState), + }; + } + + return { + stateKind: "prod", + registryUrl: DEFAULT_REGISTRY_URL, + }; +}; -export const getCacheDir = (): string => join(getConfigDir(), CACHE_DIR); +const pathExists = async (path: string): Promise => { + try { + await stat(path); + return true; + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return false; + } -export const getCacheFilePath = (fileName: string): string => - join(getCacheDir(), fileName); + throw error; + } +}; const writeSecureFile = async (filePath: string, value: string) => { const targetDirectory = dirname(filePath); @@ -82,7 +261,100 @@ const writeSecureFile = async (filePath: string, value: string) => { await chmod(filePath, FILE_MODE); }; +const writeRouter = async ( + router: CliStateRouter, + options?: ConfigPathOptions, +): Promise => { + const payload: CliStateRouter = {}; + if (router.lastRegistryUrl) { + payload.lastRegistryUrl = router.lastRegistryUrl; + } + if (router.lastState) { + payload.lastState = router.lastState; + } + if (router.migratedLegacyState === true) { + payload.migratedLegacyState = true; + } + + await writeSecureFile( + getRouterFilePath(options), + `${JSON.stringify(payload)}\n`, + ); +}; + +const ensureStateLayoutMigrated = async ( + options?: ConfigPathOptions, +): Promise => { + const router = readRouterSync(options); + if (router.migratedLegacyState === true) { + return; + } + + let entries: { name: string }[]; + try { + entries = await readdir(getConfigRootDir(options), { + withFileTypes: true, + }); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + throw error; + } + + const legacyEntries = entries.filter((entry) => { + return !LEGACY_ROOT_ENTRIES.has(entry.name); + }); + + if (legacyEntries.length > 0) { + const prodStateDir = join(getConfigStatesDir(options), "prod"); + await mkdir(prodStateDir, { recursive: true }); + + for (const entry of legacyEntries) { + const sourcePath = join(getConfigRootDir(options), entry.name); + const targetPath = join(prodStateDir, entry.name); + if (await pathExists(targetPath)) { + continue; + } + + await cp(sourcePath, targetPath, { + recursive: true, + errorOnExist: false, + }); + } + } + + await writeRouter( + { + lastRegistryUrl: router.lastRegistryUrl ?? DEFAULT_REGISTRY_URL, + lastState: router.lastState ?? "prod", + migratedLegacyState: true, + }, + options, + ); +}; + +export const getConfigDir = (options?: ConfigPathOptions): string => { + const selection = resolveStateSelection(options); + return join(getConfigStatesDir(options), selection.stateKind); +}; + +export const getConfigFilePath = (options?: ConfigPathOptions): string => + join(getConfigDir(options), CONFIG_FILE); + +export const getCacheDir = (options?: ConfigPathOptions): string => + join(getConfigDir(options), CACHE_DIR); + +export const getCacheFilePath = ( + fileName: string, + options?: ConfigPathOptions, +): string => join(getCacheDir(options), fileName); + export const readConfig = async (): Promise => { + await ensureStateLayoutMigrated(); + try { const configContents = await readFile(getConfigFilePath(), "utf-8"); return normalizeConfig(JSON.parse(configContents)); @@ -117,10 +389,21 @@ export const resolveConfig = async (): Promise => { }; export const writeConfig = async (config: CliConfig): Promise => { + await ensureStateLayoutMigrated(); + + const selection = resolveStateSelection({ + registryUrlHint: config.registryUrl, + }); await writeSecureFile( - getConfigFilePath(), + getConfigFilePath({ registryUrlHint: config.registryUrl }), `${JSON.stringify(config, null, 2)}\n`, ); + const currentRouter = readRouterSync(); + await writeRouter({ + lastRegistryUrl: selection.registryUrl, + lastState: selection.stateKind, + migratedLegacyState: currentRouter.migratedLegacyState === true, + }); }; export const getConfigValue = async ( @@ -145,6 +428,8 @@ export const setConfigValue = async ( export const readCacheFile = async ( fileName: string, ): Promise => { + await ensureStateLayoutMigrated(); + try { return await readFile(getCacheFilePath(fileName), "utf-8"); } catch (error) { @@ -161,5 +446,6 @@ export const writeCacheFile = async ( fileName: string, value: string, ): Promise => { + await ensureStateLayoutMigrated(); await writeSecureFile(getCacheFilePath(fileName), value); }; diff --git a/apps/cli/src/test-env.ts b/apps/cli/src/test-env.ts index 4420c06..95eb8e2 100644 --- a/apps/cli/src/test-env.ts +++ b/apps/cli/src/test-env.ts @@ -3,6 +3,7 @@ const CLAWDENTITY_ENV_OVERRIDE_KEYS = [ "CLAWDENTITY_REGISTRY", "CLAWDENTITY_PROXY_URL", "CLAWDENTITY_API_KEY", + "CLAWDENTITY_HUMAN_NAME", ] as const; export function resetClawdentityEnv( diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 0f72af2..5acbfec 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -9,6 +9,8 @@ - Keep Cloudflare Worker deployment config in `wrangler.jsonc` with explicit `local`, `dev`, and `production` environments. - Duplicate Durable Object `bindings` and `migrations` inside each Wrangler env block; env sections do not inherit top-level DO config. - Keep deploy traceability explicit by passing `APP_VERSION` (or fallback `PROXY_VERSION`) via Worker bindings; `/health` must surface the resolved version. +- Keep Wrangler observability logging enabled (`observability.enabled=true`, `logs.enabled=true`, `invocation_logs=true`) so relay/auth failures are visible in Cloudflare logs. +- Keep `worker-configuration.d.ts` committed and regenerate with `wrangler types --env dev` after `wrangler.jsonc` or binding changes. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. diff --git a/apps/proxy/tsconfig.json b/apps/proxy/tsconfig.json index c6ba935..9228e85 100644 --- a/apps/proxy/tsconfig.json +++ b/apps/proxy/tsconfig.json @@ -5,5 +5,6 @@ "types": ["@cloudflare/workers-types", "node"], "outDir": "./dist" }, - "include": ["src"] + "include": ["src"], + "exclude": ["worker-configuration.d.ts"] } diff --git a/apps/proxy/worker-configuration.d.ts b/apps/proxy/worker-configuration.d.ts new file mode 100644 index 0000000..a5cf30f --- /dev/null +++ b/apps/proxy/worker-configuration.d.ts @@ -0,0 +1,10939 @@ +/* eslint-disable */ +// Generated by Wrangler by running `wrangler --cwd apps/proxy types --env dev` (hash: dc5827228fb6484fcb5b0f01c54ab10d) +// Runtime types generated with workerd@1.20260219.0 2025-09-01 nodejs_compat +declare namespace Cloudflare { + interface GlobalProps { + mainModule: typeof import("./src/worker"); + durableNamespaces: "AgentRelaySession" | "ProxyTrustState"; + } + interface Env { + ENVIRONMENT: string; + APP_VERSION: string; + REGISTRY_URL: string; + OPENCLAW_BASE_URL: string; + REGISTRY_INTERNAL_SERVICE_ID: string; + REGISTRY_INTERNAL_SERVICE_SECRET: string; + INJECT_IDENTITY_INTO_MESSAGE: string; + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: string; + RELAY_QUEUE_TTL_SECONDS: string; + RELAY_RETRY_INITIAL_MS: string; + RELAY_RETRY_MAX_MS: string; + RELAY_RETRY_MAX_ATTEMPTS: string; + RELAY_RETRY_JITTER_RATIO: string; + AGENT_RELAY_SESSION: DurableObjectNamespace; + PROXY_TRUST_STATE: DurableObjectNamespace; + } +} +interface Env extends Cloudflare.Env {} +type StringifyValues> = { + [Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string; +}; +declare namespace NodeJS { + interface ProcessEnv extends StringifyValues> {} +} + +// Begin runtime types +/*! ***************************************************************************** +Copyright (c) Cloudflare. All rights reserved. +Copyright (c) Microsoft Corporation. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +/* eslint-disable */ +// noinspection JSUnusedGlobalSymbols +declare var onmessage: never; +/** + * The **`DOMException`** interface represents an abnormal event (called an **exception**) that occurs as a result of calling a method or accessing a property of a web API. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException) + */ +declare class DOMException extends Error { + constructor(message?: string, name?: string); + /** + * The **`message`** read-only property of the a message or description associated with the given error name. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/message) + */ + readonly message: string; + /** + * The **`name`** read-only property of the one of the strings associated with an error name. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/name) + */ + readonly name: string; + /** + * The **`code`** read-only property of the DOMException interface returns one of the legacy error code constants, or `0` if none match. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/code) + */ + readonly code: number; + static readonly INDEX_SIZE_ERR: number; + static readonly DOMSTRING_SIZE_ERR: number; + static readonly HIERARCHY_REQUEST_ERR: number; + static readonly WRONG_DOCUMENT_ERR: number; + static readonly INVALID_CHARACTER_ERR: number; + static readonly NO_DATA_ALLOWED_ERR: number; + static readonly NO_MODIFICATION_ALLOWED_ERR: number; + static readonly NOT_FOUND_ERR: number; + static readonly NOT_SUPPORTED_ERR: number; + static readonly INUSE_ATTRIBUTE_ERR: number; + static readonly INVALID_STATE_ERR: number; + static readonly SYNTAX_ERR: number; + static readonly INVALID_MODIFICATION_ERR: number; + static readonly NAMESPACE_ERR: number; + static readonly INVALID_ACCESS_ERR: number; + static readonly VALIDATION_ERR: number; + static readonly TYPE_MISMATCH_ERR: number; + static readonly SECURITY_ERR: number; + static readonly NETWORK_ERR: number; + static readonly ABORT_ERR: number; + static readonly URL_MISMATCH_ERR: number; + static readonly QUOTA_EXCEEDED_ERR: number; + static readonly TIMEOUT_ERR: number; + static readonly INVALID_NODE_TYPE_ERR: number; + static readonly DATA_CLONE_ERR: number; + get stack(): any; + set stack(value: any); +} +type WorkerGlobalScopeEventMap = { + fetch: FetchEvent; + scheduled: ScheduledEvent; + queue: QueueEvent; + unhandledrejection: PromiseRejectionEvent; + rejectionhandled: PromiseRejectionEvent; +}; +declare abstract class WorkerGlobalScope extends EventTarget { + EventTarget: typeof EventTarget; +} +/* The **`console`** object provides access to the debugging console (e.g., the Web console in Firefox). * + * The **`console`** object provides access to the debugging console (e.g., the Web console in Firefox). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console) + */ +interface Console { + "assert"(condition?: boolean, ...data: any[]): void; + /** + * The **`console.clear()`** static method clears the console if possible. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/clear_static) + */ + clear(): void; + /** + * The **`console.count()`** static method logs the number of times that this particular call to `count()` has been called. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/count_static) + */ + count(label?: string): void; + /** + * The **`console.countReset()`** static method resets counter used with console/count_static. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/countReset_static) + */ + countReset(label?: string): void; + /** + * The **`console.debug()`** static method outputs a message to the console at the 'debug' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/debug_static) + */ + debug(...data: any[]): void; + /** + * The **`console.dir()`** static method displays a list of the properties of the specified JavaScript object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dir_static) + */ + dir(item?: any, options?: any): void; + /** + * The **`console.dirxml()`** static method displays an interactive tree of the descendant elements of the specified XML/HTML element. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dirxml_static) + */ + dirxml(...data: any[]): void; + /** + * The **`console.error()`** static method outputs a message to the console at the 'error' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/error_static) + */ + error(...data: any[]): void; + /** + * The **`console.group()`** static method creates a new inline group in the Web console log, causing any subsequent console messages to be indented by an additional level, until console/groupEnd_static is called. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/group_static) + */ + group(...data: any[]): void; + /** + * The **`console.groupCollapsed()`** static method creates a new inline group in the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupCollapsed_static) + */ + groupCollapsed(...data: any[]): void; + /** + * The **`console.groupEnd()`** static method exits the current inline group in the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupEnd_static) + */ + groupEnd(): void; + /** + * The **`console.info()`** static method outputs a message to the console at the 'info' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/info_static) + */ + info(...data: any[]): void; + /** + * The **`console.log()`** static method outputs a message to the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/log_static) + */ + log(...data: any[]): void; + /** + * The **`console.table()`** static method displays tabular data as a table. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/table_static) + */ + table(tabularData?: any, properties?: string[]): void; + /** + * The **`console.time()`** static method starts a timer you can use to track how long an operation takes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/time_static) + */ + time(label?: string): void; + /** + * The **`console.timeEnd()`** static method stops a timer that was previously started by calling console/time_static. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timeEnd_static) + */ + timeEnd(label?: string): void; + /** + * The **`console.timeLog()`** static method logs the current value of a timer that was previously started by calling console/time_static. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timeLog_static) + */ + timeLog(label?: string, ...data: any[]): void; + timeStamp(label?: string): void; + /** + * The **`console.trace()`** static method outputs a stack trace to the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/trace_static) + */ + trace(...data: any[]): void; + /** + * The **`console.warn()`** static method outputs a warning message to the console at the 'warning' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/warn_static) + */ + warn(...data: any[]): void; +} +declare const console: Console; +type BufferSource = ArrayBufferView | ArrayBuffer; +type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; +declare namespace WebAssembly { + class CompileError extends Error { + constructor(message?: string); + } + class RuntimeError extends Error { + constructor(message?: string); + } + type ValueType = "anyfunc" | "externref" | "f32" | "f64" | "i32" | "i64" | "v128"; + interface GlobalDescriptor { + value: ValueType; + mutable?: boolean; + } + class Global { + constructor(descriptor: GlobalDescriptor, value?: any); + value: any; + valueOf(): any; + } + type ImportValue = ExportValue | number; + type ModuleImports = Record; + type Imports = Record; + type ExportValue = Function | Global | Memory | Table; + type Exports = Record; + class Instance { + constructor(module: Module, imports?: Imports); + readonly exports: Exports; + } + interface MemoryDescriptor { + initial: number; + maximum?: number; + shared?: boolean; + } + class Memory { + constructor(descriptor: MemoryDescriptor); + readonly buffer: ArrayBuffer; + grow(delta: number): number; + } + type ImportExportKind = "function" | "global" | "memory" | "table"; + interface ModuleExportDescriptor { + kind: ImportExportKind; + name: string; + } + interface ModuleImportDescriptor { + kind: ImportExportKind; + module: string; + name: string; + } + abstract class Module { + static customSections(module: Module, sectionName: string): ArrayBuffer[]; + static exports(module: Module): ModuleExportDescriptor[]; + static imports(module: Module): ModuleImportDescriptor[]; + } + type TableKind = "anyfunc" | "externref"; + interface TableDescriptor { + element: TableKind; + initial: number; + maximum?: number; + } + class Table { + constructor(descriptor: TableDescriptor, value?: any); + readonly length: number; + get(index: number): any; + grow(delta: number, value?: any): number; + set(index: number, value?: any): void; + } + function instantiate(module: Module, imports?: Imports): Promise; + function validate(bytes: BufferSource): boolean; +} +/** + * The **`ServiceWorkerGlobalScope`** interface of the Service Worker API represents the global execution context of a service worker. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ServiceWorkerGlobalScope) + */ +interface ServiceWorkerGlobalScope extends WorkerGlobalScope { + DOMException: typeof DOMException; + WorkerGlobalScope: typeof WorkerGlobalScope; + btoa(data: string): string; + atob(data: string): string; + setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; + setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; + clearTimeout(timeoutId: number | null): void; + setInterval(callback: (...args: any[]) => void, msDelay?: number): number; + setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; + clearInterval(timeoutId: number | null): void; + queueMicrotask(task: Function): void; + structuredClone(value: T, options?: StructuredSerializeOptions): T; + reportError(error: any): void; + fetch(input: RequestInfo | URL, init?: RequestInit): Promise; + self: ServiceWorkerGlobalScope; + crypto: Crypto; + caches: CacheStorage; + scheduler: Scheduler; + performance: Performance; + Cloudflare: Cloudflare; + readonly origin: string; + Event: typeof Event; + ExtendableEvent: typeof ExtendableEvent; + CustomEvent: typeof CustomEvent; + PromiseRejectionEvent: typeof PromiseRejectionEvent; + FetchEvent: typeof FetchEvent; + TailEvent: typeof TailEvent; + TraceEvent: typeof TailEvent; + ScheduledEvent: typeof ScheduledEvent; + MessageEvent: typeof MessageEvent; + CloseEvent: typeof CloseEvent; + ReadableStreamDefaultReader: typeof ReadableStreamDefaultReader; + ReadableStreamBYOBReader: typeof ReadableStreamBYOBReader; + ReadableStream: typeof ReadableStream; + WritableStream: typeof WritableStream; + WritableStreamDefaultWriter: typeof WritableStreamDefaultWriter; + TransformStream: typeof TransformStream; + ByteLengthQueuingStrategy: typeof ByteLengthQueuingStrategy; + CountQueuingStrategy: typeof CountQueuingStrategy; + ErrorEvent: typeof ErrorEvent; + MessageChannel: typeof MessageChannel; + MessagePort: typeof MessagePort; + EventSource: typeof EventSource; + ReadableStreamBYOBRequest: typeof ReadableStreamBYOBRequest; + ReadableStreamDefaultController: typeof ReadableStreamDefaultController; + ReadableByteStreamController: typeof ReadableByteStreamController; + WritableStreamDefaultController: typeof WritableStreamDefaultController; + TransformStreamDefaultController: typeof TransformStreamDefaultController; + CompressionStream: typeof CompressionStream; + DecompressionStream: typeof DecompressionStream; + TextEncoderStream: typeof TextEncoderStream; + TextDecoderStream: typeof TextDecoderStream; + Headers: typeof Headers; + Body: typeof Body; + Request: typeof Request; + Response: typeof Response; + WebSocket: typeof WebSocket; + WebSocketPair: typeof WebSocketPair; + WebSocketRequestResponsePair: typeof WebSocketRequestResponsePair; + AbortController: typeof AbortController; + AbortSignal: typeof AbortSignal; + TextDecoder: typeof TextDecoder; + TextEncoder: typeof TextEncoder; + navigator: Navigator; + Navigator: typeof Navigator; + URL: typeof URL; + URLSearchParams: typeof URLSearchParams; + URLPattern: typeof URLPattern; + Blob: typeof Blob; + File: typeof File; + FormData: typeof FormData; + Crypto: typeof Crypto; + SubtleCrypto: typeof SubtleCrypto; + CryptoKey: typeof CryptoKey; + CacheStorage: typeof CacheStorage; + Cache: typeof Cache; + FixedLengthStream: typeof FixedLengthStream; + IdentityTransformStream: typeof IdentityTransformStream; + HTMLRewriter: typeof HTMLRewriter; +} +declare function addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; +declare function removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; +/** + * The **`dispatchEvent()`** method of the EventTarget sends an Event to the object, (synchronously) invoking the affected event listeners in the appropriate order. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) + */ +declare function dispatchEvent(event: WorkerGlobalScopeEventMap[keyof WorkerGlobalScopeEventMap]): boolean; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/btoa) */ +declare function btoa(data: string): string; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/atob) */ +declare function atob(data: string): string; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setTimeout) */ +declare function setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setTimeout) */ +declare function setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/clearTimeout) */ +declare function clearTimeout(timeoutId: number | null): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setInterval) */ +declare function setInterval(callback: (...args: any[]) => void, msDelay?: number): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setInterval) */ +declare function setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/clearInterval) */ +declare function clearInterval(timeoutId: number | null): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/queueMicrotask) */ +declare function queueMicrotask(task: Function): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/structuredClone) */ +declare function structuredClone(value: T, options?: StructuredSerializeOptions): T; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/reportError) */ +declare function reportError(error: any): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/fetch) */ +declare function fetch(input: RequestInfo | URL, init?: RequestInit): Promise; +declare const self: ServiceWorkerGlobalScope; +/** +* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. +* The Workers runtime implements the full surface of this API, but with some differences in +* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) +* compared to those implemented in most browsers. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) +*/ +declare const crypto: Crypto; +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare const caches: CacheStorage; +declare const scheduler: Scheduler; +/** +* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, +* as well as timing of subrequests and other operations. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) +*/ +declare const performance: Performance; +declare const Cloudflare: Cloudflare; +declare const origin: string; +declare const navigator: Navigator; +interface TestController { +} +interface ExecutionContext { + waitUntil(promise: Promise): void; + passThroughOnException(): void; + readonly props: Props; +} +type ExportedHandlerFetchHandler = (request: Request>, env: Env, ctx: ExecutionContext) => Response | Promise; +type ExportedHandlerTailHandler = (events: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTraceHandler = (traces: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTailStreamHandler = (event: TailStream.TailEvent, env: Env, ctx: ExecutionContext) => TailStream.TailEventHandlerType | Promise; +type ExportedHandlerScheduledHandler = (controller: ScheduledController, env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerQueueHandler = (batch: MessageBatch, env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTestHandler = (controller: TestController, env: Env, ctx: ExecutionContext) => void | Promise; +interface ExportedHandler { + fetch?: ExportedHandlerFetchHandler; + tail?: ExportedHandlerTailHandler; + trace?: ExportedHandlerTraceHandler; + tailStream?: ExportedHandlerTailStreamHandler; + scheduled?: ExportedHandlerScheduledHandler; + test?: ExportedHandlerTestHandler; + email?: EmailExportedHandler; + queue?: ExportedHandlerQueueHandler; +} +interface StructuredSerializeOptions { + transfer?: any[]; +} +declare abstract class Navigator { + sendBeacon(url: string, body?: BodyInit): boolean; + readonly userAgent: string; + readonly hardwareConcurrency: number; + readonly language: string; + readonly languages: string[]; +} +interface AlarmInvocationInfo { + readonly isRetry: boolean; + readonly retryCount: number; +} +interface Cloudflare { + readonly compatibilityFlags: Record; +} +interface DurableObject { + fetch(request: Request): Response | Promise; + alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; + webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; + webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; + webSocketError?(ws: WebSocket, error: unknown): void | Promise; +} +type DurableObjectStub = Fetcher & { + readonly id: DurableObjectId; + readonly name?: string; +}; +interface DurableObjectId { + toString(): string; + equals(other: DurableObjectId): boolean; + readonly name?: string; +} +declare abstract class DurableObjectNamespace { + newUniqueId(options?: DurableObjectNamespaceNewUniqueIdOptions): DurableObjectId; + idFromName(name: string): DurableObjectId; + idFromString(id: string): DurableObjectId; + get(id: DurableObjectId, options?: DurableObjectNamespaceGetDurableObjectOptions): DurableObjectStub; + getByName(name: string, options?: DurableObjectNamespaceGetDurableObjectOptions): DurableObjectStub; + jurisdiction(jurisdiction: DurableObjectJurisdiction): DurableObjectNamespace; +} +type DurableObjectJurisdiction = "eu" | "fedramp" | "fedramp-high"; +interface DurableObjectNamespaceNewUniqueIdOptions { + jurisdiction?: DurableObjectJurisdiction; +} +type DurableObjectLocationHint = "wnam" | "enam" | "sam" | "weur" | "eeur" | "apac" | "oc" | "afr" | "me"; +type DurableObjectRoutingMode = "primary-only"; +interface DurableObjectNamespaceGetDurableObjectOptions { + locationHint?: DurableObjectLocationHint; + routingMode?: DurableObjectRoutingMode; +} +interface DurableObjectClass<_T extends Rpc.DurableObjectBranded | undefined = undefined> { +} +interface DurableObjectState { + waitUntil(promise: Promise): void; + readonly props: Props; + readonly id: DurableObjectId; + readonly storage: DurableObjectStorage; + container?: Container; + blockConcurrencyWhile(callback: () => Promise): Promise; + acceptWebSocket(ws: WebSocket, tags?: string[]): void; + getWebSockets(tag?: string): WebSocket[]; + setWebSocketAutoResponse(maybeReqResp?: WebSocketRequestResponsePair): void; + getWebSocketAutoResponse(): WebSocketRequestResponsePair | null; + getWebSocketAutoResponseTimestamp(ws: WebSocket): Date | null; + setHibernatableWebSocketEventTimeout(timeoutMs?: number): void; + getHibernatableWebSocketEventTimeout(): number | null; + getTags(ws: WebSocket): string[]; + abort(reason?: string): void; +} +interface DurableObjectTransaction { + get(key: string, options?: DurableObjectGetOptions): Promise; + get(keys: string[], options?: DurableObjectGetOptions): Promise>; + list(options?: DurableObjectListOptions): Promise>; + put(key: string, value: T, options?: DurableObjectPutOptions): Promise; + put(entries: Record, options?: DurableObjectPutOptions): Promise; + delete(key: string, options?: DurableObjectPutOptions): Promise; + delete(keys: string[], options?: DurableObjectPutOptions): Promise; + rollback(): void; + getAlarm(options?: DurableObjectGetAlarmOptions): Promise; + setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; + deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; +} +interface DurableObjectStorage { + get(key: string, options?: DurableObjectGetOptions): Promise; + get(keys: string[], options?: DurableObjectGetOptions): Promise>; + list(options?: DurableObjectListOptions): Promise>; + put(key: string, value: T, options?: DurableObjectPutOptions): Promise; + put(entries: Record, options?: DurableObjectPutOptions): Promise; + delete(key: string, options?: DurableObjectPutOptions): Promise; + delete(keys: string[], options?: DurableObjectPutOptions): Promise; + deleteAll(options?: DurableObjectPutOptions): Promise; + transaction(closure: (txn: DurableObjectTransaction) => Promise): Promise; + getAlarm(options?: DurableObjectGetAlarmOptions): Promise; + setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; + deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; + sync(): Promise; + sql: SqlStorage; + kv: SyncKvStorage; + transactionSync(closure: () => T): T; + getCurrentBookmark(): Promise; + getBookmarkForTime(timestamp: number | Date): Promise; + onNextSessionRestoreBookmark(bookmark: string): Promise; +} +interface DurableObjectListOptions { + start?: string; + startAfter?: string; + end?: string; + prefix?: string; + reverse?: boolean; + limit?: number; + allowConcurrency?: boolean; + noCache?: boolean; +} +interface DurableObjectGetOptions { + allowConcurrency?: boolean; + noCache?: boolean; +} +interface DurableObjectGetAlarmOptions { + allowConcurrency?: boolean; +} +interface DurableObjectPutOptions { + allowConcurrency?: boolean; + allowUnconfirmed?: boolean; + noCache?: boolean; +} +interface DurableObjectSetAlarmOptions { + allowConcurrency?: boolean; + allowUnconfirmed?: boolean; +} +declare class WebSocketRequestResponsePair { + constructor(request: string, response: string); + get request(): string; + get response(): string; +} +interface AnalyticsEngineDataset { + writeDataPoint(event?: AnalyticsEngineDataPoint): void; +} +interface AnalyticsEngineDataPoint { + indexes?: ((ArrayBuffer | string) | null)[]; + doubles?: number[]; + blobs?: ((ArrayBuffer | string) | null)[]; +} +/** + * The **`Event`** interface represents an event which takes place on an `EventTarget`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event) + */ +declare class Event { + constructor(type: string, init?: EventInit); + /** + * The **`type`** read-only property of the Event interface returns a string containing the event's type. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/type) + */ + get type(): string; + /** + * The **`eventPhase`** read-only property of the being evaluated. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/eventPhase) + */ + get eventPhase(): number; + /** + * The read-only **`composed`** property of the or not the event will propagate across the shadow DOM boundary into the standard DOM. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composed) + */ + get composed(): boolean; + /** + * The **`bubbles`** read-only property of the Event interface indicates whether the event bubbles up through the DOM tree or not. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/bubbles) + */ + get bubbles(): boolean; + /** + * The **`cancelable`** read-only property of the Event interface indicates whether the event can be canceled, and therefore prevented as if the event never happened. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelable) + */ + get cancelable(): boolean; + /** + * The **`defaultPrevented`** read-only property of the Event interface returns a boolean value indicating whether or not the call to Event.preventDefault() canceled the event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/defaultPrevented) + */ + get defaultPrevented(): boolean; + /** + * The Event property **`returnValue`** indicates whether the default action for this event has been prevented or not. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/returnValue) + */ + get returnValue(): boolean; + /** + * The **`currentTarget`** read-only property of the Event interface identifies the element to which the event handler has been attached. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/currentTarget) + */ + get currentTarget(): EventTarget | undefined; + /** + * The read-only **`target`** property of the dispatched. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/target) + */ + get target(): EventTarget | undefined; + /** + * The deprecated **`Event.srcElement`** is an alias for the Event.target property. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/srcElement) + */ + get srcElement(): EventTarget | undefined; + /** + * The **`timeStamp`** read-only property of the Event interface returns the time (in milliseconds) at which the event was created. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/timeStamp) + */ + get timeStamp(): number; + /** + * The **`isTrusted`** read-only property of the when the event was generated by the user agent (including via user actions and programmatic methods such as HTMLElement.focus()), and `false` when the event was dispatched via The only exception is the `click` event, which initializes the `isTrusted` property to `false` in user agents. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/isTrusted) + */ + get isTrusted(): boolean; + /** + * The **`cancelBubble`** property of the Event interface is deprecated. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) + */ + get cancelBubble(): boolean; + /** + * The **`cancelBubble`** property of the Event interface is deprecated. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) + */ + set cancelBubble(value: boolean); + /** + * The **`stopImmediatePropagation()`** method of the If several listeners are attached to the same element for the same event type, they are called in the order in which they were added. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopImmediatePropagation) + */ + stopImmediatePropagation(): void; + /** + * The **`preventDefault()`** method of the Event interface tells the user agent that if the event does not get explicitly handled, its default action should not be taken as it normally would be. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/preventDefault) + */ + preventDefault(): void; + /** + * The **`stopPropagation()`** method of the Event interface prevents further propagation of the current event in the capturing and bubbling phases. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopPropagation) + */ + stopPropagation(): void; + /** + * The **`composedPath()`** method of the Event interface returns the event's path which is an array of the objects on which listeners will be invoked. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composedPath) + */ + composedPath(): EventTarget[]; + static readonly NONE: number; + static readonly CAPTURING_PHASE: number; + static readonly AT_TARGET: number; + static readonly BUBBLING_PHASE: number; +} +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} +type EventListener = (event: EventType) => void; +interface EventListenerObject { + handleEvent(event: EventType): void; +} +type EventListenerOrEventListenerObject = EventListener | EventListenerObject; +/** + * The **`EventTarget`** interface is implemented by objects that can receive events and may have listeners for them. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget) + */ +declare class EventTarget = Record> { + constructor(); + /** + * The **`addEventListener()`** method of the EventTarget interface sets up a function that will be called whenever the specified event is delivered to the target. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/addEventListener) + */ + addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; + /** + * The **`removeEventListener()`** method of the EventTarget interface removes an event listener previously registered with EventTarget.addEventListener() from the target. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/removeEventListener) + */ + removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; + /** + * The **`dispatchEvent()`** method of the EventTarget sends an Event to the object, (synchronously) invoking the affected event listeners in the appropriate order. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) + */ + dispatchEvent(event: EventMap[keyof EventMap]): boolean; +} +interface EventTargetEventListenerOptions { + capture?: boolean; +} +interface EventTargetAddEventListenerOptions { + capture?: boolean; + passive?: boolean; + once?: boolean; + signal?: AbortSignal; +} +interface EventTargetHandlerObject { + handleEvent: (event: Event) => any | undefined; +} +/** + * The **`AbortController`** interface represents a controller object that allows you to abort one or more Web requests as and when desired. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController) + */ +declare class AbortController { + constructor(); + /** + * The **`signal`** read-only property of the AbortController interface returns an AbortSignal object instance, which can be used to communicate with/abort an asynchronous operation as desired. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/signal) + */ + get signal(): AbortSignal; + /** + * The **`abort()`** method of the AbortController interface aborts an asynchronous operation before it has completed. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/abort) + */ + abort(reason?: any): void; +} +/** + * The **`AbortSignal`** interface represents a signal object that allows you to communicate with an asynchronous operation (such as a fetch request) and abort it if required via an AbortController object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal) + */ +declare abstract class AbortSignal extends EventTarget { + /** + * The **`AbortSignal.abort()`** static method returns an AbortSignal that is already set as aborted (and which does not trigger an AbortSignal/abort_event event). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_static) + */ + static abort(reason?: any): AbortSignal; + /** + * The **`AbortSignal.timeout()`** static method returns an AbortSignal that will automatically abort after a specified time. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/timeout_static) + */ + static timeout(delay: number): AbortSignal; + /** + * The **`AbortSignal.any()`** static method takes an iterable of abort signals and returns an AbortSignal. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/any_static) + */ + static any(signals: AbortSignal[]): AbortSignal; + /** + * The **`aborted`** read-only property returns a value that indicates whether the asynchronous operations the signal is communicating with are aborted (`true`) or not (`false`). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/aborted) + */ + get aborted(): boolean; + /** + * The **`reason`** read-only property returns a JavaScript value that indicates the abort reason. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/reason) + */ + get reason(): any; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ + get onabort(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ + set onabort(value: any | null); + /** + * The **`throwIfAborted()`** method throws the signal's abort AbortSignal.reason if the signal has been aborted; otherwise it does nothing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/throwIfAborted) + */ + throwIfAborted(): void; +} +interface Scheduler { + wait(delay: number, maybeOptions?: SchedulerWaitOptions): Promise; +} +interface SchedulerWaitOptions { + signal?: AbortSignal; +} +/** + * The **`ExtendableEvent`** interface extends the lifetime of the `install` and `activate` events dispatched on the global scope as part of the service worker lifecycle. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent) + */ +declare abstract class ExtendableEvent extends Event { + /** + * The **`ExtendableEvent.waitUntil()`** method tells the event dispatcher that work is ongoing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent/waitUntil) + */ + waitUntil(promise: Promise): void; +} +/** + * The **`CustomEvent`** interface represents events initialized by an application for any purpose. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent) + */ +declare class CustomEvent extends Event { + constructor(type: string, init?: CustomEventCustomEventInit); + /** + * The read-only **`detail`** property of the CustomEvent interface returns any data passed when initializing the event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent/detail) + */ + get detail(): T; +} +interface CustomEventCustomEventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; + detail?: any; +} +/** + * The **`Blob`** interface represents a blob, which is a file-like object of immutable, raw data; they can be read as text or binary data, or converted into a ReadableStream so its methods can be used for processing the data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob) + */ +declare class Blob { + constructor(type?: ((ArrayBuffer | ArrayBufferView) | string | Blob)[], options?: BlobOptions); + /** + * The **`size`** read-only property of the Blob interface returns the size of the Blob or File in bytes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) + */ + get size(): number; + /** + * The **`type`** read-only property of the Blob interface returns the MIME type of the file. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) + */ + get type(): string; + /** + * The **`slice()`** method of the Blob interface creates and returns a new `Blob` object which contains data from a subset of the blob on which it's called. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * The **`arrayBuffer()`** method of the Blob interface returns a Promise that resolves with the contents of the blob as binary data contained in an ArrayBuffer. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/arrayBuffer) + */ + arrayBuffer(): Promise; + /** + * The **`bytes()`** method of the Blob interface returns a Promise that resolves with a Uint8Array containing the contents of the blob as an array of bytes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/bytes) + */ + bytes(): Promise; + /** + * The **`text()`** method of the string containing the contents of the blob, interpreted as UTF-8. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) + */ + text(): Promise; + /** + * The **`stream()`** method of the Blob interface returns a ReadableStream which upon reading returns the data contained within the `Blob`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/stream) + */ + stream(): ReadableStream; +} +interface BlobOptions { + type?: string; +} +/** + * The **`File`** interface provides information about files and allows JavaScript in a web page to access their content. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File) + */ +declare class File extends Blob { + constructor(bits: ((ArrayBuffer | ArrayBufferView) | string | Blob)[] | undefined, name: string, options?: FileOptions); + /** + * The **`name`** read-only property of the File interface returns the name of the file represented by a File object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) + */ + get name(): string; + /** + * The **`lastModified`** read-only property of the File interface provides the last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) + */ + get lastModified(): number; +} +interface FileOptions { + type?: string; + lastModified?: number; +} +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare abstract class CacheStorage { + /** + * The **`open()`** method of the the Cache object matching the `cacheName`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CacheStorage/open) + */ + open(cacheName: string): Promise; + readonly default: Cache; +} +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare abstract class Cache { + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#delete) */ + delete(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#match) */ + match(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#put) */ + put(request: RequestInfo | URL, response: Response): Promise; +} +interface CacheQueryOptions { + ignoreMethod?: boolean; +} +/** +* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. +* The Workers runtime implements the full surface of this API, but with some differences in +* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) +* compared to those implemented in most browsers. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) +*/ +declare abstract class Crypto { + /** + * The **`Crypto.subtle`** read-only property returns a cryptographic operations. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/subtle) + */ + get subtle(): SubtleCrypto; + /** + * The **`Crypto.getRandomValues()`** method lets you get cryptographically strong random values. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/getRandomValues) + */ + getRandomValues(buffer: T): T; + /** + * The **`randomUUID()`** method of the Crypto interface is used to generate a v4 UUID using a cryptographically secure random number generator. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/randomUUID) + */ + randomUUID(): string; + DigestStream: typeof DigestStream; +} +/** + * The **`SubtleCrypto`** interface of the Web Crypto API provides a number of low-level cryptographic functions. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto) + */ +declare abstract class SubtleCrypto { + /** + * The **`encrypt()`** method of the SubtleCrypto interface encrypts data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/encrypt) + */ + encrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, plainText: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`decrypt()`** method of the SubtleCrypto interface decrypts some encrypted data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/decrypt) + */ + decrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, cipherText: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`sign()`** method of the SubtleCrypto interface generates a digital signature. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/sign) + */ + sign(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, data: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`verify()`** method of the SubtleCrypto interface verifies a digital signature. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/verify) + */ + verify(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, signature: ArrayBuffer | ArrayBufferView, data: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`digest()`** method of the SubtleCrypto interface generates a _digest_ of the given data, using the specified hash function. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/digest) + */ + digest(algorithm: string | SubtleCryptoHashAlgorithm, data: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`generateKey()`** method of the SubtleCrypto interface is used to generate a new key (for symmetric algorithms) or key pair (for public-key algorithms). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey) + */ + generateKey(algorithm: string | SubtleCryptoGenerateKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /** + * The **`deriveKey()`** method of the SubtleCrypto interface can be used to derive a secret key from a master key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey) + */ + deriveKey(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, derivedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /** + * The **`deriveBits()`** method of the key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveBits) + */ + deriveBits(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, length?: number | null): Promise; + /** + * The **`importKey()`** method of the SubtleCrypto interface imports a key: that is, it takes as input a key in an external, portable format and gives you a CryptoKey object that you can use in the Web Crypto API. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey) + */ + importKey(format: string, keyData: (ArrayBuffer | ArrayBufferView) | JsonWebKey, algorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /** + * The **`exportKey()`** method of the SubtleCrypto interface exports a key: that is, it takes as input a CryptoKey object and gives you the key in an external, portable format. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/exportKey) + */ + exportKey(format: string, key: CryptoKey): Promise; + /** + * The **`wrapKey()`** method of the SubtleCrypto interface 'wraps' a key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/wrapKey) + */ + wrapKey(format: string, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: string | SubtleCryptoEncryptAlgorithm): Promise; + /** + * The **`unwrapKey()`** method of the SubtleCrypto interface 'unwraps' a key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey) + */ + unwrapKey(format: string, wrappedKey: ArrayBuffer | ArrayBufferView, unwrappingKey: CryptoKey, unwrapAlgorithm: string | SubtleCryptoEncryptAlgorithm, unwrappedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + timingSafeEqual(a: ArrayBuffer | ArrayBufferView, b: ArrayBuffer | ArrayBufferView): boolean; +} +/** + * The **`CryptoKey`** interface of the Web Crypto API represents a cryptographic key obtained from one of the SubtleCrypto methods SubtleCrypto.generateKey, SubtleCrypto.deriveKey, SubtleCrypto.importKey, or SubtleCrypto.unwrapKey. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey) + */ +declare abstract class CryptoKey { + /** + * The read-only **`type`** property of the CryptoKey interface indicates which kind of key is represented by the object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/type) + */ + readonly type: string; + /** + * The read-only **`extractable`** property of the CryptoKey interface indicates whether or not the key may be extracted using `SubtleCrypto.exportKey()` or `SubtleCrypto.wrapKey()`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/extractable) + */ + readonly extractable: boolean; + /** + * The read-only **`algorithm`** property of the CryptoKey interface returns an object describing the algorithm for which this key can be used, and any associated extra parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/algorithm) + */ + readonly algorithm: CryptoKeyKeyAlgorithm | CryptoKeyAesKeyAlgorithm | CryptoKeyHmacKeyAlgorithm | CryptoKeyRsaKeyAlgorithm | CryptoKeyEllipticKeyAlgorithm | CryptoKeyArbitraryKeyAlgorithm; + /** + * The read-only **`usages`** property of the CryptoKey interface indicates what can be done with the key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/usages) + */ + readonly usages: string[]; +} +interface CryptoKeyPair { + publicKey: CryptoKey; + privateKey: CryptoKey; +} +interface JsonWebKey { + kty: string; + use?: string; + key_ops?: string[]; + alg?: string; + ext?: boolean; + crv?: string; + x?: string; + y?: string; + d?: string; + n?: string; + e?: string; + p?: string; + q?: string; + dp?: string; + dq?: string; + qi?: string; + oth?: RsaOtherPrimesInfo[]; + k?: string; +} +interface RsaOtherPrimesInfo { + r?: string; + d?: string; + t?: string; +} +interface SubtleCryptoDeriveKeyAlgorithm { + name: string; + salt?: (ArrayBuffer | ArrayBufferView); + iterations?: number; + hash?: (string | SubtleCryptoHashAlgorithm); + $public?: CryptoKey; + info?: (ArrayBuffer | ArrayBufferView); +} +interface SubtleCryptoEncryptAlgorithm { + name: string; + iv?: (ArrayBuffer | ArrayBufferView); + additionalData?: (ArrayBuffer | ArrayBufferView); + tagLength?: number; + counter?: (ArrayBuffer | ArrayBufferView); + length?: number; + label?: (ArrayBuffer | ArrayBufferView); +} +interface SubtleCryptoGenerateKeyAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + modulusLength?: number; + publicExponent?: (ArrayBuffer | ArrayBufferView); + length?: number; + namedCurve?: string; +} +interface SubtleCryptoHashAlgorithm { + name: string; +} +interface SubtleCryptoImportKeyAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + length?: number; + namedCurve?: string; + compressed?: boolean; +} +interface SubtleCryptoSignAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + dataLength?: number; + saltLength?: number; +} +interface CryptoKeyKeyAlgorithm { + name: string; +} +interface CryptoKeyAesKeyAlgorithm { + name: string; + length: number; +} +interface CryptoKeyHmacKeyAlgorithm { + name: string; + hash: CryptoKeyKeyAlgorithm; + length: number; +} +interface CryptoKeyRsaKeyAlgorithm { + name: string; + modulusLength: number; + publicExponent: ArrayBuffer | ArrayBufferView; + hash?: CryptoKeyKeyAlgorithm; +} +interface CryptoKeyEllipticKeyAlgorithm { + name: string; + namedCurve: string; +} +interface CryptoKeyArbitraryKeyAlgorithm { + name: string; + hash?: CryptoKeyKeyAlgorithm; + namedCurve?: string; + length?: number; +} +declare class DigestStream extends WritableStream { + constructor(algorithm: string | SubtleCryptoHashAlgorithm); + readonly digest: Promise; + get bytesWritten(): number | bigint; +} +/** + * The **`TextDecoder`** interface represents a decoder for a specific text encoding, such as `UTF-8`, `ISO-8859-2`, `KOI8-R`, `GBK`, etc. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder) + */ +declare class TextDecoder { + constructor(label?: string, options?: TextDecoderConstructorOptions); + /** + * The **`TextDecoder.decode()`** method returns a string containing text decoded from the buffer passed as a parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder/decode) + */ + decode(input?: (ArrayBuffer | ArrayBufferView), options?: TextDecoderDecodeOptions): string; + get encoding(): string; + get fatal(): boolean; + get ignoreBOM(): boolean; +} +/** + * The **`TextEncoder`** interface takes a stream of code points as input and emits a stream of UTF-8 bytes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder) + */ +declare class TextEncoder { + constructor(); + /** + * The **`TextEncoder.encode()`** method takes a string as input, and returns a Global_Objects/Uint8Array containing the text given in parameters encoded with the specific method for that TextEncoder object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encode) + */ + encode(input?: string): Uint8Array; + /** + * The **`TextEncoder.encodeInto()`** method takes a string to encode and a destination Uint8Array to put resulting UTF-8 encoded text into, and returns a dictionary object indicating the progress of the encoding. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encodeInto) + */ + encodeInto(input: string, buffer: Uint8Array): TextEncoderEncodeIntoResult; + get encoding(): string; +} +interface TextDecoderConstructorOptions { + fatal: boolean; + ignoreBOM: boolean; +} +interface TextDecoderDecodeOptions { + stream: boolean; +} +interface TextEncoderEncodeIntoResult { + read: number; + written: number; +} +/** + * The **`ErrorEvent`** interface represents events providing information related to errors in scripts or in files. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent) + */ +declare class ErrorEvent extends Event { + constructor(type: string, init?: ErrorEventErrorEventInit); + /** + * The **`filename`** read-only property of the ErrorEvent interface returns a string containing the name of the script file in which the error occurred. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/filename) + */ + get filename(): string; + /** + * The **`message`** read-only property of the ErrorEvent interface returns a string containing a human-readable error message describing the problem. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/message) + */ + get message(): string; + /** + * The **`lineno`** read-only property of the ErrorEvent interface returns an integer containing the line number of the script file on which the error occurred. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/lineno) + */ + get lineno(): number; + /** + * The **`colno`** read-only property of the ErrorEvent interface returns an integer containing the column number of the script file on which the error occurred. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/colno) + */ + get colno(): number; + /** + * The **`error`** read-only property of the ErrorEvent interface returns a JavaScript value, such as an Error or DOMException, representing the error associated with this event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/error) + */ + get error(): any; +} +interface ErrorEventErrorEventInit { + message?: string; + filename?: string; + lineno?: number; + colno?: number; + error?: any; +} +/** + * The **`MessageEvent`** interface represents a message received by a target object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent) + */ +declare class MessageEvent extends Event { + constructor(type: string, initializer: MessageEventInit); + /** + * The **`data`** read-only property of the The data sent by the message emitter; this can be any data type, depending on what originated this event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/data) + */ + readonly data: any; + /** + * The **`origin`** read-only property of the origin of the message emitter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/origin) + */ + readonly origin: string | null; + /** + * The **`lastEventId`** read-only property of the unique ID for the event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/lastEventId) + */ + readonly lastEventId: string; + /** + * The **`source`** read-only property of the a WindowProxy, MessagePort, or a `MessageEventSource` (which can be a WindowProxy, message emitter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/source) + */ + readonly source: MessagePort | null; + /** + * The **`ports`** read-only property of the containing all MessagePort objects sent with the message, in order. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/ports) + */ + readonly ports: MessagePort[]; +} +interface MessageEventInit { + data: ArrayBuffer | string; +} +/** + * The **`PromiseRejectionEvent`** interface represents events which are sent to the global script context when JavaScript Promises are rejected. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent) + */ +declare abstract class PromiseRejectionEvent extends Event { + /** + * The PromiseRejectionEvent interface's **`promise`** read-only property indicates the JavaScript rejected. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/promise) + */ + readonly promise: Promise; + /** + * The PromiseRejectionEvent **`reason`** read-only property is any JavaScript value or Object which provides the reason passed into Promise.reject(). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/reason) + */ + readonly reason: any; +} +/** + * The **`FormData`** interface provides a way to construct a set of key/value pairs representing form fields and their values, which can be sent using the Window/fetch, XMLHttpRequest.send() or navigator.sendBeacon() methods. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData) + */ +declare class FormData { + constructor(); + /** + * The **`append()`** method of the FormData interface appends a new value onto an existing key inside a `FormData` object, or adds the key if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) + */ + append(name: string, value: string | Blob): void; + /** + * The **`append()`** method of the FormData interface appends a new value onto an existing key inside a `FormData` object, or adds the key if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) + */ + append(name: string, value: string): void; + /** + * The **`append()`** method of the FormData interface appends a new value onto an existing key inside a `FormData` object, or adds the key if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) + */ + append(name: string, value: Blob, filename?: string): void; + /** + * The **`delete()`** method of the FormData interface deletes a key and its value(s) from a `FormData` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/delete) + */ + delete(name: string): void; + /** + * The **`get()`** method of the FormData interface returns the first value associated with a given key from within a `FormData` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/get) + */ + get(name: string): (File | string) | null; + /** + * The **`getAll()`** method of the FormData interface returns all the values associated with a given key from within a `FormData` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/getAll) + */ + getAll(name: string): (File | string)[]; + /** + * The **`has()`** method of the FormData interface returns whether a `FormData` object contains a certain key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/has) + */ + has(name: string): boolean; + /** + * The **`set()`** method of the FormData interface sets a new value for an existing key inside a `FormData` object, or adds the key/value if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) + */ + set(name: string, value: string | Blob): void; + /** + * The **`set()`** method of the FormData interface sets a new value for an existing key inside a `FormData` object, or adds the key/value if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) + */ + set(name: string, value: string): void; + /** + * The **`set()`** method of the FormData interface sets a new value for an existing key inside a `FormData` object, or adds the key/value if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) + */ + set(name: string, value: Blob, filename?: string): void; + /* Returns an array of key, value pairs for every entry in the list. */ + entries(): IterableIterator<[ + key: string, + value: File | string + ]>; + /* Returns a list of keys in the list. */ + keys(): IterableIterator; + /* Returns a list of values in the list. */ + values(): IterableIterator<(File | string)>; + forEach(callback: (this: This, value: File | string, key: string, parent: FormData) => void, thisArg?: This): void; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: File | string + ]>; +} +interface ContentOptions { + html?: boolean; +} +declare class HTMLRewriter { + constructor(); + on(selector: string, handlers: HTMLRewriterElementContentHandlers): HTMLRewriter; + onDocument(handlers: HTMLRewriterDocumentContentHandlers): HTMLRewriter; + transform(response: Response): Response; +} +interface HTMLRewriterElementContentHandlers { + element?(element: Element): void | Promise; + comments?(comment: Comment): void | Promise; + text?(element: Text): void | Promise; +} +interface HTMLRewriterDocumentContentHandlers { + doctype?(doctype: Doctype): void | Promise; + comments?(comment: Comment): void | Promise; + text?(text: Text): void | Promise; + end?(end: DocumentEnd): void | Promise; +} +interface Doctype { + readonly name: string | null; + readonly publicId: string | null; + readonly systemId: string | null; +} +interface Element { + tagName: string; + readonly attributes: IterableIterator; + readonly removed: boolean; + readonly namespaceURI: string; + getAttribute(name: string): string | null; + hasAttribute(name: string): boolean; + setAttribute(name: string, value: string): Element; + removeAttribute(name: string): Element; + before(content: string | ReadableStream | Response, options?: ContentOptions): Element; + after(content: string | ReadableStream | Response, options?: ContentOptions): Element; + prepend(content: string | ReadableStream | Response, options?: ContentOptions): Element; + append(content: string | ReadableStream | Response, options?: ContentOptions): Element; + replace(content: string | ReadableStream | Response, options?: ContentOptions): Element; + remove(): Element; + removeAndKeepContent(): Element; + setInnerContent(content: string | ReadableStream | Response, options?: ContentOptions): Element; + onEndTag(handler: (tag: EndTag) => void | Promise): void; +} +interface EndTag { + name: string; + before(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; + after(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; + remove(): EndTag; +} +interface Comment { + text: string; + readonly removed: boolean; + before(content: string, options?: ContentOptions): Comment; + after(content: string, options?: ContentOptions): Comment; + replace(content: string, options?: ContentOptions): Comment; + remove(): Comment; +} +interface Text { + readonly text: string; + readonly lastInTextNode: boolean; + readonly removed: boolean; + before(content: string | ReadableStream | Response, options?: ContentOptions): Text; + after(content: string | ReadableStream | Response, options?: ContentOptions): Text; + replace(content: string | ReadableStream | Response, options?: ContentOptions): Text; + remove(): Text; +} +interface DocumentEnd { + append(content: string, options?: ContentOptions): DocumentEnd; +} +/** + * This is the event type for `fetch` events dispatched on the ServiceWorkerGlobalScope. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent) + */ +declare abstract class FetchEvent extends ExtendableEvent { + /** + * The **`request`** read-only property of the the event handler. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/request) + */ + readonly request: Request; + /** + * The **`respondWith()`** method of allows you to provide a promise for a Response yourself. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/respondWith) + */ + respondWith(promise: Response | Promise): void; + passThroughOnException(): void; +} +type HeadersInit = Headers | Iterable> | Record; +/** + * The **`Headers`** interface of the Fetch API allows you to perform various actions on HTTP request and response headers. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers) + */ +declare class Headers { + constructor(init?: HeadersInit); + /** + * The **`get()`** method of the Headers interface returns a byte string of all the values of a header within a `Headers` object with a given name. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/get) + */ + get(name: string): string | null; + getAll(name: string): string[]; + /** + * The **`getSetCookie()`** method of the Headers interface returns an array containing the values of all Set-Cookie headers associated with a response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/getSetCookie) + */ + getSetCookie(): string[]; + /** + * The **`has()`** method of the Headers interface returns a boolean stating whether a `Headers` object contains a certain header. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/has) + */ + has(name: string): boolean; + /** + * The **`set()`** method of the Headers interface sets a new value for an existing header inside a `Headers` object, or adds the header if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/set) + */ + set(name: string, value: string): void; + /** + * The **`append()`** method of the Headers interface appends a new value onto an existing header inside a `Headers` object, or adds the header if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/append) + */ + append(name: string, value: string): void; + /** + * The **`delete()`** method of the Headers interface deletes a header from the current `Headers` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/delete) + */ + delete(name: string): void; + forEach(callback: (this: This, value: string, key: string, parent: Headers) => void, thisArg?: This): void; + /* Returns an iterator allowing to go through all key/value pairs contained in this object. */ + entries(): IterableIterator<[ + key: string, + value: string + ]>; + /* Returns an iterator allowing to go through all keys of the key/value pairs contained in this object. */ + keys(): IterableIterator; + /* Returns an iterator allowing to go through all values of the key/value pairs contained in this object. */ + values(): IterableIterator; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: string + ]>; +} +type BodyInit = ReadableStream | string | ArrayBuffer | ArrayBufferView | Blob | URLSearchParams | FormData; +declare abstract class Body { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/body) */ + get body(): ReadableStream | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bodyUsed) */ + get bodyUsed(): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/arrayBuffer) */ + arrayBuffer(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bytes) */ + bytes(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/text) */ + text(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/json) */ + json(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/formData) */ + formData(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/blob) */ + blob(): Promise; +} +/** + * The **`Response`** interface of the Fetch API represents the response to a request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) + */ +declare var Response: { + prototype: Response; + new (body?: BodyInit | null, init?: ResponseInit): Response; + error(): Response; + redirect(url: string, status?: number): Response; + json(any: any, maybeInit?: (ResponseInit | Response)): Response; +}; +/** + * The **`Response`** interface of the Fetch API represents the response to a request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) + */ +interface Response extends Body { + /** + * The **`clone()`** method of the Response interface creates a clone of a response object, identical in every way, but stored in a different variable. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/clone) + */ + clone(): Response; + /** + * The **`status`** read-only property of the Response interface contains the HTTP status codes of the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/status) + */ + status: number; + /** + * The **`statusText`** read-only property of the Response interface contains the status message corresponding to the HTTP status code in Response.status. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/statusText) + */ + statusText: string; + /** + * The **`headers`** read-only property of the with the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/headers) + */ + headers: Headers; + /** + * The **`ok`** read-only property of the Response interface contains a Boolean stating whether the response was successful (status in the range 200-299) or not. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/ok) + */ + ok: boolean; + /** + * The **`redirected`** read-only property of the Response interface indicates whether or not the response is the result of a request you made which was redirected. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/redirected) + */ + redirected: boolean; + /** + * The **`url`** read-only property of the Response interface contains the URL of the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/url) + */ + url: string; + webSocket: WebSocket | null; + cf: any | undefined; + /** + * The **`type`** read-only property of the Response interface contains the type of the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/type) + */ + type: "default" | "error"; +} +interface ResponseInit { + status?: number; + statusText?: string; + headers?: HeadersInit; + cf?: any; + webSocket?: (WebSocket | null); + encodeBody?: "automatic" | "manual"; +} +type RequestInfo> = Request | string; +/** + * The **`Request`** interface of the Fetch API represents a resource request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) + */ +declare var Request: { + prototype: Request; + new >(input: RequestInfo | URL, init?: RequestInit): Request; +}; +/** + * The **`Request`** interface of the Fetch API represents a resource request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) + */ +interface Request> extends Body { + /** + * The **`clone()`** method of the Request interface creates a copy of the current `Request` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/clone) + */ + clone(): Request; + /** + * The **`method`** read-only property of the `POST`, etc.) A String indicating the method of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/method) + */ + method: string; + /** + * The **`url`** read-only property of the Request interface contains the URL of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/url) + */ + url: string; + /** + * The **`headers`** read-only property of the with the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/headers) + */ + headers: Headers; + /** + * The **`redirect`** read-only property of the Request interface contains the mode for how redirects are handled. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/redirect) + */ + redirect: string; + fetcher: Fetcher | null; + /** + * The read-only **`signal`** property of the Request interface returns the AbortSignal associated with the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/signal) + */ + signal: AbortSignal; + cf?: Cf; + /** + * The **`integrity`** read-only property of the Request interface contains the subresource integrity value of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/integrity) + */ + integrity: string; + /** + * The **`keepalive`** read-only property of the Request interface contains the request's `keepalive` setting (`true` or `false`), which indicates whether the browser will keep the associated request alive if the page that initiated it is unloaded before the request is complete. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/keepalive) + */ + keepalive: boolean; + /** + * The **`cache`** read-only property of the Request interface contains the cache mode of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/cache) + */ + cache?: "no-store" | "no-cache"; +} +interface RequestInit { + /* A string to set request's method. */ + method?: string; + /* A Headers object, an object literal, or an array of two-item arrays to set request's headers. */ + headers?: HeadersInit; + /* A BodyInit object or null to set request's body. */ + body?: BodyInit | null; + /* A string indicating whether request follows redirects, results in an error upon encountering a redirect, or returns the redirect (in an opaque fashion). Sets request's redirect. */ + redirect?: string; + fetcher?: (Fetcher | null); + cf?: Cf; + /* A string indicating how the request will interact with the browser's cache to set request's cache. */ + cache?: "no-store" | "no-cache"; + /* A cryptographic hash of the resource to be fetched by request. Sets request's integrity. */ + integrity?: string; + /* An AbortSignal to set request's signal. */ + signal?: (AbortSignal | null); + encodeResponseBody?: "automatic" | "manual"; +} +type Service Rpc.WorkerEntrypointBranded) | Rpc.WorkerEntrypointBranded | ExportedHandler | undefined = undefined> = T extends new (...args: any[]) => Rpc.WorkerEntrypointBranded ? Fetcher> : T extends Rpc.WorkerEntrypointBranded ? Fetcher : T extends Exclude ? never : Fetcher; +type Fetcher = (T extends Rpc.EntrypointBranded ? Rpc.Provider : unknown) & { + fetch(input: RequestInfo | URL, init?: RequestInit): Promise; + connect(address: SocketAddress | string, options?: SocketOptions): Socket; +}; +interface KVNamespaceListKey { + name: Key; + expiration?: number; + metadata?: Metadata; +} +type KVNamespaceListResult = { + list_complete: false; + keys: KVNamespaceListKey[]; + cursor: string; + cacheStatus: string | null; +} | { + list_complete: true; + keys: KVNamespaceListKey[]; + cacheStatus: string | null; +}; +interface KVNamespace { + get(key: Key, options?: Partial>): Promise; + get(key: Key, type: "text"): Promise; + get(key: Key, type: "json"): Promise; + get(key: Key, type: "arrayBuffer"): Promise; + get(key: Key, type: "stream"): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"text">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"json">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"arrayBuffer">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"stream">): Promise; + get(key: Array, type: "text"): Promise>; + get(key: Array, type: "json"): Promise>; + get(key: Array, options?: Partial>): Promise>; + get(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>; + get(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>; + list(options?: KVNamespaceListOptions): Promise>; + put(key: Key, value: string | ArrayBuffer | ArrayBufferView | ReadableStream, options?: KVNamespacePutOptions): Promise; + getWithMetadata(key: Key, options?: Partial>): Promise>; + getWithMetadata(key: Key, type: "text"): Promise>; + getWithMetadata(key: Key, type: "json"): Promise>; + getWithMetadata(key: Key, type: "arrayBuffer"): Promise>; + getWithMetadata(key: Key, type: "stream"): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"text">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"json">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"arrayBuffer">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"stream">): Promise>; + getWithMetadata(key: Array, type: "text"): Promise>>; + getWithMetadata(key: Array, type: "json"): Promise>>; + getWithMetadata(key: Array, options?: Partial>): Promise>>; + getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>>; + getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>>; + delete(key: Key): Promise; +} +interface KVNamespaceListOptions { + limit?: number; + prefix?: (string | null); + cursor?: (string | null); +} +interface KVNamespaceGetOptions { + type: Type; + cacheTtl?: number; +} +interface KVNamespacePutOptions { + expiration?: number; + expirationTtl?: number; + metadata?: (any | null); +} +interface KVNamespaceGetWithMetadataResult { + value: Value | null; + metadata: Metadata | null; + cacheStatus: string | null; +} +type QueueContentType = "text" | "bytes" | "json" | "v8"; +interface Queue { + send(message: Body, options?: QueueSendOptions): Promise; + sendBatch(messages: Iterable>, options?: QueueSendBatchOptions): Promise; +} +interface QueueSendOptions { + contentType?: QueueContentType; + delaySeconds?: number; +} +interface QueueSendBatchOptions { + delaySeconds?: number; +} +interface MessageSendRequest { + body: Body; + contentType?: QueueContentType; + delaySeconds?: number; +} +interface QueueRetryOptions { + delaySeconds?: number; +} +interface Message { + readonly id: string; + readonly timestamp: Date; + readonly body: Body; + readonly attempts: number; + retry(options?: QueueRetryOptions): void; + ack(): void; +} +interface QueueEvent extends ExtendableEvent { + readonly messages: readonly Message[]; + readonly queue: string; + retryAll(options?: QueueRetryOptions): void; + ackAll(): void; +} +interface MessageBatch { + readonly messages: readonly Message[]; + readonly queue: string; + retryAll(options?: QueueRetryOptions): void; + ackAll(): void; +} +interface R2Error extends Error { + readonly name: string; + readonly code: number; + readonly message: string; + readonly action: string; + readonly stack: any; +} +interface R2ListOptions { + limit?: number; + prefix?: string; + cursor?: string; + delimiter?: string; + startAfter?: string; + include?: ("httpMetadata" | "customMetadata")[]; +} +declare abstract class R2Bucket { + head(key: string): Promise; + get(key: string, options: R2GetOptions & { + onlyIf: R2Conditional | Headers; + }): Promise; + get(key: string, options?: R2GetOptions): Promise; + put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions & { + onlyIf: R2Conditional | Headers; + }): Promise; + put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions): Promise; + createMultipartUpload(key: string, options?: R2MultipartOptions): Promise; + resumeMultipartUpload(key: string, uploadId: string): R2MultipartUpload; + delete(keys: string | string[]): Promise; + list(options?: R2ListOptions): Promise; +} +interface R2MultipartUpload { + readonly key: string; + readonly uploadId: string; + uploadPart(partNumber: number, value: ReadableStream | (ArrayBuffer | ArrayBufferView) | string | Blob, options?: R2UploadPartOptions): Promise; + abort(): Promise; + complete(uploadedParts: R2UploadedPart[]): Promise; +} +interface R2UploadedPart { + partNumber: number; + etag: string; +} +declare abstract class R2Object { + readonly key: string; + readonly version: string; + readonly size: number; + readonly etag: string; + readonly httpEtag: string; + readonly checksums: R2Checksums; + readonly uploaded: Date; + readonly httpMetadata?: R2HTTPMetadata; + readonly customMetadata?: Record; + readonly range?: R2Range; + readonly storageClass: string; + readonly ssecKeyMd5?: string; + writeHttpMetadata(headers: Headers): void; +} +interface R2ObjectBody extends R2Object { + get body(): ReadableStream; + get bodyUsed(): boolean; + arrayBuffer(): Promise; + bytes(): Promise; + text(): Promise; + json(): Promise; + blob(): Promise; +} +type R2Range = { + offset: number; + length?: number; +} | { + offset?: number; + length: number; +} | { + suffix: number; +}; +interface R2Conditional { + etagMatches?: string; + etagDoesNotMatch?: string; + uploadedBefore?: Date; + uploadedAfter?: Date; + secondsGranularity?: boolean; +} +interface R2GetOptions { + onlyIf?: (R2Conditional | Headers); + range?: (R2Range | Headers); + ssecKey?: (ArrayBuffer | string); +} +interface R2PutOptions { + onlyIf?: (R2Conditional | Headers); + httpMetadata?: (R2HTTPMetadata | Headers); + customMetadata?: Record; + md5?: ((ArrayBuffer | ArrayBufferView) | string); + sha1?: ((ArrayBuffer | ArrayBufferView) | string); + sha256?: ((ArrayBuffer | ArrayBufferView) | string); + sha384?: ((ArrayBuffer | ArrayBufferView) | string); + sha512?: ((ArrayBuffer | ArrayBufferView) | string); + storageClass?: string; + ssecKey?: (ArrayBuffer | string); +} +interface R2MultipartOptions { + httpMetadata?: (R2HTTPMetadata | Headers); + customMetadata?: Record; + storageClass?: string; + ssecKey?: (ArrayBuffer | string); +} +interface R2Checksums { + readonly md5?: ArrayBuffer; + readonly sha1?: ArrayBuffer; + readonly sha256?: ArrayBuffer; + readonly sha384?: ArrayBuffer; + readonly sha512?: ArrayBuffer; + toJSON(): R2StringChecksums; +} +interface R2StringChecksums { + md5?: string; + sha1?: string; + sha256?: string; + sha384?: string; + sha512?: string; +} +interface R2HTTPMetadata { + contentType?: string; + contentLanguage?: string; + contentDisposition?: string; + contentEncoding?: string; + cacheControl?: string; + cacheExpiry?: Date; +} +type R2Objects = { + objects: R2Object[]; + delimitedPrefixes: string[]; +} & ({ + truncated: true; + cursor: string; +} | { + truncated: false; +}); +interface R2UploadPartOptions { + ssecKey?: (ArrayBuffer | string); +} +declare abstract class ScheduledEvent extends ExtendableEvent { + readonly scheduledTime: number; + readonly cron: string; + noRetry(): void; +} +interface ScheduledController { + readonly scheduledTime: number; + readonly cron: string; + noRetry(): void; +} +interface QueuingStrategy { + highWaterMark?: (number | bigint); + size?: (chunk: T) => number | bigint; +} +interface UnderlyingSink { + type?: string; + start?: (controller: WritableStreamDefaultController) => void | Promise; + write?: (chunk: W, controller: WritableStreamDefaultController) => void | Promise; + abort?: (reason: any) => void | Promise; + close?: () => void | Promise; +} +interface UnderlyingByteSource { + type: "bytes"; + autoAllocateChunkSize?: number; + start?: (controller: ReadableByteStreamController) => void | Promise; + pull?: (controller: ReadableByteStreamController) => void | Promise; + cancel?: (reason: any) => void | Promise; +} +interface UnderlyingSource { + type?: "" | undefined; + start?: (controller: ReadableStreamDefaultController) => void | Promise; + pull?: (controller: ReadableStreamDefaultController) => void | Promise; + cancel?: (reason: any) => void | Promise; + expectedLength?: (number | bigint); +} +interface Transformer { + readableType?: string; + writableType?: string; + start?: (controller: TransformStreamDefaultController) => void | Promise; + transform?: (chunk: I, controller: TransformStreamDefaultController) => void | Promise; + flush?: (controller: TransformStreamDefaultController) => void | Promise; + cancel?: (reason: any) => void | Promise; + expectedLength?: number; +} +interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. The way in which the piping process behaves under various error conditions can be customized with a number of passed options. It returns a promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate as follows: + * + * An error in this source readable stream will abort destination, unless preventAbort is truthy. The returned promise will be rejected with the source's error, or with any error that occurs during aborting the destination. + * + * An error in destination will cancel this source readable stream, unless preventCancel is truthy. The returned promise will be rejected with the destination's error, or with any error that occurs during canceling the source. + * + * When this source readable stream closes, destination will be closed, unless preventClose is truthy. The returned promise will be fulfilled once this process completes, unless an error is encountered while closing the destination, in which case it will be rejected with that error. + * + * If destination starts out closed or closing, this source readable stream will be canceled, unless preventCancel is true. The returned promise will be rejected with an error indicating piping to a closed stream failed, or with any error that occurs during canceling the source. + * + * The signal option can be set to an AbortSignal to allow aborting an ongoing pipe operation via the corresponding AbortController. In this case, this source readable stream will be canceled, and destination aborted, unless the respective options preventCancel or preventAbort are set. + */ + preventClose?: boolean; + signal?: AbortSignal; +} +type ReadableStreamReadResult = { + done: false; + value: R; +} | { + done: true; + value?: undefined; +}; +/** + * The `ReadableStream` interface of the Streams API represents a readable stream of byte data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) + */ +interface ReadableStream { + /** + * The **`locked`** read-only property of the ReadableStream interface returns whether or not the readable stream is locked to a reader. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/locked) + */ + get locked(): boolean; + /** + * The **`cancel()`** method of the ReadableStream interface returns a Promise that resolves when the stream is canceled. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/cancel) + */ + cancel(reason?: any): Promise; + /** + * The **`getReader()`** method of the ReadableStream interface creates a reader and locks the stream to it. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) + */ + getReader(): ReadableStreamDefaultReader; + /** + * The **`getReader()`** method of the ReadableStream interface creates a reader and locks the stream to it. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) + */ + getReader(options: ReadableStreamGetReaderOptions): ReadableStreamBYOBReader; + /** + * The **`pipeThrough()`** method of the ReadableStream interface provides a chainable way of piping the current stream through a transform stream or any other writable/readable pair. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeThrough) + */ + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + /** + * The **`pipeTo()`** method of the ReadableStream interface pipes the current `ReadableStream` to a given WritableStream and returns a Promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeTo) + */ + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + /** + * The **`tee()`** method of the two-element array containing the two resulting branches as new ReadableStream instances. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/tee) + */ + tee(): [ + ReadableStream, + ReadableStream + ]; + values(options?: ReadableStreamValuesOptions): AsyncIterableIterator; + [Symbol.asyncIterator](options?: ReadableStreamValuesOptions): AsyncIterableIterator; +} +/** + * The `ReadableStream` interface of the Streams API represents a readable stream of byte data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) + */ +declare const ReadableStream: { + prototype: ReadableStream; + new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new (underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; +}; +/** + * The **`ReadableStreamDefaultReader`** interface of the Streams API represents a default reader that can be used to read stream data supplied from a network (such as a fetch request). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader) + */ +declare class ReadableStreamDefaultReader { + constructor(stream: ReadableStream); + get closed(): Promise; + cancel(reason?: any): Promise; + /** + * The **`read()`** method of the ReadableStreamDefaultReader interface returns a Promise providing access to the next chunk in the stream's internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/read) + */ + read(): Promise>; + /** + * The **`releaseLock()`** method of the ReadableStreamDefaultReader interface releases the reader's lock on the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/releaseLock) + */ + releaseLock(): void; +} +/** + * The `ReadableStreamBYOBReader` interface of the Streams API defines a reader for a ReadableStream that supports zero-copy reading from an underlying byte source. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader) + */ +declare class ReadableStreamBYOBReader { + constructor(stream: ReadableStream); + get closed(): Promise; + cancel(reason?: any): Promise; + /** + * The **`read()`** method of the ReadableStreamBYOBReader interface is used to read data into a view on a user-supplied buffer from an associated readable byte stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/read) + */ + read(view: T): Promise>; + /** + * The **`releaseLock()`** method of the ReadableStreamBYOBReader interface releases the reader's lock on the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/releaseLock) + */ + releaseLock(): void; + readAtLeast(minElements: number, view: T): Promise>; +} +interface ReadableStreamBYOBReaderReadableStreamBYOBReaderReadOptions { + min?: number; +} +interface ReadableStreamGetReaderOptions { + /** + * Creates a ReadableStreamBYOBReader and locks the stream to the new reader. + * + * This call behaves the same way as the no-argument variant, except that it only works on readable byte streams, i.e. streams which were constructed specifically with the ability to handle "bring your own buffer" reading. The returned BYOB reader provides the ability to directly read individual chunks from the stream via its read() method, into developer-supplied buffers, allowing more precise control over allocation. + */ + mode: "byob"; +} +/** + * The **`ReadableStreamBYOBRequest`** interface of the Streams API represents a 'pull request' for data from an underlying source that will made as a zero-copy transfer to a consumer (bypassing the stream's internal queues). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest) + */ +declare abstract class ReadableStreamBYOBRequest { + /** + * The **`view`** getter property of the ReadableStreamBYOBRequest interface returns the current view. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/view) + */ + get view(): Uint8Array | null; + /** + * The **`respond()`** method of the ReadableStreamBYOBRequest interface is used to signal to the associated readable byte stream that the specified number of bytes were written into the ReadableStreamBYOBRequest.view. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respond) + */ + respond(bytesWritten: number): void; + /** + * The **`respondWithNewView()`** method of the ReadableStreamBYOBRequest interface specifies a new view that the consumer of the associated readable byte stream should write to instead of ReadableStreamBYOBRequest.view. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respondWithNewView) + */ + respondWithNewView(view: ArrayBuffer | ArrayBufferView): void; + get atLeast(): number | null; +} +/** + * The **`ReadableStreamDefaultController`** interface of the Streams API represents a controller allowing control of a ReadableStream's state and internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController) + */ +declare abstract class ReadableStreamDefaultController { + /** + * The **`desiredSize`** read-only property of the required to fill the stream's internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`close()`** method of the ReadableStreamDefaultController interface closes the associated stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/close) + */ + close(): void; + /** + * The **`enqueue()`** method of the ```js-nolint enqueue(chunk) ``` - `chunk` - : The chunk to enqueue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/enqueue) + */ + enqueue(chunk?: R): void; + /** + * The **`error()`** method of the with the associated stream to error. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/error) + */ + error(reason: any): void; +} +/** + * The **`ReadableByteStreamController`** interface of the Streams API represents a controller for a readable byte stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController) + */ +declare abstract class ReadableByteStreamController { + /** + * The **`byobRequest`** read-only property of the ReadableByteStreamController interface returns the current BYOB request, or `null` if there are no pending requests. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/byobRequest) + */ + get byobRequest(): ReadableStreamBYOBRequest | null; + /** + * The **`desiredSize`** read-only property of the ReadableByteStreamController interface returns the number of bytes required to fill the stream's internal queue to its 'desired size'. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`close()`** method of the ReadableByteStreamController interface closes the associated stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/close) + */ + close(): void; + /** + * The **`enqueue()`** method of the ReadableByteStreamController interface enqueues a given chunk on the associated readable byte stream (the chunk is copied into the stream's internal queues). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/enqueue) + */ + enqueue(chunk: ArrayBuffer | ArrayBufferView): void; + /** + * The **`error()`** method of the ReadableByteStreamController interface causes any future interactions with the associated stream to error with the specified reason. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/error) + */ + error(reason: any): void; +} +/** + * The **`WritableStreamDefaultController`** interface of the Streams API represents a controller allowing control of a WritableStream's state. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController) + */ +declare abstract class WritableStreamDefaultController { + /** + * The read-only **`signal`** property of the WritableStreamDefaultController interface returns the AbortSignal associated with the controller. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/signal) + */ + get signal(): AbortSignal; + /** + * The **`error()`** method of the with the associated stream to error. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/error) + */ + error(reason?: any): void; +} +/** + * The **`TransformStreamDefaultController`** interface of the Streams API provides methods to manipulate the associated ReadableStream and WritableStream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController) + */ +declare abstract class TransformStreamDefaultController { + /** + * The **`desiredSize`** read-only property of the TransformStreamDefaultController interface returns the desired size to fill the queue of the associated ReadableStream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`enqueue()`** method of the TransformStreamDefaultController interface enqueues the given chunk in the readable side of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/enqueue) + */ + enqueue(chunk?: O): void; + /** + * The **`error()`** method of the TransformStreamDefaultController interface errors both sides of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/error) + */ + error(reason: any): void; + /** + * The **`terminate()`** method of the TransformStreamDefaultController interface closes the readable side and errors the writable side of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/terminate) + */ + terminate(): void; +} +interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream through a transform stream (or any other { writable, readable } pair). It simply pipes the stream into the writable side of the supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. + */ + writable: WritableStream; +} +/** + * The **`WritableStream`** interface of the Streams API provides a standard abstraction for writing streaming data to a destination, known as a sink. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream) + */ +declare class WritableStream { + constructor(underlyingSink?: UnderlyingSink, queuingStrategy?: QueuingStrategy); + /** + * The **`locked`** read-only property of the WritableStream interface returns a boolean indicating whether the `WritableStream` is locked to a writer. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/locked) + */ + get locked(): boolean; + /** + * The **`abort()`** method of the WritableStream interface aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be immediately moved to an error state, with any queued writes discarded. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/abort) + */ + abort(reason?: any): Promise; + /** + * The **`close()`** method of the WritableStream interface closes the associated stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/close) + */ + close(): Promise; + /** + * The **`getWriter()`** method of the WritableStream interface returns a new instance of WritableStreamDefaultWriter and locks the stream to that instance. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/getWriter) + */ + getWriter(): WritableStreamDefaultWriter; +} +/** + * The **`WritableStreamDefaultWriter`** interface of the Streams API is the object returned by WritableStream.getWriter() and once created locks the writer to the `WritableStream` ensuring that no other streams can write to the underlying sink. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter) + */ +declare class WritableStreamDefaultWriter { + constructor(stream: WritableStream); + /** + * The **`closed`** read-only property of the the stream errors or the writer's lock is released. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/closed) + */ + get closed(): Promise; + /** + * The **`ready`** read-only property of the that resolves when the desired size of the stream's internal queue transitions from non-positive to positive, signaling that it is no longer applying backpressure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/ready) + */ + get ready(): Promise; + /** + * The **`desiredSize`** read-only property of the to fill the stream's internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`abort()`** method of the the producer can no longer successfully write to the stream and it is to be immediately moved to an error state, with any queued writes discarded. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/abort) + */ + abort(reason?: any): Promise; + /** + * The **`close()`** method of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/close) + */ + close(): Promise; + /** + * The **`write()`** method of the operation. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/write) + */ + write(chunk?: W): Promise; + /** + * The **`releaseLock()`** method of the corresponding stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/releaseLock) + */ + releaseLock(): void; +} +/** + * The **`TransformStream`** interface of the Streams API represents a concrete implementation of the pipe chain _transform stream_ concept. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream) + */ +declare class TransformStream { + constructor(transformer?: Transformer, writableStrategy?: QueuingStrategy, readableStrategy?: QueuingStrategy); + /** + * The **`readable`** read-only property of the TransformStream interface returns the ReadableStream instance controlled by this `TransformStream`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/readable) + */ + get readable(): ReadableStream; + /** + * The **`writable`** read-only property of the TransformStream interface returns the WritableStream instance controlled by this `TransformStream`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/writable) + */ + get writable(): WritableStream; +} +declare class FixedLengthStream extends IdentityTransformStream { + constructor(expectedLength: number | bigint, queuingStrategy?: IdentityTransformStreamQueuingStrategy); +} +declare class IdentityTransformStream extends TransformStream { + constructor(queuingStrategy?: IdentityTransformStreamQueuingStrategy); +} +interface IdentityTransformStreamQueuingStrategy { + highWaterMark?: (number | bigint); +} +interface ReadableStreamValuesOptions { + preventCancel?: boolean; +} +/** + * The **`CompressionStream`** interface of the Compression Streams API is an API for compressing a stream of data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CompressionStream) + */ +declare class CompressionStream extends TransformStream { + constructor(format: "gzip" | "deflate" | "deflate-raw"); +} +/** + * The **`DecompressionStream`** interface of the Compression Streams API is an API for decompressing a stream of data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DecompressionStream) + */ +declare class DecompressionStream extends TransformStream { + constructor(format: "gzip" | "deflate" | "deflate-raw"); +} +/** + * The **`TextEncoderStream`** interface of the Encoding API converts a stream of strings into bytes in the UTF-8 encoding. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoderStream) + */ +declare class TextEncoderStream extends TransformStream { + constructor(); + get encoding(): string; +} +/** + * The **`TextDecoderStream`** interface of the Encoding API converts a stream of text in a binary encoding, such as UTF-8 etc., to a stream of strings. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoderStream) + */ +declare class TextDecoderStream extends TransformStream { + constructor(label?: string, options?: TextDecoderStreamTextDecoderStreamInit); + get encoding(): string; + get fatal(): boolean; + get ignoreBOM(): boolean; +} +interface TextDecoderStreamTextDecoderStreamInit { + fatal?: boolean; + ignoreBOM?: boolean; +} +/** + * The **`ByteLengthQueuingStrategy`** interface of the Streams API provides a built-in byte length queuing strategy that can be used when constructing streams. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy) + */ +declare class ByteLengthQueuingStrategy implements QueuingStrategy { + constructor(init: QueuingStrategyInit); + /** + * The read-only **`ByteLengthQueuingStrategy.highWaterMark`** property returns the total number of bytes that can be contained in the internal queue before backpressure is applied. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/highWaterMark) + */ + get highWaterMark(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/size) */ + get size(): (chunk?: any) => number; +} +/** + * The **`CountQueuingStrategy`** interface of the Streams API provides a built-in chunk counting queuing strategy that can be used when constructing streams. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy) + */ +declare class CountQueuingStrategy implements QueuingStrategy { + constructor(init: QueuingStrategyInit); + /** + * The read-only **`CountQueuingStrategy.highWaterMark`** property returns the total number of chunks that can be contained in the internal queue before backpressure is applied. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/highWaterMark) + */ + get highWaterMark(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/size) */ + get size(): (chunk?: any) => number; +} +interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water mark. + * + * Note that the provided high water mark will not be validated ahead of time. Instead, if it is negative, NaN, or not a number, the resulting ByteLengthQueuingStrategy will cause the corresponding stream constructor to throw. + */ + highWaterMark: number; +} +interface ScriptVersion { + id?: string; + tag?: string; + message?: string; +} +declare abstract class TailEvent extends ExtendableEvent { + readonly events: TraceItem[]; + readonly traces: TraceItem[]; +} +interface TraceItem { + readonly event: (TraceItemFetchEventInfo | TraceItemJsRpcEventInfo | TraceItemScheduledEventInfo | TraceItemAlarmEventInfo | TraceItemQueueEventInfo | TraceItemEmailEventInfo | TraceItemTailEventInfo | TraceItemCustomEventInfo | TraceItemHibernatableWebSocketEventInfo) | null; + readonly eventTimestamp: number | null; + readonly logs: TraceLog[]; + readonly exceptions: TraceException[]; + readonly diagnosticsChannelEvents: TraceDiagnosticChannelEvent[]; + readonly scriptName: string | null; + readonly entrypoint?: string; + readonly scriptVersion?: ScriptVersion; + readonly dispatchNamespace?: string; + readonly scriptTags?: string[]; + readonly durableObjectId?: string; + readonly outcome: string; + readonly executionModel: string; + readonly truncated: boolean; + readonly cpuTime: number; + readonly wallTime: number; +} +interface TraceItemAlarmEventInfo { + readonly scheduledTime: Date; +} +interface TraceItemCustomEventInfo { +} +interface TraceItemScheduledEventInfo { + readonly scheduledTime: number; + readonly cron: string; +} +interface TraceItemQueueEventInfo { + readonly queue: string; + readonly batchSize: number; +} +interface TraceItemEmailEventInfo { + readonly mailFrom: string; + readonly rcptTo: string; + readonly rawSize: number; +} +interface TraceItemTailEventInfo { + readonly consumedEvents: TraceItemTailEventInfoTailItem[]; +} +interface TraceItemTailEventInfoTailItem { + readonly scriptName: string | null; +} +interface TraceItemFetchEventInfo { + readonly response?: TraceItemFetchEventInfoResponse; + readonly request: TraceItemFetchEventInfoRequest; +} +interface TraceItemFetchEventInfoRequest { + readonly cf?: any; + readonly headers: Record; + readonly method: string; + readonly url: string; + getUnredacted(): TraceItemFetchEventInfoRequest; +} +interface TraceItemFetchEventInfoResponse { + readonly status: number; +} +interface TraceItemJsRpcEventInfo { + readonly rpcMethod: string; +} +interface TraceItemHibernatableWebSocketEventInfo { + readonly getWebSocketEvent: TraceItemHibernatableWebSocketEventInfoMessage | TraceItemHibernatableWebSocketEventInfoClose | TraceItemHibernatableWebSocketEventInfoError; +} +interface TraceItemHibernatableWebSocketEventInfoMessage { + readonly webSocketEventType: string; +} +interface TraceItemHibernatableWebSocketEventInfoClose { + readonly webSocketEventType: string; + readonly code: number; + readonly wasClean: boolean; +} +interface TraceItemHibernatableWebSocketEventInfoError { + readonly webSocketEventType: string; +} +interface TraceLog { + readonly timestamp: number; + readonly level: string; + readonly message: any; +} +interface TraceException { + readonly timestamp: number; + readonly message: string; + readonly name: string; + readonly stack?: string; +} +interface TraceDiagnosticChannelEvent { + readonly timestamp: number; + readonly channel: string; + readonly message: any; +} +interface TraceMetrics { + readonly cpuTime: number; + readonly wallTime: number; +} +interface UnsafeTraceMetrics { + fromTrace(item: TraceItem): TraceMetrics; +} +/** + * The **`URL`** interface is used to parse, construct, normalize, and encode URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL) + */ +declare class URL { + constructor(url: string | URL, base?: string | URL); + /** + * The **`origin`** read-only property of the URL interface returns a string containing the Unicode serialization of the origin of the represented URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/origin) + */ + get origin(): string; + /** + * The **`href`** property of the URL interface is a string containing the whole URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) + */ + get href(): string; + /** + * The **`href`** property of the URL interface is a string containing the whole URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) + */ + set href(value: string); + /** + * The **`protocol`** property of the URL interface is a string containing the protocol or scheme of the URL, including the final `':'`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) + */ + get protocol(): string; + /** + * The **`protocol`** property of the URL interface is a string containing the protocol or scheme of the URL, including the final `':'`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) + */ + set protocol(value: string); + /** + * The **`username`** property of the URL interface is a string containing the username component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) + */ + get username(): string; + /** + * The **`username`** property of the URL interface is a string containing the username component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) + */ + set username(value: string); + /** + * The **`password`** property of the URL interface is a string containing the password component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) + */ + get password(): string; + /** + * The **`password`** property of the URL interface is a string containing the password component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) + */ + set password(value: string); + /** + * The **`host`** property of the URL interface is a string containing the host, which is the URL.hostname, and then, if the port of the URL is nonempty, a `':'`, followed by the URL.port of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) + */ + get host(): string; + /** + * The **`host`** property of the URL interface is a string containing the host, which is the URL.hostname, and then, if the port of the URL is nonempty, a `':'`, followed by the URL.port of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) + */ + set host(value: string); + /** + * The **`hostname`** property of the URL interface is a string containing either the domain name or IP address of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) + */ + get hostname(): string; + /** + * The **`hostname`** property of the URL interface is a string containing either the domain name or IP address of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) + */ + set hostname(value: string); + /** + * The **`port`** property of the URL interface is a string containing the port number of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) + */ + get port(): string; + /** + * The **`port`** property of the URL interface is a string containing the port number of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) + */ + set port(value: string); + /** + * The **`pathname`** property of the URL interface represents a location in a hierarchical structure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) + */ + get pathname(): string; + /** + * The **`pathname`** property of the URL interface represents a location in a hierarchical structure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) + */ + set pathname(value: string); + /** + * The **`search`** property of the URL interface is a search string, also called a _query string_, that is a string containing a `'?'` followed by the parameters of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) + */ + get search(): string; + /** + * The **`search`** property of the URL interface is a search string, also called a _query string_, that is a string containing a `'?'` followed by the parameters of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) + */ + set search(value: string); + /** + * The **`hash`** property of the URL interface is a string containing a `'#'` followed by the fragment identifier of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) + */ + get hash(): string; + /** + * The **`hash`** property of the URL interface is a string containing a `'#'` followed by the fragment identifier of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) + */ + set hash(value: string); + /** + * The **`searchParams`** read-only property of the access to the [MISSING: httpmethod('GET')] decoded query arguments contained in the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/searchParams) + */ + get searchParams(): URLSearchParams; + /** + * The **`toJSON()`** method of the URL interface returns a string containing a serialized version of the URL, although in practice it seems to have the same effect as ```js-nolint toJSON() ``` None. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/toJSON) + */ + toJSON(): string; + /*function toString() { [native code] }*/ + toString(): string; + /** + * The **`URL.canParse()`** static method of the URL interface returns a boolean indicating whether or not an absolute URL, or a relative URL combined with a base URL, are parsable and valid. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/canParse_static) + */ + static canParse(url: string, base?: string): boolean; + /** + * The **`URL.parse()`** static method of the URL interface returns a newly created URL object representing the URL defined by the parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/parse_static) + */ + static parse(url: string, base?: string): URL | null; + /** + * The **`createObjectURL()`** static method of the URL interface creates a string containing a URL representing the object given in the parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/createObjectURL_static) + */ + static createObjectURL(object: File | Blob): string; + /** + * The **`revokeObjectURL()`** static method of the URL interface releases an existing object URL which was previously created by calling Call this method when you've finished using an object URL to let the browser know not to keep the reference to the file any longer. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/revokeObjectURL_static) + */ + static revokeObjectURL(object_url: string): void; +} +/** + * The **`URLSearchParams`** interface defines utility methods to work with the query string of a URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams) + */ +declare class URLSearchParams { + constructor(init?: (Iterable> | Record | string)); + /** + * The **`size`** read-only property of the URLSearchParams interface indicates the total number of search parameter entries. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/size) + */ + get size(): number; + /** + * The **`append()`** method of the URLSearchParams interface appends a specified key/value pair as a new search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/append) + */ + append(name: string, value: string): void; + /** + * The **`delete()`** method of the URLSearchParams interface deletes specified parameters and their associated value(s) from the list of all search parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/delete) + */ + delete(name: string, value?: string): void; + /** + * The **`get()`** method of the URLSearchParams interface returns the first value associated to the given search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/get) + */ + get(name: string): string | null; + /** + * The **`getAll()`** method of the URLSearchParams interface returns all the values associated with a given search parameter as an array. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/getAll) + */ + getAll(name: string): string[]; + /** + * The **`has()`** method of the URLSearchParams interface returns a boolean value that indicates whether the specified parameter is in the search parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/has) + */ + has(name: string, value?: string): boolean; + /** + * The **`set()`** method of the URLSearchParams interface sets the value associated with a given search parameter to the given value. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/set) + */ + set(name: string, value: string): void; + /** + * The **`URLSearchParams.sort()`** method sorts all key/value pairs contained in this object in place and returns `undefined`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/sort) + */ + sort(): void; + /* Returns an array of key, value pairs for every entry in the search params. */ + entries(): IterableIterator<[ + key: string, + value: string + ]>; + /* Returns a list of keys in the search params. */ + keys(): IterableIterator; + /* Returns a list of values in the search params. */ + values(): IterableIterator; + forEach(callback: (this: This, value: string, key: string, parent: URLSearchParams) => void, thisArg?: This): void; + /*function toString() { [native code] }*/ + toString(): string; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: string + ]>; +} +declare class URLPattern { + constructor(input?: (string | URLPatternInit), baseURL?: (string | URLPatternOptions), patternOptions?: URLPatternOptions); + get protocol(): string; + get username(): string; + get password(): string; + get hostname(): string; + get port(): string; + get pathname(): string; + get search(): string; + get hash(): string; + get hasRegExpGroups(): boolean; + test(input?: (string | URLPatternInit), baseURL?: string): boolean; + exec(input?: (string | URLPatternInit), baseURL?: string): URLPatternResult | null; +} +interface URLPatternInit { + protocol?: string; + username?: string; + password?: string; + hostname?: string; + port?: string; + pathname?: string; + search?: string; + hash?: string; + baseURL?: string; +} +interface URLPatternComponentResult { + input: string; + groups: Record; +} +interface URLPatternResult { + inputs: (string | URLPatternInit)[]; + protocol: URLPatternComponentResult; + username: URLPatternComponentResult; + password: URLPatternComponentResult; + hostname: URLPatternComponentResult; + port: URLPatternComponentResult; + pathname: URLPatternComponentResult; + search: URLPatternComponentResult; + hash: URLPatternComponentResult; +} +interface URLPatternOptions { + ignoreCase?: boolean; +} +/** + * A `CloseEvent` is sent to clients using WebSockets when the connection is closed. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent) + */ +declare class CloseEvent extends Event { + constructor(type: string, initializer?: CloseEventInit); + /** + * The **`code`** read-only property of the CloseEvent interface returns a WebSocket connection close code indicating the reason the connection was closed. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/code) + */ + readonly code: number; + /** + * The **`reason`** read-only property of the CloseEvent interface returns the WebSocket connection close reason the server gave for closing the connection; that is, a concise human-readable prose explanation for the closure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/reason) + */ + readonly reason: string; + /** + * The **`wasClean`** read-only property of the CloseEvent interface returns `true` if the connection closed cleanly. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/wasClean) + */ + readonly wasClean: boolean; +} +interface CloseEventInit { + code?: number; + reason?: string; + wasClean?: boolean; +} +type WebSocketEventMap = { + close: CloseEvent; + message: MessageEvent; + open: Event; + error: ErrorEvent; +}; +/** + * The `WebSocket` object provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) + */ +declare var WebSocket: { + prototype: WebSocket; + new (url: string, protocols?: (string[] | string)): WebSocket; + readonly READY_STATE_CONNECTING: number; + readonly CONNECTING: number; + readonly READY_STATE_OPEN: number; + readonly OPEN: number; + readonly READY_STATE_CLOSING: number; + readonly CLOSING: number; + readonly READY_STATE_CLOSED: number; + readonly CLOSED: number; +}; +/** + * The `WebSocket` object provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) + */ +interface WebSocket extends EventTarget { + accept(): void; + /** + * The **`WebSocket.send()`** method enqueues the specified data to be transmitted to the server over the WebSocket connection, increasing the value of `bufferedAmount` by the number of bytes needed to contain the data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/send) + */ + send(message: (ArrayBuffer | ArrayBufferView) | string): void; + /** + * The **`WebSocket.close()`** method closes the already `CLOSED`, this method does nothing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/close) + */ + close(code?: number, reason?: string): void; + serializeAttachment(attachment: any): void; + deserializeAttachment(): any | null; + /** + * The **`WebSocket.readyState`** read-only property returns the current state of the WebSocket connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/readyState) + */ + readyState: number; + /** + * The **`WebSocket.url`** read-only property returns the absolute URL of the WebSocket as resolved by the constructor. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/url) + */ + url: string | null; + /** + * The **`WebSocket.protocol`** read-only property returns the name of the sub-protocol the server selected; this will be one of the strings specified in the `protocols` parameter when creating the WebSocket object, or the empty string if no connection is established. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/protocol) + */ + protocol: string | null; + /** + * The **`WebSocket.extensions`** read-only property returns the extensions selected by the server. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/extensions) + */ + extensions: string | null; +} +declare const WebSocketPair: { + new (): { + 0: WebSocket; + 1: WebSocket; + }; +}; +interface SqlStorage { + exec>(query: string, ...bindings: any[]): SqlStorageCursor; + get databaseSize(): number; + Cursor: typeof SqlStorageCursor; + Statement: typeof SqlStorageStatement; +} +declare abstract class SqlStorageStatement { +} +type SqlStorageValue = ArrayBuffer | string | number | null; +declare abstract class SqlStorageCursor> { + next(): { + done?: false; + value: T; + } | { + done: true; + value?: never; + }; + toArray(): T[]; + one(): T; + raw(): IterableIterator; + columnNames: string[]; + get rowsRead(): number; + get rowsWritten(): number; + [Symbol.iterator](): IterableIterator; +} +interface Socket { + get readable(): ReadableStream; + get writable(): WritableStream; + get closed(): Promise; + get opened(): Promise; + get upgraded(): boolean; + get secureTransport(): "on" | "off" | "starttls"; + close(): Promise; + startTls(options?: TlsOptions): Socket; +} +interface SocketOptions { + secureTransport?: string; + allowHalfOpen: boolean; + highWaterMark?: (number | bigint); +} +interface SocketAddress { + hostname: string; + port: number; +} +interface TlsOptions { + expectedServerHostname?: string; +} +interface SocketInfo { + remoteAddress?: string; + localAddress?: string; +} +/** + * The **`EventSource`** interface is web content's interface to server-sent events. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource) + */ +declare class EventSource extends EventTarget { + constructor(url: string, init?: EventSourceEventSourceInit); + /** + * The **`close()`** method of the EventSource interface closes the connection, if one is made, and sets the ```js-nolint close() ``` None. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/close) + */ + close(): void; + /** + * The **`url`** read-only property of the URL of the source. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/url) + */ + get url(): string; + /** + * The **`withCredentials`** read-only property of the the `EventSource` object was instantiated with CORS credentials set. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/withCredentials) + */ + get withCredentials(): boolean; + /** + * The **`readyState`** read-only property of the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/readyState) + */ + get readyState(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ + get onopen(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ + set onopen(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ + get onmessage(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ + set onmessage(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ + get onerror(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ + set onerror(value: any | null); + static readonly CONNECTING: number; + static readonly OPEN: number; + static readonly CLOSED: number; + static from(stream: ReadableStream): EventSource; +} +interface EventSourceEventSourceInit { + withCredentials?: boolean; + fetcher?: Fetcher; +} +interface Container { + get running(): boolean; + start(options?: ContainerStartupOptions): void; + monitor(): Promise; + destroy(error?: any): Promise; + signal(signo: number): void; + getTcpPort(port: number): Fetcher; + setInactivityTimeout(durationMs: number | bigint): Promise; +} +interface ContainerStartupOptions { + entrypoint?: string[]; + enableInternet: boolean; + env?: Record; + hardTimeout?: (number | bigint); +} +/** + * The **`MessagePort`** interface of the Channel Messaging API represents one of the two ports of a MessageChannel, allowing messages to be sent from one port and listening out for them arriving at the other. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort) + */ +declare abstract class MessagePort extends EventTarget { + /** + * The **`postMessage()`** method of the transfers ownership of objects to other browsing contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort/postMessage) + */ + postMessage(data?: any, options?: (any[] | MessagePortPostMessageOptions)): void; + /** + * The **`close()`** method of the MessagePort interface disconnects the port, so it is no longer active. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort/close) + */ + close(): void; + /** + * The **`start()`** method of the MessagePort interface starts the sending of messages queued on the port. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort/start) + */ + start(): void; + get onmessage(): any | null; + set onmessage(value: any | null); +} +/** + * The **`MessageChannel`** interface of the Channel Messaging API allows us to create a new message channel and send data through it via its two MessagePort properties. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageChannel) + */ +declare class MessageChannel { + constructor(); + /** + * The **`port1`** read-only property of the the port attached to the context that originated the channel. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageChannel/port1) + */ + readonly port1: MessagePort; + /** + * The **`port2`** read-only property of the the port attached to the context at the other end of the channel, which the message is initially sent to. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageChannel/port2) + */ + readonly port2: MessagePort; +} +interface MessagePortPostMessageOptions { + transfer?: any[]; +} +type LoopbackForExport Rpc.EntrypointBranded) | ExportedHandler | undefined = undefined> = T extends new (...args: any[]) => Rpc.WorkerEntrypointBranded ? LoopbackServiceStub> : T extends new (...args: any[]) => Rpc.DurableObjectBranded ? LoopbackDurableObjectClass> : T extends ExportedHandler ? LoopbackServiceStub : undefined; +type LoopbackServiceStub = Fetcher & (T extends CloudflareWorkersModule.WorkerEntrypoint ? (opts: { + props?: Props; +}) => Fetcher : (opts: { + props?: any; +}) => Fetcher); +type LoopbackDurableObjectClass = DurableObjectClass & (T extends CloudflareWorkersModule.DurableObject ? (opts: { + props?: Props; +}) => DurableObjectClass : (opts: { + props?: any; +}) => DurableObjectClass); +interface SyncKvStorage { + get(key: string): T | undefined; + list(options?: SyncKvListOptions): Iterable<[ + string, + T + ]>; + put(key: string, value: T): void; + delete(key: string): boolean; +} +interface SyncKvListOptions { + start?: string; + startAfter?: string; + end?: string; + prefix?: string; + reverse?: boolean; + limit?: number; +} +interface WorkerStub { + getEntrypoint(name?: string, options?: WorkerStubEntrypointOptions): Fetcher; +} +interface WorkerStubEntrypointOptions { + props?: any; +} +interface WorkerLoader { + get(name: string | null, getCode: () => WorkerLoaderWorkerCode | Promise): WorkerStub; +} +interface WorkerLoaderModule { + js?: string; + cjs?: string; + text?: string; + data?: ArrayBuffer; + json?: any; + py?: string; + wasm?: ArrayBuffer; +} +interface WorkerLoaderWorkerCode { + compatibilityDate: string; + compatibilityFlags?: string[]; + allowExperimental?: boolean; + mainModule: string; + modules: Record; + env?: any; + globalOutbound?: (Fetcher | null); + tails?: Fetcher[]; + streamingTails?: Fetcher[]; +} +/** +* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, +* as well as timing of subrequests and other operations. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) +*/ +declare abstract class Performance { + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancetimeorigin) */ + get timeOrigin(): number; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancenow) */ + now(): number; +} +type AiImageClassificationInput = { + image: number[]; +}; +type AiImageClassificationOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiImageClassification { + inputs: AiImageClassificationInput; + postProcessedOutputs: AiImageClassificationOutput; +} +type AiImageToTextInput = { + image: number[]; + prompt?: string; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + raw?: boolean; + messages?: RoleScopedChatInput[]; +}; +type AiImageToTextOutput = { + description: string; +}; +declare abstract class BaseAiImageToText { + inputs: AiImageToTextInput; + postProcessedOutputs: AiImageToTextOutput; +} +type AiImageTextToTextInput = { + image: string; + prompt?: string; + max_tokens?: number; + temperature?: number; + ignore_eos?: boolean; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + raw?: boolean; + messages?: RoleScopedChatInput[]; +}; +type AiImageTextToTextOutput = { + description: string; +}; +declare abstract class BaseAiImageTextToText { + inputs: AiImageTextToTextInput; + postProcessedOutputs: AiImageTextToTextOutput; +} +type AiMultimodalEmbeddingsInput = { + image: string; + text: string[]; +}; +type AiIMultimodalEmbeddingsOutput = { + data: number[][]; + shape: number[]; +}; +declare abstract class BaseAiMultimodalEmbeddings { + inputs: AiImageTextToTextInput; + postProcessedOutputs: AiImageTextToTextOutput; +} +type AiObjectDetectionInput = { + image: number[]; +}; +type AiObjectDetectionOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiObjectDetection { + inputs: AiObjectDetectionInput; + postProcessedOutputs: AiObjectDetectionOutput; +} +type AiSentenceSimilarityInput = { + source: string; + sentences: string[]; +}; +type AiSentenceSimilarityOutput = number[]; +declare abstract class BaseAiSentenceSimilarity { + inputs: AiSentenceSimilarityInput; + postProcessedOutputs: AiSentenceSimilarityOutput; +} +type AiAutomaticSpeechRecognitionInput = { + audio: number[]; +}; +type AiAutomaticSpeechRecognitionOutput = { + text?: string; + words?: { + word: string; + start: number; + end: number; + }[]; + vtt?: string; +}; +declare abstract class BaseAiAutomaticSpeechRecognition { + inputs: AiAutomaticSpeechRecognitionInput; + postProcessedOutputs: AiAutomaticSpeechRecognitionOutput; +} +type AiSummarizationInput = { + input_text: string; + max_length?: number; +}; +type AiSummarizationOutput = { + summary: string; +}; +declare abstract class BaseAiSummarization { + inputs: AiSummarizationInput; + postProcessedOutputs: AiSummarizationOutput; +} +type AiTextClassificationInput = { + text: string; +}; +type AiTextClassificationOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiTextClassification { + inputs: AiTextClassificationInput; + postProcessedOutputs: AiTextClassificationOutput; +} +type AiTextEmbeddingsInput = { + text: string | string[]; +}; +type AiTextEmbeddingsOutput = { + shape: number[]; + data: number[][]; +}; +declare abstract class BaseAiTextEmbeddings { + inputs: AiTextEmbeddingsInput; + postProcessedOutputs: AiTextEmbeddingsOutput; +} +type RoleScopedChatInput = { + role: "user" | "assistant" | "system" | "tool" | (string & NonNullable); + content: string; + name?: string; +}; +type AiTextGenerationToolLegacyInput = { + name: string; + description: string; + parameters?: { + type: "object" | (string & NonNullable); + properties: { + [key: string]: { + type: string; + description?: string; + }; + }; + required: string[]; + }; +}; +type AiTextGenerationToolInput = { + type: "function" | (string & NonNullable); + function: { + name: string; + description: string; + parameters?: { + type: "object" | (string & NonNullable); + properties: { + [key: string]: { + type: string; + description?: string; + }; + }; + required: string[]; + }; + }; +}; +type AiTextGenerationFunctionsInput = { + name: string; + code: string; +}; +type AiTextGenerationResponseFormat = { + type: string; + json_schema?: any; +}; +type AiTextGenerationInput = { + prompt?: string; + raw?: boolean; + stream?: boolean; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + messages?: RoleScopedChatInput[]; + response_format?: AiTextGenerationResponseFormat; + tools?: AiTextGenerationToolInput[] | AiTextGenerationToolLegacyInput[] | (object & NonNullable); + functions?: AiTextGenerationFunctionsInput[]; +}; +type AiTextGenerationToolLegacyOutput = { + name: string; + arguments: unknown; +}; +type AiTextGenerationToolOutput = { + id: string; + type: "function"; + function: { + name: string; + arguments: string; + }; +}; +type UsageTags = { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; +}; +type AiTextGenerationOutput = { + response?: string; + tool_calls?: AiTextGenerationToolLegacyOutput[] & AiTextGenerationToolOutput[]; + usage?: UsageTags; +}; +declare abstract class BaseAiTextGeneration { + inputs: AiTextGenerationInput; + postProcessedOutputs: AiTextGenerationOutput; +} +type AiTextToSpeechInput = { + prompt: string; + lang?: string; +}; +type AiTextToSpeechOutput = Uint8Array | { + audio: string; +}; +declare abstract class BaseAiTextToSpeech { + inputs: AiTextToSpeechInput; + postProcessedOutputs: AiTextToSpeechOutput; +} +type AiTextToImageInput = { + prompt: string; + negative_prompt?: string; + height?: number; + width?: number; + image?: number[]; + image_b64?: string; + mask?: number[]; + num_steps?: number; + strength?: number; + guidance?: number; + seed?: number; +}; +type AiTextToImageOutput = ReadableStream; +declare abstract class BaseAiTextToImage { + inputs: AiTextToImageInput; + postProcessedOutputs: AiTextToImageOutput; +} +type AiTranslationInput = { + text: string; + target_lang: string; + source_lang?: string; +}; +type AiTranslationOutput = { + translated_text?: string; +}; +declare abstract class BaseAiTranslation { + inputs: AiTranslationInput; + postProcessedOutputs: AiTranslationOutput; +} +/** + * Workers AI support for OpenAI's Responses API + * Reference: https://github.com/openai/openai-node/blob/master/src/resources/responses/responses.ts + * + * It's a stripped down version from its source. + * It currently supports basic function calling, json mode and accepts images as input. + * + * It does not include types for WebSearch, CodeInterpreter, FileInputs, MCP, CustomTools. + * We plan to add those incrementally as model + platform capabilities evolve. + */ +type ResponsesInput = { + background?: boolean | null; + conversation?: string | ResponseConversationParam | null; + include?: Array | null; + input?: string | ResponseInput; + instructions?: string | null; + max_output_tokens?: number | null; + parallel_tool_calls?: boolean | null; + previous_response_id?: string | null; + prompt_cache_key?: string; + reasoning?: Reasoning | null; + safety_identifier?: string; + service_tier?: "auto" | "default" | "flex" | "scale" | "priority" | null; + stream?: boolean | null; + stream_options?: StreamOptions | null; + temperature?: number | null; + text?: ResponseTextConfig; + tool_choice?: ToolChoiceOptions | ToolChoiceFunction; + tools?: Array; + top_p?: number | null; + truncation?: "auto" | "disabled" | null; +}; +type ResponsesOutput = { + id?: string; + created_at?: number; + output_text?: string; + error?: ResponseError | null; + incomplete_details?: ResponseIncompleteDetails | null; + instructions?: string | Array | null; + object?: "response"; + output?: Array; + parallel_tool_calls?: boolean; + temperature?: number | null; + tool_choice?: ToolChoiceOptions | ToolChoiceFunction; + tools?: Array; + top_p?: number | null; + max_output_tokens?: number | null; + previous_response_id?: string | null; + prompt?: ResponsePrompt | null; + reasoning?: Reasoning | null; + safety_identifier?: string; + service_tier?: "auto" | "default" | "flex" | "scale" | "priority" | null; + status?: ResponseStatus; + text?: ResponseTextConfig; + truncation?: "auto" | "disabled" | null; + usage?: ResponseUsage; +}; +type EasyInputMessage = { + content: string | ResponseInputMessageContentList; + role: "user" | "assistant" | "system" | "developer"; + type?: "message"; +}; +type ResponsesFunctionTool = { + name: string; + parameters: { + [key: string]: unknown; + } | null; + strict: boolean | null; + type: "function"; + description?: string | null; +}; +type ResponseIncompleteDetails = { + reason?: "max_output_tokens" | "content_filter"; +}; +type ResponsePrompt = { + id: string; + variables?: { + [key: string]: string | ResponseInputText | ResponseInputImage; + } | null; + version?: string | null; +}; +type Reasoning = { + effort?: ReasoningEffort | null; + generate_summary?: "auto" | "concise" | "detailed" | null; + summary?: "auto" | "concise" | "detailed" | null; +}; +type ResponseContent = ResponseInputText | ResponseInputImage | ResponseOutputText | ResponseOutputRefusal | ResponseContentReasoningText; +type ResponseContentReasoningText = { + text: string; + type: "reasoning_text"; +}; +type ResponseConversationParam = { + id: string; +}; +type ResponseCreatedEvent = { + response: Response; + sequence_number: number; + type: "response.created"; +}; +type ResponseCustomToolCallOutput = { + call_id: string; + output: string | Array; + type: "custom_tool_call_output"; + id?: string; +}; +type ResponseError = { + code: "server_error" | "rate_limit_exceeded" | "invalid_prompt" | "vector_store_timeout" | "invalid_image" | "invalid_image_format" | "invalid_base64_image" | "invalid_image_url" | "image_too_large" | "image_too_small" | "image_parse_error" | "image_content_policy_violation" | "invalid_image_mode" | "image_file_too_large" | "unsupported_image_media_type" | "empty_image_file" | "failed_to_download_image" | "image_file_not_found"; + message: string; +}; +type ResponseErrorEvent = { + code: string | null; + message: string; + param: string | null; + sequence_number: number; + type: "error"; +}; +type ResponseFailedEvent = { + response: Response; + sequence_number: number; + type: "response.failed"; +}; +type ResponseFormatText = { + type: "text"; +}; +type ResponseFormatJSONObject = { + type: "json_object"; +}; +type ResponseFormatTextConfig = ResponseFormatText | ResponseFormatTextJSONSchemaConfig | ResponseFormatJSONObject; +type ResponseFormatTextJSONSchemaConfig = { + name: string; + schema: { + [key: string]: unknown; + }; + type: "json_schema"; + description?: string; + strict?: boolean | null; +}; +type ResponseFunctionCallArgumentsDeltaEvent = { + delta: string; + item_id: string; + output_index: number; + sequence_number: number; + type: "response.function_call_arguments.delta"; +}; +type ResponseFunctionCallArgumentsDoneEvent = { + arguments: string; + item_id: string; + name: string; + output_index: number; + sequence_number: number; + type: "response.function_call_arguments.done"; +}; +type ResponseFunctionCallOutputItem = ResponseInputTextContent | ResponseInputImageContent; +type ResponseFunctionCallOutputItemList = Array; +type ResponseFunctionToolCall = { + arguments: string; + call_id: string; + name: string; + type: "function_call"; + id?: string; + status?: "in_progress" | "completed" | "incomplete"; +}; +interface ResponseFunctionToolCallItem extends ResponseFunctionToolCall { + id: string; +} +type ResponseFunctionToolCallOutputItem = { + id: string; + call_id: string; + output: string | Array; + type: "function_call_output"; + status?: "in_progress" | "completed" | "incomplete"; +}; +type ResponseIncludable = "message.input_image.image_url" | "message.output_text.logprobs"; +type ResponseIncompleteEvent = { + response: Response; + sequence_number: number; + type: "response.incomplete"; +}; +type ResponseInput = Array; +type ResponseInputContent = ResponseInputText | ResponseInputImage; +type ResponseInputImage = { + detail: "low" | "high" | "auto"; + type: "input_image"; + /** + * Base64 encoded image + */ + image_url?: string | null; +}; +type ResponseInputImageContent = { + type: "input_image"; + detail?: "low" | "high" | "auto" | null; + /** + * Base64 encoded image + */ + image_url?: string | null; +}; +type ResponseInputItem = EasyInputMessage | ResponseInputItemMessage | ResponseOutputMessage | ResponseFunctionToolCall | ResponseInputItemFunctionCallOutput | ResponseReasoningItem; +type ResponseInputItemFunctionCallOutput = { + call_id: string; + output: string | ResponseFunctionCallOutputItemList; + type: "function_call_output"; + id?: string | null; + status?: "in_progress" | "completed" | "incomplete" | null; +}; +type ResponseInputItemMessage = { + content: ResponseInputMessageContentList; + role: "user" | "system" | "developer"; + status?: "in_progress" | "completed" | "incomplete"; + type?: "message"; +}; +type ResponseInputMessageContentList = Array; +type ResponseInputMessageItem = { + id: string; + content: ResponseInputMessageContentList; + role: "user" | "system" | "developer"; + status?: "in_progress" | "completed" | "incomplete"; + type?: "message"; +}; +type ResponseInputText = { + text: string; + type: "input_text"; +}; +type ResponseInputTextContent = { + text: string; + type: "input_text"; +}; +type ResponseItem = ResponseInputMessageItem | ResponseOutputMessage | ResponseFunctionToolCallItem | ResponseFunctionToolCallOutputItem; +type ResponseOutputItem = ResponseOutputMessage | ResponseFunctionToolCall | ResponseReasoningItem; +type ResponseOutputItemAddedEvent = { + item: ResponseOutputItem; + output_index: number; + sequence_number: number; + type: "response.output_item.added"; +}; +type ResponseOutputItemDoneEvent = { + item: ResponseOutputItem; + output_index: number; + sequence_number: number; + type: "response.output_item.done"; +}; +type ResponseOutputMessage = { + id: string; + content: Array; + role: "assistant"; + status: "in_progress" | "completed" | "incomplete"; + type: "message"; +}; +type ResponseOutputRefusal = { + refusal: string; + type: "refusal"; +}; +type ResponseOutputText = { + text: string; + type: "output_text"; + logprobs?: Array; +}; +type ResponseReasoningItem = { + id: string; + summary: Array; + type: "reasoning"; + content?: Array; + encrypted_content?: string | null; + status?: "in_progress" | "completed" | "incomplete"; +}; +type ResponseReasoningSummaryItem = { + text: string; + type: "summary_text"; +}; +type ResponseReasoningContentItem = { + text: string; + type: "reasoning_text"; +}; +type ResponseReasoningTextDeltaEvent = { + content_index: number; + delta: string; + item_id: string; + output_index: number; + sequence_number: number; + type: "response.reasoning_text.delta"; +}; +type ResponseReasoningTextDoneEvent = { + content_index: number; + item_id: string; + output_index: number; + sequence_number: number; + text: string; + type: "response.reasoning_text.done"; +}; +type ResponseRefusalDeltaEvent = { + content_index: number; + delta: string; + item_id: string; + output_index: number; + sequence_number: number; + type: "response.refusal.delta"; +}; +type ResponseRefusalDoneEvent = { + content_index: number; + item_id: string; + output_index: number; + refusal: string; + sequence_number: number; + type: "response.refusal.done"; +}; +type ResponseStatus = "completed" | "failed" | "in_progress" | "cancelled" | "queued" | "incomplete"; +type ResponseStreamEvent = ResponseCompletedEvent | ResponseCreatedEvent | ResponseErrorEvent | ResponseFunctionCallArgumentsDeltaEvent | ResponseFunctionCallArgumentsDoneEvent | ResponseFailedEvent | ResponseIncompleteEvent | ResponseOutputItemAddedEvent | ResponseOutputItemDoneEvent | ResponseReasoningTextDeltaEvent | ResponseReasoningTextDoneEvent | ResponseRefusalDeltaEvent | ResponseRefusalDoneEvent | ResponseTextDeltaEvent | ResponseTextDoneEvent; +type ResponseCompletedEvent = { + response: Response; + sequence_number: number; + type: "response.completed"; +}; +type ResponseTextConfig = { + format?: ResponseFormatTextConfig; + verbosity?: "low" | "medium" | "high" | null; +}; +type ResponseTextDeltaEvent = { + content_index: number; + delta: string; + item_id: string; + logprobs: Array; + output_index: number; + sequence_number: number; + type: "response.output_text.delta"; +}; +type ResponseTextDoneEvent = { + content_index: number; + item_id: string; + logprobs: Array; + output_index: number; + sequence_number: number; + text: string; + type: "response.output_text.done"; +}; +type Logprob = { + token: string; + logprob: number; + top_logprobs?: Array; +}; +type TopLogprob = { + token?: string; + logprob?: number; +}; +type ResponseUsage = { + input_tokens: number; + output_tokens: number; + total_tokens: number; +}; +type Tool = ResponsesFunctionTool; +type ToolChoiceFunction = { + name: string; + type: "function"; +}; +type ToolChoiceOptions = "none"; +type ReasoningEffort = "minimal" | "low" | "medium" | "high" | null; +type StreamOptions = { + include_obfuscation?: boolean; +}; +type Ai_Cf_Baai_Bge_Base_En_V1_5_Input = { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; + }[]; +}; +type Ai_Cf_Baai_Bge_Base_En_V1_5_Output = { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} | Ai_Cf_Baai_Bge_Base_En_V1_5_AsyncResponse; +interface Ai_Cf_Baai_Bge_Base_En_V1_5_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Base_En_V1_5 { + inputs: Ai_Cf_Baai_Bge_Base_En_V1_5_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Base_En_V1_5_Output; +} +type Ai_Cf_Openai_Whisper_Input = string | { + /** + * An array of integers that represent the audio data constrained to 8-bit unsigned integer values + */ + audio: number[]; +}; +interface Ai_Cf_Openai_Whisper_Output { + /** + * The transcription + */ + text: string; + word_count?: number; + words?: { + word?: string; + /** + * The second this word begins in the recording + */ + start?: number; + /** + * The ending second when the word completes + */ + end?: number; + }[]; + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper { + inputs: Ai_Cf_Openai_Whisper_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Output; +} +type Ai_Cf_Meta_M2M100_1_2B_Input = { + /** + * The text to be translated + */ + text: string; + /** + * The language code of the source text (e.g., 'en' for English). Defaults to 'en' if not specified + */ + source_lang?: string; + /** + * The language code to translate the text into (e.g., 'es' for Spanish) + */ + target_lang: string; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + /** + * The text to be translated + */ + text: string; + /** + * The language code of the source text (e.g., 'en' for English). Defaults to 'en' if not specified + */ + source_lang?: string; + /** + * The language code to translate the text into (e.g., 'es' for Spanish) + */ + target_lang: string; + }[]; +}; +type Ai_Cf_Meta_M2M100_1_2B_Output = { + /** + * The translated text in the target language + */ + translated_text?: string; +} | Ai_Cf_Meta_M2M100_1_2B_AsyncResponse; +interface Ai_Cf_Meta_M2M100_1_2B_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Meta_M2M100_1_2B { + inputs: Ai_Cf_Meta_M2M100_1_2B_Input; + postProcessedOutputs: Ai_Cf_Meta_M2M100_1_2B_Output; +} +type Ai_Cf_Baai_Bge_Small_En_V1_5_Input = { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; + }[]; +}; +type Ai_Cf_Baai_Bge_Small_En_V1_5_Output = { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} | Ai_Cf_Baai_Bge_Small_En_V1_5_AsyncResponse; +interface Ai_Cf_Baai_Bge_Small_En_V1_5_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Small_En_V1_5 { + inputs: Ai_Cf_Baai_Bge_Small_En_V1_5_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Small_En_V1_5_Output; +} +type Ai_Cf_Baai_Bge_Large_En_V1_5_Input = { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; + }[]; +}; +type Ai_Cf_Baai_Bge_Large_En_V1_5_Output = { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} | Ai_Cf_Baai_Bge_Large_En_V1_5_AsyncResponse; +interface Ai_Cf_Baai_Bge_Large_En_V1_5_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Large_En_V1_5 { + inputs: Ai_Cf_Baai_Bge_Large_En_V1_5_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Large_En_V1_5_Output; +} +type Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input = string | { + /** + * The input text prompt for the model to generate a response. + */ + prompt?: string; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + image: number[] | (string & NonNullable); + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; +}; +interface Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output { + description?: string; +} +declare abstract class Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M { + inputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input; + postProcessedOutputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output; +} +type Ai_Cf_Openai_Whisper_Tiny_En_Input = string | { + /** + * An array of integers that represent the audio data constrained to 8-bit unsigned integer values + */ + audio: number[]; +}; +interface Ai_Cf_Openai_Whisper_Tiny_En_Output { + /** + * The transcription + */ + text: string; + word_count?: number; + words?: { + word?: string; + /** + * The second this word begins in the recording + */ + start?: number; + /** + * The ending second when the word completes + */ + end?: number; + }[]; + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper_Tiny_En { + inputs: Ai_Cf_Openai_Whisper_Tiny_En_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Tiny_En_Output; +} +interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input { + /** + * Base64 encoded value of the audio data. + */ + audio: string; + /** + * Supported tasks are 'translate' or 'transcribe'. + */ + task?: string; + /** + * The language of the audio being transcribed or translated. + */ + language?: string; + /** + * Preprocess the audio with a voice activity detection model. + */ + vad_filter?: boolean; + /** + * A text prompt to help provide context to the model on the contents of the audio. + */ + initial_prompt?: string; + /** + * The prefix it appended the the beginning of the output of the transcription and can guide the transcription result. + */ + prefix?: string; +} +interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output { + transcription_info?: { + /** + * The language of the audio being transcribed or translated. + */ + language?: string; + /** + * The confidence level or probability of the detected language being accurate, represented as a decimal between 0 and 1. + */ + language_probability?: number; + /** + * The total duration of the original audio file, in seconds. + */ + duration?: number; + /** + * The duration of the audio after applying Voice Activity Detection (VAD) to remove silent or irrelevant sections, in seconds. + */ + duration_after_vad?: number; + }; + /** + * The complete transcription of the audio. + */ + text: string; + /** + * The total number of words in the transcription. + */ + word_count?: number; + segments?: { + /** + * The starting time of the segment within the audio, in seconds. + */ + start?: number; + /** + * The ending time of the segment within the audio, in seconds. + */ + end?: number; + /** + * The transcription of the segment. + */ + text?: string; + /** + * The temperature used in the decoding process, controlling randomness in predictions. Lower values result in more deterministic outputs. + */ + temperature?: number; + /** + * The average log probability of the predictions for the words in this segment, indicating overall confidence. + */ + avg_logprob?: number; + /** + * The compression ratio of the input to the output, measuring how much the text was compressed during the transcription process. + */ + compression_ratio?: number; + /** + * The probability that the segment contains no speech, represented as a decimal between 0 and 1. + */ + no_speech_prob?: number; + words?: { + /** + * The individual word transcribed from the audio. + */ + word?: string; + /** + * The starting time of the word within the audio, in seconds. + */ + start?: number; + /** + * The ending time of the word within the audio, in seconds. + */ + end?: number; + }[]; + }[]; + /** + * The transcription in WebVTT format, which includes timing and text information for use in subtitles. + */ + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo { + inputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output; +} +type Ai_Cf_Baai_Bge_M3_Input = Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts | Ai_Cf_Baai_Bge_M3_Input_Embedding | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: (Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts_1 | Ai_Cf_Baai_Bge_M3_Input_Embedding_1)[]; +}; +interface Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts { + /** + * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts + */ + query?: string; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface Ai_Cf_Baai_Bge_M3_Input_Embedding { + text: string | string[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts_1 { + /** + * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts + */ + query?: string; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface Ai_Cf_Baai_Bge_M3_Input_Embedding_1 { + text: string | string[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +type Ai_Cf_Baai_Bge_M3_Output = Ai_Cf_Baai_Bge_M3_Ouput_Query | Ai_Cf_Baai_Bge_M3_Output_EmbeddingFor_Contexts | Ai_Cf_Baai_Bge_M3_Ouput_Embedding | Ai_Cf_Baai_Bge_M3_AsyncResponse; +interface Ai_Cf_Baai_Bge_M3_Ouput_Query { + response?: { + /** + * Index of the context in the request + */ + id?: number; + /** + * Score of the context under the index. + */ + score?: number; + }[]; +} +interface Ai_Cf_Baai_Bge_M3_Output_EmbeddingFor_Contexts { + response?: number[][]; + shape?: number[]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} +interface Ai_Cf_Baai_Bge_M3_Ouput_Embedding { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} +interface Ai_Cf_Baai_Bge_M3_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_M3 { + inputs: Ai_Cf_Baai_Bge_M3_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_M3_Output; +} +interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * The number of diffusion steps; higher values can improve quality but take longer. + */ + steps?: number; +} +interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output { + /** + * The generated image in Base64 format. + */ + image?: string; +} +declare abstract class Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell { + inputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input; + postProcessedOutputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output; +} +type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input = Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Prompt | Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Messages; +interface Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + image?: number[] | (string & NonNullable); + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; +} +interface Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + image?: number[] | (string & NonNullable); + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * If true, the response will be streamed back incrementally. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output = { + /** + * The generated text response from the model + */ + response?: string; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct { + inputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output; +} +type Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Input = Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Prompt | Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Messages | Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Async_Batch; +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Async_Batch { + requests?: { + /** + * User-supplied reference. This field will be present in the response as well it can be used to reference the request and response. It's NOT validated to be unique. + */ + external_reference?: string; + /** + * Prompt for the text generation model + */ + prompt?: string; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + response_format?: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_2; + }[]; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_2 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +} | string | Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_AsyncResponse; +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast { + inputs: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Output; +} +interface Ai_Cf_Meta_Llama_Guard_3_8B_Input { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender must alternate between 'user' and 'assistant'. + */ + role: "user" | "assistant"; + /** + * The content of the message as a string. + */ + content: string; + }[]; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Dictate the output format of the generated response. + */ + response_format?: { + /** + * Set to json_object to process and output generated text as JSON. + */ + type?: string; + }; +} +interface Ai_Cf_Meta_Llama_Guard_3_8B_Output { + response?: string | { + /** + * Whether the conversation is safe or not. + */ + safe?: boolean; + /** + * A list of what hazard categories predicted for the conversation, if the conversation is deemed unsafe. + */ + categories?: string[]; + }; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +declare abstract class Base_Ai_Cf_Meta_Llama_Guard_3_8B { + inputs: Ai_Cf_Meta_Llama_Guard_3_8B_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_Guard_3_8B_Output; +} +interface Ai_Cf_Baai_Bge_Reranker_Base_Input { + /** + * A query you wish to perform against the provided contexts. + */ + /** + * Number of returned results starting with the best score. + */ + top_k?: number; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; +} +interface Ai_Cf_Baai_Bge_Reranker_Base_Output { + response?: { + /** + * Index of the context in the request + */ + id?: number; + /** + * Score of the context under the index. + */ + score?: number; + }[]; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Reranker_Base { + inputs: Ai_Cf_Baai_Bge_Reranker_Base_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Reranker_Base_Output; +} +type Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Input = Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Prompt | Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Messages; +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct { + inputs: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Output; +} +type Ai_Cf_Qwen_Qwq_32B_Input = Ai_Cf_Qwen_Qwq_32B_Prompt | Ai_Cf_Qwen_Qwq_32B_Messages; +interface Ai_Cf_Qwen_Qwq_32B_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwq_32B_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Qwen_Qwq_32B_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Qwen_Qwq_32B { + inputs: Ai_Cf_Qwen_Qwq_32B_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwq_32B_Output; +} +type Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Input = Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Prompt | Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Messages; +interface Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct { + inputs: Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Input; + postProcessedOutputs: Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Output; +} +type Ai_Cf_Google_Gemma_3_12B_It_Input = Ai_Cf_Google_Gemma_3_12B_It_Prompt | Ai_Cf_Google_Gemma_3_12B_It_Messages; +interface Ai_Cf_Google_Gemma_3_12B_It_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Google_Gemma_3_12B_It_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[]; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Google_Gemma_3_12B_It_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Google_Gemma_3_12B_It { + inputs: Ai_Cf_Google_Gemma_3_12B_It_Input; + postProcessedOutputs: Ai_Cf_Google_Gemma_3_12B_It_Output; +} +type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input = Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt | Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages | Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Async_Batch; +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Async_Batch { + requests: (Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt_Inner | Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages_Inner)[]; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt_Inner { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages_Inner { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The tool call id. + */ + id?: string; + /** + * Specifies the type of tool (e.g., 'function'). + */ + type?: string; + /** + * Details of the function tool. + */ + function?: { + /** + * The name of the tool to be called + */ + name?: string; + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + }; + }[]; +}; +declare abstract class Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct { + inputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output; +} +type Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Input = Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Async_Batch; +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Async_Batch { + requests: (Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt_1 | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages_1)[]; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt_1 { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_2; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_2 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages_1 { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_3; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_3 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Output = Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Chat_Completion_Response | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Text_Completion_Response | string | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_AsyncResponse; +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Chat_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "chat.completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index?: number; + /** + * The message generated by the model + */ + message?: { + /** + * Role of the message author + */ + role: string; + /** + * The content of the message + */ + content: string; + /** + * Internal reasoning content (if available) + */ + reasoning_content?: string; + /** + * Tool calls made by the assistant + */ + tool_calls?: { + /** + * Unique identifier for the tool call + */ + id: string; + /** + * Type of tool call + */ + type: "function"; + function: { + /** + * Name of the function to call + */ + name: string; + /** + * JSON string of arguments for the function + */ + arguments: string; + }; + }[]; + }; + /** + * Reason why the model stopped generating + */ + finish_reason?: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Text_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "text_completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index: number; + /** + * The generated text completion + */ + text: string; + /** + * Reason why the model stopped generating + */ + finish_reason: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8 { + inputs: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Output; +} +interface Ai_Cf_Deepgram_Nova_3_Input { + audio: { + body: object; + contentType: string; + }; + /** + * Sets how the model will interpret strings submitted to the custom_topic param. When strict, the model will only return topics submitted using the custom_topic param. When extended, the model will return its own detected topics in addition to those submitted using the custom_topic param. + */ + custom_topic_mode?: "extended" | "strict"; + /** + * Custom topics you want the model to detect within your input audio or text if present Submit up to 100 + */ + custom_topic?: string; + /** + * Sets how the model will interpret intents submitted to the custom_intent param. When strict, the model will only return intents submitted using the custom_intent param. When extended, the model will return its own detected intents in addition those submitted using the custom_intents param + */ + custom_intent_mode?: "extended" | "strict"; + /** + * Custom intents you want the model to detect within your input audio if present + */ + custom_intent?: string; + /** + * Identifies and extracts key entities from content in submitted audio + */ + detect_entities?: boolean; + /** + * Identifies the dominant language spoken in submitted audio + */ + detect_language?: boolean; + /** + * Recognize speaker changes. Each word in the transcript will be assigned a speaker number starting at 0 + */ + diarize?: boolean; + /** + * Identify and extract key entities from content in submitted audio + */ + dictation?: boolean; + /** + * Specify the expected encoding of your submitted audio + */ + encoding?: "linear16" | "flac" | "mulaw" | "amr-nb" | "amr-wb" | "opus" | "speex" | "g729"; + /** + * Arbitrary key-value pairs that are attached to the API response for usage in downstream processing + */ + extra?: string; + /** + * Filler Words can help transcribe interruptions in your audio, like 'uh' and 'um' + */ + filler_words?: boolean; + /** + * Key term prompting can boost or suppress specialized terminology and brands. + */ + keyterm?: string; + /** + * Keywords can boost or suppress specialized terminology and brands. + */ + keywords?: string; + /** + * The BCP-47 language tag that hints at the primary spoken language. Depending on the Model and API endpoint you choose only certain languages are available. + */ + language?: string; + /** + * Spoken measurements will be converted to their corresponding abbreviations. + */ + measurements?: boolean; + /** + * Opts out requests from the Deepgram Model Improvement Program. Refer to our Docs for pricing impacts before setting this to true. https://dpgr.am/deepgram-mip. + */ + mip_opt_out?: boolean; + /** + * Mode of operation for the model representing broad area of topic that will be talked about in the supplied audio + */ + mode?: "general" | "medical" | "finance"; + /** + * Transcribe each audio channel independently. + */ + multichannel?: boolean; + /** + * Numerals converts numbers from written format to numerical format. + */ + numerals?: boolean; + /** + * Splits audio into paragraphs to improve transcript readability. + */ + paragraphs?: boolean; + /** + * Profanity Filter looks for recognized profanity and converts it to the nearest recognized non-profane word or removes it from the transcript completely. + */ + profanity_filter?: boolean; + /** + * Add punctuation and capitalization to the transcript. + */ + punctuate?: boolean; + /** + * Redaction removes sensitive information from your transcripts. + */ + redact?: string; + /** + * Search for terms or phrases in submitted audio and replaces them. + */ + replace?: string; + /** + * Search for terms or phrases in submitted audio. + */ + search?: string; + /** + * Recognizes the sentiment throughout a transcript or text. + */ + sentiment?: boolean; + /** + * Apply formatting to transcript output. When set to true, additional formatting will be applied to transcripts to improve readability. + */ + smart_format?: boolean; + /** + * Detect topics throughout a transcript or text. + */ + topics?: boolean; + /** + * Segments speech into meaningful semantic units. + */ + utterances?: boolean; + /** + * Seconds to wait before detecting a pause between words in submitted audio. + */ + utt_split?: number; + /** + * The number of channels in the submitted audio + */ + channels?: number; + /** + * Specifies whether the streaming endpoint should provide ongoing transcription updates as more audio is received. When set to true, the endpoint sends continuous updates, meaning transcription results may evolve over time. Note: Supported only for webosockets. + */ + interim_results?: boolean; + /** + * Indicates how long model will wait to detect whether a speaker has finished speaking or pauses for a significant period of time. When set to a value, the streaming endpoint immediately finalizes the transcription for the processed time range and returns the transcript with a speech_final parameter set to true. Can also be set to false to disable endpointing + */ + endpointing?: string; + /** + * Indicates that speech has started. You'll begin receiving Speech Started messages upon speech starting. Note: Supported only for webosockets. + */ + vad_events?: boolean; + /** + * Indicates how long model will wait to send an UtteranceEnd message after a word has been transcribed. Use with interim_results. Note: Supported only for webosockets. + */ + utterance_end_ms?: boolean; +} +interface Ai_Cf_Deepgram_Nova_3_Output { + results?: { + channels?: { + alternatives?: { + confidence?: number; + transcript?: string; + words?: { + confidence?: number; + end?: number; + start?: number; + word?: string; + }[]; + }[]; + }[]; + summary?: { + result?: string; + short?: string; + }; + sentiments?: { + segments?: { + text?: string; + start_word?: number; + end_word?: number; + sentiment?: string; + sentiment_score?: number; + }[]; + average?: { + sentiment?: string; + sentiment_score?: number; + }; + }; + }; +} +declare abstract class Base_Ai_Cf_Deepgram_Nova_3 { + inputs: Ai_Cf_Deepgram_Nova_3_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Nova_3_Output; +} +interface Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Input { + queries?: string | string[]; + /** + * Optional instruction for the task + */ + instruction?: string; + documents?: string | string[]; + text?: string | string[]; +} +interface Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Output { + data?: number[][]; + shape?: number[]; +} +declare abstract class Base_Ai_Cf_Qwen_Qwen3_Embedding_0_6B { + inputs: Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Output; +} +type Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Input = { + /** + * readable stream with audio data and content-type specified for that data + */ + audio: { + body: object; + contentType: string; + }; + /** + * type of data PCM data that's sent to the inference server as raw array + */ + dtype?: "uint8" | "float32" | "float64"; +} | { + /** + * base64 encoded audio data + */ + audio: string; + /** + * type of data PCM data that's sent to the inference server as raw array + */ + dtype?: "uint8" | "float32" | "float64"; +}; +interface Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Output { + /** + * if true, end-of-turn was detected + */ + is_complete?: boolean; + /** + * probability of the end-of-turn detection + */ + probability?: number; +} +declare abstract class Base_Ai_Cf_Pipecat_Ai_Smart_Turn_V2 { + inputs: Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Input; + postProcessedOutputs: Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Output; +} +declare abstract class Base_Ai_Cf_Openai_Gpt_Oss_120B { + inputs: ResponsesInput; + postProcessedOutputs: ResponsesOutput; +} +declare abstract class Base_Ai_Cf_Openai_Gpt_Oss_20B { + inputs: ResponsesInput; + postProcessedOutputs: ResponsesOutput; +} +interface Ai_Cf_Leonardo_Phoenix_1_0_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * Controls how closely the generated image should adhere to the prompt; higher values make the image more aligned with the prompt + */ + guidance?: number; + /** + * Random seed for reproducibility of the image generation + */ + seed?: number; + /** + * The height of the generated image in pixels + */ + height?: number; + /** + * The width of the generated image in pixels + */ + width?: number; + /** + * The number of diffusion steps; higher values can improve quality but take longer + */ + num_steps?: number; + /** + * Specify what to exclude from the generated images + */ + negative_prompt?: string; +} +/** + * The generated image in JPEG format + */ +type Ai_Cf_Leonardo_Phoenix_1_0_Output = string; +declare abstract class Base_Ai_Cf_Leonardo_Phoenix_1_0 { + inputs: Ai_Cf_Leonardo_Phoenix_1_0_Input; + postProcessedOutputs: Ai_Cf_Leonardo_Phoenix_1_0_Output; +} +interface Ai_Cf_Leonardo_Lucid_Origin_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * Controls how closely the generated image should adhere to the prompt; higher values make the image more aligned with the prompt + */ + guidance?: number; + /** + * Random seed for reproducibility of the image generation + */ + seed?: number; + /** + * The height of the generated image in pixels + */ + height?: number; + /** + * The width of the generated image in pixels + */ + width?: number; + /** + * The number of diffusion steps; higher values can improve quality but take longer + */ + num_steps?: number; + /** + * The number of diffusion steps; higher values can improve quality but take longer + */ + steps?: number; +} +interface Ai_Cf_Leonardo_Lucid_Origin_Output { + /** + * The generated image in Base64 format. + */ + image?: string; +} +declare abstract class Base_Ai_Cf_Leonardo_Lucid_Origin { + inputs: Ai_Cf_Leonardo_Lucid_Origin_Input; + postProcessedOutputs: Ai_Cf_Leonardo_Lucid_Origin_Output; +} +interface Ai_Cf_Deepgram_Aura_1_Input { + /** + * Speaker used to produce the audio. + */ + speaker?: "angus" | "asteria" | "arcas" | "orion" | "orpheus" | "athena" | "luna" | "zeus" | "perseus" | "helios" | "hera" | "stella"; + /** + * Encoding of the output audio. + */ + encoding?: "linear16" | "flac" | "mulaw" | "alaw" | "mp3" | "opus" | "aac"; + /** + * Container specifies the file format wrapper for the output audio. The available options depend on the encoding type.. + */ + container?: "none" | "wav" | "ogg"; + /** + * The text content to be converted to speech + */ + text: string; + /** + * Sample Rate specifies the sample rate for the output audio. Based on the encoding, different sample rates are supported. For some encodings, the sample rate is not configurable + */ + sample_rate?: number; + /** + * The bitrate of the audio in bits per second. Choose from predefined ranges or specific values based on the encoding type. + */ + bit_rate?: number; +} +/** + * The generated audio in MP3 format + */ +type Ai_Cf_Deepgram_Aura_1_Output = string; +declare abstract class Base_Ai_Cf_Deepgram_Aura_1 { + inputs: Ai_Cf_Deepgram_Aura_1_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Aura_1_Output; +} +interface Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Input { + /** + * Input text to translate. Can be a single string or a list of strings. + */ + text: string | string[]; + /** + * Target language to translate to + */ + target_language: "asm_Beng" | "awa_Deva" | "ben_Beng" | "bho_Deva" | "brx_Deva" | "doi_Deva" | "eng_Latn" | "gom_Deva" | "gon_Deva" | "guj_Gujr" | "hin_Deva" | "hne_Deva" | "kan_Knda" | "kas_Arab" | "kas_Deva" | "kha_Latn" | "lus_Latn" | "mag_Deva" | "mai_Deva" | "mal_Mlym" | "mar_Deva" | "mni_Beng" | "mni_Mtei" | "npi_Deva" | "ory_Orya" | "pan_Guru" | "san_Deva" | "sat_Olck" | "snd_Arab" | "snd_Deva" | "tam_Taml" | "tel_Telu" | "urd_Arab" | "unr_Deva"; +} +interface Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Output { + /** + * Translated texts + */ + translations: string[]; +} +declare abstract class Base_Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B { + inputs: Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Input; + postProcessedOutputs: Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Output; +} +type Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Input = Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Async_Batch; +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Async_Batch { + requests: (Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt_1 | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages_1)[]; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt_1 { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_2; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_2 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages_1 { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_3; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_3 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Output = Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Chat_Completion_Response | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Text_Completion_Response | string | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_AsyncResponse; +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Chat_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "chat.completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index?: number; + /** + * The message generated by the model + */ + message?: { + /** + * Role of the message author + */ + role: string; + /** + * The content of the message + */ + content: string; + /** + * Internal reasoning content (if available) + */ + reasoning_content?: string; + /** + * Tool calls made by the assistant + */ + tool_calls?: { + /** + * Unique identifier for the tool call + */ + id: string; + /** + * Type of tool call + */ + type: "function"; + function: { + /** + * Name of the function to call + */ + name: string; + /** + * JSON string of arguments for the function + */ + arguments: string; + }; + }[]; + }; + /** + * Reason why the model stopped generating + */ + finish_reason?: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Text_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "text_completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index: number; + /** + * The generated text completion + */ + text: string; + /** + * Reason why the model stopped generating + */ + finish_reason: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It { + inputs: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Input; + postProcessedOutputs: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Output; +} +interface Ai_Cf_Pfnet_Plamo_Embedding_1B_Input { + /** + * Input text to embed. Can be a single string or a list of strings. + */ + text: string | string[]; +} +interface Ai_Cf_Pfnet_Plamo_Embedding_1B_Output { + /** + * Embedding vectors, where each vector is a list of floats. + */ + data: number[][]; + /** + * Shape of the embedding data as [number_of_embeddings, embedding_dimension]. + * + * @minItems 2 + * @maxItems 2 + */ + shape: [ + number, + number + ]; +} +declare abstract class Base_Ai_Cf_Pfnet_Plamo_Embedding_1B { + inputs: Ai_Cf_Pfnet_Plamo_Embedding_1B_Input; + postProcessedOutputs: Ai_Cf_Pfnet_Plamo_Embedding_1B_Output; +} +interface Ai_Cf_Deepgram_Flux_Input { + /** + * Encoding of the audio stream. Currently only supports raw signed little-endian 16-bit PCM. + */ + encoding: "linear16"; + /** + * Sample rate of the audio stream in Hz. + */ + sample_rate: string; + /** + * End-of-turn confidence required to fire an eager end-of-turn event. When set, enables EagerEndOfTurn and TurnResumed events. Valid Values 0.3 - 0.9. + */ + eager_eot_threshold?: string; + /** + * End-of-turn confidence required to finish a turn. Valid Values 0.5 - 0.9. + */ + eot_threshold?: string; + /** + * A turn will be finished when this much time has passed after speech, regardless of EOT confidence. + */ + eot_timeout_ms?: string; + /** + * Keyterm prompting can improve recognition of specialized terminology. Pass multiple keyterm query parameters to boost multiple keyterms. + */ + keyterm?: string; + /** + * Opts out requests from the Deepgram Model Improvement Program. Refer to Deepgram Docs for pricing impacts before setting this to true. https://dpgr.am/deepgram-mip + */ + mip_opt_out?: "true" | "false"; + /** + * Label your requests for the purpose of identification during usage reporting + */ + tag?: string; +} +/** + * Output will be returned as websocket messages. + */ +interface Ai_Cf_Deepgram_Flux_Output { + /** + * The unique identifier of the request (uuid) + */ + request_id?: string; + /** + * Starts at 0 and increments for each message the server sends to the client. + */ + sequence_id?: number; + /** + * The type of event being reported. + */ + event?: "Update" | "StartOfTurn" | "EagerEndOfTurn" | "TurnResumed" | "EndOfTurn"; + /** + * The index of the current turn + */ + turn_index?: number; + /** + * Start time in seconds of the audio range that was transcribed + */ + audio_window_start?: number; + /** + * End time in seconds of the audio range that was transcribed + */ + audio_window_end?: number; + /** + * Text that was said over the course of the current turn + */ + transcript?: string; + /** + * The words in the transcript + */ + words?: { + /** + * The individual punctuated, properly-cased word from the transcript + */ + word: string; + /** + * Confidence that this word was transcribed correctly + */ + confidence: number; + }[]; + /** + * Confidence that no more speech is coming in this turn + */ + end_of_turn_confidence?: number; +} +declare abstract class Base_Ai_Cf_Deepgram_Flux { + inputs: Ai_Cf_Deepgram_Flux_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Flux_Output; +} +interface Ai_Cf_Deepgram_Aura_2_En_Input { + /** + * Speaker used to produce the audio. + */ + speaker?: "amalthea" | "andromeda" | "apollo" | "arcas" | "aries" | "asteria" | "athena" | "atlas" | "aurora" | "callista" | "cora" | "cordelia" | "delia" | "draco" | "electra" | "harmonia" | "helena" | "hera" | "hermes" | "hyperion" | "iris" | "janus" | "juno" | "jupiter" | "luna" | "mars" | "minerva" | "neptune" | "odysseus" | "ophelia" | "orion" | "orpheus" | "pandora" | "phoebe" | "pluto" | "saturn" | "thalia" | "theia" | "vesta" | "zeus"; + /** + * Encoding of the output audio. + */ + encoding?: "linear16" | "flac" | "mulaw" | "alaw" | "mp3" | "opus" | "aac"; + /** + * Container specifies the file format wrapper for the output audio. The available options depend on the encoding type.. + */ + container?: "none" | "wav" | "ogg"; + /** + * The text content to be converted to speech + */ + text: string; + /** + * Sample Rate specifies the sample rate for the output audio. Based on the encoding, different sample rates are supported. For some encodings, the sample rate is not configurable + */ + sample_rate?: number; + /** + * The bitrate of the audio in bits per second. Choose from predefined ranges or specific values based on the encoding type. + */ + bit_rate?: number; +} +/** + * The generated audio in MP3 format + */ +type Ai_Cf_Deepgram_Aura_2_En_Output = string; +declare abstract class Base_Ai_Cf_Deepgram_Aura_2_En { + inputs: Ai_Cf_Deepgram_Aura_2_En_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Aura_2_En_Output; +} +interface Ai_Cf_Deepgram_Aura_2_Es_Input { + /** + * Speaker used to produce the audio. + */ + speaker?: "sirio" | "nestor" | "carina" | "celeste" | "alvaro" | "diana" | "aquila" | "selena" | "estrella" | "javier"; + /** + * Encoding of the output audio. + */ + encoding?: "linear16" | "flac" | "mulaw" | "alaw" | "mp3" | "opus" | "aac"; + /** + * Container specifies the file format wrapper for the output audio. The available options depend on the encoding type.. + */ + container?: "none" | "wav" | "ogg"; + /** + * The text content to be converted to speech + */ + text: string; + /** + * Sample Rate specifies the sample rate for the output audio. Based on the encoding, different sample rates are supported. For some encodings, the sample rate is not configurable + */ + sample_rate?: number; + /** + * The bitrate of the audio in bits per second. Choose from predefined ranges or specific values based on the encoding type. + */ + bit_rate?: number; +} +/** + * The generated audio in MP3 format + */ +type Ai_Cf_Deepgram_Aura_2_Es_Output = string; +declare abstract class Base_Ai_Cf_Deepgram_Aura_2_Es { + inputs: Ai_Cf_Deepgram_Aura_2_Es_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Aura_2_Es_Output; +} +interface AiModels { + "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification; + "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage; + "@cf/runwayml/stable-diffusion-v1-5-inpainting": BaseAiTextToImage; + "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage; + "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage; + "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage; + "@cf/myshell-ai/melotts": BaseAiTextToSpeech; + "@cf/google/embeddinggemma-300m": BaseAiTextEmbeddings; + "@cf/microsoft/resnet-50": BaseAiImageClassification; + "@cf/meta/llama-2-7b-chat-int8": BaseAiTextGeneration; + "@cf/mistral/mistral-7b-instruct-v0.1": BaseAiTextGeneration; + "@cf/meta/llama-2-7b-chat-fp16": BaseAiTextGeneration; + "@hf/thebloke/llama-2-13b-chat-awq": BaseAiTextGeneration; + "@hf/thebloke/mistral-7b-instruct-v0.1-awq": BaseAiTextGeneration; + "@hf/thebloke/zephyr-7b-beta-awq": BaseAiTextGeneration; + "@hf/thebloke/openhermes-2.5-mistral-7b-awq": BaseAiTextGeneration; + "@hf/thebloke/neural-chat-7b-v3-1-awq": BaseAiTextGeneration; + "@hf/thebloke/llamaguard-7b-awq": BaseAiTextGeneration; + "@hf/thebloke/deepseek-coder-6.7b-base-awq": BaseAiTextGeneration; + "@hf/thebloke/deepseek-coder-6.7b-instruct-awq": BaseAiTextGeneration; + "@cf/deepseek-ai/deepseek-math-7b-instruct": BaseAiTextGeneration; + "@cf/defog/sqlcoder-7b-2": BaseAiTextGeneration; + "@cf/openchat/openchat-3.5-0106": BaseAiTextGeneration; + "@cf/tiiuae/falcon-7b-instruct": BaseAiTextGeneration; + "@cf/thebloke/discolm-german-7b-v1-awq": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-0.5b-chat": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-7b-chat-awq": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-14b-chat-awq": BaseAiTextGeneration; + "@cf/tinyllama/tinyllama-1.1b-chat-v1.0": BaseAiTextGeneration; + "@cf/microsoft/phi-2": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-1.8b-chat": BaseAiTextGeneration; + "@cf/mistral/mistral-7b-instruct-v0.2-lora": BaseAiTextGeneration; + "@hf/nousresearch/hermes-2-pro-mistral-7b": BaseAiTextGeneration; + "@hf/nexusflow/starling-lm-7b-beta": BaseAiTextGeneration; + "@hf/google/gemma-7b-it": BaseAiTextGeneration; + "@cf/meta-llama/llama-2-7b-chat-hf-lora": BaseAiTextGeneration; + "@cf/google/gemma-2b-it-lora": BaseAiTextGeneration; + "@cf/google/gemma-7b-it-lora": BaseAiTextGeneration; + "@hf/mistral/mistral-7b-instruct-v0.2": BaseAiTextGeneration; + "@cf/meta/llama-3-8b-instruct": BaseAiTextGeneration; + "@cf/fblgit/una-cybertron-7b-v2-bf16": BaseAiTextGeneration; + "@cf/meta/llama-3-8b-instruct-awq": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct-fp8": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct-awq": BaseAiTextGeneration; + "@cf/meta/llama-3.2-3b-instruct": BaseAiTextGeneration; + "@cf/meta/llama-3.2-1b-instruct": BaseAiTextGeneration; + "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b": BaseAiTextGeneration; + "@cf/ibm-granite/granite-4.0-h-micro": BaseAiTextGeneration; + "@cf/facebook/bart-large-cnn": BaseAiSummarization; + "@cf/llava-hf/llava-1.5-7b-hf": BaseAiImageToText; + "@cf/baai/bge-base-en-v1.5": Base_Ai_Cf_Baai_Bge_Base_En_V1_5; + "@cf/openai/whisper": Base_Ai_Cf_Openai_Whisper; + "@cf/meta/m2m100-1.2b": Base_Ai_Cf_Meta_M2M100_1_2B; + "@cf/baai/bge-small-en-v1.5": Base_Ai_Cf_Baai_Bge_Small_En_V1_5; + "@cf/baai/bge-large-en-v1.5": Base_Ai_Cf_Baai_Bge_Large_En_V1_5; + "@cf/unum/uform-gen2-qwen-500m": Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M; + "@cf/openai/whisper-tiny-en": Base_Ai_Cf_Openai_Whisper_Tiny_En; + "@cf/openai/whisper-large-v3-turbo": Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo; + "@cf/baai/bge-m3": Base_Ai_Cf_Baai_Bge_M3; + "@cf/black-forest-labs/flux-1-schnell": Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell; + "@cf/meta/llama-3.2-11b-vision-instruct": Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct; + "@cf/meta/llama-3.3-70b-instruct-fp8-fast": Base_Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast; + "@cf/meta/llama-guard-3-8b": Base_Ai_Cf_Meta_Llama_Guard_3_8B; + "@cf/baai/bge-reranker-base": Base_Ai_Cf_Baai_Bge_Reranker_Base; + "@cf/qwen/qwen2.5-coder-32b-instruct": Base_Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct; + "@cf/qwen/qwq-32b": Base_Ai_Cf_Qwen_Qwq_32B; + "@cf/mistralai/mistral-small-3.1-24b-instruct": Base_Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct; + "@cf/google/gemma-3-12b-it": Base_Ai_Cf_Google_Gemma_3_12B_It; + "@cf/meta/llama-4-scout-17b-16e-instruct": Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct; + "@cf/qwen/qwen3-30b-a3b-fp8": Base_Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8; + "@cf/deepgram/nova-3": Base_Ai_Cf_Deepgram_Nova_3; + "@cf/qwen/qwen3-embedding-0.6b": Base_Ai_Cf_Qwen_Qwen3_Embedding_0_6B; + "@cf/pipecat-ai/smart-turn-v2": Base_Ai_Cf_Pipecat_Ai_Smart_Turn_V2; + "@cf/openai/gpt-oss-120b": Base_Ai_Cf_Openai_Gpt_Oss_120B; + "@cf/openai/gpt-oss-20b": Base_Ai_Cf_Openai_Gpt_Oss_20B; + "@cf/leonardo/phoenix-1.0": Base_Ai_Cf_Leonardo_Phoenix_1_0; + "@cf/leonardo/lucid-origin": Base_Ai_Cf_Leonardo_Lucid_Origin; + "@cf/deepgram/aura-1": Base_Ai_Cf_Deepgram_Aura_1; + "@cf/ai4bharat/indictrans2-en-indic-1B": Base_Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B; + "@cf/aisingapore/gemma-sea-lion-v4-27b-it": Base_Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It; + "@cf/pfnet/plamo-embedding-1b": Base_Ai_Cf_Pfnet_Plamo_Embedding_1B; + "@cf/deepgram/flux": Base_Ai_Cf_Deepgram_Flux; + "@cf/deepgram/aura-2-en": Base_Ai_Cf_Deepgram_Aura_2_En; + "@cf/deepgram/aura-2-es": Base_Ai_Cf_Deepgram_Aura_2_Es; +} +type AiOptions = { + /** + * Send requests as an asynchronous batch job, only works for supported models + * https://developers.cloudflare.com/workers-ai/features/batch-api + */ + queueRequest?: boolean; + /** + * Establish websocket connections, only works for supported models + */ + websocket?: boolean; + /** + * Tag your requests to group and view them in Cloudflare dashboard. + * + * Rules: + * Tags must only contain letters, numbers, and the symbols: : - . / @ + * Each tag can have maximum 50 characters. + * Maximum 5 tags are allowed each request. + * Duplicate tags will removed. + */ + tags?: string[]; + gateway?: GatewayOptions; + returnRawResponse?: boolean; + prefix?: string; + extraHeaders?: object; +}; +type AiModelsSearchParams = { + author?: string; + hide_experimental?: boolean; + page?: number; + per_page?: number; + search?: string; + source?: number; + task?: string; +}; +type AiModelsSearchObject = { + id: string; + source: number; + name: string; + description: string; + task: { + id: string; + name: string; + description: string; + }; + tags: string[]; + properties: { + property_id: string; + value: string; + }[]; +}; +interface InferenceUpstreamError extends Error { +} +interface AiInternalError extends Error { +} +type AiModelListType = Record; +declare abstract class Ai { + aiGatewayLogId: string | null; + gateway(gatewayId: string): AiGateway; + autorag(autoragId: string): AutoRAG; + run(model: Name, inputs: InputOptions, options?: Options): Promise; + models(params?: AiModelsSearchParams): Promise; + toMarkdown(): ToMarkdownService; + toMarkdown(files: MarkdownDocument[], options?: ConversionRequestOptions): Promise; + toMarkdown(files: MarkdownDocument, options?: ConversionRequestOptions): Promise; +} +type GatewayRetries = { + maxAttempts?: 1 | 2 | 3 | 4 | 5; + retryDelayMs?: number; + backoff?: 'constant' | 'linear' | 'exponential'; +}; +type GatewayOptions = { + id: string; + cacheKey?: string; + cacheTtl?: number; + skipCache?: boolean; + metadata?: Record; + collectLog?: boolean; + eventId?: string; + requestTimeoutMs?: number; + retries?: GatewayRetries; +}; +type UniversalGatewayOptions = Exclude & { + /** + ** @deprecated + */ + id?: string; +}; +type AiGatewayPatchLog = { + score?: number | null; + feedback?: -1 | 1 | null; + metadata?: Record | null; +}; +type AiGatewayLog = { + id: string; + provider: string; + model: string; + model_type?: string; + path: string; + duration: number; + request_type?: string; + request_content_type?: string; + status_code: number; + response_content_type?: string; + success: boolean; + cached: boolean; + tokens_in?: number; + tokens_out?: number; + metadata?: Record; + step?: number; + cost?: number; + custom_cost?: boolean; + request_size: number; + request_head?: string; + request_head_complete: boolean; + response_size: number; + response_head?: string; + response_head_complete: boolean; + created_at: Date; +}; +type AIGatewayProviders = 'workers-ai' | 'anthropic' | 'aws-bedrock' | 'azure-openai' | 'google-vertex-ai' | 'huggingface' | 'openai' | 'perplexity-ai' | 'replicate' | 'groq' | 'cohere' | 'google-ai-studio' | 'mistral' | 'grok' | 'openrouter' | 'deepseek' | 'cerebras' | 'cartesia' | 'elevenlabs' | 'adobe-firefly'; +type AIGatewayHeaders = { + 'cf-aig-metadata': Record | string; + 'cf-aig-custom-cost': { + per_token_in?: number; + per_token_out?: number; + } | { + total_cost?: number; + } | string; + 'cf-aig-cache-ttl': number | string; + 'cf-aig-skip-cache': boolean | string; + 'cf-aig-cache-key': string; + 'cf-aig-event-id': string; + 'cf-aig-request-timeout': number | string; + 'cf-aig-max-attempts': number | string; + 'cf-aig-retry-delay': number | string; + 'cf-aig-backoff': string; + 'cf-aig-collect-log': boolean | string; + Authorization: string; + 'Content-Type': string; + [key: string]: string | number | boolean | object; +}; +type AIGatewayUniversalRequest = { + provider: AIGatewayProviders | string; // eslint-disable-line + endpoint: string; + headers: Partial; + query: unknown; +}; +interface AiGatewayInternalError extends Error { +} +interface AiGatewayLogNotFound extends Error { +} +declare abstract class AiGateway { + patchLog(logId: string, data: AiGatewayPatchLog): Promise; + getLog(logId: string): Promise; + run(data: AIGatewayUniversalRequest | AIGatewayUniversalRequest[], options?: { + gateway?: UniversalGatewayOptions; + extraHeaders?: object; + }): Promise; + getUrl(provider?: AIGatewayProviders | string): Promise; // eslint-disable-line +} +interface AutoRAGInternalError extends Error { +} +interface AutoRAGNotFoundError extends Error { +} +interface AutoRAGUnauthorizedError extends Error { +} +interface AutoRAGNameNotSetError extends Error { +} +type ComparisonFilter = { + key: string; + type: 'eq' | 'ne' | 'gt' | 'gte' | 'lt' | 'lte'; + value: string | number | boolean; +}; +type CompoundFilter = { + type: 'and' | 'or'; + filters: ComparisonFilter[]; +}; +type AutoRagSearchRequest = { + query: string; + filters?: CompoundFilter | ComparisonFilter; + max_num_results?: number; + ranking_options?: { + ranker?: string; + score_threshold?: number; + }; + reranking?: { + enabled?: boolean; + model?: string; + }; + rewrite_query?: boolean; +}; +type AutoRagAiSearchRequest = AutoRagSearchRequest & { + stream?: boolean; + system_prompt?: string; +}; +type AutoRagAiSearchRequestStreaming = Omit & { + stream: true; +}; +type AutoRagSearchResponse = { + object: 'vector_store.search_results.page'; + search_query: string; + data: { + file_id: string; + filename: string; + score: number; + attributes: Record; + content: { + type: 'text'; + text: string; + }[]; + }[]; + has_more: boolean; + next_page: string | null; +}; +type AutoRagListResponse = { + id: string; + enable: boolean; + type: string; + source: string; + vectorize_name: string; + paused: boolean; + status: string; +}[]; +type AutoRagAiSearchResponse = AutoRagSearchResponse & { + response: string; +}; +declare abstract class AutoRAG { + list(): Promise; + search(params: AutoRagSearchRequest): Promise; + aiSearch(params: AutoRagAiSearchRequestStreaming): Promise; + aiSearch(params: AutoRagAiSearchRequest): Promise; + aiSearch(params: AutoRagAiSearchRequest): Promise; +} +interface BasicImageTransformations { + /** + * Maximum width in image pixels. The value must be an integer. + */ + width?: number; + /** + * Maximum height in image pixels. The value must be an integer. + */ + height?: number; + /** + * Resizing mode as a string. It affects interpretation of width and height + * options: + * - scale-down: Similar to contain, but the image is never enlarged. If + * the image is larger than given width or height, it will be resized. + * Otherwise its original size will be kept. + * - contain: Resizes to maximum size that fits within the given width and + * height. If only a single dimension is given (e.g. only width), the + * image will be shrunk or enlarged to exactly match that dimension. + * Aspect ratio is always preserved. + * - cover: Resizes (shrinks or enlarges) to fill the entire area of width + * and height. If the image has an aspect ratio different from the ratio + * of width and height, it will be cropped to fit. + * - crop: The image will be shrunk and cropped to fit within the area + * specified by width and height. The image will not be enlarged. For images + * smaller than the given dimensions it's the same as scale-down. For + * images larger than the given dimensions, it's the same as cover. + * See also trim. + * - pad: Resizes to the maximum size that fits within the given width and + * height, and then fills the remaining area with a background color + * (white by default). Use of this mode is not recommended, as the same + * effect can be more efficiently achieved with the contain mode and the + * CSS object-fit: contain property. + * - squeeze: Stretches and deforms to the width and height given, even if it + * breaks aspect ratio + */ + fit?: "scale-down" | "contain" | "cover" | "crop" | "pad" | "squeeze"; + /** + * Image segmentation using artificial intelligence models. Sets pixels not + * within selected segment area to transparent e.g "foreground" sets every + * background pixel as transparent. + */ + segment?: "foreground"; + /** + * When cropping with fit: "cover", this defines the side or point that should + * be left uncropped. The value is either a string + * "left", "right", "top", "bottom", "auto", or "center" (the default), + * or an object {x, y} containing focal point coordinates in the original + * image expressed as fractions ranging from 0.0 (top or left) to 1.0 + * (bottom or right), 0.5 being the center. {fit: "cover", gravity: "top"} will + * crop bottom or left and right sides as necessary, but won’t crop anything + * from the top. {fit: "cover", gravity: {x:0.5, y:0.2}} will crop each side to + * preserve as much as possible around a point at 20% of the height of the + * source image. + */ + gravity?: 'face' | 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | BasicImageTransformationsGravityCoordinates; + /** + * Background color to add underneath the image. Applies only to images with + * transparency (such as PNG). Accepts any CSS color (#RRGGBB, rgba(…), + * hsl(…), etc.) + */ + background?: string; + /** + * Number of degrees (90, 180, 270) to rotate the image by. width and height + * options refer to axes after rotation. + */ + rotate?: 0 | 90 | 180 | 270 | 360; +} +interface BasicImageTransformationsGravityCoordinates { + x?: number; + y?: number; + mode?: 'remainder' | 'box-center'; +} +/** + * In addition to the properties you can set in the RequestInit dict + * that you pass as an argument to the Request constructor, you can + * set certain properties of a `cf` object to control how Cloudflare + * features are applied to that new Request. + * + * Note: Currently, these properties cannot be tested in the + * playground. + */ +interface RequestInitCfProperties extends Record { + cacheEverything?: boolean; + /** + * A request's cache key is what determines if two requests are + * "the same" for caching purposes. If a request has the same cache key + * as some previous request, then we can serve the same cached response for + * both. (e.g. 'some-key') + * + * Only available for Enterprise customers. + */ + cacheKey?: string; + /** + * This allows you to append additional Cache-Tag response headers + * to the origin response without modifications to the origin server. + * This will allow for greater control over the Purge by Cache Tag feature + * utilizing changes only in the Workers process. + * + * Only available for Enterprise customers. + */ + cacheTags?: string[]; + /** + * Force response to be cached for a given number of seconds. (e.g. 300) + */ + cacheTtl?: number; + /** + * Force response to be cached for a given number of seconds based on the Origin status code. + * (e.g. { '200-299': 86400, '404': 1, '500-599': 0 }) + */ + cacheTtlByStatus?: Record; + scrapeShield?: boolean; + apps?: boolean; + image?: RequestInitCfPropertiesImage; + minify?: RequestInitCfPropertiesImageMinify; + mirage?: boolean; + polish?: "lossy" | "lossless" | "off"; + r2?: RequestInitCfPropertiesR2; + /** + * Redirects the request to an alternate origin server. You can use this, + * for example, to implement load balancing across several origins. + * (e.g.us-east.example.com) + * + * Note - For security reasons, the hostname set in resolveOverride must + * be proxied on the same Cloudflare zone of the incoming request. + * Otherwise, the setting is ignored. CNAME hosts are allowed, so to + * resolve to a host under a different domain or a DNS only domain first + * declare a CNAME record within your own zone’s DNS mapping to the + * external hostname, set proxy on Cloudflare, then set resolveOverride + * to point to that CNAME record. + */ + resolveOverride?: string; +} +interface RequestInitCfPropertiesImageDraw extends BasicImageTransformations { + /** + * Absolute URL of the image file to use for the drawing. It can be any of + * the supported file formats. For drawing of watermarks or non-rectangular + * overlays we recommend using PNG or WebP images. + */ + url: string; + /** + * Floating-point number between 0 (transparent) and 1 (opaque). + * For example, opacity: 0.5 makes overlay semitransparent. + */ + opacity?: number; + /** + * - If set to true, the overlay image will be tiled to cover the entire + * area. This is useful for stock-photo-like watermarks. + * - If set to "x", the overlay image will be tiled horizontally only + * (form a line). + * - If set to "y", the overlay image will be tiled vertically only + * (form a line). + */ + repeat?: true | "x" | "y"; + /** + * Position of the overlay image relative to a given edge. Each property is + * an offset in pixels. 0 aligns exactly to the edge. For example, left: 10 + * positions left side of the overlay 10 pixels from the left edge of the + * image it's drawn over. bottom: 0 aligns bottom of the overlay with bottom + * of the background image. + * + * Setting both left & right, or both top & bottom is an error. + * + * If no position is specified, the image will be centered. + */ + top?: number; + left?: number; + bottom?: number; + right?: number; +} +interface RequestInitCfPropertiesImage extends BasicImageTransformations { + /** + * Device Pixel Ratio. Default 1. Multiplier for width/height that makes it + * easier to specify higher-DPI sizes in . + */ + dpr?: number; + /** + * Allows you to trim your image. Takes dpr into account and is performed before + * resizing or rotation. + * + * It can be used as: + * - left, top, right, bottom - it will specify the number of pixels to cut + * off each side + * - width, height - the width/height you'd like to end up with - can be used + * in combination with the properties above + * - border - this will automatically trim the surroundings of an image based on + * it's color. It consists of three properties: + * - color: rgb or hex representation of the color you wish to trim (todo: verify the rgba bit) + * - tolerance: difference from color to treat as color + * - keep: the number of pixels of border to keep + */ + trim?: "border" | { + top?: number; + bottom?: number; + left?: number; + right?: number; + width?: number; + height?: number; + border?: boolean | { + color?: string; + tolerance?: number; + keep?: number; + }; + }; + /** + * Quality setting from 1-100 (useful values are in 60-90 range). Lower values + * make images look worse, but load faster. The default is 85. It applies only + * to JPEG and WebP images. It doesn’t have any effect on PNG. + */ + quality?: number | "low" | "medium-low" | "medium-high" | "high"; + /** + * Output format to generate. It can be: + * - avif: generate images in AVIF format. + * - webp: generate images in Google WebP format. Set quality to 100 to get + * the WebP-lossless format. + * - json: instead of generating an image, outputs information about the + * image, in JSON format. The JSON object will contain image size + * (before and after resizing), source image’s MIME type, file size, etc. + * - jpeg: generate images in JPEG format. + * - png: generate images in PNG format. + */ + format?: "avif" | "webp" | "json" | "jpeg" | "png" | "baseline-jpeg" | "png-force" | "svg"; + /** + * Whether to preserve animation frames from input files. Default is true. + * Setting it to false reduces animations to still images. This setting is + * recommended when enlarging images or processing arbitrary user content, + * because large GIF animations can weigh tens or even hundreds of megabytes. + * It is also useful to set anim:false when using format:"json" to get the + * response quicker without the number of frames. + */ + anim?: boolean; + /** + * What EXIF data should be preserved in the output image. Note that EXIF + * rotation and embedded color profiles are always applied ("baked in" into + * the image), and aren't affected by this option. Note that if the Polish + * feature is enabled, all metadata may have been removed already and this + * option may have no effect. + * - keep: Preserve most of EXIF metadata, including GPS location if there's + * any. + * - copyright: Only keep the copyright tag, and discard everything else. + * This is the default behavior for JPEG files. + * - none: Discard all invisible EXIF metadata. Currently WebP and PNG + * output formats always discard metadata. + */ + metadata?: "keep" | "copyright" | "none"; + /** + * Strength of sharpening filter to apply to the image. Floating-point + * number between 0 (no sharpening, default) and 10 (maximum). 1.0 is a + * recommended value for downscaled images. + */ + sharpen?: number; + /** + * Radius of a blur filter (approximate gaussian). Maximum supported radius + * is 250. + */ + blur?: number; + /** + * Overlays are drawn in the order they appear in the array (last array + * entry is the topmost layer). + */ + draw?: RequestInitCfPropertiesImageDraw[]; + /** + * Fetching image from authenticated origin. Setting this property will + * pass authentication headers (Authorization, Cookie, etc.) through to + * the origin. + */ + "origin-auth"?: "share-publicly"; + /** + * Adds a border around the image. The border is added after resizing. Border + * width takes dpr into account, and can be specified either using a single + * width property, or individually for each side. + */ + border?: { + color: string; + width: number; + } | { + color: string; + top: number; + right: number; + bottom: number; + left: number; + }; + /** + * Increase brightness by a factor. A value of 1.0 equals no change, a value + * of 0.5 equals half brightness, and a value of 2.0 equals twice as bright. + * 0 is ignored. + */ + brightness?: number; + /** + * Increase contrast by a factor. A value of 1.0 equals no change, a value of + * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is + * ignored. + */ + contrast?: number; + /** + * Increase exposure by a factor. A value of 1.0 equals no change, a value of + * 0.5 darkens the image, and a value of 2.0 lightens the image. 0 is ignored. + */ + gamma?: number; + /** + * Increase contrast by a factor. A value of 1.0 equals no change, a value of + * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is + * ignored. + */ + saturation?: number; + /** + * Flips the images horizontally, vertically, or both. Flipping is applied before + * rotation, so if you apply flip=h,rotate=90 then the image will be flipped + * horizontally, then rotated by 90 degrees. + */ + flip?: 'h' | 'v' | 'hv'; + /** + * Slightly reduces latency on a cache miss by selecting a + * quickest-to-compress file format, at a cost of increased file size and + * lower image quality. It will usually override the format option and choose + * JPEG over WebP or AVIF. We do not recommend using this option, except in + * unusual circumstances like resizing uncacheable dynamically-generated + * images. + */ + compression?: "fast"; +} +interface RequestInitCfPropertiesImageMinify { + javascript?: boolean; + css?: boolean; + html?: boolean; +} +interface RequestInitCfPropertiesR2 { + /** + * Colo id of bucket that an object is stored in + */ + bucketColoId?: number; +} +/** + * Request metadata provided by Cloudflare's edge. + */ +type IncomingRequestCfProperties = IncomingRequestCfPropertiesBase & IncomingRequestCfPropertiesBotManagementEnterprise & IncomingRequestCfPropertiesCloudflareForSaaSEnterprise & IncomingRequestCfPropertiesGeographicInformation & IncomingRequestCfPropertiesCloudflareAccessOrApiShield; +interface IncomingRequestCfPropertiesBase extends Record { + /** + * [ASN](https://www.iana.org/assignments/as-numbers/as-numbers.xhtml) of the incoming request. + * + * @example 395747 + */ + asn?: number; + /** + * The organization which owns the ASN of the incoming request. + * + * @example "Google Cloud" + */ + asOrganization?: string; + /** + * The original value of the `Accept-Encoding` header if Cloudflare modified it. + * + * @example "gzip, deflate, br" + */ + clientAcceptEncoding?: string; + /** + * The number of milliseconds it took for the request to reach your worker. + * + * @example 22 + */ + clientTcpRtt?: number; + /** + * The three-letter [IATA](https://en.wikipedia.org/wiki/IATA_airport_code) + * airport code of the data center that the request hit. + * + * @example "DFW" + */ + colo: string; + /** + * Represents the upstream's response to a + * [TCP `keepalive` message](https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html) + * from cloudflare. + * + * For workers with no upstream, this will always be `1`. + * + * @example 3 + */ + edgeRequestKeepAliveStatus: IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus; + /** + * The HTTP Protocol the request used. + * + * @example "HTTP/2" + */ + httpProtocol: string; + /** + * The browser-requested prioritization information in the request object. + * + * If no information was set, defaults to the empty string `""` + * + * @example "weight=192;exclusive=0;group=3;group-weight=127" + * @default "" + */ + requestPriority: string; + /** + * The TLS version of the connection to Cloudflare. + * In requests served over plaintext (without TLS), this property is the empty string `""`. + * + * @example "TLSv1.3" + */ + tlsVersion: string; + /** + * The cipher for the connection to Cloudflare. + * In requests served over plaintext (without TLS), this property is the empty string `""`. + * + * @example "AEAD-AES128-GCM-SHA256" + */ + tlsCipher: string; + /** + * Metadata containing the [`HELLO`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2) and [`FINISHED`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9) messages from this request's TLS handshake. + * + * If the incoming request was served over plaintext (without TLS) this field is undefined. + */ + tlsExportedAuthenticator?: IncomingRequestCfPropertiesExportedAuthenticatorMetadata; +} +interface IncomingRequestCfPropertiesBotManagementBase { + /** + * Cloudflare’s [level of certainty](https://developers.cloudflare.com/bots/concepts/bot-score/) that a request comes from a bot, + * represented as an integer percentage between `1` (almost certainly a bot) and `99` (almost certainly human). + * + * @example 54 + */ + score: number; + /** + * A boolean value that is true if the request comes from a good bot, like Google or Bing. + * Most customers choose to allow this traffic. For more details, see [Traffic from known bots](https://developers.cloudflare.com/firewall/known-issues-and-faq/#how-does-firewall-rules-handle-traffic-from-known-bots). + */ + verifiedBot: boolean; + /** + * A boolean value that is true if the request originates from a + * Cloudflare-verified proxy service. + */ + corporateProxy: boolean; + /** + * A boolean value that's true if the request matches [file extensions](https://developers.cloudflare.com/bots/reference/static-resources/) for many types of static resources. + */ + staticResource: boolean; + /** + * List of IDs that correlate to the Bot Management heuristic detections made on a request (you can have multiple heuristic detections on the same request). + */ + detectionIds: number[]; +} +interface IncomingRequestCfPropertiesBotManagement { + /** + * Results of Cloudflare's Bot Management analysis + */ + botManagement: IncomingRequestCfPropertiesBotManagementBase; + /** + * Duplicate of `botManagement.score`. + * + * @deprecated + */ + clientTrustScore: number; +} +interface IncomingRequestCfPropertiesBotManagementEnterprise extends IncomingRequestCfPropertiesBotManagement { + /** + * Results of Cloudflare's Bot Management analysis + */ + botManagement: IncomingRequestCfPropertiesBotManagementBase & { + /** + * A [JA3 Fingerprint](https://developers.cloudflare.com/bots/concepts/ja3-fingerprint/) to help profile specific SSL/TLS clients + * across different destination IPs, Ports, and X509 certificates. + */ + ja3Hash: string; + }; +} +interface IncomingRequestCfPropertiesCloudflareForSaaSEnterprise { + /** + * Custom metadata set per-host in [Cloudflare for SaaS](https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/). + * + * This field is only present if you have Cloudflare for SaaS enabled on your account + * and you have followed the [required steps to enable it]((https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/domain-support/custom-metadata/)). + */ + hostMetadata?: HostMetadata; +} +interface IncomingRequestCfPropertiesCloudflareAccessOrApiShield { + /** + * Information about the client certificate presented to Cloudflare. + * + * This is populated when the incoming request is served over TLS using + * either Cloudflare Access or API Shield (mTLS) + * and the presented SSL certificate has a valid + * [Certificate Serial Number](https://ldapwiki.com/wiki/Certificate%20Serial%20Number) + * (i.e., not `null` or `""`). + * + * Otherwise, a set of placeholder values are used. + * + * The property `certPresented` will be set to `"1"` when + * the object is populated (i.e. the above conditions were met). + */ + tlsClientAuth: IncomingRequestCfPropertiesTLSClientAuth | IncomingRequestCfPropertiesTLSClientAuthPlaceholder; +} +/** + * Metadata about the request's TLS handshake + */ +interface IncomingRequestCfPropertiesExportedAuthenticatorMetadata { + /** + * The client's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal + * + * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" + */ + clientHandshake: string; + /** + * The server's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal + * + * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" + */ + serverHandshake: string; + /** + * The client's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal + * + * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" + */ + clientFinished: string; + /** + * The server's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal + * + * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" + */ + serverFinished: string; +} +/** + * Geographic data about the request's origin. + */ +interface IncomingRequestCfPropertiesGeographicInformation { + /** + * The [ISO 3166-1 Alpha 2](https://www.iso.org/iso-3166-country-codes.html) country code the request originated from. + * + * If your worker is [configured to accept TOR connections](https://support.cloudflare.com/hc/en-us/articles/203306930-Understanding-Cloudflare-Tor-support-and-Onion-Routing), this may also be `"T1"`, indicating a request that originated over TOR. + * + * If Cloudflare is unable to determine where the request originated this property is omitted. + * + * The country code `"T1"` is used for requests originating on TOR. + * + * @example "GB" + */ + country?: Iso3166Alpha2Code | "T1"; + /** + * If present, this property indicates that the request originated in the EU + * + * @example "1" + */ + isEUCountry?: "1"; + /** + * A two-letter code indicating the continent the request originated from. + * + * @example "AN" + */ + continent?: ContinentCode; + /** + * The city the request originated from + * + * @example "Austin" + */ + city?: string; + /** + * Postal code of the incoming request + * + * @example "78701" + */ + postalCode?: string; + /** + * Latitude of the incoming request + * + * @example "30.27130" + */ + latitude?: string; + /** + * Longitude of the incoming request + * + * @example "-97.74260" + */ + longitude?: string; + /** + * Timezone of the incoming request + * + * @example "America/Chicago" + */ + timezone?: string; + /** + * If known, the ISO 3166-2 name for the first level region associated with + * the IP address of the incoming request + * + * @example "Texas" + */ + region?: string; + /** + * If known, the ISO 3166-2 code for the first-level region associated with + * the IP address of the incoming request + * + * @example "TX" + */ + regionCode?: string; + /** + * Metro code (DMA) of the incoming request + * + * @example "635" + */ + metroCode?: string; +} +/** Data about the incoming request's TLS certificate */ +interface IncomingRequestCfPropertiesTLSClientAuth { + /** Always `"1"`, indicating that the certificate was presented */ + certPresented: "1"; + /** + * Result of certificate verification. + * + * @example "FAILED:self signed certificate" + */ + certVerified: Exclude; + /** The presented certificate's revokation status. + * + * - A value of `"1"` indicates the certificate has been revoked + * - A value of `"0"` indicates the certificate has not been revoked + */ + certRevoked: "1" | "0"; + /** + * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) + * + * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certIssuerDN: string; + /** + * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) + * + * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certSubjectDN: string; + /** + * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) + * + * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certIssuerDNRFC2253: string; + /** + * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) + * + * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certSubjectDNRFC2253: string; + /** The certificate issuer's distinguished name (legacy policies) */ + certIssuerDNLegacy: string; + /** The certificate subject's distinguished name (legacy policies) */ + certSubjectDNLegacy: string; + /** + * The certificate's serial number + * + * @example "00936EACBE07F201DF" + */ + certSerial: string; + /** + * The certificate issuer's serial number + * + * @example "2489002934BDFEA34" + */ + certIssuerSerial: string; + /** + * The certificate's Subject Key Identifier + * + * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" + */ + certSKI: string; + /** + * The certificate issuer's Subject Key Identifier + * + * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" + */ + certIssuerSKI: string; + /** + * The certificate's SHA-1 fingerprint + * + * @example "6b9109f323999e52259cda7373ff0b4d26bd232e" + */ + certFingerprintSHA1: string; + /** + * The certificate's SHA-256 fingerprint + * + * @example "acf77cf37b4156a2708e34c4eb755f9b5dbbe5ebb55adfec8f11493438d19e6ad3f157f81fa3b98278453d5652b0c1fd1d71e5695ae4d709803a4d3f39de9dea" + */ + certFingerprintSHA256: string; + /** + * The effective starting date of the certificate + * + * @example "Dec 22 19:39:00 2018 GMT" + */ + certNotBefore: string; + /** + * The effective expiration date of the certificate + * + * @example "Dec 22 19:39:00 2018 GMT" + */ + certNotAfter: string; +} +/** Placeholder values for TLS Client Authorization */ +interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder { + certPresented: "0"; + certVerified: "NONE"; + certRevoked: "0"; + certIssuerDN: ""; + certSubjectDN: ""; + certIssuerDNRFC2253: ""; + certSubjectDNRFC2253: ""; + certIssuerDNLegacy: ""; + certSubjectDNLegacy: ""; + certSerial: ""; + certIssuerSerial: ""; + certSKI: ""; + certIssuerSKI: ""; + certFingerprintSHA1: ""; + certFingerprintSHA256: ""; + certNotBefore: ""; + certNotAfter: ""; +} +/** Possible outcomes of TLS verification */ +declare type CertVerificationStatus = +/** Authentication succeeded */ +"SUCCESS" +/** No certificate was presented */ + | "NONE" +/** Failed because the certificate was self-signed */ + | "FAILED:self signed certificate" +/** Failed because the certificate failed a trust chain check */ + | "FAILED:unable to verify the first certificate" +/** Failed because the certificate not yet valid */ + | "FAILED:certificate is not yet valid" +/** Failed because the certificate is expired */ + | "FAILED:certificate has expired" +/** Failed for another unspecified reason */ + | "FAILED"; +/** + * An upstream endpoint's response to a TCP `keepalive` message from Cloudflare. + */ +declare type IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus = 0 /** Unknown */ | 1 /** no keepalives (not found) */ | 2 /** no connection re-use, opening keepalive connection failed */ | 3 /** no connection re-use, keepalive accepted and saved */ | 4 /** connection re-use, refused by the origin server (`TCP FIN`) */ | 5; /** connection re-use, accepted by the origin server */ +/** ISO 3166-1 Alpha-2 codes */ +declare type Iso3166Alpha2Code = "AD" | "AE" | "AF" | "AG" | "AI" | "AL" | "AM" | "AO" | "AQ" | "AR" | "AS" | "AT" | "AU" | "AW" | "AX" | "AZ" | "BA" | "BB" | "BD" | "BE" | "BF" | "BG" | "BH" | "BI" | "BJ" | "BL" | "BM" | "BN" | "BO" | "BQ" | "BR" | "BS" | "BT" | "BV" | "BW" | "BY" | "BZ" | "CA" | "CC" | "CD" | "CF" | "CG" | "CH" | "CI" | "CK" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CV" | "CW" | "CX" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "DO" | "DZ" | "EC" | "EE" | "EG" | "EH" | "ER" | "ES" | "ET" | "FI" | "FJ" | "FK" | "FM" | "FO" | "FR" | "GA" | "GB" | "GD" | "GE" | "GF" | "GG" | "GH" | "GI" | "GL" | "GM" | "GN" | "GP" | "GQ" | "GR" | "GS" | "GT" | "GU" | "GW" | "GY" | "HK" | "HM" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IM" | "IN" | "IO" | "IQ" | "IR" | "IS" | "IT" | "JE" | "JM" | "JO" | "JP" | "KE" | "KG" | "KH" | "KI" | "KM" | "KN" | "KP" | "KR" | "KW" | "KY" | "KZ" | "LA" | "LB" | "LC" | "LI" | "LK" | "LR" | "LS" | "LT" | "LU" | "LV" | "LY" | "MA" | "MC" | "MD" | "ME" | "MF" | "MG" | "MH" | "MK" | "ML" | "MM" | "MN" | "MO" | "MP" | "MQ" | "MR" | "MS" | "MT" | "MU" | "MV" | "MW" | "MX" | "MY" | "MZ" | "NA" | "NC" | "NE" | "NF" | "NG" | "NI" | "NL" | "NO" | "NP" | "NR" | "NU" | "NZ" | "OM" | "PA" | "PE" | "PF" | "PG" | "PH" | "PK" | "PL" | "PM" | "PN" | "PR" | "PS" | "PT" | "PW" | "PY" | "QA" | "RE" | "RO" | "RS" | "RU" | "RW" | "SA" | "SB" | "SC" | "SD" | "SE" | "SG" | "SH" | "SI" | "SJ" | "SK" | "SL" | "SM" | "SN" | "SO" | "SR" | "SS" | "ST" | "SV" | "SX" | "SY" | "SZ" | "TC" | "TD" | "TF" | "TG" | "TH" | "TJ" | "TK" | "TL" | "TM" | "TN" | "TO" | "TR" | "TT" | "TV" | "TW" | "TZ" | "UA" | "UG" | "UM" | "US" | "UY" | "UZ" | "VA" | "VC" | "VE" | "VG" | "VI" | "VN" | "VU" | "WF" | "WS" | "YE" | "YT" | "ZA" | "ZM" | "ZW"; +/** The 2-letter continent codes Cloudflare uses */ +declare type ContinentCode = "AF" | "AN" | "AS" | "EU" | "NA" | "OC" | "SA"; +type CfProperties = IncomingRequestCfProperties | RequestInitCfProperties; +interface D1Meta { + duration: number; + size_after: number; + rows_read: number; + rows_written: number; + last_row_id: number; + changed_db: boolean; + changes: number; + /** + * The region of the database instance that executed the query. + */ + served_by_region?: string; + /** + * The three letters airport code of the colo that executed the query. + */ + served_by_colo?: string; + /** + * True if-and-only-if the database instance that executed the query was the primary. + */ + served_by_primary?: boolean; + timings?: { + /** + * The duration of the SQL query execution by the database instance. It doesn't include any network time. + */ + sql_duration_ms: number; + }; + /** + * Number of total attempts to execute the query, due to automatic retries. + * Note: All other fields in the response like `timings` only apply to the last attempt. + */ + total_attempts?: number; +} +interface D1Response { + success: true; + meta: D1Meta & Record; + error?: never; +} +type D1Result = D1Response & { + results: T[]; +}; +interface D1ExecResult { + count: number; + duration: number; +} +type D1SessionConstraint = +// Indicates that the first query should go to the primary, and the rest queries +// using the same D1DatabaseSession will go to any replica that is consistent with +// the bookmark maintained by the session (returned by the first query). +'first-primary' +// Indicates that the first query can go anywhere (primary or replica), and the rest queries +// using the same D1DatabaseSession will go to any replica that is consistent with +// the bookmark maintained by the session (returned by the first query). + | 'first-unconstrained'; +type D1SessionBookmark = string; +declare abstract class D1Database { + prepare(query: string): D1PreparedStatement; + batch(statements: D1PreparedStatement[]): Promise[]>; + exec(query: string): Promise; + /** + * Creates a new D1 Session anchored at the given constraint or the bookmark. + * All queries executed using the created session will have sequential consistency, + * meaning that all writes done through the session will be visible in subsequent reads. + * + * @param constraintOrBookmark Either the session constraint or the explicit bookmark to anchor the created session. + */ + withSession(constraintOrBookmark?: D1SessionBookmark | D1SessionConstraint): D1DatabaseSession; + /** + * @deprecated dump() will be removed soon, only applies to deprecated alpha v1 databases. + */ + dump(): Promise; +} +declare abstract class D1DatabaseSession { + prepare(query: string): D1PreparedStatement; + batch(statements: D1PreparedStatement[]): Promise[]>; + /** + * @returns The latest session bookmark across all executed queries on the session. + * If no query has been executed yet, `null` is returned. + */ + getBookmark(): D1SessionBookmark | null; +} +declare abstract class D1PreparedStatement { + bind(...values: unknown[]): D1PreparedStatement; + first(colName: string): Promise; + first>(): Promise; + run>(): Promise>; + all>(): Promise>; + raw(options: { + columnNames: true; + }): Promise<[ + string[], + ...T[] + ]>; + raw(options?: { + columnNames?: false; + }): Promise; +} +// `Disposable` was added to TypeScript's standard lib types in version 5.2. +// To support older TypeScript versions, define an empty `Disposable` interface. +// Users won't be able to use `using`/`Symbol.dispose` without upgrading to 5.2, +// but this will ensure type checking on older versions still passes. +// TypeScript's interface merging will ensure our empty interface is effectively +// ignored when `Disposable` is included in the standard lib. +interface Disposable { +} +/** + * The returned data after sending an email + */ +interface EmailSendResult { + /** + * The Email Message ID + */ + messageId: string; +} +/** + * An email message that can be sent from a Worker. + */ +interface EmailMessage { + /** + * Envelope From attribute of the email message. + */ + readonly from: string; + /** + * Envelope To attribute of the email message. + */ + readonly to: string; +} +/** + * An email message that is sent to a consumer Worker and can be rejected/forwarded. + */ +interface ForwardableEmailMessage extends EmailMessage { + /** + * Stream of the email message content. + */ + readonly raw: ReadableStream; + /** + * An [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). + */ + readonly headers: Headers; + /** + * Size of the email message content. + */ + readonly rawSize: number; + /** + * Reject this email message by returning a permanent SMTP error back to the connecting client including the given reason. + * @param reason The reject reason. + * @returns void + */ + setReject(reason: string): void; + /** + * Forward this email message to a verified destination address of the account. + * @param rcptTo Verified destination address. + * @param headers A [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). + * @returns A promise that resolves when the email message is forwarded. + */ + forward(rcptTo: string, headers?: Headers): Promise; + /** + * Reply to the sender of this email message with a new EmailMessage object. + * @param message The reply message. + * @returns A promise that resolves when the email message is replied. + */ + reply(message: EmailMessage): Promise; +} +/** A file attachment for an email message */ +type EmailAttachment = { + disposition: 'inline'; + contentId: string; + filename: string; + type: string; + content: string | ArrayBuffer | ArrayBufferView; +} | { + disposition: 'attachment'; + contentId?: undefined; + filename: string; + type: string; + content: string | ArrayBuffer | ArrayBufferView; +}; +/** An Email Address */ +interface EmailAddress { + name: string; + email: string; +} +/** + * A binding that allows a Worker to send email messages. + */ +interface SendEmail { + send(message: EmailMessage): Promise; + send(builder: { + from: string | EmailAddress; + to: string | string[]; + subject: string; + replyTo?: string | EmailAddress; + cc?: string | string[]; + bcc?: string | string[]; + headers?: Record; + text?: string; + html?: string; + attachments?: EmailAttachment[]; + }): Promise; +} +declare abstract class EmailEvent extends ExtendableEvent { + readonly message: ForwardableEmailMessage; +} +declare type EmailExportedHandler = (message: ForwardableEmailMessage, env: Env, ctx: ExecutionContext) => void | Promise; +declare module "cloudflare:email" { + let _EmailMessage: { + prototype: EmailMessage; + new (from: string, to: string, raw: ReadableStream | string): EmailMessage; + }; + export { _EmailMessage as EmailMessage }; +} +/** + * Hello World binding to serve as an explanatory example. DO NOT USE + */ +interface HelloWorldBinding { + /** + * Retrieve the current stored value + */ + get(): Promise<{ + value: string; + ms?: number; + }>; + /** + * Set a new stored value + */ + set(value: string): Promise; +} +interface Hyperdrive { + /** + * Connect directly to Hyperdrive as if it's your database, returning a TCP socket. + * + * Calling this method returns an identical socket to if you call + * `connect("host:port")` using the `host` and `port` fields from this object. + * Pick whichever approach works better with your preferred DB client library. + * + * Note that this socket is not yet authenticated -- it's expected that your + * code (or preferably, the client library of your choice) will authenticate + * using the information in this class's readonly fields. + */ + connect(): Socket; + /** + * A valid DB connection string that can be passed straight into the typical + * client library/driver/ORM. This will typically be the easiest way to use + * Hyperdrive. + */ + readonly connectionString: string; + /* + * A randomly generated hostname that is only valid within the context of the + * currently running Worker which, when passed into `connect()` function from + * the "cloudflare:sockets" module, will connect to the Hyperdrive instance + * for your database. + */ + readonly host: string; + /* + * The port that must be paired the the host field when connecting. + */ + readonly port: number; + /* + * The username to use when authenticating to your database via Hyperdrive. + * Unlike the host and password, this will be the same every time + */ + readonly user: string; + /* + * The randomly generated password to use when authenticating to your + * database via Hyperdrive. Like the host field, this password is only valid + * within the context of the currently running Worker instance from which + * it's read. + */ + readonly password: string; + /* + * The name of the database to connect to. + */ + readonly database: string; +} +// Copyright (c) 2024 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +type ImageInfoResponse = { + format: 'image/svg+xml'; +} | { + format: string; + fileSize: number; + width: number; + height: number; +}; +type ImageTransform = { + width?: number; + height?: number; + background?: string; + blur?: number; + border?: { + color?: string; + width?: number; + } | { + top?: number; + bottom?: number; + left?: number; + right?: number; + }; + brightness?: number; + contrast?: number; + fit?: 'scale-down' | 'contain' | 'pad' | 'squeeze' | 'cover' | 'crop'; + flip?: 'h' | 'v' | 'hv'; + gamma?: number; + segment?: 'foreground'; + gravity?: 'face' | 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | { + x?: number; + y?: number; + mode: 'remainder' | 'box-center'; + }; + rotate?: 0 | 90 | 180 | 270; + saturation?: number; + sharpen?: number; + trim?: 'border' | { + top?: number; + bottom?: number; + left?: number; + right?: number; + width?: number; + height?: number; + border?: boolean | { + color?: string; + tolerance?: number; + keep?: number; + }; + }; +}; +type ImageDrawOptions = { + opacity?: number; + repeat?: boolean | string; + top?: number; + left?: number; + bottom?: number; + right?: number; +}; +type ImageInputOptions = { + encoding?: 'base64'; +}; +type ImageOutputOptions = { + format: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp' | 'image/avif' | 'rgb' | 'rgba'; + quality?: number; + background?: string; + anim?: boolean; +}; +interface ImagesBinding { + /** + * Get image metadata (type, width and height) + * @throws {@link ImagesError} with code 9412 if input is not an image + * @param stream The image bytes + */ + info(stream: ReadableStream, options?: ImageInputOptions): Promise; + /** + * Begin applying a series of transformations to an image + * @param stream The image bytes + * @returns A transform handle + */ + input(stream: ReadableStream, options?: ImageInputOptions): ImageTransformer; +} +interface ImageTransformer { + /** + * Apply transform next, returning a transform handle. + * You can then apply more transformations, draw, or retrieve the output. + * @param transform + */ + transform(transform: ImageTransform): ImageTransformer; + /** + * Draw an image on this transformer, returning a transform handle. + * You can then apply more transformations, draw, or retrieve the output. + * @param image The image (or transformer that will give the image) to draw + * @param options The options configuring how to draw the image + */ + draw(image: ReadableStream | ImageTransformer, options?: ImageDrawOptions): ImageTransformer; + /** + * Retrieve the image that results from applying the transforms to the + * provided input + * @param options Options that apply to the output e.g. output format + */ + output(options: ImageOutputOptions): Promise; +} +type ImageTransformationOutputOptions = { + encoding?: 'base64'; +}; +interface ImageTransformationResult { + /** + * The image as a response, ready to store in cache or return to users + */ + response(): Response; + /** + * The content type of the returned image + */ + contentType(): string; + /** + * The bytes of the response + */ + image(options?: ImageTransformationOutputOptions): ReadableStream; +} +interface ImagesError extends Error { + readonly code: number; + readonly message: string; + readonly stack?: string; +} +/** + * Media binding for transforming media streams. + * Provides the entry point for media transformation operations. + */ +interface MediaBinding { + /** + * Creates a media transformer from an input stream. + * @param media - The input media bytes + * @returns A MediaTransformer instance for applying transformations + */ + input(media: ReadableStream): MediaTransformer; +} +/** + * Media transformer for applying transformation operations to media content. + * Handles sizing, fitting, and other input transformation parameters. + */ +interface MediaTransformer { + /** + * Applies transformation options to the media content. + * @param transform - Configuration for how the media should be transformed + * @returns A generator for producing the transformed media output + */ + transform(transform: MediaTransformationInputOptions): MediaTransformationGenerator; +} +/** + * Generator for producing media transformation results. + * Configures the output format and parameters for the transformed media. + */ +interface MediaTransformationGenerator { + /** + * Generates the final media output with specified options. + * @param output - Configuration for the output format and parameters + * @returns The final transformation result containing the transformed media + */ + output(output: MediaTransformationOutputOptions): MediaTransformationResult; +} +/** + * Result of a media transformation operation. + * Provides multiple ways to access the transformed media content. + */ +interface MediaTransformationResult { + /** + * Returns the transformed media as a readable stream of bytes. + * @returns A stream containing the transformed media data + */ + media(): ReadableStream; + /** + * Returns the transformed media as an HTTP response object. + * @returns The transformed media as a Response, ready to store in cache or return to users + */ + response(): Response; + /** + * Returns the MIME type of the transformed media. + * @returns The content type string (e.g., 'image/jpeg', 'video/mp4') + */ + contentType(): string; +} +/** + * Configuration options for transforming media input. + * Controls how the media should be resized and fitted. + */ +type MediaTransformationInputOptions = { + /** How the media should be resized to fit the specified dimensions */ + fit?: 'contain' | 'cover' | 'scale-down'; + /** Target width in pixels */ + width?: number; + /** Target height in pixels */ + height?: number; +}; +/** + * Configuration options for Media Transformations output. + * Controls the format, timing, and type of the generated output. + */ +type MediaTransformationOutputOptions = { + /** + * Output mode determining the type of media to generate + */ + mode?: 'video' | 'spritesheet' | 'frame' | 'audio'; + /** Whether to include audio in the output */ + audio?: boolean; + /** + * Starting timestamp for frame extraction or start time for clips. (e.g. '2s'). + */ + time?: string; + /** + * Duration for video clips, audio extraction, and spritesheet generation (e.g. '5s'). + */ + duration?: string; + /** + * Number of frames in the spritesheet. + */ + imageCount?: number; + /** + * Output format for the generated media. + */ + format?: 'jpg' | 'png' | 'm4a'; +}; +/** + * Error object for media transformation operations. + * Extends the standard Error interface with additional media-specific information. + */ +interface MediaError extends Error { + readonly code: number; + readonly message: string; + readonly stack?: string; +} +declare module 'cloudflare:node' { + interface NodeStyleServer { + listen(...args: unknown[]): this; + address(): { + port?: number | null | undefined; + }; + } + export function httpServerHandler(port: number): ExportedHandler; + export function httpServerHandler(options: { + port: number; + }): ExportedHandler; + export function httpServerHandler(server: NodeStyleServer): ExportedHandler; +} +type Params

= Record; +type EventContext = { + request: Request>; + functionPath: string; + waitUntil: (promise: Promise) => void; + passThroughOnException: () => void; + next: (input?: Request | string, init?: RequestInit) => Promise; + env: Env & { + ASSETS: { + fetch: typeof fetch; + }; + }; + params: Params

; + data: Data; +}; +type PagesFunction = Record> = (context: EventContext) => Response | Promise; +type EventPluginContext = { + request: Request>; + functionPath: string; + waitUntil: (promise: Promise) => void; + passThroughOnException: () => void; + next: (input?: Request | string, init?: RequestInit) => Promise; + env: Env & { + ASSETS: { + fetch: typeof fetch; + }; + }; + params: Params

) => Result> : Result>; + // Type for the callable part of an `Provider` if `T` is callable. + // This is intersected with methods/properties. + type MaybeCallableProvider = T extends (...args: any[]) => any ? MethodOrProperty : unknown; + // Base type for all other types providing RPC-like interfaces. + // Rewrites all methods/properties to be `MethodOrProperty`s, while preserving callable types. + // `Reserved` names (e.g. stub method names like `dup()`) and symbols can't be accessed over RPC. + export type Provider = MaybeCallableProvider & Pick<{ + [K in keyof T]: MethodOrProperty; + }, Exclude>>; +} +declare namespace Cloudflare { + // Type of `env`. + // + // The specific project can extend `Env` by redeclaring it in project-specific files. Typescript + // will merge all declarations. + // + // You can use `wrangler types` to generate the `Env` type automatically. + interface Env { + } + // Project-specific parameters used to inform types. + // + // This interface is, again, intended to be declared in project-specific files, and then that + // declaration will be merged with this one. + // + // A project should have a declaration like this: + // + // interface GlobalProps { + // // Declares the main module's exports. Used to populate Cloudflare.Exports aka the type + // // of `ctx.exports`. + // mainModule: typeof import("my-main-module"); + // + // // Declares which of the main module's exports are configured with durable storage, and + // // thus should behave as Durable Object namsepace bindings. + // durableNamespaces: "MyDurableObject" | "AnotherDurableObject"; + // } + // + // You can use `wrangler types` to generate `GlobalProps` automatically. + interface GlobalProps { + } + // Evaluates to the type of a property in GlobalProps, defaulting to `Default` if it is not + // present. + type GlobalProp = K extends keyof GlobalProps ? GlobalProps[K] : Default; + // The type of the program's main module exports, if known. Requires `GlobalProps` to declare the + // `mainModule` property. + type MainModule = GlobalProp<"mainModule", {}>; + // The type of ctx.exports, which contains loopback bindings for all top-level exports. + type Exports = { + [K in keyof MainModule]: LoopbackForExport + // If the export is listed in `durableNamespaces`, then it is also a + // DurableObjectNamespace. + & (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace : DurableObjectNamespace : DurableObjectNamespace : {}); + }; +} +declare namespace CloudflareWorkersModule { + export type RpcStub = Rpc.Stub; + export const RpcStub: { + new (value: T): Rpc.Stub; + }; + export abstract class RpcTarget implements Rpc.RpcTargetBranded { + [Rpc.__RPC_TARGET_BRAND]: never; + } + // `protected` fields don't appear in `keyof`s, so can't be accessed over RPC + export abstract class WorkerEntrypoint implements Rpc.WorkerEntrypointBranded { + [Rpc.__WORKER_ENTRYPOINT_BRAND]: never; + protected ctx: ExecutionContext; + protected env: Env; + constructor(ctx: ExecutionContext, env: Env); + email?(message: ForwardableEmailMessage): void | Promise; + fetch?(request: Request): Response | Promise; + queue?(batch: MessageBatch): void | Promise; + scheduled?(controller: ScheduledController): void | Promise; + tail?(events: TraceItem[]): void | Promise; + tailStream?(event: TailStream.TailEvent): TailStream.TailEventHandlerType | Promise; + test?(controller: TestController): void | Promise; + trace?(traces: TraceItem[]): void | Promise; + } + export abstract class DurableObject implements Rpc.DurableObjectBranded { + [Rpc.__DURABLE_OBJECT_BRAND]: never; + protected ctx: DurableObjectState; + protected env: Env; + constructor(ctx: DurableObjectState, env: Env); + alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; + fetch?(request: Request): Response | Promise; + webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; + webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; + webSocketError?(ws: WebSocket, error: unknown): void | Promise; + } + export type WorkflowDurationLabel = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; + export type WorkflowSleepDuration = `${number} ${WorkflowDurationLabel}${'s' | ''}` | number; + export type WorkflowDelayDuration = WorkflowSleepDuration; + export type WorkflowTimeoutDuration = WorkflowSleepDuration; + export type WorkflowRetentionDuration = WorkflowSleepDuration; + export type WorkflowBackoff = 'constant' | 'linear' | 'exponential'; + export type WorkflowStepConfig = { + retries?: { + limit: number; + delay: WorkflowDelayDuration | number; + backoff?: WorkflowBackoff; + }; + timeout?: WorkflowTimeoutDuration | number; + }; + export type WorkflowEvent = { + payload: Readonly; + timestamp: Date; + instanceId: string; + }; + export type WorkflowStepEvent = { + payload: Readonly; + timestamp: Date; + type: string; + }; + export abstract class WorkflowStep { + do>(name: string, callback: () => Promise): Promise; + do>(name: string, config: WorkflowStepConfig, callback: () => Promise): Promise; + sleep: (name: string, duration: WorkflowSleepDuration) => Promise; + sleepUntil: (name: string, timestamp: Date | number) => Promise; + waitForEvent>(name: string, options: { + type: string; + timeout?: WorkflowTimeoutDuration | number; + }): Promise>; + } + export type WorkflowInstanceStatus = 'queued' | 'running' | 'paused' | 'errored' | 'terminated' | 'complete' | 'waiting' | 'waitingForPause' | 'unknown'; + export abstract class WorkflowEntrypoint | unknown = unknown> implements Rpc.WorkflowEntrypointBranded { + [Rpc.__WORKFLOW_ENTRYPOINT_BRAND]: never; + protected ctx: ExecutionContext; + protected env: Env; + constructor(ctx: ExecutionContext, env: Env); + run(event: Readonly>, step: WorkflowStep): Promise; + } + export function waitUntil(promise: Promise): void; + export function withEnv(newEnv: unknown, fn: () => unknown): unknown; + export function withExports(newExports: unknown, fn: () => unknown): unknown; + export function withEnvAndExports(newEnv: unknown, newExports: unknown, fn: () => unknown): unknown; + export const env: Cloudflare.Env; + export const exports: Cloudflare.Exports; +} +declare module 'cloudflare:workers' { + export = CloudflareWorkersModule; +} +interface SecretsStoreSecret { + /** + * Get a secret from the Secrets Store, returning a string of the secret value + * if it exists, or throws an error if it does not exist + */ + get(): Promise; +} +declare module "cloudflare:sockets" { + function _connect(address: string | SocketAddress, options?: SocketOptions): Socket; + export { _connect as connect }; +} +type MarkdownDocument = { + name: string; + blob: Blob; +}; +type ConversionResponse = { + id: string; + name: string; + mimeType: string; + format: 'markdown'; + tokens: number; + data: string; +} | { + id: string; + name: string; + mimeType: string; + format: 'error'; + error: string; +}; +type ImageConversionOptions = { + descriptionLanguage?: 'en' | 'es' | 'fr' | 'it' | 'pt' | 'de'; +}; +type EmbeddedImageConversionOptions = ImageConversionOptions & { + convert?: boolean; + maxConvertedImages?: number; +}; +type ConversionOptions = { + html?: { + images?: EmbeddedImageConversionOptions & { + convertOGImage?: boolean; + }; + hostname?: string; + }; + docx?: { + images?: EmbeddedImageConversionOptions; + }; + image?: ImageConversionOptions; + pdf?: { + images?: EmbeddedImageConversionOptions; + metadata?: boolean; + }; +}; +type ConversionRequestOptions = { + gateway?: GatewayOptions; + extraHeaders?: object; + conversionOptions?: ConversionOptions; +}; +type SupportedFileFormat = { + mimeType: string; + extension: string; +}; +declare abstract class ToMarkdownService { + transform(files: MarkdownDocument[], options?: ConversionRequestOptions): Promise; + transform(files: MarkdownDocument, options?: ConversionRequestOptions): Promise; + supported(): Promise; +} +declare namespace TailStream { + interface Header { + readonly name: string; + readonly value: string; + } + interface FetchEventInfo { + readonly type: "fetch"; + readonly method: string; + readonly url: string; + readonly cfJson?: object; + readonly headers: Header[]; + } + interface JsRpcEventInfo { + readonly type: "jsrpc"; + } + interface ScheduledEventInfo { + readonly type: "scheduled"; + readonly scheduledTime: Date; + readonly cron: string; + } + interface AlarmEventInfo { + readonly type: "alarm"; + readonly scheduledTime: Date; + } + interface QueueEventInfo { + readonly type: "queue"; + readonly queueName: string; + readonly batchSize: number; + } + interface EmailEventInfo { + readonly type: "email"; + readonly mailFrom: string; + readonly rcptTo: string; + readonly rawSize: number; + } + interface TraceEventInfo { + readonly type: "trace"; + readonly traces: (string | null)[]; + } + interface HibernatableWebSocketEventInfoMessage { + readonly type: "message"; + } + interface HibernatableWebSocketEventInfoError { + readonly type: "error"; + } + interface HibernatableWebSocketEventInfoClose { + readonly type: "close"; + readonly code: number; + readonly wasClean: boolean; + } + interface HibernatableWebSocketEventInfo { + readonly type: "hibernatableWebSocket"; + readonly info: HibernatableWebSocketEventInfoClose | HibernatableWebSocketEventInfoError | HibernatableWebSocketEventInfoMessage; + } + interface CustomEventInfo { + readonly type: "custom"; + } + interface FetchResponseInfo { + readonly type: "fetch"; + readonly statusCode: number; + } + type EventOutcome = "ok" | "canceled" | "exception" | "unknown" | "killSwitch" | "daemonDown" | "exceededCpu" | "exceededMemory" | "loadShed" | "responseStreamDisconnected" | "scriptNotFound"; + interface ScriptVersion { + readonly id: string; + readonly tag?: string; + readonly message?: string; + } + interface Onset { + readonly type: "onset"; + readonly attributes: Attribute[]; + // id for the span being opened by this Onset event. + readonly spanId: string; + readonly dispatchNamespace?: string; + readonly entrypoint?: string; + readonly executionModel: string; + readonly scriptName?: string; + readonly scriptTags?: string[]; + readonly scriptVersion?: ScriptVersion; + readonly info: FetchEventInfo | JsRpcEventInfo | ScheduledEventInfo | AlarmEventInfo | QueueEventInfo | EmailEventInfo | TraceEventInfo | HibernatableWebSocketEventInfo | CustomEventInfo; + } + interface Outcome { + readonly type: "outcome"; + readonly outcome: EventOutcome; + readonly cpuTime: number; + readonly wallTime: number; + } + interface SpanOpen { + readonly type: "spanOpen"; + readonly name: string; + // id for the span being opened by this SpanOpen event. + readonly spanId: string; + readonly info?: FetchEventInfo | JsRpcEventInfo | Attributes; + } + interface SpanClose { + readonly type: "spanClose"; + readonly outcome: EventOutcome; + } + interface DiagnosticChannelEvent { + readonly type: "diagnosticChannel"; + readonly channel: string; + readonly message: any; + } + interface Exception { + readonly type: "exception"; + readonly name: string; + readonly message: string; + readonly stack?: string; + } + interface Log { + readonly type: "log"; + readonly level: "debug" | "error" | "info" | "log" | "warn"; + readonly message: object; + } + interface DroppedEventsDiagnostic { + readonly diagnosticsType: "droppedEvents"; + readonly count: number; + } + interface StreamDiagnostic { + readonly type: 'streamDiagnostic'; + // To add new diagnostic types, define a new interface and add it to this union type. + readonly diagnostic: DroppedEventsDiagnostic; + } + // This marks the worker handler return information. + // This is separate from Outcome because the worker invocation can live for a long time after + // returning. For example - Websockets that return an http upgrade response but then continue + // streaming information or SSE http connections. + interface Return { + readonly type: "return"; + readonly info?: FetchResponseInfo; + } + interface Attribute { + readonly name: string; + readonly value: string | string[] | boolean | boolean[] | number | number[] | bigint | bigint[]; + } + interface Attributes { + readonly type: "attributes"; + readonly info: Attribute[]; + } + type EventType = Onset | Outcome | SpanOpen | SpanClose | DiagnosticChannelEvent | Exception | Log | StreamDiagnostic | Return | Attributes; + // Context in which this trace event lives. + interface SpanContext { + // Single id for the entire top-level invocation + // This should be a new traceId for the first worker stage invoked in the eyeball request and then + // same-account service-bindings should reuse the same traceId but cross-account service-bindings + // should use a new traceId. + readonly traceId: string; + // spanId in which this event is handled + // for Onset and SpanOpen events this would be the parent span id + // for Outcome and SpanClose these this would be the span id of the opening Onset and SpanOpen events + // For Hibernate and Mark this would be the span under which they were emitted. + // spanId is not set ONLY if: + // 1. This is an Onset event + // 2. We are not inheriting any SpanContext. (e.g. this is a cross-account service binding or a new top-level invocation) + readonly spanId?: string; + } + interface TailEvent { + // invocation id of the currently invoked worker stage. + // invocation id will always be unique to every Onset event and will be the same until the Outcome event. + readonly invocationId: string; + // Inherited spanContext for this event. + readonly spanContext: SpanContext; + readonly timestamp: Date; + readonly sequence: number; + readonly event: Event; + } + type TailEventHandler = (event: TailEvent) => void | Promise; + type TailEventHandlerObject = { + outcome?: TailEventHandler; + spanOpen?: TailEventHandler; + spanClose?: TailEventHandler; + diagnosticChannel?: TailEventHandler; + exception?: TailEventHandler; + log?: TailEventHandler; + return?: TailEventHandler; + attributes?: TailEventHandler; + }; + type TailEventHandlerType = TailEventHandler | TailEventHandlerObject; +} +// Copyright (c) 2022-2023 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +/** + * Data types supported for holding vector metadata. + */ +type VectorizeVectorMetadataValue = string | number | boolean | string[]; +/** + * Additional information to associate with a vector. + */ +type VectorizeVectorMetadata = VectorizeVectorMetadataValue | Record; +type VectorFloatArray = Float32Array | Float64Array; +interface VectorizeError { + code?: number; + error: string; +} +/** + * Comparison logic/operation to use for metadata filtering. + * + * This list is expected to grow as support for more operations are released. + */ +type VectorizeVectorMetadataFilterOp = '$eq' | '$ne' | '$lt' | '$lte' | '$gt' | '$gte'; +type VectorizeVectorMetadataFilterCollectionOp = '$in' | '$nin'; +/** + * Filter criteria for vector metadata used to limit the retrieved query result set. + */ +type VectorizeVectorMetadataFilter = { + [field: string]: Exclude | null | { + [Op in VectorizeVectorMetadataFilterOp]?: Exclude | null; + } | { + [Op in VectorizeVectorMetadataFilterCollectionOp]?: Exclude[]; + }; +}; +/** + * Supported distance metrics for an index. + * Distance metrics determine how other "similar" vectors are determined. + */ +type VectorizeDistanceMetric = "euclidean" | "cosine" | "dot-product"; +/** + * Metadata return levels for a Vectorize query. + * + * Default to "none". + * + * @property all Full metadata for the vector return set, including all fields (including those un-indexed) without truncation. This is a more expensive retrieval, as it requires additional fetching & reading of un-indexed data. + * @property indexed Return all metadata fields configured for indexing in the vector return set. This level of retrieval is "free" in that no additional overhead is incurred returning this data. However, note that indexed metadata is subject to truncation (especially for larger strings). + * @property none No indexed metadata will be returned. + */ +type VectorizeMetadataRetrievalLevel = "all" | "indexed" | "none"; +interface VectorizeQueryOptions { + topK?: number; + namespace?: string; + returnValues?: boolean; + returnMetadata?: boolean | VectorizeMetadataRetrievalLevel; + filter?: VectorizeVectorMetadataFilter; +} +/** + * Information about the configuration of an index. + */ +type VectorizeIndexConfig = { + dimensions: number; + metric: VectorizeDistanceMetric; +} | { + preset: string; // keep this generic, as we'll be adding more presets in the future and this is only in a read capacity +}; +/** + * Metadata about an existing index. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link VectorizeIndexInfo} for its post-beta equivalent. + */ +interface VectorizeIndexDetails { + /** The unique ID of the index */ + readonly id: string; + /** The name of the index. */ + name: string; + /** (optional) A human readable description for the index. */ + description?: string; + /** The index configuration, including the dimension size and distance metric. */ + config: VectorizeIndexConfig; + /** The number of records containing vectors within the index. */ + vectorsCount: number; +} +/** + * Metadata about an existing index. + */ +interface VectorizeIndexInfo { + /** The number of records containing vectors within the index. */ + vectorCount: number; + /** Number of dimensions the index has been configured for. */ + dimensions: number; + /** ISO 8601 datetime of the last processed mutation on in the index. All changes before this mutation will be reflected in the index state. */ + processedUpToDatetime: number; + /** UUIDv4 of the last mutation processed by the index. All changes before this mutation will be reflected in the index state. */ + processedUpToMutation: number; +} +/** + * Represents a single vector value set along with its associated metadata. + */ +interface VectorizeVector { + /** The ID for the vector. This can be user-defined, and must be unique. It should uniquely identify the object, and is best set based on the ID of what the vector represents. */ + id: string; + /** The vector values */ + values: VectorFloatArray | number[]; + /** The namespace this vector belongs to. */ + namespace?: string; + /** Metadata associated with the vector. Includes the values of other fields and potentially additional details. */ + metadata?: Record; +} +/** + * Represents a matched vector for a query along with its score and (if specified) the matching vector information. + */ +type VectorizeMatch = Pick, "values"> & Omit & { + /** The score or rank for similarity, when returned as a result */ + score: number; +}; +/** + * A set of matching {@link VectorizeMatch} for a particular query. + */ +interface VectorizeMatches { + matches: VectorizeMatch[]; + count: number; +} +/** + * Results of an operation that performed a mutation on a set of vectors. + * Here, `ids` is a list of vectors that were successfully processed. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link VectorizeAsyncMutation} for its post-beta equivalent. + */ +interface VectorizeVectorMutation { + /* List of ids of vectors that were successfully processed. */ + ids: string[]; + /* Total count of the number of processed vectors. */ + count: number; +} +/** + * Result type indicating a mutation on the Vectorize Index. + * Actual mutations are processed async where the `mutationId` is the unique identifier for the operation. + */ +interface VectorizeAsyncMutation { + /** The unique identifier for the async mutation operation containing the changeset. */ + mutationId: string; +} +/** + * A Vectorize Vector Search Index for querying vectors/embeddings. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link Vectorize} for its new implementation. + */ +declare abstract class VectorizeIndex { + /** + * Get information about the currently bound index. + * @returns A promise that resolves with information about the current index. + */ + public describe(): Promise; + /** + * Use the provided vector to perform a similarity search across the index. + * @param vector Input vector that will be used to drive the similarity search. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; + /** + * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. + * @param vectors List of vectors that will be inserted. + * @returns A promise that resolves with the ids & count of records that were successfully processed. + */ + public insert(vectors: VectorizeVector[]): Promise; + /** + * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. + * @param vectors List of vectors that will be upserted. + * @returns A promise that resolves with the ids & count of records that were successfully processed. + */ + public upsert(vectors: VectorizeVector[]): Promise; + /** + * Delete a list of vectors with a matching id. + * @param ids List of vector ids that should be deleted. + * @returns A promise that resolves with the ids & count of records that were successfully processed (and thus deleted). + */ + public deleteByIds(ids: string[]): Promise; + /** + * Get a list of vectors with a matching id. + * @param ids List of vector ids that should be returned. + * @returns A promise that resolves with the raw unscored vectors matching the id set. + */ + public getByIds(ids: string[]): Promise; +} +/** + * A Vectorize Vector Search Index for querying vectors/embeddings. + * + * Mutations in this version are async, returning a mutation id. + */ +declare abstract class Vectorize { + /** + * Get information about the currently bound index. + * @returns A promise that resolves with information about the current index. + */ + public describe(): Promise; + /** + * Use the provided vector to perform a similarity search across the index. + * @param vector Input vector that will be used to drive the similarity search. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; + /** + * Use the provided vector-id to perform a similarity search across the index. + * @param vectorId Id for a vector in the index against which the index should be queried. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public queryById(vectorId: string, options?: VectorizeQueryOptions): Promise; + /** + * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. + * @param vectors List of vectors that will be inserted. + * @returns A promise that resolves with a unique identifier of a mutation containing the insert changeset. + */ + public insert(vectors: VectorizeVector[]): Promise; + /** + * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. + * @param vectors List of vectors that will be upserted. + * @returns A promise that resolves with a unique identifier of a mutation containing the upsert changeset. + */ + public upsert(vectors: VectorizeVector[]): Promise; + /** + * Delete a list of vectors with a matching id. + * @param ids List of vector ids that should be deleted. + * @returns A promise that resolves with a unique identifier of a mutation containing the delete changeset. + */ + public deleteByIds(ids: string[]): Promise; + /** + * Get a list of vectors with a matching id. + * @param ids List of vector ids that should be returned. + * @returns A promise that resolves with the raw unscored vectors matching the id set. + */ + public getByIds(ids: string[]): Promise; +} +/** + * The interface for "version_metadata" binding + * providing metadata about the Worker Version using this binding. + */ +type WorkerVersionMetadata = { + /** The ID of the Worker Version using this binding */ + id: string; + /** The tag of the Worker Version using this binding */ + tag: string; + /** The timestamp of when the Worker Version was uploaded */ + timestamp: string; +}; +interface DynamicDispatchLimits { + /** + * Limit CPU time in milliseconds. + */ + cpuMs?: number; + /** + * Limit number of subrequests. + */ + subRequests?: number; +} +interface DynamicDispatchOptions { + /** + * Limit resources of invoked Worker script. + */ + limits?: DynamicDispatchLimits; + /** + * Arguments for outbound Worker script, if configured. + */ + outbound?: { + [key: string]: any; + }; +} +interface DispatchNamespace { + /** + * @param name Name of the Worker script. + * @param args Arguments to Worker script. + * @param options Options for Dynamic Dispatch invocation. + * @returns A Fetcher object that allows you to send requests to the Worker script. + * @throws If the Worker script does not exist in this dispatch namespace, an error will be thrown. + */ + get(name: string, args?: { + [key: string]: any; + }, options?: DynamicDispatchOptions): Fetcher; +} +declare module 'cloudflare:workflows' { + /** + * NonRetryableError allows for a user to throw a fatal error + * that makes a Workflow instance fail immediately without triggering a retry + */ + export class NonRetryableError extends Error { + public constructor(message: string, name?: string); + } +} +declare abstract class Workflow { + /** + * Get a handle to an existing instance of the Workflow. + * @param id Id for the instance of this Workflow + * @returns A promise that resolves with a handle for the Instance + */ + public get(id: string): Promise; + /** + * Create a new instance and return a handle to it. If a provided id exists, an error will be thrown. + * @param options Options when creating an instance including id and params + * @returns A promise that resolves with a handle for the Instance + */ + public create(options?: WorkflowInstanceCreateOptions): Promise; + /** + * Create a batch of instances and return handle for all of them. If a provided id exists, an error will be thrown. + * `createBatch` is limited at 100 instances at a time or when the RPC limit for the batch (1MiB) is reached. + * @param batch List of Options when creating an instance including name and params + * @returns A promise that resolves with a list of handles for the created instances. + */ + public createBatch(batch: WorkflowInstanceCreateOptions[]): Promise; +} +type WorkflowDurationLabel = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; +type WorkflowSleepDuration = `${number} ${WorkflowDurationLabel}${'s' | ''}` | number; +type WorkflowRetentionDuration = WorkflowSleepDuration; +interface WorkflowInstanceCreateOptions { + /** + * An id for your Workflow instance. Must be unique within the Workflow. + */ + id?: string; + /** + * The event payload the Workflow instance is triggered with + */ + params?: PARAMS; + /** + * The retention policy for Workflow instance. + * Defaults to the maximum retention period available for the owner's account. + */ + retention?: { + successRetention?: WorkflowRetentionDuration; + errorRetention?: WorkflowRetentionDuration; + }; +} +type InstanceStatus = { + status: 'queued' // means that instance is waiting to be started (see concurrency limits) + | 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running + | 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish + | 'waitingForPause' // instance is finishing the current work to pause + | 'unknown'; + error?: { + name: string; + message: string; + }; + output?: unknown; +}; +interface WorkflowError { + code?: number; + message: string; +} +declare abstract class WorkflowInstance { + public id: string; + /** + * Pause the instance. + */ + public pause(): Promise; + /** + * Resume the instance. If it is already running, an error will be thrown. + */ + public resume(): Promise; + /** + * Terminate the instance. If it is errored, terminated or complete, an error will be thrown. + */ + public terminate(): Promise; + /** + * Restart the instance. + */ + public restart(): Promise; + /** + * Returns the current status of the instance. + */ + public status(): Promise; + /** + * Send an event to this instance. + */ + public sendEvent({ type, payload, }: { + type: string; + payload: unknown; + }): Promise; +} diff --git a/apps/proxy/wrangler.jsonc b/apps/proxy/wrangler.jsonc index 9d89ae5..3ac218a 100644 --- a/apps/proxy/wrangler.jsonc +++ b/apps/proxy/wrangler.jsonc @@ -4,6 +4,14 @@ "main": "src/worker.ts", "compatibility_date": "2025-09-01", "compatibility_flags": ["nodejs_compat"], + "observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } + }, "durable_objects": { "bindings": [ { diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 2240ca1..68220fb 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -23,11 +23,14 @@ - Preserve `/health` response contract: `{ status, version, environment }`. - Keep the worker entrypoint in `src/server.ts`; use `src/index.ts` only as the package export wrapper. - Keep environment variables non-secret in `wrangler.jsonc` and secret values out of git. +- Keep Wrangler observability logging enabled (`observability.enabled=true`, `logs.enabled=true`, `invocation_logs=true`) so deploy/runtime failures are visible without ad-hoc debugging. +- Keep `worker-configuration.d.ts` committed and regenerate with `wrangler types --env dev` after `wrangler.jsonc` or binding changes. - Keep `.dev.vars` and `.env.example` synchronized when adding/changing runtime config fields (`ENVIRONMENT`, `APP_VERSION`, `PROXY_URL`, `EVENT_BUS_BACKEND`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). -- Use queue-backed event bus in `development`/`production` (`EVENT_BUS_BACKEND=queue` + `EVENT_BUS_QUEUE` binding) and memory backend in local development overrides (`EVENT_BUS_BACKEND=memory`). +- Use memory event bus in `development` while no downstream consumers exist (`EVENT_BUS_BACKEND=memory`). +- Keep production queue-backed (`EVENT_BUS_BACKEND=queue` + `EVENT_BUS_QUEUE`) until rollout policy changes. ## Validation -- Validate config changes with `wrangler check` before deployment. +- Validate deployment config and bundle with `wrangler deploy --env --dry-run` before remote migration/deploy. - Run `pnpm -F @clawdentity/registry run test` and `pnpm -F @clawdentity/registry run typecheck` for app-level safety. - Keep Vitest path aliases pointed at workspace source (`packages/*/src/index.ts`) so tests do not depend on stale package `dist` outputs. diff --git a/apps/registry/tsconfig.json b/apps/registry/tsconfig.json index fb688c5..5309f4c 100644 --- a/apps/registry/tsconfig.json +++ b/apps/registry/tsconfig.json @@ -7,5 +7,6 @@ "jsx": "react-jsx", "jsxImportSource": "hono/jsx" }, - "include": ["src"] + "include": ["src"], + "exclude": ["worker-configuration.d.ts"] } diff --git a/apps/registry/worker-configuration.d.ts b/apps/registry/worker-configuration.d.ts new file mode 100644 index 0000000..a4e0230 --- /dev/null +++ b/apps/registry/worker-configuration.d.ts @@ -0,0 +1,10932 @@ +/* eslint-disable */ +// Generated by Wrangler by running `wrangler --cwd apps/registry types --env dev` (hash: 203d0da1e8a57c87486b2e73012c56e0) +// Runtime types generated with workerd@1.20260219.0 2025-09-01 nodejs_compat +declare namespace Cloudflare { + interface GlobalProps { + mainModule: typeof import("./src/server"); + } + interface Env { + DB: D1Database; + ENVIRONMENT: string; + APP_VERSION: string; + EVENT_BUS_BACKEND: string; + PROXY_URL: string; + REGISTRY_ISSUER_URL: string; + BOOTSTRAP_SECRET: string; + REGISTRY_SIGNING_KEY: string; + REGISTRY_SIGNING_KEYS: string; + } +} +interface Env extends Cloudflare.Env {} +type StringifyValues> = { + [Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string; +}; +declare namespace NodeJS { + interface ProcessEnv extends StringifyValues> {} +} + +// Begin runtime types +/*! ***************************************************************************** +Copyright (c) Cloudflare. All rights reserved. +Copyright (c) Microsoft Corporation. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +/* eslint-disable */ +// noinspection JSUnusedGlobalSymbols +declare var onmessage: never; +/** + * The **`DOMException`** interface represents an abnormal event (called an **exception**) that occurs as a result of calling a method or accessing a property of a web API. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException) + */ +declare class DOMException extends Error { + constructor(message?: string, name?: string); + /** + * The **`message`** read-only property of the a message or description associated with the given error name. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/message) + */ + readonly message: string; + /** + * The **`name`** read-only property of the one of the strings associated with an error name. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/name) + */ + readonly name: string; + /** + * The **`code`** read-only property of the DOMException interface returns one of the legacy error code constants, or `0` if none match. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/code) + */ + readonly code: number; + static readonly INDEX_SIZE_ERR: number; + static readonly DOMSTRING_SIZE_ERR: number; + static readonly HIERARCHY_REQUEST_ERR: number; + static readonly WRONG_DOCUMENT_ERR: number; + static readonly INVALID_CHARACTER_ERR: number; + static readonly NO_DATA_ALLOWED_ERR: number; + static readonly NO_MODIFICATION_ALLOWED_ERR: number; + static readonly NOT_FOUND_ERR: number; + static readonly NOT_SUPPORTED_ERR: number; + static readonly INUSE_ATTRIBUTE_ERR: number; + static readonly INVALID_STATE_ERR: number; + static readonly SYNTAX_ERR: number; + static readonly INVALID_MODIFICATION_ERR: number; + static readonly NAMESPACE_ERR: number; + static readonly INVALID_ACCESS_ERR: number; + static readonly VALIDATION_ERR: number; + static readonly TYPE_MISMATCH_ERR: number; + static readonly SECURITY_ERR: number; + static readonly NETWORK_ERR: number; + static readonly ABORT_ERR: number; + static readonly URL_MISMATCH_ERR: number; + static readonly QUOTA_EXCEEDED_ERR: number; + static readonly TIMEOUT_ERR: number; + static readonly INVALID_NODE_TYPE_ERR: number; + static readonly DATA_CLONE_ERR: number; + get stack(): any; + set stack(value: any); +} +type WorkerGlobalScopeEventMap = { + fetch: FetchEvent; + scheduled: ScheduledEvent; + queue: QueueEvent; + unhandledrejection: PromiseRejectionEvent; + rejectionhandled: PromiseRejectionEvent; +}; +declare abstract class WorkerGlobalScope extends EventTarget { + EventTarget: typeof EventTarget; +} +/* The **`console`** object provides access to the debugging console (e.g., the Web console in Firefox). * + * The **`console`** object provides access to the debugging console (e.g., the Web console in Firefox). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console) + */ +interface Console { + "assert"(condition?: boolean, ...data: any[]): void; + /** + * The **`console.clear()`** static method clears the console if possible. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/clear_static) + */ + clear(): void; + /** + * The **`console.count()`** static method logs the number of times that this particular call to `count()` has been called. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/count_static) + */ + count(label?: string): void; + /** + * The **`console.countReset()`** static method resets counter used with console/count_static. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/countReset_static) + */ + countReset(label?: string): void; + /** + * The **`console.debug()`** static method outputs a message to the console at the 'debug' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/debug_static) + */ + debug(...data: any[]): void; + /** + * The **`console.dir()`** static method displays a list of the properties of the specified JavaScript object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dir_static) + */ + dir(item?: any, options?: any): void; + /** + * The **`console.dirxml()`** static method displays an interactive tree of the descendant elements of the specified XML/HTML element. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dirxml_static) + */ + dirxml(...data: any[]): void; + /** + * The **`console.error()`** static method outputs a message to the console at the 'error' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/error_static) + */ + error(...data: any[]): void; + /** + * The **`console.group()`** static method creates a new inline group in the Web console log, causing any subsequent console messages to be indented by an additional level, until console/groupEnd_static is called. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/group_static) + */ + group(...data: any[]): void; + /** + * The **`console.groupCollapsed()`** static method creates a new inline group in the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupCollapsed_static) + */ + groupCollapsed(...data: any[]): void; + /** + * The **`console.groupEnd()`** static method exits the current inline group in the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupEnd_static) + */ + groupEnd(): void; + /** + * The **`console.info()`** static method outputs a message to the console at the 'info' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/info_static) + */ + info(...data: any[]): void; + /** + * The **`console.log()`** static method outputs a message to the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/log_static) + */ + log(...data: any[]): void; + /** + * The **`console.table()`** static method displays tabular data as a table. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/table_static) + */ + table(tabularData?: any, properties?: string[]): void; + /** + * The **`console.time()`** static method starts a timer you can use to track how long an operation takes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/time_static) + */ + time(label?: string): void; + /** + * The **`console.timeEnd()`** static method stops a timer that was previously started by calling console/time_static. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timeEnd_static) + */ + timeEnd(label?: string): void; + /** + * The **`console.timeLog()`** static method logs the current value of a timer that was previously started by calling console/time_static. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timeLog_static) + */ + timeLog(label?: string, ...data: any[]): void; + timeStamp(label?: string): void; + /** + * The **`console.trace()`** static method outputs a stack trace to the console. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/trace_static) + */ + trace(...data: any[]): void; + /** + * The **`console.warn()`** static method outputs a warning message to the console at the 'warning' log level. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/warn_static) + */ + warn(...data: any[]): void; +} +declare const console: Console; +type BufferSource = ArrayBufferView | ArrayBuffer; +type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; +declare namespace WebAssembly { + class CompileError extends Error { + constructor(message?: string); + } + class RuntimeError extends Error { + constructor(message?: string); + } + type ValueType = "anyfunc" | "externref" | "f32" | "f64" | "i32" | "i64" | "v128"; + interface GlobalDescriptor { + value: ValueType; + mutable?: boolean; + } + class Global { + constructor(descriptor: GlobalDescriptor, value?: any); + value: any; + valueOf(): any; + } + type ImportValue = ExportValue | number; + type ModuleImports = Record; + type Imports = Record; + type ExportValue = Function | Global | Memory | Table; + type Exports = Record; + class Instance { + constructor(module: Module, imports?: Imports); + readonly exports: Exports; + } + interface MemoryDescriptor { + initial: number; + maximum?: number; + shared?: boolean; + } + class Memory { + constructor(descriptor: MemoryDescriptor); + readonly buffer: ArrayBuffer; + grow(delta: number): number; + } + type ImportExportKind = "function" | "global" | "memory" | "table"; + interface ModuleExportDescriptor { + kind: ImportExportKind; + name: string; + } + interface ModuleImportDescriptor { + kind: ImportExportKind; + module: string; + name: string; + } + abstract class Module { + static customSections(module: Module, sectionName: string): ArrayBuffer[]; + static exports(module: Module): ModuleExportDescriptor[]; + static imports(module: Module): ModuleImportDescriptor[]; + } + type TableKind = "anyfunc" | "externref"; + interface TableDescriptor { + element: TableKind; + initial: number; + maximum?: number; + } + class Table { + constructor(descriptor: TableDescriptor, value?: any); + readonly length: number; + get(index: number): any; + grow(delta: number, value?: any): number; + set(index: number, value?: any): void; + } + function instantiate(module: Module, imports?: Imports): Promise; + function validate(bytes: BufferSource): boolean; +} +/** + * The **`ServiceWorkerGlobalScope`** interface of the Service Worker API represents the global execution context of a service worker. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ServiceWorkerGlobalScope) + */ +interface ServiceWorkerGlobalScope extends WorkerGlobalScope { + DOMException: typeof DOMException; + WorkerGlobalScope: typeof WorkerGlobalScope; + btoa(data: string): string; + atob(data: string): string; + setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; + setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; + clearTimeout(timeoutId: number | null): void; + setInterval(callback: (...args: any[]) => void, msDelay?: number): number; + setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; + clearInterval(timeoutId: number | null): void; + queueMicrotask(task: Function): void; + structuredClone(value: T, options?: StructuredSerializeOptions): T; + reportError(error: any): void; + fetch(input: RequestInfo | URL, init?: RequestInit): Promise; + self: ServiceWorkerGlobalScope; + crypto: Crypto; + caches: CacheStorage; + scheduler: Scheduler; + performance: Performance; + Cloudflare: Cloudflare; + readonly origin: string; + Event: typeof Event; + ExtendableEvent: typeof ExtendableEvent; + CustomEvent: typeof CustomEvent; + PromiseRejectionEvent: typeof PromiseRejectionEvent; + FetchEvent: typeof FetchEvent; + TailEvent: typeof TailEvent; + TraceEvent: typeof TailEvent; + ScheduledEvent: typeof ScheduledEvent; + MessageEvent: typeof MessageEvent; + CloseEvent: typeof CloseEvent; + ReadableStreamDefaultReader: typeof ReadableStreamDefaultReader; + ReadableStreamBYOBReader: typeof ReadableStreamBYOBReader; + ReadableStream: typeof ReadableStream; + WritableStream: typeof WritableStream; + WritableStreamDefaultWriter: typeof WritableStreamDefaultWriter; + TransformStream: typeof TransformStream; + ByteLengthQueuingStrategy: typeof ByteLengthQueuingStrategy; + CountQueuingStrategy: typeof CountQueuingStrategy; + ErrorEvent: typeof ErrorEvent; + MessageChannel: typeof MessageChannel; + MessagePort: typeof MessagePort; + EventSource: typeof EventSource; + ReadableStreamBYOBRequest: typeof ReadableStreamBYOBRequest; + ReadableStreamDefaultController: typeof ReadableStreamDefaultController; + ReadableByteStreamController: typeof ReadableByteStreamController; + WritableStreamDefaultController: typeof WritableStreamDefaultController; + TransformStreamDefaultController: typeof TransformStreamDefaultController; + CompressionStream: typeof CompressionStream; + DecompressionStream: typeof DecompressionStream; + TextEncoderStream: typeof TextEncoderStream; + TextDecoderStream: typeof TextDecoderStream; + Headers: typeof Headers; + Body: typeof Body; + Request: typeof Request; + Response: typeof Response; + WebSocket: typeof WebSocket; + WebSocketPair: typeof WebSocketPair; + WebSocketRequestResponsePair: typeof WebSocketRequestResponsePair; + AbortController: typeof AbortController; + AbortSignal: typeof AbortSignal; + TextDecoder: typeof TextDecoder; + TextEncoder: typeof TextEncoder; + navigator: Navigator; + Navigator: typeof Navigator; + URL: typeof URL; + URLSearchParams: typeof URLSearchParams; + URLPattern: typeof URLPattern; + Blob: typeof Blob; + File: typeof File; + FormData: typeof FormData; + Crypto: typeof Crypto; + SubtleCrypto: typeof SubtleCrypto; + CryptoKey: typeof CryptoKey; + CacheStorage: typeof CacheStorage; + Cache: typeof Cache; + FixedLengthStream: typeof FixedLengthStream; + IdentityTransformStream: typeof IdentityTransformStream; + HTMLRewriter: typeof HTMLRewriter; +} +declare function addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; +declare function removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; +/** + * The **`dispatchEvent()`** method of the EventTarget sends an Event to the object, (synchronously) invoking the affected event listeners in the appropriate order. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) + */ +declare function dispatchEvent(event: WorkerGlobalScopeEventMap[keyof WorkerGlobalScopeEventMap]): boolean; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/btoa) */ +declare function btoa(data: string): string; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/atob) */ +declare function atob(data: string): string; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setTimeout) */ +declare function setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setTimeout) */ +declare function setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/clearTimeout) */ +declare function clearTimeout(timeoutId: number | null): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setInterval) */ +declare function setInterval(callback: (...args: any[]) => void, msDelay?: number): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/setInterval) */ +declare function setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/clearInterval) */ +declare function clearInterval(timeoutId: number | null): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/queueMicrotask) */ +declare function queueMicrotask(task: Function): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/structuredClone) */ +declare function structuredClone(value: T, options?: StructuredSerializeOptions): T; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/reportError) */ +declare function reportError(error: any): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/fetch) */ +declare function fetch(input: RequestInfo | URL, init?: RequestInit): Promise; +declare const self: ServiceWorkerGlobalScope; +/** +* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. +* The Workers runtime implements the full surface of this API, but with some differences in +* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) +* compared to those implemented in most browsers. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) +*/ +declare const crypto: Crypto; +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare const caches: CacheStorage; +declare const scheduler: Scheduler; +/** +* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, +* as well as timing of subrequests and other operations. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) +*/ +declare const performance: Performance; +declare const Cloudflare: Cloudflare; +declare const origin: string; +declare const navigator: Navigator; +interface TestController { +} +interface ExecutionContext { + waitUntil(promise: Promise): void; + passThroughOnException(): void; + readonly props: Props; +} +type ExportedHandlerFetchHandler = (request: Request>, env: Env, ctx: ExecutionContext) => Response | Promise; +type ExportedHandlerTailHandler = (events: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTraceHandler = (traces: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTailStreamHandler = (event: TailStream.TailEvent, env: Env, ctx: ExecutionContext) => TailStream.TailEventHandlerType | Promise; +type ExportedHandlerScheduledHandler = (controller: ScheduledController, env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerQueueHandler = (batch: MessageBatch, env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTestHandler = (controller: TestController, env: Env, ctx: ExecutionContext) => void | Promise; +interface ExportedHandler { + fetch?: ExportedHandlerFetchHandler; + tail?: ExportedHandlerTailHandler; + trace?: ExportedHandlerTraceHandler; + tailStream?: ExportedHandlerTailStreamHandler; + scheduled?: ExportedHandlerScheduledHandler; + test?: ExportedHandlerTestHandler; + email?: EmailExportedHandler; + queue?: ExportedHandlerQueueHandler; +} +interface StructuredSerializeOptions { + transfer?: any[]; +} +declare abstract class Navigator { + sendBeacon(url: string, body?: BodyInit): boolean; + readonly userAgent: string; + readonly hardwareConcurrency: number; + readonly language: string; + readonly languages: string[]; +} +interface AlarmInvocationInfo { + readonly isRetry: boolean; + readonly retryCount: number; +} +interface Cloudflare { + readonly compatibilityFlags: Record; +} +interface DurableObject { + fetch(request: Request): Response | Promise; + alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; + webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; + webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; + webSocketError?(ws: WebSocket, error: unknown): void | Promise; +} +type DurableObjectStub = Fetcher & { + readonly id: DurableObjectId; + readonly name?: string; +}; +interface DurableObjectId { + toString(): string; + equals(other: DurableObjectId): boolean; + readonly name?: string; +} +declare abstract class DurableObjectNamespace { + newUniqueId(options?: DurableObjectNamespaceNewUniqueIdOptions): DurableObjectId; + idFromName(name: string): DurableObjectId; + idFromString(id: string): DurableObjectId; + get(id: DurableObjectId, options?: DurableObjectNamespaceGetDurableObjectOptions): DurableObjectStub; + getByName(name: string, options?: DurableObjectNamespaceGetDurableObjectOptions): DurableObjectStub; + jurisdiction(jurisdiction: DurableObjectJurisdiction): DurableObjectNamespace; +} +type DurableObjectJurisdiction = "eu" | "fedramp" | "fedramp-high"; +interface DurableObjectNamespaceNewUniqueIdOptions { + jurisdiction?: DurableObjectJurisdiction; +} +type DurableObjectLocationHint = "wnam" | "enam" | "sam" | "weur" | "eeur" | "apac" | "oc" | "afr" | "me"; +type DurableObjectRoutingMode = "primary-only"; +interface DurableObjectNamespaceGetDurableObjectOptions { + locationHint?: DurableObjectLocationHint; + routingMode?: DurableObjectRoutingMode; +} +interface DurableObjectClass<_T extends Rpc.DurableObjectBranded | undefined = undefined> { +} +interface DurableObjectState { + waitUntil(promise: Promise): void; + readonly props: Props; + readonly id: DurableObjectId; + readonly storage: DurableObjectStorage; + container?: Container; + blockConcurrencyWhile(callback: () => Promise): Promise; + acceptWebSocket(ws: WebSocket, tags?: string[]): void; + getWebSockets(tag?: string): WebSocket[]; + setWebSocketAutoResponse(maybeReqResp?: WebSocketRequestResponsePair): void; + getWebSocketAutoResponse(): WebSocketRequestResponsePair | null; + getWebSocketAutoResponseTimestamp(ws: WebSocket): Date | null; + setHibernatableWebSocketEventTimeout(timeoutMs?: number): void; + getHibernatableWebSocketEventTimeout(): number | null; + getTags(ws: WebSocket): string[]; + abort(reason?: string): void; +} +interface DurableObjectTransaction { + get(key: string, options?: DurableObjectGetOptions): Promise; + get(keys: string[], options?: DurableObjectGetOptions): Promise>; + list(options?: DurableObjectListOptions): Promise>; + put(key: string, value: T, options?: DurableObjectPutOptions): Promise; + put(entries: Record, options?: DurableObjectPutOptions): Promise; + delete(key: string, options?: DurableObjectPutOptions): Promise; + delete(keys: string[], options?: DurableObjectPutOptions): Promise; + rollback(): void; + getAlarm(options?: DurableObjectGetAlarmOptions): Promise; + setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; + deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; +} +interface DurableObjectStorage { + get(key: string, options?: DurableObjectGetOptions): Promise; + get(keys: string[], options?: DurableObjectGetOptions): Promise>; + list(options?: DurableObjectListOptions): Promise>; + put(key: string, value: T, options?: DurableObjectPutOptions): Promise; + put(entries: Record, options?: DurableObjectPutOptions): Promise; + delete(key: string, options?: DurableObjectPutOptions): Promise; + delete(keys: string[], options?: DurableObjectPutOptions): Promise; + deleteAll(options?: DurableObjectPutOptions): Promise; + transaction(closure: (txn: DurableObjectTransaction) => Promise): Promise; + getAlarm(options?: DurableObjectGetAlarmOptions): Promise; + setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; + deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; + sync(): Promise; + sql: SqlStorage; + kv: SyncKvStorage; + transactionSync(closure: () => T): T; + getCurrentBookmark(): Promise; + getBookmarkForTime(timestamp: number | Date): Promise; + onNextSessionRestoreBookmark(bookmark: string): Promise; +} +interface DurableObjectListOptions { + start?: string; + startAfter?: string; + end?: string; + prefix?: string; + reverse?: boolean; + limit?: number; + allowConcurrency?: boolean; + noCache?: boolean; +} +interface DurableObjectGetOptions { + allowConcurrency?: boolean; + noCache?: boolean; +} +interface DurableObjectGetAlarmOptions { + allowConcurrency?: boolean; +} +interface DurableObjectPutOptions { + allowConcurrency?: boolean; + allowUnconfirmed?: boolean; + noCache?: boolean; +} +interface DurableObjectSetAlarmOptions { + allowConcurrency?: boolean; + allowUnconfirmed?: boolean; +} +declare class WebSocketRequestResponsePair { + constructor(request: string, response: string); + get request(): string; + get response(): string; +} +interface AnalyticsEngineDataset { + writeDataPoint(event?: AnalyticsEngineDataPoint): void; +} +interface AnalyticsEngineDataPoint { + indexes?: ((ArrayBuffer | string) | null)[]; + doubles?: number[]; + blobs?: ((ArrayBuffer | string) | null)[]; +} +/** + * The **`Event`** interface represents an event which takes place on an `EventTarget`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event) + */ +declare class Event { + constructor(type: string, init?: EventInit); + /** + * The **`type`** read-only property of the Event interface returns a string containing the event's type. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/type) + */ + get type(): string; + /** + * The **`eventPhase`** read-only property of the being evaluated. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/eventPhase) + */ + get eventPhase(): number; + /** + * The read-only **`composed`** property of the or not the event will propagate across the shadow DOM boundary into the standard DOM. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composed) + */ + get composed(): boolean; + /** + * The **`bubbles`** read-only property of the Event interface indicates whether the event bubbles up through the DOM tree or not. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/bubbles) + */ + get bubbles(): boolean; + /** + * The **`cancelable`** read-only property of the Event interface indicates whether the event can be canceled, and therefore prevented as if the event never happened. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelable) + */ + get cancelable(): boolean; + /** + * The **`defaultPrevented`** read-only property of the Event interface returns a boolean value indicating whether or not the call to Event.preventDefault() canceled the event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/defaultPrevented) + */ + get defaultPrevented(): boolean; + /** + * The Event property **`returnValue`** indicates whether the default action for this event has been prevented or not. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/returnValue) + */ + get returnValue(): boolean; + /** + * The **`currentTarget`** read-only property of the Event interface identifies the element to which the event handler has been attached. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/currentTarget) + */ + get currentTarget(): EventTarget | undefined; + /** + * The read-only **`target`** property of the dispatched. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/target) + */ + get target(): EventTarget | undefined; + /** + * The deprecated **`Event.srcElement`** is an alias for the Event.target property. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/srcElement) + */ + get srcElement(): EventTarget | undefined; + /** + * The **`timeStamp`** read-only property of the Event interface returns the time (in milliseconds) at which the event was created. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/timeStamp) + */ + get timeStamp(): number; + /** + * The **`isTrusted`** read-only property of the when the event was generated by the user agent (including via user actions and programmatic methods such as HTMLElement.focus()), and `false` when the event was dispatched via The only exception is the `click` event, which initializes the `isTrusted` property to `false` in user agents. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/isTrusted) + */ + get isTrusted(): boolean; + /** + * The **`cancelBubble`** property of the Event interface is deprecated. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) + */ + get cancelBubble(): boolean; + /** + * The **`cancelBubble`** property of the Event interface is deprecated. + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) + */ + set cancelBubble(value: boolean); + /** + * The **`stopImmediatePropagation()`** method of the If several listeners are attached to the same element for the same event type, they are called in the order in which they were added. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopImmediatePropagation) + */ + stopImmediatePropagation(): void; + /** + * The **`preventDefault()`** method of the Event interface tells the user agent that if the event does not get explicitly handled, its default action should not be taken as it normally would be. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/preventDefault) + */ + preventDefault(): void; + /** + * The **`stopPropagation()`** method of the Event interface prevents further propagation of the current event in the capturing and bubbling phases. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopPropagation) + */ + stopPropagation(): void; + /** + * The **`composedPath()`** method of the Event interface returns the event's path which is an array of the objects on which listeners will be invoked. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composedPath) + */ + composedPath(): EventTarget[]; + static readonly NONE: number; + static readonly CAPTURING_PHASE: number; + static readonly AT_TARGET: number; + static readonly BUBBLING_PHASE: number; +} +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} +type EventListener = (event: EventType) => void; +interface EventListenerObject { + handleEvent(event: EventType): void; +} +type EventListenerOrEventListenerObject = EventListener | EventListenerObject; +/** + * The **`EventTarget`** interface is implemented by objects that can receive events and may have listeners for them. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget) + */ +declare class EventTarget = Record> { + constructor(); + /** + * The **`addEventListener()`** method of the EventTarget interface sets up a function that will be called whenever the specified event is delivered to the target. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/addEventListener) + */ + addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; + /** + * The **`removeEventListener()`** method of the EventTarget interface removes an event listener previously registered with EventTarget.addEventListener() from the target. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/removeEventListener) + */ + removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; + /** + * The **`dispatchEvent()`** method of the EventTarget sends an Event to the object, (synchronously) invoking the affected event listeners in the appropriate order. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) + */ + dispatchEvent(event: EventMap[keyof EventMap]): boolean; +} +interface EventTargetEventListenerOptions { + capture?: boolean; +} +interface EventTargetAddEventListenerOptions { + capture?: boolean; + passive?: boolean; + once?: boolean; + signal?: AbortSignal; +} +interface EventTargetHandlerObject { + handleEvent: (event: Event) => any | undefined; +} +/** + * The **`AbortController`** interface represents a controller object that allows you to abort one or more Web requests as and when desired. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController) + */ +declare class AbortController { + constructor(); + /** + * The **`signal`** read-only property of the AbortController interface returns an AbortSignal object instance, which can be used to communicate with/abort an asynchronous operation as desired. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/signal) + */ + get signal(): AbortSignal; + /** + * The **`abort()`** method of the AbortController interface aborts an asynchronous operation before it has completed. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/abort) + */ + abort(reason?: any): void; +} +/** + * The **`AbortSignal`** interface represents a signal object that allows you to communicate with an asynchronous operation (such as a fetch request) and abort it if required via an AbortController object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal) + */ +declare abstract class AbortSignal extends EventTarget { + /** + * The **`AbortSignal.abort()`** static method returns an AbortSignal that is already set as aborted (and which does not trigger an AbortSignal/abort_event event). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_static) + */ + static abort(reason?: any): AbortSignal; + /** + * The **`AbortSignal.timeout()`** static method returns an AbortSignal that will automatically abort after a specified time. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/timeout_static) + */ + static timeout(delay: number): AbortSignal; + /** + * The **`AbortSignal.any()`** static method takes an iterable of abort signals and returns an AbortSignal. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/any_static) + */ + static any(signals: AbortSignal[]): AbortSignal; + /** + * The **`aborted`** read-only property returns a value that indicates whether the asynchronous operations the signal is communicating with are aborted (`true`) or not (`false`). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/aborted) + */ + get aborted(): boolean; + /** + * The **`reason`** read-only property returns a JavaScript value that indicates the abort reason. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/reason) + */ + get reason(): any; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ + get onabort(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ + set onabort(value: any | null); + /** + * The **`throwIfAborted()`** method throws the signal's abort AbortSignal.reason if the signal has been aborted; otherwise it does nothing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/throwIfAborted) + */ + throwIfAborted(): void; +} +interface Scheduler { + wait(delay: number, maybeOptions?: SchedulerWaitOptions): Promise; +} +interface SchedulerWaitOptions { + signal?: AbortSignal; +} +/** + * The **`ExtendableEvent`** interface extends the lifetime of the `install` and `activate` events dispatched on the global scope as part of the service worker lifecycle. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent) + */ +declare abstract class ExtendableEvent extends Event { + /** + * The **`ExtendableEvent.waitUntil()`** method tells the event dispatcher that work is ongoing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent/waitUntil) + */ + waitUntil(promise: Promise): void; +} +/** + * The **`CustomEvent`** interface represents events initialized by an application for any purpose. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent) + */ +declare class CustomEvent extends Event { + constructor(type: string, init?: CustomEventCustomEventInit); + /** + * The read-only **`detail`** property of the CustomEvent interface returns any data passed when initializing the event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent/detail) + */ + get detail(): T; +} +interface CustomEventCustomEventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; + detail?: any; +} +/** + * The **`Blob`** interface represents a blob, which is a file-like object of immutable, raw data; they can be read as text or binary data, or converted into a ReadableStream so its methods can be used for processing the data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob) + */ +declare class Blob { + constructor(type?: ((ArrayBuffer | ArrayBufferView) | string | Blob)[], options?: BlobOptions); + /** + * The **`size`** read-only property of the Blob interface returns the size of the Blob or File in bytes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) + */ + get size(): number; + /** + * The **`type`** read-only property of the Blob interface returns the MIME type of the file. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) + */ + get type(): string; + /** + * The **`slice()`** method of the Blob interface creates and returns a new `Blob` object which contains data from a subset of the blob on which it's called. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * The **`arrayBuffer()`** method of the Blob interface returns a Promise that resolves with the contents of the blob as binary data contained in an ArrayBuffer. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/arrayBuffer) + */ + arrayBuffer(): Promise; + /** + * The **`bytes()`** method of the Blob interface returns a Promise that resolves with a Uint8Array containing the contents of the blob as an array of bytes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/bytes) + */ + bytes(): Promise; + /** + * The **`text()`** method of the string containing the contents of the blob, interpreted as UTF-8. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) + */ + text(): Promise; + /** + * The **`stream()`** method of the Blob interface returns a ReadableStream which upon reading returns the data contained within the `Blob`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/stream) + */ + stream(): ReadableStream; +} +interface BlobOptions { + type?: string; +} +/** + * The **`File`** interface provides information about files and allows JavaScript in a web page to access their content. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File) + */ +declare class File extends Blob { + constructor(bits: ((ArrayBuffer | ArrayBufferView) | string | Blob)[] | undefined, name: string, options?: FileOptions); + /** + * The **`name`** read-only property of the File interface returns the name of the file represented by a File object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) + */ + get name(): string; + /** + * The **`lastModified`** read-only property of the File interface provides the last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) + */ + get lastModified(): number; +} +interface FileOptions { + type?: string; + lastModified?: number; +} +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare abstract class CacheStorage { + /** + * The **`open()`** method of the the Cache object matching the `cacheName`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CacheStorage/open) + */ + open(cacheName: string): Promise; + readonly default: Cache; +} +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare abstract class Cache { + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#delete) */ + delete(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#match) */ + match(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#put) */ + put(request: RequestInfo | URL, response: Response): Promise; +} +interface CacheQueryOptions { + ignoreMethod?: boolean; +} +/** +* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. +* The Workers runtime implements the full surface of this API, but with some differences in +* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) +* compared to those implemented in most browsers. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) +*/ +declare abstract class Crypto { + /** + * The **`Crypto.subtle`** read-only property returns a cryptographic operations. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/subtle) + */ + get subtle(): SubtleCrypto; + /** + * The **`Crypto.getRandomValues()`** method lets you get cryptographically strong random values. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/getRandomValues) + */ + getRandomValues(buffer: T): T; + /** + * The **`randomUUID()`** method of the Crypto interface is used to generate a v4 UUID using a cryptographically secure random number generator. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/randomUUID) + */ + randomUUID(): string; + DigestStream: typeof DigestStream; +} +/** + * The **`SubtleCrypto`** interface of the Web Crypto API provides a number of low-level cryptographic functions. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto) + */ +declare abstract class SubtleCrypto { + /** + * The **`encrypt()`** method of the SubtleCrypto interface encrypts data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/encrypt) + */ + encrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, plainText: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`decrypt()`** method of the SubtleCrypto interface decrypts some encrypted data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/decrypt) + */ + decrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, cipherText: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`sign()`** method of the SubtleCrypto interface generates a digital signature. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/sign) + */ + sign(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, data: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`verify()`** method of the SubtleCrypto interface verifies a digital signature. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/verify) + */ + verify(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, signature: ArrayBuffer | ArrayBufferView, data: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`digest()`** method of the SubtleCrypto interface generates a _digest_ of the given data, using the specified hash function. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/digest) + */ + digest(algorithm: string | SubtleCryptoHashAlgorithm, data: ArrayBuffer | ArrayBufferView): Promise; + /** + * The **`generateKey()`** method of the SubtleCrypto interface is used to generate a new key (for symmetric algorithms) or key pair (for public-key algorithms). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey) + */ + generateKey(algorithm: string | SubtleCryptoGenerateKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /** + * The **`deriveKey()`** method of the SubtleCrypto interface can be used to derive a secret key from a master key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey) + */ + deriveKey(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, derivedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /** + * The **`deriveBits()`** method of the key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveBits) + */ + deriveBits(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, length?: number | null): Promise; + /** + * The **`importKey()`** method of the SubtleCrypto interface imports a key: that is, it takes as input a key in an external, portable format and gives you a CryptoKey object that you can use in the Web Crypto API. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey) + */ + importKey(format: string, keyData: (ArrayBuffer | ArrayBufferView) | JsonWebKey, algorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /** + * The **`exportKey()`** method of the SubtleCrypto interface exports a key: that is, it takes as input a CryptoKey object and gives you the key in an external, portable format. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/exportKey) + */ + exportKey(format: string, key: CryptoKey): Promise; + /** + * The **`wrapKey()`** method of the SubtleCrypto interface 'wraps' a key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/wrapKey) + */ + wrapKey(format: string, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: string | SubtleCryptoEncryptAlgorithm): Promise; + /** + * The **`unwrapKey()`** method of the SubtleCrypto interface 'unwraps' a key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey) + */ + unwrapKey(format: string, wrappedKey: ArrayBuffer | ArrayBufferView, unwrappingKey: CryptoKey, unwrapAlgorithm: string | SubtleCryptoEncryptAlgorithm, unwrappedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + timingSafeEqual(a: ArrayBuffer | ArrayBufferView, b: ArrayBuffer | ArrayBufferView): boolean; +} +/** + * The **`CryptoKey`** interface of the Web Crypto API represents a cryptographic key obtained from one of the SubtleCrypto methods SubtleCrypto.generateKey, SubtleCrypto.deriveKey, SubtleCrypto.importKey, or SubtleCrypto.unwrapKey. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey) + */ +declare abstract class CryptoKey { + /** + * The read-only **`type`** property of the CryptoKey interface indicates which kind of key is represented by the object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/type) + */ + readonly type: string; + /** + * The read-only **`extractable`** property of the CryptoKey interface indicates whether or not the key may be extracted using `SubtleCrypto.exportKey()` or `SubtleCrypto.wrapKey()`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/extractable) + */ + readonly extractable: boolean; + /** + * The read-only **`algorithm`** property of the CryptoKey interface returns an object describing the algorithm for which this key can be used, and any associated extra parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/algorithm) + */ + readonly algorithm: CryptoKeyKeyAlgorithm | CryptoKeyAesKeyAlgorithm | CryptoKeyHmacKeyAlgorithm | CryptoKeyRsaKeyAlgorithm | CryptoKeyEllipticKeyAlgorithm | CryptoKeyArbitraryKeyAlgorithm; + /** + * The read-only **`usages`** property of the CryptoKey interface indicates what can be done with the key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/usages) + */ + readonly usages: string[]; +} +interface CryptoKeyPair { + publicKey: CryptoKey; + privateKey: CryptoKey; +} +interface JsonWebKey { + kty: string; + use?: string; + key_ops?: string[]; + alg?: string; + ext?: boolean; + crv?: string; + x?: string; + y?: string; + d?: string; + n?: string; + e?: string; + p?: string; + q?: string; + dp?: string; + dq?: string; + qi?: string; + oth?: RsaOtherPrimesInfo[]; + k?: string; +} +interface RsaOtherPrimesInfo { + r?: string; + d?: string; + t?: string; +} +interface SubtleCryptoDeriveKeyAlgorithm { + name: string; + salt?: (ArrayBuffer | ArrayBufferView); + iterations?: number; + hash?: (string | SubtleCryptoHashAlgorithm); + $public?: CryptoKey; + info?: (ArrayBuffer | ArrayBufferView); +} +interface SubtleCryptoEncryptAlgorithm { + name: string; + iv?: (ArrayBuffer | ArrayBufferView); + additionalData?: (ArrayBuffer | ArrayBufferView); + tagLength?: number; + counter?: (ArrayBuffer | ArrayBufferView); + length?: number; + label?: (ArrayBuffer | ArrayBufferView); +} +interface SubtleCryptoGenerateKeyAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + modulusLength?: number; + publicExponent?: (ArrayBuffer | ArrayBufferView); + length?: number; + namedCurve?: string; +} +interface SubtleCryptoHashAlgorithm { + name: string; +} +interface SubtleCryptoImportKeyAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + length?: number; + namedCurve?: string; + compressed?: boolean; +} +interface SubtleCryptoSignAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + dataLength?: number; + saltLength?: number; +} +interface CryptoKeyKeyAlgorithm { + name: string; +} +interface CryptoKeyAesKeyAlgorithm { + name: string; + length: number; +} +interface CryptoKeyHmacKeyAlgorithm { + name: string; + hash: CryptoKeyKeyAlgorithm; + length: number; +} +interface CryptoKeyRsaKeyAlgorithm { + name: string; + modulusLength: number; + publicExponent: ArrayBuffer | ArrayBufferView; + hash?: CryptoKeyKeyAlgorithm; +} +interface CryptoKeyEllipticKeyAlgorithm { + name: string; + namedCurve: string; +} +interface CryptoKeyArbitraryKeyAlgorithm { + name: string; + hash?: CryptoKeyKeyAlgorithm; + namedCurve?: string; + length?: number; +} +declare class DigestStream extends WritableStream { + constructor(algorithm: string | SubtleCryptoHashAlgorithm); + readonly digest: Promise; + get bytesWritten(): number | bigint; +} +/** + * The **`TextDecoder`** interface represents a decoder for a specific text encoding, such as `UTF-8`, `ISO-8859-2`, `KOI8-R`, `GBK`, etc. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder) + */ +declare class TextDecoder { + constructor(label?: string, options?: TextDecoderConstructorOptions); + /** + * The **`TextDecoder.decode()`** method returns a string containing text decoded from the buffer passed as a parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder/decode) + */ + decode(input?: (ArrayBuffer | ArrayBufferView), options?: TextDecoderDecodeOptions): string; + get encoding(): string; + get fatal(): boolean; + get ignoreBOM(): boolean; +} +/** + * The **`TextEncoder`** interface takes a stream of code points as input and emits a stream of UTF-8 bytes. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder) + */ +declare class TextEncoder { + constructor(); + /** + * The **`TextEncoder.encode()`** method takes a string as input, and returns a Global_Objects/Uint8Array containing the text given in parameters encoded with the specific method for that TextEncoder object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encode) + */ + encode(input?: string): Uint8Array; + /** + * The **`TextEncoder.encodeInto()`** method takes a string to encode and a destination Uint8Array to put resulting UTF-8 encoded text into, and returns a dictionary object indicating the progress of the encoding. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encodeInto) + */ + encodeInto(input: string, buffer: Uint8Array): TextEncoderEncodeIntoResult; + get encoding(): string; +} +interface TextDecoderConstructorOptions { + fatal: boolean; + ignoreBOM: boolean; +} +interface TextDecoderDecodeOptions { + stream: boolean; +} +interface TextEncoderEncodeIntoResult { + read: number; + written: number; +} +/** + * The **`ErrorEvent`** interface represents events providing information related to errors in scripts or in files. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent) + */ +declare class ErrorEvent extends Event { + constructor(type: string, init?: ErrorEventErrorEventInit); + /** + * The **`filename`** read-only property of the ErrorEvent interface returns a string containing the name of the script file in which the error occurred. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/filename) + */ + get filename(): string; + /** + * The **`message`** read-only property of the ErrorEvent interface returns a string containing a human-readable error message describing the problem. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/message) + */ + get message(): string; + /** + * The **`lineno`** read-only property of the ErrorEvent interface returns an integer containing the line number of the script file on which the error occurred. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/lineno) + */ + get lineno(): number; + /** + * The **`colno`** read-only property of the ErrorEvent interface returns an integer containing the column number of the script file on which the error occurred. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/colno) + */ + get colno(): number; + /** + * The **`error`** read-only property of the ErrorEvent interface returns a JavaScript value, such as an Error or DOMException, representing the error associated with this event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent/error) + */ + get error(): any; +} +interface ErrorEventErrorEventInit { + message?: string; + filename?: string; + lineno?: number; + colno?: number; + error?: any; +} +/** + * The **`MessageEvent`** interface represents a message received by a target object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent) + */ +declare class MessageEvent extends Event { + constructor(type: string, initializer: MessageEventInit); + /** + * The **`data`** read-only property of the The data sent by the message emitter; this can be any data type, depending on what originated this event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/data) + */ + readonly data: any; + /** + * The **`origin`** read-only property of the origin of the message emitter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/origin) + */ + readonly origin: string | null; + /** + * The **`lastEventId`** read-only property of the unique ID for the event. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/lastEventId) + */ + readonly lastEventId: string; + /** + * The **`source`** read-only property of the a WindowProxy, MessagePort, or a `MessageEventSource` (which can be a WindowProxy, message emitter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/source) + */ + readonly source: MessagePort | null; + /** + * The **`ports`** read-only property of the containing all MessagePort objects sent with the message, in order. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/ports) + */ + readonly ports: MessagePort[]; +} +interface MessageEventInit { + data: ArrayBuffer | string; +} +/** + * The **`PromiseRejectionEvent`** interface represents events which are sent to the global script context when JavaScript Promises are rejected. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent) + */ +declare abstract class PromiseRejectionEvent extends Event { + /** + * The PromiseRejectionEvent interface's **`promise`** read-only property indicates the JavaScript rejected. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/promise) + */ + readonly promise: Promise; + /** + * The PromiseRejectionEvent **`reason`** read-only property is any JavaScript value or Object which provides the reason passed into Promise.reject(). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/reason) + */ + readonly reason: any; +} +/** + * The **`FormData`** interface provides a way to construct a set of key/value pairs representing form fields and their values, which can be sent using the Window/fetch, XMLHttpRequest.send() or navigator.sendBeacon() methods. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData) + */ +declare class FormData { + constructor(); + /** + * The **`append()`** method of the FormData interface appends a new value onto an existing key inside a `FormData` object, or adds the key if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) + */ + append(name: string, value: string | Blob): void; + /** + * The **`append()`** method of the FormData interface appends a new value onto an existing key inside a `FormData` object, or adds the key if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) + */ + append(name: string, value: string): void; + /** + * The **`append()`** method of the FormData interface appends a new value onto an existing key inside a `FormData` object, or adds the key if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) + */ + append(name: string, value: Blob, filename?: string): void; + /** + * The **`delete()`** method of the FormData interface deletes a key and its value(s) from a `FormData` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/delete) + */ + delete(name: string): void; + /** + * The **`get()`** method of the FormData interface returns the first value associated with a given key from within a `FormData` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/get) + */ + get(name: string): (File | string) | null; + /** + * The **`getAll()`** method of the FormData interface returns all the values associated with a given key from within a `FormData` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/getAll) + */ + getAll(name: string): (File | string)[]; + /** + * The **`has()`** method of the FormData interface returns whether a `FormData` object contains a certain key. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/has) + */ + has(name: string): boolean; + /** + * The **`set()`** method of the FormData interface sets a new value for an existing key inside a `FormData` object, or adds the key/value if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) + */ + set(name: string, value: string | Blob): void; + /** + * The **`set()`** method of the FormData interface sets a new value for an existing key inside a `FormData` object, or adds the key/value if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) + */ + set(name: string, value: string): void; + /** + * The **`set()`** method of the FormData interface sets a new value for an existing key inside a `FormData` object, or adds the key/value if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) + */ + set(name: string, value: Blob, filename?: string): void; + /* Returns an array of key, value pairs for every entry in the list. */ + entries(): IterableIterator<[ + key: string, + value: File | string + ]>; + /* Returns a list of keys in the list. */ + keys(): IterableIterator; + /* Returns a list of values in the list. */ + values(): IterableIterator<(File | string)>; + forEach(callback: (this: This, value: File | string, key: string, parent: FormData) => void, thisArg?: This): void; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: File | string + ]>; +} +interface ContentOptions { + html?: boolean; +} +declare class HTMLRewriter { + constructor(); + on(selector: string, handlers: HTMLRewriterElementContentHandlers): HTMLRewriter; + onDocument(handlers: HTMLRewriterDocumentContentHandlers): HTMLRewriter; + transform(response: Response): Response; +} +interface HTMLRewriterElementContentHandlers { + element?(element: Element): void | Promise; + comments?(comment: Comment): void | Promise; + text?(element: Text): void | Promise; +} +interface HTMLRewriterDocumentContentHandlers { + doctype?(doctype: Doctype): void | Promise; + comments?(comment: Comment): void | Promise; + text?(text: Text): void | Promise; + end?(end: DocumentEnd): void | Promise; +} +interface Doctype { + readonly name: string | null; + readonly publicId: string | null; + readonly systemId: string | null; +} +interface Element { + tagName: string; + readonly attributes: IterableIterator; + readonly removed: boolean; + readonly namespaceURI: string; + getAttribute(name: string): string | null; + hasAttribute(name: string): boolean; + setAttribute(name: string, value: string): Element; + removeAttribute(name: string): Element; + before(content: string | ReadableStream | Response, options?: ContentOptions): Element; + after(content: string | ReadableStream | Response, options?: ContentOptions): Element; + prepend(content: string | ReadableStream | Response, options?: ContentOptions): Element; + append(content: string | ReadableStream | Response, options?: ContentOptions): Element; + replace(content: string | ReadableStream | Response, options?: ContentOptions): Element; + remove(): Element; + removeAndKeepContent(): Element; + setInnerContent(content: string | ReadableStream | Response, options?: ContentOptions): Element; + onEndTag(handler: (tag: EndTag) => void | Promise): void; +} +interface EndTag { + name: string; + before(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; + after(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; + remove(): EndTag; +} +interface Comment { + text: string; + readonly removed: boolean; + before(content: string, options?: ContentOptions): Comment; + after(content: string, options?: ContentOptions): Comment; + replace(content: string, options?: ContentOptions): Comment; + remove(): Comment; +} +interface Text { + readonly text: string; + readonly lastInTextNode: boolean; + readonly removed: boolean; + before(content: string | ReadableStream | Response, options?: ContentOptions): Text; + after(content: string | ReadableStream | Response, options?: ContentOptions): Text; + replace(content: string | ReadableStream | Response, options?: ContentOptions): Text; + remove(): Text; +} +interface DocumentEnd { + append(content: string, options?: ContentOptions): DocumentEnd; +} +/** + * This is the event type for `fetch` events dispatched on the ServiceWorkerGlobalScope. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent) + */ +declare abstract class FetchEvent extends ExtendableEvent { + /** + * The **`request`** read-only property of the the event handler. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/request) + */ + readonly request: Request; + /** + * The **`respondWith()`** method of allows you to provide a promise for a Response yourself. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/respondWith) + */ + respondWith(promise: Response | Promise): void; + passThroughOnException(): void; +} +type HeadersInit = Headers | Iterable> | Record; +/** + * The **`Headers`** interface of the Fetch API allows you to perform various actions on HTTP request and response headers. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers) + */ +declare class Headers { + constructor(init?: HeadersInit); + /** + * The **`get()`** method of the Headers interface returns a byte string of all the values of a header within a `Headers` object with a given name. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/get) + */ + get(name: string): string | null; + getAll(name: string): string[]; + /** + * The **`getSetCookie()`** method of the Headers interface returns an array containing the values of all Set-Cookie headers associated with a response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/getSetCookie) + */ + getSetCookie(): string[]; + /** + * The **`has()`** method of the Headers interface returns a boolean stating whether a `Headers` object contains a certain header. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/has) + */ + has(name: string): boolean; + /** + * The **`set()`** method of the Headers interface sets a new value for an existing header inside a `Headers` object, or adds the header if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/set) + */ + set(name: string, value: string): void; + /** + * The **`append()`** method of the Headers interface appends a new value onto an existing header inside a `Headers` object, or adds the header if it does not already exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/append) + */ + append(name: string, value: string): void; + /** + * The **`delete()`** method of the Headers interface deletes a header from the current `Headers` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/delete) + */ + delete(name: string): void; + forEach(callback: (this: This, value: string, key: string, parent: Headers) => void, thisArg?: This): void; + /* Returns an iterator allowing to go through all key/value pairs contained in this object. */ + entries(): IterableIterator<[ + key: string, + value: string + ]>; + /* Returns an iterator allowing to go through all keys of the key/value pairs contained in this object. */ + keys(): IterableIterator; + /* Returns an iterator allowing to go through all values of the key/value pairs contained in this object. */ + values(): IterableIterator; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: string + ]>; +} +type BodyInit = ReadableStream | string | ArrayBuffer | ArrayBufferView | Blob | URLSearchParams | FormData; +declare abstract class Body { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/body) */ + get body(): ReadableStream | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bodyUsed) */ + get bodyUsed(): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/arrayBuffer) */ + arrayBuffer(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bytes) */ + bytes(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/text) */ + text(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/json) */ + json(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/formData) */ + formData(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/blob) */ + blob(): Promise; +} +/** + * The **`Response`** interface of the Fetch API represents the response to a request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) + */ +declare var Response: { + prototype: Response; + new (body?: BodyInit | null, init?: ResponseInit): Response; + error(): Response; + redirect(url: string, status?: number): Response; + json(any: any, maybeInit?: (ResponseInit | Response)): Response; +}; +/** + * The **`Response`** interface of the Fetch API represents the response to a request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) + */ +interface Response extends Body { + /** + * The **`clone()`** method of the Response interface creates a clone of a response object, identical in every way, but stored in a different variable. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/clone) + */ + clone(): Response; + /** + * The **`status`** read-only property of the Response interface contains the HTTP status codes of the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/status) + */ + status: number; + /** + * The **`statusText`** read-only property of the Response interface contains the status message corresponding to the HTTP status code in Response.status. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/statusText) + */ + statusText: string; + /** + * The **`headers`** read-only property of the with the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/headers) + */ + headers: Headers; + /** + * The **`ok`** read-only property of the Response interface contains a Boolean stating whether the response was successful (status in the range 200-299) or not. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/ok) + */ + ok: boolean; + /** + * The **`redirected`** read-only property of the Response interface indicates whether or not the response is the result of a request you made which was redirected. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/redirected) + */ + redirected: boolean; + /** + * The **`url`** read-only property of the Response interface contains the URL of the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/url) + */ + url: string; + webSocket: WebSocket | null; + cf: any | undefined; + /** + * The **`type`** read-only property of the Response interface contains the type of the response. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/type) + */ + type: "default" | "error"; +} +interface ResponseInit { + status?: number; + statusText?: string; + headers?: HeadersInit; + cf?: any; + webSocket?: (WebSocket | null); + encodeBody?: "automatic" | "manual"; +} +type RequestInfo> = Request | string; +/** + * The **`Request`** interface of the Fetch API represents a resource request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) + */ +declare var Request: { + prototype: Request; + new >(input: RequestInfo | URL, init?: RequestInit): Request; +}; +/** + * The **`Request`** interface of the Fetch API represents a resource request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) + */ +interface Request> extends Body { + /** + * The **`clone()`** method of the Request interface creates a copy of the current `Request` object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/clone) + */ + clone(): Request; + /** + * The **`method`** read-only property of the `POST`, etc.) A String indicating the method of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/method) + */ + method: string; + /** + * The **`url`** read-only property of the Request interface contains the URL of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/url) + */ + url: string; + /** + * The **`headers`** read-only property of the with the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/headers) + */ + headers: Headers; + /** + * The **`redirect`** read-only property of the Request interface contains the mode for how redirects are handled. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/redirect) + */ + redirect: string; + fetcher: Fetcher | null; + /** + * The read-only **`signal`** property of the Request interface returns the AbortSignal associated with the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/signal) + */ + signal: AbortSignal; + cf?: Cf; + /** + * The **`integrity`** read-only property of the Request interface contains the subresource integrity value of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/integrity) + */ + integrity: string; + /** + * The **`keepalive`** read-only property of the Request interface contains the request's `keepalive` setting (`true` or `false`), which indicates whether the browser will keep the associated request alive if the page that initiated it is unloaded before the request is complete. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/keepalive) + */ + keepalive: boolean; + /** + * The **`cache`** read-only property of the Request interface contains the cache mode of the request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/cache) + */ + cache?: "no-store" | "no-cache"; +} +interface RequestInit { + /* A string to set request's method. */ + method?: string; + /* A Headers object, an object literal, or an array of two-item arrays to set request's headers. */ + headers?: HeadersInit; + /* A BodyInit object or null to set request's body. */ + body?: BodyInit | null; + /* A string indicating whether request follows redirects, results in an error upon encountering a redirect, or returns the redirect (in an opaque fashion). Sets request's redirect. */ + redirect?: string; + fetcher?: (Fetcher | null); + cf?: Cf; + /* A string indicating how the request will interact with the browser's cache to set request's cache. */ + cache?: "no-store" | "no-cache"; + /* A cryptographic hash of the resource to be fetched by request. Sets request's integrity. */ + integrity?: string; + /* An AbortSignal to set request's signal. */ + signal?: (AbortSignal | null); + encodeResponseBody?: "automatic" | "manual"; +} +type Service Rpc.WorkerEntrypointBranded) | Rpc.WorkerEntrypointBranded | ExportedHandler | undefined = undefined> = T extends new (...args: any[]) => Rpc.WorkerEntrypointBranded ? Fetcher> : T extends Rpc.WorkerEntrypointBranded ? Fetcher : T extends Exclude ? never : Fetcher; +type Fetcher = (T extends Rpc.EntrypointBranded ? Rpc.Provider : unknown) & { + fetch(input: RequestInfo | URL, init?: RequestInit): Promise; + connect(address: SocketAddress | string, options?: SocketOptions): Socket; +}; +interface KVNamespaceListKey { + name: Key; + expiration?: number; + metadata?: Metadata; +} +type KVNamespaceListResult = { + list_complete: false; + keys: KVNamespaceListKey[]; + cursor: string; + cacheStatus: string | null; +} | { + list_complete: true; + keys: KVNamespaceListKey[]; + cacheStatus: string | null; +}; +interface KVNamespace { + get(key: Key, options?: Partial>): Promise; + get(key: Key, type: "text"): Promise; + get(key: Key, type: "json"): Promise; + get(key: Key, type: "arrayBuffer"): Promise; + get(key: Key, type: "stream"): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"text">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"json">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"arrayBuffer">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"stream">): Promise; + get(key: Array, type: "text"): Promise>; + get(key: Array, type: "json"): Promise>; + get(key: Array, options?: Partial>): Promise>; + get(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>; + get(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>; + list(options?: KVNamespaceListOptions): Promise>; + put(key: Key, value: string | ArrayBuffer | ArrayBufferView | ReadableStream, options?: KVNamespacePutOptions): Promise; + getWithMetadata(key: Key, options?: Partial>): Promise>; + getWithMetadata(key: Key, type: "text"): Promise>; + getWithMetadata(key: Key, type: "json"): Promise>; + getWithMetadata(key: Key, type: "arrayBuffer"): Promise>; + getWithMetadata(key: Key, type: "stream"): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"text">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"json">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"arrayBuffer">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"stream">): Promise>; + getWithMetadata(key: Array, type: "text"): Promise>>; + getWithMetadata(key: Array, type: "json"): Promise>>; + getWithMetadata(key: Array, options?: Partial>): Promise>>; + getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>>; + getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>>; + delete(key: Key): Promise; +} +interface KVNamespaceListOptions { + limit?: number; + prefix?: (string | null); + cursor?: (string | null); +} +interface KVNamespaceGetOptions { + type: Type; + cacheTtl?: number; +} +interface KVNamespacePutOptions { + expiration?: number; + expirationTtl?: number; + metadata?: (any | null); +} +interface KVNamespaceGetWithMetadataResult { + value: Value | null; + metadata: Metadata | null; + cacheStatus: string | null; +} +type QueueContentType = "text" | "bytes" | "json" | "v8"; +interface Queue { + send(message: Body, options?: QueueSendOptions): Promise; + sendBatch(messages: Iterable>, options?: QueueSendBatchOptions): Promise; +} +interface QueueSendOptions { + contentType?: QueueContentType; + delaySeconds?: number; +} +interface QueueSendBatchOptions { + delaySeconds?: number; +} +interface MessageSendRequest { + body: Body; + contentType?: QueueContentType; + delaySeconds?: number; +} +interface QueueRetryOptions { + delaySeconds?: number; +} +interface Message { + readonly id: string; + readonly timestamp: Date; + readonly body: Body; + readonly attempts: number; + retry(options?: QueueRetryOptions): void; + ack(): void; +} +interface QueueEvent extends ExtendableEvent { + readonly messages: readonly Message[]; + readonly queue: string; + retryAll(options?: QueueRetryOptions): void; + ackAll(): void; +} +interface MessageBatch { + readonly messages: readonly Message[]; + readonly queue: string; + retryAll(options?: QueueRetryOptions): void; + ackAll(): void; +} +interface R2Error extends Error { + readonly name: string; + readonly code: number; + readonly message: string; + readonly action: string; + readonly stack: any; +} +interface R2ListOptions { + limit?: number; + prefix?: string; + cursor?: string; + delimiter?: string; + startAfter?: string; + include?: ("httpMetadata" | "customMetadata")[]; +} +declare abstract class R2Bucket { + head(key: string): Promise; + get(key: string, options: R2GetOptions & { + onlyIf: R2Conditional | Headers; + }): Promise; + get(key: string, options?: R2GetOptions): Promise; + put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions & { + onlyIf: R2Conditional | Headers; + }): Promise; + put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions): Promise; + createMultipartUpload(key: string, options?: R2MultipartOptions): Promise; + resumeMultipartUpload(key: string, uploadId: string): R2MultipartUpload; + delete(keys: string | string[]): Promise; + list(options?: R2ListOptions): Promise; +} +interface R2MultipartUpload { + readonly key: string; + readonly uploadId: string; + uploadPart(partNumber: number, value: ReadableStream | (ArrayBuffer | ArrayBufferView) | string | Blob, options?: R2UploadPartOptions): Promise; + abort(): Promise; + complete(uploadedParts: R2UploadedPart[]): Promise; +} +interface R2UploadedPart { + partNumber: number; + etag: string; +} +declare abstract class R2Object { + readonly key: string; + readonly version: string; + readonly size: number; + readonly etag: string; + readonly httpEtag: string; + readonly checksums: R2Checksums; + readonly uploaded: Date; + readonly httpMetadata?: R2HTTPMetadata; + readonly customMetadata?: Record; + readonly range?: R2Range; + readonly storageClass: string; + readonly ssecKeyMd5?: string; + writeHttpMetadata(headers: Headers): void; +} +interface R2ObjectBody extends R2Object { + get body(): ReadableStream; + get bodyUsed(): boolean; + arrayBuffer(): Promise; + bytes(): Promise; + text(): Promise; + json(): Promise; + blob(): Promise; +} +type R2Range = { + offset: number; + length?: number; +} | { + offset?: number; + length: number; +} | { + suffix: number; +}; +interface R2Conditional { + etagMatches?: string; + etagDoesNotMatch?: string; + uploadedBefore?: Date; + uploadedAfter?: Date; + secondsGranularity?: boolean; +} +interface R2GetOptions { + onlyIf?: (R2Conditional | Headers); + range?: (R2Range | Headers); + ssecKey?: (ArrayBuffer | string); +} +interface R2PutOptions { + onlyIf?: (R2Conditional | Headers); + httpMetadata?: (R2HTTPMetadata | Headers); + customMetadata?: Record; + md5?: ((ArrayBuffer | ArrayBufferView) | string); + sha1?: ((ArrayBuffer | ArrayBufferView) | string); + sha256?: ((ArrayBuffer | ArrayBufferView) | string); + sha384?: ((ArrayBuffer | ArrayBufferView) | string); + sha512?: ((ArrayBuffer | ArrayBufferView) | string); + storageClass?: string; + ssecKey?: (ArrayBuffer | string); +} +interface R2MultipartOptions { + httpMetadata?: (R2HTTPMetadata | Headers); + customMetadata?: Record; + storageClass?: string; + ssecKey?: (ArrayBuffer | string); +} +interface R2Checksums { + readonly md5?: ArrayBuffer; + readonly sha1?: ArrayBuffer; + readonly sha256?: ArrayBuffer; + readonly sha384?: ArrayBuffer; + readonly sha512?: ArrayBuffer; + toJSON(): R2StringChecksums; +} +interface R2StringChecksums { + md5?: string; + sha1?: string; + sha256?: string; + sha384?: string; + sha512?: string; +} +interface R2HTTPMetadata { + contentType?: string; + contentLanguage?: string; + contentDisposition?: string; + contentEncoding?: string; + cacheControl?: string; + cacheExpiry?: Date; +} +type R2Objects = { + objects: R2Object[]; + delimitedPrefixes: string[]; +} & ({ + truncated: true; + cursor: string; +} | { + truncated: false; +}); +interface R2UploadPartOptions { + ssecKey?: (ArrayBuffer | string); +} +declare abstract class ScheduledEvent extends ExtendableEvent { + readonly scheduledTime: number; + readonly cron: string; + noRetry(): void; +} +interface ScheduledController { + readonly scheduledTime: number; + readonly cron: string; + noRetry(): void; +} +interface QueuingStrategy { + highWaterMark?: (number | bigint); + size?: (chunk: T) => number | bigint; +} +interface UnderlyingSink { + type?: string; + start?: (controller: WritableStreamDefaultController) => void | Promise; + write?: (chunk: W, controller: WritableStreamDefaultController) => void | Promise; + abort?: (reason: any) => void | Promise; + close?: () => void | Promise; +} +interface UnderlyingByteSource { + type: "bytes"; + autoAllocateChunkSize?: number; + start?: (controller: ReadableByteStreamController) => void | Promise; + pull?: (controller: ReadableByteStreamController) => void | Promise; + cancel?: (reason: any) => void | Promise; +} +interface UnderlyingSource { + type?: "" | undefined; + start?: (controller: ReadableStreamDefaultController) => void | Promise; + pull?: (controller: ReadableStreamDefaultController) => void | Promise; + cancel?: (reason: any) => void | Promise; + expectedLength?: (number | bigint); +} +interface Transformer { + readableType?: string; + writableType?: string; + start?: (controller: TransformStreamDefaultController) => void | Promise; + transform?: (chunk: I, controller: TransformStreamDefaultController) => void | Promise; + flush?: (controller: TransformStreamDefaultController) => void | Promise; + cancel?: (reason: any) => void | Promise; + expectedLength?: number; +} +interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. The way in which the piping process behaves under various error conditions can be customized with a number of passed options. It returns a promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate as follows: + * + * An error in this source readable stream will abort destination, unless preventAbort is truthy. The returned promise will be rejected with the source's error, or with any error that occurs during aborting the destination. + * + * An error in destination will cancel this source readable stream, unless preventCancel is truthy. The returned promise will be rejected with the destination's error, or with any error that occurs during canceling the source. + * + * When this source readable stream closes, destination will be closed, unless preventClose is truthy. The returned promise will be fulfilled once this process completes, unless an error is encountered while closing the destination, in which case it will be rejected with that error. + * + * If destination starts out closed or closing, this source readable stream will be canceled, unless preventCancel is true. The returned promise will be rejected with an error indicating piping to a closed stream failed, or with any error that occurs during canceling the source. + * + * The signal option can be set to an AbortSignal to allow aborting an ongoing pipe operation via the corresponding AbortController. In this case, this source readable stream will be canceled, and destination aborted, unless the respective options preventCancel or preventAbort are set. + */ + preventClose?: boolean; + signal?: AbortSignal; +} +type ReadableStreamReadResult = { + done: false; + value: R; +} | { + done: true; + value?: undefined; +}; +/** + * The `ReadableStream` interface of the Streams API represents a readable stream of byte data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) + */ +interface ReadableStream { + /** + * The **`locked`** read-only property of the ReadableStream interface returns whether or not the readable stream is locked to a reader. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/locked) + */ + get locked(): boolean; + /** + * The **`cancel()`** method of the ReadableStream interface returns a Promise that resolves when the stream is canceled. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/cancel) + */ + cancel(reason?: any): Promise; + /** + * The **`getReader()`** method of the ReadableStream interface creates a reader and locks the stream to it. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) + */ + getReader(): ReadableStreamDefaultReader; + /** + * The **`getReader()`** method of the ReadableStream interface creates a reader and locks the stream to it. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) + */ + getReader(options: ReadableStreamGetReaderOptions): ReadableStreamBYOBReader; + /** + * The **`pipeThrough()`** method of the ReadableStream interface provides a chainable way of piping the current stream through a transform stream or any other writable/readable pair. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeThrough) + */ + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + /** + * The **`pipeTo()`** method of the ReadableStream interface pipes the current `ReadableStream` to a given WritableStream and returns a Promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeTo) + */ + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + /** + * The **`tee()`** method of the two-element array containing the two resulting branches as new ReadableStream instances. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/tee) + */ + tee(): [ + ReadableStream, + ReadableStream + ]; + values(options?: ReadableStreamValuesOptions): AsyncIterableIterator; + [Symbol.asyncIterator](options?: ReadableStreamValuesOptions): AsyncIterableIterator; +} +/** + * The `ReadableStream` interface of the Streams API represents a readable stream of byte data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) + */ +declare const ReadableStream: { + prototype: ReadableStream; + new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new (underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; +}; +/** + * The **`ReadableStreamDefaultReader`** interface of the Streams API represents a default reader that can be used to read stream data supplied from a network (such as a fetch request). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader) + */ +declare class ReadableStreamDefaultReader { + constructor(stream: ReadableStream); + get closed(): Promise; + cancel(reason?: any): Promise; + /** + * The **`read()`** method of the ReadableStreamDefaultReader interface returns a Promise providing access to the next chunk in the stream's internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/read) + */ + read(): Promise>; + /** + * The **`releaseLock()`** method of the ReadableStreamDefaultReader interface releases the reader's lock on the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/releaseLock) + */ + releaseLock(): void; +} +/** + * The `ReadableStreamBYOBReader` interface of the Streams API defines a reader for a ReadableStream that supports zero-copy reading from an underlying byte source. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader) + */ +declare class ReadableStreamBYOBReader { + constructor(stream: ReadableStream); + get closed(): Promise; + cancel(reason?: any): Promise; + /** + * The **`read()`** method of the ReadableStreamBYOBReader interface is used to read data into a view on a user-supplied buffer from an associated readable byte stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/read) + */ + read(view: T): Promise>; + /** + * The **`releaseLock()`** method of the ReadableStreamBYOBReader interface releases the reader's lock on the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/releaseLock) + */ + releaseLock(): void; + readAtLeast(minElements: number, view: T): Promise>; +} +interface ReadableStreamBYOBReaderReadableStreamBYOBReaderReadOptions { + min?: number; +} +interface ReadableStreamGetReaderOptions { + /** + * Creates a ReadableStreamBYOBReader and locks the stream to the new reader. + * + * This call behaves the same way as the no-argument variant, except that it only works on readable byte streams, i.e. streams which were constructed specifically with the ability to handle "bring your own buffer" reading. The returned BYOB reader provides the ability to directly read individual chunks from the stream via its read() method, into developer-supplied buffers, allowing more precise control over allocation. + */ + mode: "byob"; +} +/** + * The **`ReadableStreamBYOBRequest`** interface of the Streams API represents a 'pull request' for data from an underlying source that will made as a zero-copy transfer to a consumer (bypassing the stream's internal queues). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest) + */ +declare abstract class ReadableStreamBYOBRequest { + /** + * The **`view`** getter property of the ReadableStreamBYOBRequest interface returns the current view. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/view) + */ + get view(): Uint8Array | null; + /** + * The **`respond()`** method of the ReadableStreamBYOBRequest interface is used to signal to the associated readable byte stream that the specified number of bytes were written into the ReadableStreamBYOBRequest.view. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respond) + */ + respond(bytesWritten: number): void; + /** + * The **`respondWithNewView()`** method of the ReadableStreamBYOBRequest interface specifies a new view that the consumer of the associated readable byte stream should write to instead of ReadableStreamBYOBRequest.view. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respondWithNewView) + */ + respondWithNewView(view: ArrayBuffer | ArrayBufferView): void; + get atLeast(): number | null; +} +/** + * The **`ReadableStreamDefaultController`** interface of the Streams API represents a controller allowing control of a ReadableStream's state and internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController) + */ +declare abstract class ReadableStreamDefaultController { + /** + * The **`desiredSize`** read-only property of the required to fill the stream's internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`close()`** method of the ReadableStreamDefaultController interface closes the associated stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/close) + */ + close(): void; + /** + * The **`enqueue()`** method of the ```js-nolint enqueue(chunk) ``` - `chunk` - : The chunk to enqueue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/enqueue) + */ + enqueue(chunk?: R): void; + /** + * The **`error()`** method of the with the associated stream to error. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/error) + */ + error(reason: any): void; +} +/** + * The **`ReadableByteStreamController`** interface of the Streams API represents a controller for a readable byte stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController) + */ +declare abstract class ReadableByteStreamController { + /** + * The **`byobRequest`** read-only property of the ReadableByteStreamController interface returns the current BYOB request, or `null` if there are no pending requests. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/byobRequest) + */ + get byobRequest(): ReadableStreamBYOBRequest | null; + /** + * The **`desiredSize`** read-only property of the ReadableByteStreamController interface returns the number of bytes required to fill the stream's internal queue to its 'desired size'. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`close()`** method of the ReadableByteStreamController interface closes the associated stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/close) + */ + close(): void; + /** + * The **`enqueue()`** method of the ReadableByteStreamController interface enqueues a given chunk on the associated readable byte stream (the chunk is copied into the stream's internal queues). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/enqueue) + */ + enqueue(chunk: ArrayBuffer | ArrayBufferView): void; + /** + * The **`error()`** method of the ReadableByteStreamController interface causes any future interactions with the associated stream to error with the specified reason. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/error) + */ + error(reason: any): void; +} +/** + * The **`WritableStreamDefaultController`** interface of the Streams API represents a controller allowing control of a WritableStream's state. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController) + */ +declare abstract class WritableStreamDefaultController { + /** + * The read-only **`signal`** property of the WritableStreamDefaultController interface returns the AbortSignal associated with the controller. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/signal) + */ + get signal(): AbortSignal; + /** + * The **`error()`** method of the with the associated stream to error. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/error) + */ + error(reason?: any): void; +} +/** + * The **`TransformStreamDefaultController`** interface of the Streams API provides methods to manipulate the associated ReadableStream and WritableStream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController) + */ +declare abstract class TransformStreamDefaultController { + /** + * The **`desiredSize`** read-only property of the TransformStreamDefaultController interface returns the desired size to fill the queue of the associated ReadableStream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`enqueue()`** method of the TransformStreamDefaultController interface enqueues the given chunk in the readable side of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/enqueue) + */ + enqueue(chunk?: O): void; + /** + * The **`error()`** method of the TransformStreamDefaultController interface errors both sides of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/error) + */ + error(reason: any): void; + /** + * The **`terminate()`** method of the TransformStreamDefaultController interface closes the readable side and errors the writable side of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/terminate) + */ + terminate(): void; +} +interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream through a transform stream (or any other { writable, readable } pair). It simply pipes the stream into the writable side of the supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. + */ + writable: WritableStream; +} +/** + * The **`WritableStream`** interface of the Streams API provides a standard abstraction for writing streaming data to a destination, known as a sink. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream) + */ +declare class WritableStream { + constructor(underlyingSink?: UnderlyingSink, queuingStrategy?: QueuingStrategy); + /** + * The **`locked`** read-only property of the WritableStream interface returns a boolean indicating whether the `WritableStream` is locked to a writer. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/locked) + */ + get locked(): boolean; + /** + * The **`abort()`** method of the WritableStream interface aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be immediately moved to an error state, with any queued writes discarded. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/abort) + */ + abort(reason?: any): Promise; + /** + * The **`close()`** method of the WritableStream interface closes the associated stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/close) + */ + close(): Promise; + /** + * The **`getWriter()`** method of the WritableStream interface returns a new instance of WritableStreamDefaultWriter and locks the stream to that instance. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/getWriter) + */ + getWriter(): WritableStreamDefaultWriter; +} +/** + * The **`WritableStreamDefaultWriter`** interface of the Streams API is the object returned by WritableStream.getWriter() and once created locks the writer to the `WritableStream` ensuring that no other streams can write to the underlying sink. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter) + */ +declare class WritableStreamDefaultWriter { + constructor(stream: WritableStream); + /** + * The **`closed`** read-only property of the the stream errors or the writer's lock is released. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/closed) + */ + get closed(): Promise; + /** + * The **`ready`** read-only property of the that resolves when the desired size of the stream's internal queue transitions from non-positive to positive, signaling that it is no longer applying backpressure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/ready) + */ + get ready(): Promise; + /** + * The **`desiredSize`** read-only property of the to fill the stream's internal queue. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/desiredSize) + */ + get desiredSize(): number | null; + /** + * The **`abort()`** method of the the producer can no longer successfully write to the stream and it is to be immediately moved to an error state, with any queued writes discarded. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/abort) + */ + abort(reason?: any): Promise; + /** + * The **`close()`** method of the stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/close) + */ + close(): Promise; + /** + * The **`write()`** method of the operation. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/write) + */ + write(chunk?: W): Promise; + /** + * The **`releaseLock()`** method of the corresponding stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/releaseLock) + */ + releaseLock(): void; +} +/** + * The **`TransformStream`** interface of the Streams API represents a concrete implementation of the pipe chain _transform stream_ concept. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream) + */ +declare class TransformStream { + constructor(transformer?: Transformer, writableStrategy?: QueuingStrategy, readableStrategy?: QueuingStrategy); + /** + * The **`readable`** read-only property of the TransformStream interface returns the ReadableStream instance controlled by this `TransformStream`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/readable) + */ + get readable(): ReadableStream; + /** + * The **`writable`** read-only property of the TransformStream interface returns the WritableStream instance controlled by this `TransformStream`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/writable) + */ + get writable(): WritableStream; +} +declare class FixedLengthStream extends IdentityTransformStream { + constructor(expectedLength: number | bigint, queuingStrategy?: IdentityTransformStreamQueuingStrategy); +} +declare class IdentityTransformStream extends TransformStream { + constructor(queuingStrategy?: IdentityTransformStreamQueuingStrategy); +} +interface IdentityTransformStreamQueuingStrategy { + highWaterMark?: (number | bigint); +} +interface ReadableStreamValuesOptions { + preventCancel?: boolean; +} +/** + * The **`CompressionStream`** interface of the Compression Streams API is an API for compressing a stream of data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CompressionStream) + */ +declare class CompressionStream extends TransformStream { + constructor(format: "gzip" | "deflate" | "deflate-raw"); +} +/** + * The **`DecompressionStream`** interface of the Compression Streams API is an API for decompressing a stream of data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DecompressionStream) + */ +declare class DecompressionStream extends TransformStream { + constructor(format: "gzip" | "deflate" | "deflate-raw"); +} +/** + * The **`TextEncoderStream`** interface of the Encoding API converts a stream of strings into bytes in the UTF-8 encoding. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoderStream) + */ +declare class TextEncoderStream extends TransformStream { + constructor(); + get encoding(): string; +} +/** + * The **`TextDecoderStream`** interface of the Encoding API converts a stream of text in a binary encoding, such as UTF-8 etc., to a stream of strings. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoderStream) + */ +declare class TextDecoderStream extends TransformStream { + constructor(label?: string, options?: TextDecoderStreamTextDecoderStreamInit); + get encoding(): string; + get fatal(): boolean; + get ignoreBOM(): boolean; +} +interface TextDecoderStreamTextDecoderStreamInit { + fatal?: boolean; + ignoreBOM?: boolean; +} +/** + * The **`ByteLengthQueuingStrategy`** interface of the Streams API provides a built-in byte length queuing strategy that can be used when constructing streams. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy) + */ +declare class ByteLengthQueuingStrategy implements QueuingStrategy { + constructor(init: QueuingStrategyInit); + /** + * The read-only **`ByteLengthQueuingStrategy.highWaterMark`** property returns the total number of bytes that can be contained in the internal queue before backpressure is applied. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/highWaterMark) + */ + get highWaterMark(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/size) */ + get size(): (chunk?: any) => number; +} +/** + * The **`CountQueuingStrategy`** interface of the Streams API provides a built-in chunk counting queuing strategy that can be used when constructing streams. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy) + */ +declare class CountQueuingStrategy implements QueuingStrategy { + constructor(init: QueuingStrategyInit); + /** + * The read-only **`CountQueuingStrategy.highWaterMark`** property returns the total number of chunks that can be contained in the internal queue before backpressure is applied. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/highWaterMark) + */ + get highWaterMark(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/size) */ + get size(): (chunk?: any) => number; +} +interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water mark. + * + * Note that the provided high water mark will not be validated ahead of time. Instead, if it is negative, NaN, or not a number, the resulting ByteLengthQueuingStrategy will cause the corresponding stream constructor to throw. + */ + highWaterMark: number; +} +interface ScriptVersion { + id?: string; + tag?: string; + message?: string; +} +declare abstract class TailEvent extends ExtendableEvent { + readonly events: TraceItem[]; + readonly traces: TraceItem[]; +} +interface TraceItem { + readonly event: (TraceItemFetchEventInfo | TraceItemJsRpcEventInfo | TraceItemScheduledEventInfo | TraceItemAlarmEventInfo | TraceItemQueueEventInfo | TraceItemEmailEventInfo | TraceItemTailEventInfo | TraceItemCustomEventInfo | TraceItemHibernatableWebSocketEventInfo) | null; + readonly eventTimestamp: number | null; + readonly logs: TraceLog[]; + readonly exceptions: TraceException[]; + readonly diagnosticsChannelEvents: TraceDiagnosticChannelEvent[]; + readonly scriptName: string | null; + readonly entrypoint?: string; + readonly scriptVersion?: ScriptVersion; + readonly dispatchNamespace?: string; + readonly scriptTags?: string[]; + readonly durableObjectId?: string; + readonly outcome: string; + readonly executionModel: string; + readonly truncated: boolean; + readonly cpuTime: number; + readonly wallTime: number; +} +interface TraceItemAlarmEventInfo { + readonly scheduledTime: Date; +} +interface TraceItemCustomEventInfo { +} +interface TraceItemScheduledEventInfo { + readonly scheduledTime: number; + readonly cron: string; +} +interface TraceItemQueueEventInfo { + readonly queue: string; + readonly batchSize: number; +} +interface TraceItemEmailEventInfo { + readonly mailFrom: string; + readonly rcptTo: string; + readonly rawSize: number; +} +interface TraceItemTailEventInfo { + readonly consumedEvents: TraceItemTailEventInfoTailItem[]; +} +interface TraceItemTailEventInfoTailItem { + readonly scriptName: string | null; +} +interface TraceItemFetchEventInfo { + readonly response?: TraceItemFetchEventInfoResponse; + readonly request: TraceItemFetchEventInfoRequest; +} +interface TraceItemFetchEventInfoRequest { + readonly cf?: any; + readonly headers: Record; + readonly method: string; + readonly url: string; + getUnredacted(): TraceItemFetchEventInfoRequest; +} +interface TraceItemFetchEventInfoResponse { + readonly status: number; +} +interface TraceItemJsRpcEventInfo { + readonly rpcMethod: string; +} +interface TraceItemHibernatableWebSocketEventInfo { + readonly getWebSocketEvent: TraceItemHibernatableWebSocketEventInfoMessage | TraceItemHibernatableWebSocketEventInfoClose | TraceItemHibernatableWebSocketEventInfoError; +} +interface TraceItemHibernatableWebSocketEventInfoMessage { + readonly webSocketEventType: string; +} +interface TraceItemHibernatableWebSocketEventInfoClose { + readonly webSocketEventType: string; + readonly code: number; + readonly wasClean: boolean; +} +interface TraceItemHibernatableWebSocketEventInfoError { + readonly webSocketEventType: string; +} +interface TraceLog { + readonly timestamp: number; + readonly level: string; + readonly message: any; +} +interface TraceException { + readonly timestamp: number; + readonly message: string; + readonly name: string; + readonly stack?: string; +} +interface TraceDiagnosticChannelEvent { + readonly timestamp: number; + readonly channel: string; + readonly message: any; +} +interface TraceMetrics { + readonly cpuTime: number; + readonly wallTime: number; +} +interface UnsafeTraceMetrics { + fromTrace(item: TraceItem): TraceMetrics; +} +/** + * The **`URL`** interface is used to parse, construct, normalize, and encode URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL) + */ +declare class URL { + constructor(url: string | URL, base?: string | URL); + /** + * The **`origin`** read-only property of the URL interface returns a string containing the Unicode serialization of the origin of the represented URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/origin) + */ + get origin(): string; + /** + * The **`href`** property of the URL interface is a string containing the whole URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) + */ + get href(): string; + /** + * The **`href`** property of the URL interface is a string containing the whole URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) + */ + set href(value: string); + /** + * The **`protocol`** property of the URL interface is a string containing the protocol or scheme of the URL, including the final `':'`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) + */ + get protocol(): string; + /** + * The **`protocol`** property of the URL interface is a string containing the protocol or scheme of the URL, including the final `':'`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) + */ + set protocol(value: string); + /** + * The **`username`** property of the URL interface is a string containing the username component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) + */ + get username(): string; + /** + * The **`username`** property of the URL interface is a string containing the username component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) + */ + set username(value: string); + /** + * The **`password`** property of the URL interface is a string containing the password component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) + */ + get password(): string; + /** + * The **`password`** property of the URL interface is a string containing the password component of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) + */ + set password(value: string); + /** + * The **`host`** property of the URL interface is a string containing the host, which is the URL.hostname, and then, if the port of the URL is nonempty, a `':'`, followed by the URL.port of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) + */ + get host(): string; + /** + * The **`host`** property of the URL interface is a string containing the host, which is the URL.hostname, and then, if the port of the URL is nonempty, a `':'`, followed by the URL.port of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) + */ + set host(value: string); + /** + * The **`hostname`** property of the URL interface is a string containing either the domain name or IP address of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) + */ + get hostname(): string; + /** + * The **`hostname`** property of the URL interface is a string containing either the domain name or IP address of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) + */ + set hostname(value: string); + /** + * The **`port`** property of the URL interface is a string containing the port number of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) + */ + get port(): string; + /** + * The **`port`** property of the URL interface is a string containing the port number of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) + */ + set port(value: string); + /** + * The **`pathname`** property of the URL interface represents a location in a hierarchical structure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) + */ + get pathname(): string; + /** + * The **`pathname`** property of the URL interface represents a location in a hierarchical structure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) + */ + set pathname(value: string); + /** + * The **`search`** property of the URL interface is a search string, also called a _query string_, that is a string containing a `'?'` followed by the parameters of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) + */ + get search(): string; + /** + * The **`search`** property of the URL interface is a search string, also called a _query string_, that is a string containing a `'?'` followed by the parameters of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) + */ + set search(value: string); + /** + * The **`hash`** property of the URL interface is a string containing a `'#'` followed by the fragment identifier of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) + */ + get hash(): string; + /** + * The **`hash`** property of the URL interface is a string containing a `'#'` followed by the fragment identifier of the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) + */ + set hash(value: string); + /** + * The **`searchParams`** read-only property of the access to the [MISSING: httpmethod('GET')] decoded query arguments contained in the URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/searchParams) + */ + get searchParams(): URLSearchParams; + /** + * The **`toJSON()`** method of the URL interface returns a string containing a serialized version of the URL, although in practice it seems to have the same effect as ```js-nolint toJSON() ``` None. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/toJSON) + */ + toJSON(): string; + /*function toString() { [native code] }*/ + toString(): string; + /** + * The **`URL.canParse()`** static method of the URL interface returns a boolean indicating whether or not an absolute URL, or a relative URL combined with a base URL, are parsable and valid. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/canParse_static) + */ + static canParse(url: string, base?: string): boolean; + /** + * The **`URL.parse()`** static method of the URL interface returns a newly created URL object representing the URL defined by the parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/parse_static) + */ + static parse(url: string, base?: string): URL | null; + /** + * The **`createObjectURL()`** static method of the URL interface creates a string containing a URL representing the object given in the parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/createObjectURL_static) + */ + static createObjectURL(object: File | Blob): string; + /** + * The **`revokeObjectURL()`** static method of the URL interface releases an existing object URL which was previously created by calling Call this method when you've finished using an object URL to let the browser know not to keep the reference to the file any longer. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/revokeObjectURL_static) + */ + static revokeObjectURL(object_url: string): void; +} +/** + * The **`URLSearchParams`** interface defines utility methods to work with the query string of a URL. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams) + */ +declare class URLSearchParams { + constructor(init?: (Iterable> | Record | string)); + /** + * The **`size`** read-only property of the URLSearchParams interface indicates the total number of search parameter entries. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/size) + */ + get size(): number; + /** + * The **`append()`** method of the URLSearchParams interface appends a specified key/value pair as a new search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/append) + */ + append(name: string, value: string): void; + /** + * The **`delete()`** method of the URLSearchParams interface deletes specified parameters and their associated value(s) from the list of all search parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/delete) + */ + delete(name: string, value?: string): void; + /** + * The **`get()`** method of the URLSearchParams interface returns the first value associated to the given search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/get) + */ + get(name: string): string | null; + /** + * The **`getAll()`** method of the URLSearchParams interface returns all the values associated with a given search parameter as an array. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/getAll) + */ + getAll(name: string): string[]; + /** + * The **`has()`** method of the URLSearchParams interface returns a boolean value that indicates whether the specified parameter is in the search parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/has) + */ + has(name: string, value?: string): boolean; + /** + * The **`set()`** method of the URLSearchParams interface sets the value associated with a given search parameter to the given value. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/set) + */ + set(name: string, value: string): void; + /** + * The **`URLSearchParams.sort()`** method sorts all key/value pairs contained in this object in place and returns `undefined`. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/sort) + */ + sort(): void; + /* Returns an array of key, value pairs for every entry in the search params. */ + entries(): IterableIterator<[ + key: string, + value: string + ]>; + /* Returns a list of keys in the search params. */ + keys(): IterableIterator; + /* Returns a list of values in the search params. */ + values(): IterableIterator; + forEach(callback: (this: This, value: string, key: string, parent: URLSearchParams) => void, thisArg?: This): void; + /*function toString() { [native code] }*/ + toString(): string; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: string + ]>; +} +declare class URLPattern { + constructor(input?: (string | URLPatternInit), baseURL?: (string | URLPatternOptions), patternOptions?: URLPatternOptions); + get protocol(): string; + get username(): string; + get password(): string; + get hostname(): string; + get port(): string; + get pathname(): string; + get search(): string; + get hash(): string; + get hasRegExpGroups(): boolean; + test(input?: (string | URLPatternInit), baseURL?: string): boolean; + exec(input?: (string | URLPatternInit), baseURL?: string): URLPatternResult | null; +} +interface URLPatternInit { + protocol?: string; + username?: string; + password?: string; + hostname?: string; + port?: string; + pathname?: string; + search?: string; + hash?: string; + baseURL?: string; +} +interface URLPatternComponentResult { + input: string; + groups: Record; +} +interface URLPatternResult { + inputs: (string | URLPatternInit)[]; + protocol: URLPatternComponentResult; + username: URLPatternComponentResult; + password: URLPatternComponentResult; + hostname: URLPatternComponentResult; + port: URLPatternComponentResult; + pathname: URLPatternComponentResult; + search: URLPatternComponentResult; + hash: URLPatternComponentResult; +} +interface URLPatternOptions { + ignoreCase?: boolean; +} +/** + * A `CloseEvent` is sent to clients using WebSockets when the connection is closed. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent) + */ +declare class CloseEvent extends Event { + constructor(type: string, initializer?: CloseEventInit); + /** + * The **`code`** read-only property of the CloseEvent interface returns a WebSocket connection close code indicating the reason the connection was closed. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/code) + */ + readonly code: number; + /** + * The **`reason`** read-only property of the CloseEvent interface returns the WebSocket connection close reason the server gave for closing the connection; that is, a concise human-readable prose explanation for the closure. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/reason) + */ + readonly reason: string; + /** + * The **`wasClean`** read-only property of the CloseEvent interface returns `true` if the connection closed cleanly. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/wasClean) + */ + readonly wasClean: boolean; +} +interface CloseEventInit { + code?: number; + reason?: string; + wasClean?: boolean; +} +type WebSocketEventMap = { + close: CloseEvent; + message: MessageEvent; + open: Event; + error: ErrorEvent; +}; +/** + * The `WebSocket` object provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) + */ +declare var WebSocket: { + prototype: WebSocket; + new (url: string, protocols?: (string[] | string)): WebSocket; + readonly READY_STATE_CONNECTING: number; + readonly CONNECTING: number; + readonly READY_STATE_OPEN: number; + readonly OPEN: number; + readonly READY_STATE_CLOSING: number; + readonly CLOSING: number; + readonly READY_STATE_CLOSED: number; + readonly CLOSED: number; +}; +/** + * The `WebSocket` object provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) + */ +interface WebSocket extends EventTarget { + accept(): void; + /** + * The **`WebSocket.send()`** method enqueues the specified data to be transmitted to the server over the WebSocket connection, increasing the value of `bufferedAmount` by the number of bytes needed to contain the data. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/send) + */ + send(message: (ArrayBuffer | ArrayBufferView) | string): void; + /** + * The **`WebSocket.close()`** method closes the already `CLOSED`, this method does nothing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/close) + */ + close(code?: number, reason?: string): void; + serializeAttachment(attachment: any): void; + deserializeAttachment(): any | null; + /** + * The **`WebSocket.readyState`** read-only property returns the current state of the WebSocket connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/readyState) + */ + readyState: number; + /** + * The **`WebSocket.url`** read-only property returns the absolute URL of the WebSocket as resolved by the constructor. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/url) + */ + url: string | null; + /** + * The **`WebSocket.protocol`** read-only property returns the name of the sub-protocol the server selected; this will be one of the strings specified in the `protocols` parameter when creating the WebSocket object, or the empty string if no connection is established. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/protocol) + */ + protocol: string | null; + /** + * The **`WebSocket.extensions`** read-only property returns the extensions selected by the server. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/extensions) + */ + extensions: string | null; +} +declare const WebSocketPair: { + new (): { + 0: WebSocket; + 1: WebSocket; + }; +}; +interface SqlStorage { + exec>(query: string, ...bindings: any[]): SqlStorageCursor; + get databaseSize(): number; + Cursor: typeof SqlStorageCursor; + Statement: typeof SqlStorageStatement; +} +declare abstract class SqlStorageStatement { +} +type SqlStorageValue = ArrayBuffer | string | number | null; +declare abstract class SqlStorageCursor> { + next(): { + done?: false; + value: T; + } | { + done: true; + value?: never; + }; + toArray(): T[]; + one(): T; + raw(): IterableIterator; + columnNames: string[]; + get rowsRead(): number; + get rowsWritten(): number; + [Symbol.iterator](): IterableIterator; +} +interface Socket { + get readable(): ReadableStream; + get writable(): WritableStream; + get closed(): Promise; + get opened(): Promise; + get upgraded(): boolean; + get secureTransport(): "on" | "off" | "starttls"; + close(): Promise; + startTls(options?: TlsOptions): Socket; +} +interface SocketOptions { + secureTransport?: string; + allowHalfOpen: boolean; + highWaterMark?: (number | bigint); +} +interface SocketAddress { + hostname: string; + port: number; +} +interface TlsOptions { + expectedServerHostname?: string; +} +interface SocketInfo { + remoteAddress?: string; + localAddress?: string; +} +/** + * The **`EventSource`** interface is web content's interface to server-sent events. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource) + */ +declare class EventSource extends EventTarget { + constructor(url: string, init?: EventSourceEventSourceInit); + /** + * The **`close()`** method of the EventSource interface closes the connection, if one is made, and sets the ```js-nolint close() ``` None. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/close) + */ + close(): void; + /** + * The **`url`** read-only property of the URL of the source. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/url) + */ + get url(): string; + /** + * The **`withCredentials`** read-only property of the the `EventSource` object was instantiated with CORS credentials set. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/withCredentials) + */ + get withCredentials(): boolean; + /** + * The **`readyState`** read-only property of the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/readyState) + */ + get readyState(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ + get onopen(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ + set onopen(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ + get onmessage(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ + set onmessage(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ + get onerror(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ + set onerror(value: any | null); + static readonly CONNECTING: number; + static readonly OPEN: number; + static readonly CLOSED: number; + static from(stream: ReadableStream): EventSource; +} +interface EventSourceEventSourceInit { + withCredentials?: boolean; + fetcher?: Fetcher; +} +interface Container { + get running(): boolean; + start(options?: ContainerStartupOptions): void; + monitor(): Promise; + destroy(error?: any): Promise; + signal(signo: number): void; + getTcpPort(port: number): Fetcher; + setInactivityTimeout(durationMs: number | bigint): Promise; +} +interface ContainerStartupOptions { + entrypoint?: string[]; + enableInternet: boolean; + env?: Record; + hardTimeout?: (number | bigint); +} +/** + * The **`MessagePort`** interface of the Channel Messaging API represents one of the two ports of a MessageChannel, allowing messages to be sent from one port and listening out for them arriving at the other. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort) + */ +declare abstract class MessagePort extends EventTarget { + /** + * The **`postMessage()`** method of the transfers ownership of objects to other browsing contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort/postMessage) + */ + postMessage(data?: any, options?: (any[] | MessagePortPostMessageOptions)): void; + /** + * The **`close()`** method of the MessagePort interface disconnects the port, so it is no longer active. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort/close) + */ + close(): void; + /** + * The **`start()`** method of the MessagePort interface starts the sending of messages queued on the port. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessagePort/start) + */ + start(): void; + get onmessage(): any | null; + set onmessage(value: any | null); +} +/** + * The **`MessageChannel`** interface of the Channel Messaging API allows us to create a new message channel and send data through it via its two MessagePort properties. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageChannel) + */ +declare class MessageChannel { + constructor(); + /** + * The **`port1`** read-only property of the the port attached to the context that originated the channel. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageChannel/port1) + */ + readonly port1: MessagePort; + /** + * The **`port2`** read-only property of the the port attached to the context at the other end of the channel, which the message is initially sent to. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageChannel/port2) + */ + readonly port2: MessagePort; +} +interface MessagePortPostMessageOptions { + transfer?: any[]; +} +type LoopbackForExport Rpc.EntrypointBranded) | ExportedHandler | undefined = undefined> = T extends new (...args: any[]) => Rpc.WorkerEntrypointBranded ? LoopbackServiceStub> : T extends new (...args: any[]) => Rpc.DurableObjectBranded ? LoopbackDurableObjectClass> : T extends ExportedHandler ? LoopbackServiceStub : undefined; +type LoopbackServiceStub = Fetcher & (T extends CloudflareWorkersModule.WorkerEntrypoint ? (opts: { + props?: Props; +}) => Fetcher : (opts: { + props?: any; +}) => Fetcher); +type LoopbackDurableObjectClass = DurableObjectClass & (T extends CloudflareWorkersModule.DurableObject ? (opts: { + props?: Props; +}) => DurableObjectClass : (opts: { + props?: any; +}) => DurableObjectClass); +interface SyncKvStorage { + get(key: string): T | undefined; + list(options?: SyncKvListOptions): Iterable<[ + string, + T + ]>; + put(key: string, value: T): void; + delete(key: string): boolean; +} +interface SyncKvListOptions { + start?: string; + startAfter?: string; + end?: string; + prefix?: string; + reverse?: boolean; + limit?: number; +} +interface WorkerStub { + getEntrypoint(name?: string, options?: WorkerStubEntrypointOptions): Fetcher; +} +interface WorkerStubEntrypointOptions { + props?: any; +} +interface WorkerLoader { + get(name: string | null, getCode: () => WorkerLoaderWorkerCode | Promise): WorkerStub; +} +interface WorkerLoaderModule { + js?: string; + cjs?: string; + text?: string; + data?: ArrayBuffer; + json?: any; + py?: string; + wasm?: ArrayBuffer; +} +interface WorkerLoaderWorkerCode { + compatibilityDate: string; + compatibilityFlags?: string[]; + allowExperimental?: boolean; + mainModule: string; + modules: Record; + env?: any; + globalOutbound?: (Fetcher | null); + tails?: Fetcher[]; + streamingTails?: Fetcher[]; +} +/** +* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, +* as well as timing of subrequests and other operations. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) +*/ +declare abstract class Performance { + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancetimeorigin) */ + get timeOrigin(): number; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancenow) */ + now(): number; +} +type AiImageClassificationInput = { + image: number[]; +}; +type AiImageClassificationOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiImageClassification { + inputs: AiImageClassificationInput; + postProcessedOutputs: AiImageClassificationOutput; +} +type AiImageToTextInput = { + image: number[]; + prompt?: string; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + raw?: boolean; + messages?: RoleScopedChatInput[]; +}; +type AiImageToTextOutput = { + description: string; +}; +declare abstract class BaseAiImageToText { + inputs: AiImageToTextInput; + postProcessedOutputs: AiImageToTextOutput; +} +type AiImageTextToTextInput = { + image: string; + prompt?: string; + max_tokens?: number; + temperature?: number; + ignore_eos?: boolean; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + raw?: boolean; + messages?: RoleScopedChatInput[]; +}; +type AiImageTextToTextOutput = { + description: string; +}; +declare abstract class BaseAiImageTextToText { + inputs: AiImageTextToTextInput; + postProcessedOutputs: AiImageTextToTextOutput; +} +type AiMultimodalEmbeddingsInput = { + image: string; + text: string[]; +}; +type AiIMultimodalEmbeddingsOutput = { + data: number[][]; + shape: number[]; +}; +declare abstract class BaseAiMultimodalEmbeddings { + inputs: AiImageTextToTextInput; + postProcessedOutputs: AiImageTextToTextOutput; +} +type AiObjectDetectionInput = { + image: number[]; +}; +type AiObjectDetectionOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiObjectDetection { + inputs: AiObjectDetectionInput; + postProcessedOutputs: AiObjectDetectionOutput; +} +type AiSentenceSimilarityInput = { + source: string; + sentences: string[]; +}; +type AiSentenceSimilarityOutput = number[]; +declare abstract class BaseAiSentenceSimilarity { + inputs: AiSentenceSimilarityInput; + postProcessedOutputs: AiSentenceSimilarityOutput; +} +type AiAutomaticSpeechRecognitionInput = { + audio: number[]; +}; +type AiAutomaticSpeechRecognitionOutput = { + text?: string; + words?: { + word: string; + start: number; + end: number; + }[]; + vtt?: string; +}; +declare abstract class BaseAiAutomaticSpeechRecognition { + inputs: AiAutomaticSpeechRecognitionInput; + postProcessedOutputs: AiAutomaticSpeechRecognitionOutput; +} +type AiSummarizationInput = { + input_text: string; + max_length?: number; +}; +type AiSummarizationOutput = { + summary: string; +}; +declare abstract class BaseAiSummarization { + inputs: AiSummarizationInput; + postProcessedOutputs: AiSummarizationOutput; +} +type AiTextClassificationInput = { + text: string; +}; +type AiTextClassificationOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiTextClassification { + inputs: AiTextClassificationInput; + postProcessedOutputs: AiTextClassificationOutput; +} +type AiTextEmbeddingsInput = { + text: string | string[]; +}; +type AiTextEmbeddingsOutput = { + shape: number[]; + data: number[][]; +}; +declare abstract class BaseAiTextEmbeddings { + inputs: AiTextEmbeddingsInput; + postProcessedOutputs: AiTextEmbeddingsOutput; +} +type RoleScopedChatInput = { + role: "user" | "assistant" | "system" | "tool" | (string & NonNullable); + content: string; + name?: string; +}; +type AiTextGenerationToolLegacyInput = { + name: string; + description: string; + parameters?: { + type: "object" | (string & NonNullable); + properties: { + [key: string]: { + type: string; + description?: string; + }; + }; + required: string[]; + }; +}; +type AiTextGenerationToolInput = { + type: "function" | (string & NonNullable); + function: { + name: string; + description: string; + parameters?: { + type: "object" | (string & NonNullable); + properties: { + [key: string]: { + type: string; + description?: string; + }; + }; + required: string[]; + }; + }; +}; +type AiTextGenerationFunctionsInput = { + name: string; + code: string; +}; +type AiTextGenerationResponseFormat = { + type: string; + json_schema?: any; +}; +type AiTextGenerationInput = { + prompt?: string; + raw?: boolean; + stream?: boolean; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + messages?: RoleScopedChatInput[]; + response_format?: AiTextGenerationResponseFormat; + tools?: AiTextGenerationToolInput[] | AiTextGenerationToolLegacyInput[] | (object & NonNullable); + functions?: AiTextGenerationFunctionsInput[]; +}; +type AiTextGenerationToolLegacyOutput = { + name: string; + arguments: unknown; +}; +type AiTextGenerationToolOutput = { + id: string; + type: "function"; + function: { + name: string; + arguments: string; + }; +}; +type UsageTags = { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; +}; +type AiTextGenerationOutput = { + response?: string; + tool_calls?: AiTextGenerationToolLegacyOutput[] & AiTextGenerationToolOutput[]; + usage?: UsageTags; +}; +declare abstract class BaseAiTextGeneration { + inputs: AiTextGenerationInput; + postProcessedOutputs: AiTextGenerationOutput; +} +type AiTextToSpeechInput = { + prompt: string; + lang?: string; +}; +type AiTextToSpeechOutput = Uint8Array | { + audio: string; +}; +declare abstract class BaseAiTextToSpeech { + inputs: AiTextToSpeechInput; + postProcessedOutputs: AiTextToSpeechOutput; +} +type AiTextToImageInput = { + prompt: string; + negative_prompt?: string; + height?: number; + width?: number; + image?: number[]; + image_b64?: string; + mask?: number[]; + num_steps?: number; + strength?: number; + guidance?: number; + seed?: number; +}; +type AiTextToImageOutput = ReadableStream; +declare abstract class BaseAiTextToImage { + inputs: AiTextToImageInput; + postProcessedOutputs: AiTextToImageOutput; +} +type AiTranslationInput = { + text: string; + target_lang: string; + source_lang?: string; +}; +type AiTranslationOutput = { + translated_text?: string; +}; +declare abstract class BaseAiTranslation { + inputs: AiTranslationInput; + postProcessedOutputs: AiTranslationOutput; +} +/** + * Workers AI support for OpenAI's Responses API + * Reference: https://github.com/openai/openai-node/blob/master/src/resources/responses/responses.ts + * + * It's a stripped down version from its source. + * It currently supports basic function calling, json mode and accepts images as input. + * + * It does not include types for WebSearch, CodeInterpreter, FileInputs, MCP, CustomTools. + * We plan to add those incrementally as model + platform capabilities evolve. + */ +type ResponsesInput = { + background?: boolean | null; + conversation?: string | ResponseConversationParam | null; + include?: Array | null; + input?: string | ResponseInput; + instructions?: string | null; + max_output_tokens?: number | null; + parallel_tool_calls?: boolean | null; + previous_response_id?: string | null; + prompt_cache_key?: string; + reasoning?: Reasoning | null; + safety_identifier?: string; + service_tier?: "auto" | "default" | "flex" | "scale" | "priority" | null; + stream?: boolean | null; + stream_options?: StreamOptions | null; + temperature?: number | null; + text?: ResponseTextConfig; + tool_choice?: ToolChoiceOptions | ToolChoiceFunction; + tools?: Array; + top_p?: number | null; + truncation?: "auto" | "disabled" | null; +}; +type ResponsesOutput = { + id?: string; + created_at?: number; + output_text?: string; + error?: ResponseError | null; + incomplete_details?: ResponseIncompleteDetails | null; + instructions?: string | Array | null; + object?: "response"; + output?: Array; + parallel_tool_calls?: boolean; + temperature?: number | null; + tool_choice?: ToolChoiceOptions | ToolChoiceFunction; + tools?: Array; + top_p?: number | null; + max_output_tokens?: number | null; + previous_response_id?: string | null; + prompt?: ResponsePrompt | null; + reasoning?: Reasoning | null; + safety_identifier?: string; + service_tier?: "auto" | "default" | "flex" | "scale" | "priority" | null; + status?: ResponseStatus; + text?: ResponseTextConfig; + truncation?: "auto" | "disabled" | null; + usage?: ResponseUsage; +}; +type EasyInputMessage = { + content: string | ResponseInputMessageContentList; + role: "user" | "assistant" | "system" | "developer"; + type?: "message"; +}; +type ResponsesFunctionTool = { + name: string; + parameters: { + [key: string]: unknown; + } | null; + strict: boolean | null; + type: "function"; + description?: string | null; +}; +type ResponseIncompleteDetails = { + reason?: "max_output_tokens" | "content_filter"; +}; +type ResponsePrompt = { + id: string; + variables?: { + [key: string]: string | ResponseInputText | ResponseInputImage; + } | null; + version?: string | null; +}; +type Reasoning = { + effort?: ReasoningEffort | null; + generate_summary?: "auto" | "concise" | "detailed" | null; + summary?: "auto" | "concise" | "detailed" | null; +}; +type ResponseContent = ResponseInputText | ResponseInputImage | ResponseOutputText | ResponseOutputRefusal | ResponseContentReasoningText; +type ResponseContentReasoningText = { + text: string; + type: "reasoning_text"; +}; +type ResponseConversationParam = { + id: string; +}; +type ResponseCreatedEvent = { + response: Response; + sequence_number: number; + type: "response.created"; +}; +type ResponseCustomToolCallOutput = { + call_id: string; + output: string | Array; + type: "custom_tool_call_output"; + id?: string; +}; +type ResponseError = { + code: "server_error" | "rate_limit_exceeded" | "invalid_prompt" | "vector_store_timeout" | "invalid_image" | "invalid_image_format" | "invalid_base64_image" | "invalid_image_url" | "image_too_large" | "image_too_small" | "image_parse_error" | "image_content_policy_violation" | "invalid_image_mode" | "image_file_too_large" | "unsupported_image_media_type" | "empty_image_file" | "failed_to_download_image" | "image_file_not_found"; + message: string; +}; +type ResponseErrorEvent = { + code: string | null; + message: string; + param: string | null; + sequence_number: number; + type: "error"; +}; +type ResponseFailedEvent = { + response: Response; + sequence_number: number; + type: "response.failed"; +}; +type ResponseFormatText = { + type: "text"; +}; +type ResponseFormatJSONObject = { + type: "json_object"; +}; +type ResponseFormatTextConfig = ResponseFormatText | ResponseFormatTextJSONSchemaConfig | ResponseFormatJSONObject; +type ResponseFormatTextJSONSchemaConfig = { + name: string; + schema: { + [key: string]: unknown; + }; + type: "json_schema"; + description?: string; + strict?: boolean | null; +}; +type ResponseFunctionCallArgumentsDeltaEvent = { + delta: string; + item_id: string; + output_index: number; + sequence_number: number; + type: "response.function_call_arguments.delta"; +}; +type ResponseFunctionCallArgumentsDoneEvent = { + arguments: string; + item_id: string; + name: string; + output_index: number; + sequence_number: number; + type: "response.function_call_arguments.done"; +}; +type ResponseFunctionCallOutputItem = ResponseInputTextContent | ResponseInputImageContent; +type ResponseFunctionCallOutputItemList = Array; +type ResponseFunctionToolCall = { + arguments: string; + call_id: string; + name: string; + type: "function_call"; + id?: string; + status?: "in_progress" | "completed" | "incomplete"; +}; +interface ResponseFunctionToolCallItem extends ResponseFunctionToolCall { + id: string; +} +type ResponseFunctionToolCallOutputItem = { + id: string; + call_id: string; + output: string | Array; + type: "function_call_output"; + status?: "in_progress" | "completed" | "incomplete"; +}; +type ResponseIncludable = "message.input_image.image_url" | "message.output_text.logprobs"; +type ResponseIncompleteEvent = { + response: Response; + sequence_number: number; + type: "response.incomplete"; +}; +type ResponseInput = Array; +type ResponseInputContent = ResponseInputText | ResponseInputImage; +type ResponseInputImage = { + detail: "low" | "high" | "auto"; + type: "input_image"; + /** + * Base64 encoded image + */ + image_url?: string | null; +}; +type ResponseInputImageContent = { + type: "input_image"; + detail?: "low" | "high" | "auto" | null; + /** + * Base64 encoded image + */ + image_url?: string | null; +}; +type ResponseInputItem = EasyInputMessage | ResponseInputItemMessage | ResponseOutputMessage | ResponseFunctionToolCall | ResponseInputItemFunctionCallOutput | ResponseReasoningItem; +type ResponseInputItemFunctionCallOutput = { + call_id: string; + output: string | ResponseFunctionCallOutputItemList; + type: "function_call_output"; + id?: string | null; + status?: "in_progress" | "completed" | "incomplete" | null; +}; +type ResponseInputItemMessage = { + content: ResponseInputMessageContentList; + role: "user" | "system" | "developer"; + status?: "in_progress" | "completed" | "incomplete"; + type?: "message"; +}; +type ResponseInputMessageContentList = Array; +type ResponseInputMessageItem = { + id: string; + content: ResponseInputMessageContentList; + role: "user" | "system" | "developer"; + status?: "in_progress" | "completed" | "incomplete"; + type?: "message"; +}; +type ResponseInputText = { + text: string; + type: "input_text"; +}; +type ResponseInputTextContent = { + text: string; + type: "input_text"; +}; +type ResponseItem = ResponseInputMessageItem | ResponseOutputMessage | ResponseFunctionToolCallItem | ResponseFunctionToolCallOutputItem; +type ResponseOutputItem = ResponseOutputMessage | ResponseFunctionToolCall | ResponseReasoningItem; +type ResponseOutputItemAddedEvent = { + item: ResponseOutputItem; + output_index: number; + sequence_number: number; + type: "response.output_item.added"; +}; +type ResponseOutputItemDoneEvent = { + item: ResponseOutputItem; + output_index: number; + sequence_number: number; + type: "response.output_item.done"; +}; +type ResponseOutputMessage = { + id: string; + content: Array; + role: "assistant"; + status: "in_progress" | "completed" | "incomplete"; + type: "message"; +}; +type ResponseOutputRefusal = { + refusal: string; + type: "refusal"; +}; +type ResponseOutputText = { + text: string; + type: "output_text"; + logprobs?: Array; +}; +type ResponseReasoningItem = { + id: string; + summary: Array; + type: "reasoning"; + content?: Array; + encrypted_content?: string | null; + status?: "in_progress" | "completed" | "incomplete"; +}; +type ResponseReasoningSummaryItem = { + text: string; + type: "summary_text"; +}; +type ResponseReasoningContentItem = { + text: string; + type: "reasoning_text"; +}; +type ResponseReasoningTextDeltaEvent = { + content_index: number; + delta: string; + item_id: string; + output_index: number; + sequence_number: number; + type: "response.reasoning_text.delta"; +}; +type ResponseReasoningTextDoneEvent = { + content_index: number; + item_id: string; + output_index: number; + sequence_number: number; + text: string; + type: "response.reasoning_text.done"; +}; +type ResponseRefusalDeltaEvent = { + content_index: number; + delta: string; + item_id: string; + output_index: number; + sequence_number: number; + type: "response.refusal.delta"; +}; +type ResponseRefusalDoneEvent = { + content_index: number; + item_id: string; + output_index: number; + refusal: string; + sequence_number: number; + type: "response.refusal.done"; +}; +type ResponseStatus = "completed" | "failed" | "in_progress" | "cancelled" | "queued" | "incomplete"; +type ResponseStreamEvent = ResponseCompletedEvent | ResponseCreatedEvent | ResponseErrorEvent | ResponseFunctionCallArgumentsDeltaEvent | ResponseFunctionCallArgumentsDoneEvent | ResponseFailedEvent | ResponseIncompleteEvent | ResponseOutputItemAddedEvent | ResponseOutputItemDoneEvent | ResponseReasoningTextDeltaEvent | ResponseReasoningTextDoneEvent | ResponseRefusalDeltaEvent | ResponseRefusalDoneEvent | ResponseTextDeltaEvent | ResponseTextDoneEvent; +type ResponseCompletedEvent = { + response: Response; + sequence_number: number; + type: "response.completed"; +}; +type ResponseTextConfig = { + format?: ResponseFormatTextConfig; + verbosity?: "low" | "medium" | "high" | null; +}; +type ResponseTextDeltaEvent = { + content_index: number; + delta: string; + item_id: string; + logprobs: Array; + output_index: number; + sequence_number: number; + type: "response.output_text.delta"; +}; +type ResponseTextDoneEvent = { + content_index: number; + item_id: string; + logprobs: Array; + output_index: number; + sequence_number: number; + text: string; + type: "response.output_text.done"; +}; +type Logprob = { + token: string; + logprob: number; + top_logprobs?: Array; +}; +type TopLogprob = { + token?: string; + logprob?: number; +}; +type ResponseUsage = { + input_tokens: number; + output_tokens: number; + total_tokens: number; +}; +type Tool = ResponsesFunctionTool; +type ToolChoiceFunction = { + name: string; + type: "function"; +}; +type ToolChoiceOptions = "none"; +type ReasoningEffort = "minimal" | "low" | "medium" | "high" | null; +type StreamOptions = { + include_obfuscation?: boolean; +}; +type Ai_Cf_Baai_Bge_Base_En_V1_5_Input = { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; + }[]; +}; +type Ai_Cf_Baai_Bge_Base_En_V1_5_Output = { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} | Ai_Cf_Baai_Bge_Base_En_V1_5_AsyncResponse; +interface Ai_Cf_Baai_Bge_Base_En_V1_5_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Base_En_V1_5 { + inputs: Ai_Cf_Baai_Bge_Base_En_V1_5_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Base_En_V1_5_Output; +} +type Ai_Cf_Openai_Whisper_Input = string | { + /** + * An array of integers that represent the audio data constrained to 8-bit unsigned integer values + */ + audio: number[]; +}; +interface Ai_Cf_Openai_Whisper_Output { + /** + * The transcription + */ + text: string; + word_count?: number; + words?: { + word?: string; + /** + * The second this word begins in the recording + */ + start?: number; + /** + * The ending second when the word completes + */ + end?: number; + }[]; + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper { + inputs: Ai_Cf_Openai_Whisper_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Output; +} +type Ai_Cf_Meta_M2M100_1_2B_Input = { + /** + * The text to be translated + */ + text: string; + /** + * The language code of the source text (e.g., 'en' for English). Defaults to 'en' if not specified + */ + source_lang?: string; + /** + * The language code to translate the text into (e.g., 'es' for Spanish) + */ + target_lang: string; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + /** + * The text to be translated + */ + text: string; + /** + * The language code of the source text (e.g., 'en' for English). Defaults to 'en' if not specified + */ + source_lang?: string; + /** + * The language code to translate the text into (e.g., 'es' for Spanish) + */ + target_lang: string; + }[]; +}; +type Ai_Cf_Meta_M2M100_1_2B_Output = { + /** + * The translated text in the target language + */ + translated_text?: string; +} | Ai_Cf_Meta_M2M100_1_2B_AsyncResponse; +interface Ai_Cf_Meta_M2M100_1_2B_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Meta_M2M100_1_2B { + inputs: Ai_Cf_Meta_M2M100_1_2B_Input; + postProcessedOutputs: Ai_Cf_Meta_M2M100_1_2B_Output; +} +type Ai_Cf_Baai_Bge_Small_En_V1_5_Input = { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; + }[]; +}; +type Ai_Cf_Baai_Bge_Small_En_V1_5_Output = { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} | Ai_Cf_Baai_Bge_Small_En_V1_5_AsyncResponse; +interface Ai_Cf_Baai_Bge_Small_En_V1_5_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Small_En_V1_5 { + inputs: Ai_Cf_Baai_Bge_Small_En_V1_5_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Small_En_V1_5_Output; +} +type Ai_Cf_Baai_Bge_Large_En_V1_5_Input = { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; +} | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: { + text: string | string[]; + /** + * The pooling method used in the embedding process. `cls` pooling will generate more accurate embeddings on larger inputs - however, embeddings created with cls pooling are not compatible with embeddings generated with mean pooling. The default pooling method is `mean` in order for this to not be a breaking change, but we highly suggest using the new `cls` pooling for better accuracy. + */ + pooling?: "mean" | "cls"; + }[]; +}; +type Ai_Cf_Baai_Bge_Large_En_V1_5_Output = { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} | Ai_Cf_Baai_Bge_Large_En_V1_5_AsyncResponse; +interface Ai_Cf_Baai_Bge_Large_En_V1_5_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Large_En_V1_5 { + inputs: Ai_Cf_Baai_Bge_Large_En_V1_5_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Large_En_V1_5_Output; +} +type Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input = string | { + /** + * The input text prompt for the model to generate a response. + */ + prompt?: string; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + image: number[] | (string & NonNullable); + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; +}; +interface Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output { + description?: string; +} +declare abstract class Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M { + inputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input; + postProcessedOutputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output; +} +type Ai_Cf_Openai_Whisper_Tiny_En_Input = string | { + /** + * An array of integers that represent the audio data constrained to 8-bit unsigned integer values + */ + audio: number[]; +}; +interface Ai_Cf_Openai_Whisper_Tiny_En_Output { + /** + * The transcription + */ + text: string; + word_count?: number; + words?: { + word?: string; + /** + * The second this word begins in the recording + */ + start?: number; + /** + * The ending second when the word completes + */ + end?: number; + }[]; + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper_Tiny_En { + inputs: Ai_Cf_Openai_Whisper_Tiny_En_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Tiny_En_Output; +} +interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input { + /** + * Base64 encoded value of the audio data. + */ + audio: string; + /** + * Supported tasks are 'translate' or 'transcribe'. + */ + task?: string; + /** + * The language of the audio being transcribed or translated. + */ + language?: string; + /** + * Preprocess the audio with a voice activity detection model. + */ + vad_filter?: boolean; + /** + * A text prompt to help provide context to the model on the contents of the audio. + */ + initial_prompt?: string; + /** + * The prefix it appended the the beginning of the output of the transcription and can guide the transcription result. + */ + prefix?: string; +} +interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output { + transcription_info?: { + /** + * The language of the audio being transcribed or translated. + */ + language?: string; + /** + * The confidence level or probability of the detected language being accurate, represented as a decimal between 0 and 1. + */ + language_probability?: number; + /** + * The total duration of the original audio file, in seconds. + */ + duration?: number; + /** + * The duration of the audio after applying Voice Activity Detection (VAD) to remove silent or irrelevant sections, in seconds. + */ + duration_after_vad?: number; + }; + /** + * The complete transcription of the audio. + */ + text: string; + /** + * The total number of words in the transcription. + */ + word_count?: number; + segments?: { + /** + * The starting time of the segment within the audio, in seconds. + */ + start?: number; + /** + * The ending time of the segment within the audio, in seconds. + */ + end?: number; + /** + * The transcription of the segment. + */ + text?: string; + /** + * The temperature used in the decoding process, controlling randomness in predictions. Lower values result in more deterministic outputs. + */ + temperature?: number; + /** + * The average log probability of the predictions for the words in this segment, indicating overall confidence. + */ + avg_logprob?: number; + /** + * The compression ratio of the input to the output, measuring how much the text was compressed during the transcription process. + */ + compression_ratio?: number; + /** + * The probability that the segment contains no speech, represented as a decimal between 0 and 1. + */ + no_speech_prob?: number; + words?: { + /** + * The individual word transcribed from the audio. + */ + word?: string; + /** + * The starting time of the word within the audio, in seconds. + */ + start?: number; + /** + * The ending time of the word within the audio, in seconds. + */ + end?: number; + }[]; + }[]; + /** + * The transcription in WebVTT format, which includes timing and text information for use in subtitles. + */ + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo { + inputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output; +} +type Ai_Cf_Baai_Bge_M3_Input = Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts | Ai_Cf_Baai_Bge_M3_Input_Embedding | { + /** + * Batch of the embeddings requests to run using async-queue + */ + requests: (Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts_1 | Ai_Cf_Baai_Bge_M3_Input_Embedding_1)[]; +}; +interface Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts { + /** + * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts + */ + query?: string; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface Ai_Cf_Baai_Bge_M3_Input_Embedding { + text: string | string[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface Ai_Cf_Baai_Bge_M3_Input_QueryAnd_Contexts_1 { + /** + * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts + */ + query?: string; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface Ai_Cf_Baai_Bge_M3_Input_Embedding_1 { + text: string | string[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +type Ai_Cf_Baai_Bge_M3_Output = Ai_Cf_Baai_Bge_M3_Ouput_Query | Ai_Cf_Baai_Bge_M3_Output_EmbeddingFor_Contexts | Ai_Cf_Baai_Bge_M3_Ouput_Embedding | Ai_Cf_Baai_Bge_M3_AsyncResponse; +interface Ai_Cf_Baai_Bge_M3_Ouput_Query { + response?: { + /** + * Index of the context in the request + */ + id?: number; + /** + * Score of the context under the index. + */ + score?: number; + }[]; +} +interface Ai_Cf_Baai_Bge_M3_Output_EmbeddingFor_Contexts { + response?: number[][]; + shape?: number[]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} +interface Ai_Cf_Baai_Bge_M3_Ouput_Embedding { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} +interface Ai_Cf_Baai_Bge_M3_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Baai_Bge_M3 { + inputs: Ai_Cf_Baai_Bge_M3_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_M3_Output; +} +interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * The number of diffusion steps; higher values can improve quality but take longer. + */ + steps?: number; +} +interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output { + /** + * The generated image in Base64 format. + */ + image?: string; +} +declare abstract class Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell { + inputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input; + postProcessedOutputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output; +} +type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input = Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Prompt | Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Messages; +interface Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + image?: number[] | (string & NonNullable); + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; +} +interface Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + image?: number[] | (string & NonNullable); + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * If true, the response will be streamed back incrementally. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output = { + /** + * The generated text response from the model + */ + response?: string; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct { + inputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output; +} +type Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Input = Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Prompt | Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Messages | Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Async_Batch; +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Async_Batch { + requests?: { + /** + * User-supplied reference. This field will be present in the response as well it can be used to reference the request and response. It's NOT validated to be unique. + */ + external_reference?: string; + /** + * Prompt for the text generation model + */ + prompt?: string; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + response_format?: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_2; + }[]; +} +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_JSON_Mode_2 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +} | string | Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_AsyncResponse; +interface Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast { + inputs: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast_Output; +} +interface Ai_Cf_Meta_Llama_Guard_3_8B_Input { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender must alternate between 'user' and 'assistant'. + */ + role: "user" | "assistant"; + /** + * The content of the message as a string. + */ + content: string; + }[]; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Dictate the output format of the generated response. + */ + response_format?: { + /** + * Set to json_object to process and output generated text as JSON. + */ + type?: string; + }; +} +interface Ai_Cf_Meta_Llama_Guard_3_8B_Output { + response?: string | { + /** + * Whether the conversation is safe or not. + */ + safe?: boolean; + /** + * A list of what hazard categories predicted for the conversation, if the conversation is deemed unsafe. + */ + categories?: string[]; + }; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +declare abstract class Base_Ai_Cf_Meta_Llama_Guard_3_8B { + inputs: Ai_Cf_Meta_Llama_Guard_3_8B_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_Guard_3_8B_Output; +} +interface Ai_Cf_Baai_Bge_Reranker_Base_Input { + /** + * A query you wish to perform against the provided contexts. + */ + /** + * Number of returned results starting with the best score. + */ + top_k?: number; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; +} +interface Ai_Cf_Baai_Bge_Reranker_Base_Output { + response?: { + /** + * Index of the context in the request + */ + id?: number; + /** + * Score of the context under the index. + */ + score?: number; + }[]; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Reranker_Base { + inputs: Ai_Cf_Baai_Bge_Reranker_Base_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Reranker_Base_Output; +} +type Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Input = Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Prompt | Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Messages; +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct { + inputs: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct_Output; +} +type Ai_Cf_Qwen_Qwq_32B_Input = Ai_Cf_Qwen_Qwq_32B_Prompt | Ai_Cf_Qwen_Qwq_32B_Messages; +interface Ai_Cf_Qwen_Qwq_32B_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwq_32B_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Qwen_Qwq_32B_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Qwen_Qwq_32B { + inputs: Ai_Cf_Qwen_Qwq_32B_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwq_32B_Output; +} +type Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Input = Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Prompt | Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Messages; +interface Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct { + inputs: Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Input; + postProcessedOutputs: Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct_Output; +} +type Ai_Cf_Google_Gemma_3_12B_It_Input = Ai_Cf_Google_Gemma_3_12B_It_Prompt | Ai_Cf_Google_Gemma_3_12B_It_Messages; +interface Ai_Cf_Google_Gemma_3_12B_It_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Google_Gemma_3_12B_It_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[]; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Google_Gemma_3_12B_It_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +}; +declare abstract class Base_Ai_Cf_Google_Gemma_3_12B_It { + inputs: Ai_Cf_Google_Gemma_3_12B_It_Input; + postProcessedOutputs: Ai_Cf_Google_Gemma_3_12B_It_Output; +} +type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input = Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt | Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages | Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Async_Batch; +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Async_Batch { + requests: (Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt_Inner | Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages_Inner)[]; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Prompt_Inner { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Messages_Inner { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_JSON_Mode; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The tool call id. + */ + id?: string; + /** + * Specifies the type of tool (e.g., 'function'). + */ + type?: string; + /** + * Details of the function tool. + */ + function?: { + /** + * The name of the tool to be called + */ + name?: string; + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + }; + }[]; +}; +declare abstract class Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct { + inputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output; +} +type Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Input = Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Async_Batch; +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Async_Batch { + requests: (Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt_1 | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages_1)[]; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Prompt_1 { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_2; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_2 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Messages_1 { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_3; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_JSON_Mode_3 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Output = Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Chat_Completion_Response | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Text_Completion_Response | string | Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_AsyncResponse; +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Chat_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "chat.completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index?: number; + /** + * The message generated by the model + */ + message?: { + /** + * Role of the message author + */ + role: string; + /** + * The content of the message + */ + content: string; + /** + * Internal reasoning content (if available) + */ + reasoning_content?: string; + /** + * Tool calls made by the assistant + */ + tool_calls?: { + /** + * Unique identifier for the tool call + */ + id: string; + /** + * Type of tool call + */ + type: "function"; + function: { + /** + * Name of the function to call + */ + name: string; + /** + * JSON string of arguments for the function + */ + arguments: string; + }; + }[]; + }; + /** + * Reason why the model stopped generating + */ + finish_reason?: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Text_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "text_completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index: number; + /** + * The generated text completion + */ + text: string; + /** + * Reason why the model stopped generating + */ + finish_reason: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +interface Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8 { + inputs: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8_Output; +} +interface Ai_Cf_Deepgram_Nova_3_Input { + audio: { + body: object; + contentType: string; + }; + /** + * Sets how the model will interpret strings submitted to the custom_topic param. When strict, the model will only return topics submitted using the custom_topic param. When extended, the model will return its own detected topics in addition to those submitted using the custom_topic param. + */ + custom_topic_mode?: "extended" | "strict"; + /** + * Custom topics you want the model to detect within your input audio or text if present Submit up to 100 + */ + custom_topic?: string; + /** + * Sets how the model will interpret intents submitted to the custom_intent param. When strict, the model will only return intents submitted using the custom_intent param. When extended, the model will return its own detected intents in addition those submitted using the custom_intents param + */ + custom_intent_mode?: "extended" | "strict"; + /** + * Custom intents you want the model to detect within your input audio if present + */ + custom_intent?: string; + /** + * Identifies and extracts key entities from content in submitted audio + */ + detect_entities?: boolean; + /** + * Identifies the dominant language spoken in submitted audio + */ + detect_language?: boolean; + /** + * Recognize speaker changes. Each word in the transcript will be assigned a speaker number starting at 0 + */ + diarize?: boolean; + /** + * Identify and extract key entities from content in submitted audio + */ + dictation?: boolean; + /** + * Specify the expected encoding of your submitted audio + */ + encoding?: "linear16" | "flac" | "mulaw" | "amr-nb" | "amr-wb" | "opus" | "speex" | "g729"; + /** + * Arbitrary key-value pairs that are attached to the API response for usage in downstream processing + */ + extra?: string; + /** + * Filler Words can help transcribe interruptions in your audio, like 'uh' and 'um' + */ + filler_words?: boolean; + /** + * Key term prompting can boost or suppress specialized terminology and brands. + */ + keyterm?: string; + /** + * Keywords can boost or suppress specialized terminology and brands. + */ + keywords?: string; + /** + * The BCP-47 language tag that hints at the primary spoken language. Depending on the Model and API endpoint you choose only certain languages are available. + */ + language?: string; + /** + * Spoken measurements will be converted to their corresponding abbreviations. + */ + measurements?: boolean; + /** + * Opts out requests from the Deepgram Model Improvement Program. Refer to our Docs for pricing impacts before setting this to true. https://dpgr.am/deepgram-mip. + */ + mip_opt_out?: boolean; + /** + * Mode of operation for the model representing broad area of topic that will be talked about in the supplied audio + */ + mode?: "general" | "medical" | "finance"; + /** + * Transcribe each audio channel independently. + */ + multichannel?: boolean; + /** + * Numerals converts numbers from written format to numerical format. + */ + numerals?: boolean; + /** + * Splits audio into paragraphs to improve transcript readability. + */ + paragraphs?: boolean; + /** + * Profanity Filter looks for recognized profanity and converts it to the nearest recognized non-profane word or removes it from the transcript completely. + */ + profanity_filter?: boolean; + /** + * Add punctuation and capitalization to the transcript. + */ + punctuate?: boolean; + /** + * Redaction removes sensitive information from your transcripts. + */ + redact?: string; + /** + * Search for terms or phrases in submitted audio and replaces them. + */ + replace?: string; + /** + * Search for terms or phrases in submitted audio. + */ + search?: string; + /** + * Recognizes the sentiment throughout a transcript or text. + */ + sentiment?: boolean; + /** + * Apply formatting to transcript output. When set to true, additional formatting will be applied to transcripts to improve readability. + */ + smart_format?: boolean; + /** + * Detect topics throughout a transcript or text. + */ + topics?: boolean; + /** + * Segments speech into meaningful semantic units. + */ + utterances?: boolean; + /** + * Seconds to wait before detecting a pause between words in submitted audio. + */ + utt_split?: number; + /** + * The number of channels in the submitted audio + */ + channels?: number; + /** + * Specifies whether the streaming endpoint should provide ongoing transcription updates as more audio is received. When set to true, the endpoint sends continuous updates, meaning transcription results may evolve over time. Note: Supported only for webosockets. + */ + interim_results?: boolean; + /** + * Indicates how long model will wait to detect whether a speaker has finished speaking or pauses for a significant period of time. When set to a value, the streaming endpoint immediately finalizes the transcription for the processed time range and returns the transcript with a speech_final parameter set to true. Can also be set to false to disable endpointing + */ + endpointing?: string; + /** + * Indicates that speech has started. You'll begin receiving Speech Started messages upon speech starting. Note: Supported only for webosockets. + */ + vad_events?: boolean; + /** + * Indicates how long model will wait to send an UtteranceEnd message after a word has been transcribed. Use with interim_results. Note: Supported only for webosockets. + */ + utterance_end_ms?: boolean; +} +interface Ai_Cf_Deepgram_Nova_3_Output { + results?: { + channels?: { + alternatives?: { + confidence?: number; + transcript?: string; + words?: { + confidence?: number; + end?: number; + start?: number; + word?: string; + }[]; + }[]; + }[]; + summary?: { + result?: string; + short?: string; + }; + sentiments?: { + segments?: { + text?: string; + start_word?: number; + end_word?: number; + sentiment?: string; + sentiment_score?: number; + }[]; + average?: { + sentiment?: string; + sentiment_score?: number; + }; + }; + }; +} +declare abstract class Base_Ai_Cf_Deepgram_Nova_3 { + inputs: Ai_Cf_Deepgram_Nova_3_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Nova_3_Output; +} +interface Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Input { + queries?: string | string[]; + /** + * Optional instruction for the task + */ + instruction?: string; + documents?: string | string[]; + text?: string | string[]; +} +interface Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Output { + data?: number[][]; + shape?: number[]; +} +declare abstract class Base_Ai_Cf_Qwen_Qwen3_Embedding_0_6B { + inputs: Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Input; + postProcessedOutputs: Ai_Cf_Qwen_Qwen3_Embedding_0_6B_Output; +} +type Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Input = { + /** + * readable stream with audio data and content-type specified for that data + */ + audio: { + body: object; + contentType: string; + }; + /** + * type of data PCM data that's sent to the inference server as raw array + */ + dtype?: "uint8" | "float32" | "float64"; +} | { + /** + * base64 encoded audio data + */ + audio: string; + /** + * type of data PCM data that's sent to the inference server as raw array + */ + dtype?: "uint8" | "float32" | "float64"; +}; +interface Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Output { + /** + * if true, end-of-turn was detected + */ + is_complete?: boolean; + /** + * probability of the end-of-turn detection + */ + probability?: number; +} +declare abstract class Base_Ai_Cf_Pipecat_Ai_Smart_Turn_V2 { + inputs: Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Input; + postProcessedOutputs: Ai_Cf_Pipecat_Ai_Smart_Turn_V2_Output; +} +declare abstract class Base_Ai_Cf_Openai_Gpt_Oss_120B { + inputs: ResponsesInput; + postProcessedOutputs: ResponsesOutput; +} +declare abstract class Base_Ai_Cf_Openai_Gpt_Oss_20B { + inputs: ResponsesInput; + postProcessedOutputs: ResponsesOutput; +} +interface Ai_Cf_Leonardo_Phoenix_1_0_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * Controls how closely the generated image should adhere to the prompt; higher values make the image more aligned with the prompt + */ + guidance?: number; + /** + * Random seed for reproducibility of the image generation + */ + seed?: number; + /** + * The height of the generated image in pixels + */ + height?: number; + /** + * The width of the generated image in pixels + */ + width?: number; + /** + * The number of diffusion steps; higher values can improve quality but take longer + */ + num_steps?: number; + /** + * Specify what to exclude from the generated images + */ + negative_prompt?: string; +} +/** + * The generated image in JPEG format + */ +type Ai_Cf_Leonardo_Phoenix_1_0_Output = string; +declare abstract class Base_Ai_Cf_Leonardo_Phoenix_1_0 { + inputs: Ai_Cf_Leonardo_Phoenix_1_0_Input; + postProcessedOutputs: Ai_Cf_Leonardo_Phoenix_1_0_Output; +} +interface Ai_Cf_Leonardo_Lucid_Origin_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * Controls how closely the generated image should adhere to the prompt; higher values make the image more aligned with the prompt + */ + guidance?: number; + /** + * Random seed for reproducibility of the image generation + */ + seed?: number; + /** + * The height of the generated image in pixels + */ + height?: number; + /** + * The width of the generated image in pixels + */ + width?: number; + /** + * The number of diffusion steps; higher values can improve quality but take longer + */ + num_steps?: number; + /** + * The number of diffusion steps; higher values can improve quality but take longer + */ + steps?: number; +} +interface Ai_Cf_Leonardo_Lucid_Origin_Output { + /** + * The generated image in Base64 format. + */ + image?: string; +} +declare abstract class Base_Ai_Cf_Leonardo_Lucid_Origin { + inputs: Ai_Cf_Leonardo_Lucid_Origin_Input; + postProcessedOutputs: Ai_Cf_Leonardo_Lucid_Origin_Output; +} +interface Ai_Cf_Deepgram_Aura_1_Input { + /** + * Speaker used to produce the audio. + */ + speaker?: "angus" | "asteria" | "arcas" | "orion" | "orpheus" | "athena" | "luna" | "zeus" | "perseus" | "helios" | "hera" | "stella"; + /** + * Encoding of the output audio. + */ + encoding?: "linear16" | "flac" | "mulaw" | "alaw" | "mp3" | "opus" | "aac"; + /** + * Container specifies the file format wrapper for the output audio. The available options depend on the encoding type.. + */ + container?: "none" | "wav" | "ogg"; + /** + * The text content to be converted to speech + */ + text: string; + /** + * Sample Rate specifies the sample rate for the output audio. Based on the encoding, different sample rates are supported. For some encodings, the sample rate is not configurable + */ + sample_rate?: number; + /** + * The bitrate of the audio in bits per second. Choose from predefined ranges or specific values based on the encoding type. + */ + bit_rate?: number; +} +/** + * The generated audio in MP3 format + */ +type Ai_Cf_Deepgram_Aura_1_Output = string; +declare abstract class Base_Ai_Cf_Deepgram_Aura_1 { + inputs: Ai_Cf_Deepgram_Aura_1_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Aura_1_Output; +} +interface Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Input { + /** + * Input text to translate. Can be a single string or a list of strings. + */ + text: string | string[]; + /** + * Target language to translate to + */ + target_language: "asm_Beng" | "awa_Deva" | "ben_Beng" | "bho_Deva" | "brx_Deva" | "doi_Deva" | "eng_Latn" | "gom_Deva" | "gon_Deva" | "guj_Gujr" | "hin_Deva" | "hne_Deva" | "kan_Knda" | "kas_Arab" | "kas_Deva" | "kha_Latn" | "lus_Latn" | "mag_Deva" | "mai_Deva" | "mal_Mlym" | "mar_Deva" | "mni_Beng" | "mni_Mtei" | "npi_Deva" | "ory_Orya" | "pan_Guru" | "san_Deva" | "sat_Olck" | "snd_Arab" | "snd_Deva" | "tam_Taml" | "tel_Telu" | "urd_Arab" | "unr_Deva"; +} +interface Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Output { + /** + * Translated texts + */ + translations: string[]; +} +declare abstract class Base_Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B { + inputs: Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Input; + postProcessedOutputs: Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B_Output; +} +type Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Input = Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Async_Batch; +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_1; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_1 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Async_Batch { + requests: (Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt_1 | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages_1)[]; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Prompt_1 { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_2; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_2 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Messages_1 { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + response_format?: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_3; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_JSON_Mode_3 { + type?: "json_object" | "json_schema"; + json_schema?: unknown; +} +type Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Output = Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Chat_Completion_Response | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Text_Completion_Response | string | Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_AsyncResponse; +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Chat_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "chat.completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index?: number; + /** + * The message generated by the model + */ + message?: { + /** + * Role of the message author + */ + role: string; + /** + * The content of the message + */ + content: string; + /** + * Internal reasoning content (if available) + */ + reasoning_content?: string; + /** + * Tool calls made by the assistant + */ + tool_calls?: { + /** + * Unique identifier for the tool call + */ + id: string; + /** + * Type of tool call + */ + type: "function"; + function: { + /** + * Name of the function to call + */ + name: string; + /** + * JSON string of arguments for the function + */ + arguments: string; + }; + }[]; + }; + /** + * Reason why the model stopped generating + */ + finish_reason?: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Text_Completion_Response { + /** + * Unique identifier for the completion + */ + id?: string; + /** + * Object type identifier + */ + object?: "text_completion"; + /** + * Unix timestamp of when the completion was created + */ + created?: number; + /** + * Model used for the completion + */ + model?: string; + /** + * List of completion choices + */ + choices?: { + /** + * Index of the choice in the list + */ + index: number; + /** + * The generated text completion + */ + text: string; + /** + * Reason why the model stopped generating + */ + finish_reason: string; + /** + * Stop reason (may be null) + */ + stop_reason?: string | null; + /** + * Log probabilities (if requested) + */ + logprobs?: {} | null; + /** + * Log probabilities for the prompt (if requested) + */ + prompt_logprobs?: {} | null; + }[]; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +interface Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_AsyncResponse { + /** + * The async request id that can be used to obtain the results. + */ + request_id?: string; +} +declare abstract class Base_Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It { + inputs: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Input; + postProcessedOutputs: Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It_Output; +} +interface Ai_Cf_Pfnet_Plamo_Embedding_1B_Input { + /** + * Input text to embed. Can be a single string or a list of strings. + */ + text: string | string[]; +} +interface Ai_Cf_Pfnet_Plamo_Embedding_1B_Output { + /** + * Embedding vectors, where each vector is a list of floats. + */ + data: number[][]; + /** + * Shape of the embedding data as [number_of_embeddings, embedding_dimension]. + * + * @minItems 2 + * @maxItems 2 + */ + shape: [ + number, + number + ]; +} +declare abstract class Base_Ai_Cf_Pfnet_Plamo_Embedding_1B { + inputs: Ai_Cf_Pfnet_Plamo_Embedding_1B_Input; + postProcessedOutputs: Ai_Cf_Pfnet_Plamo_Embedding_1B_Output; +} +interface Ai_Cf_Deepgram_Flux_Input { + /** + * Encoding of the audio stream. Currently only supports raw signed little-endian 16-bit PCM. + */ + encoding: "linear16"; + /** + * Sample rate of the audio stream in Hz. + */ + sample_rate: string; + /** + * End-of-turn confidence required to fire an eager end-of-turn event. When set, enables EagerEndOfTurn and TurnResumed events. Valid Values 0.3 - 0.9. + */ + eager_eot_threshold?: string; + /** + * End-of-turn confidence required to finish a turn. Valid Values 0.5 - 0.9. + */ + eot_threshold?: string; + /** + * A turn will be finished when this much time has passed after speech, regardless of EOT confidence. + */ + eot_timeout_ms?: string; + /** + * Keyterm prompting can improve recognition of specialized terminology. Pass multiple keyterm query parameters to boost multiple keyterms. + */ + keyterm?: string; + /** + * Opts out requests from the Deepgram Model Improvement Program. Refer to Deepgram Docs for pricing impacts before setting this to true. https://dpgr.am/deepgram-mip + */ + mip_opt_out?: "true" | "false"; + /** + * Label your requests for the purpose of identification during usage reporting + */ + tag?: string; +} +/** + * Output will be returned as websocket messages. + */ +interface Ai_Cf_Deepgram_Flux_Output { + /** + * The unique identifier of the request (uuid) + */ + request_id?: string; + /** + * Starts at 0 and increments for each message the server sends to the client. + */ + sequence_id?: number; + /** + * The type of event being reported. + */ + event?: "Update" | "StartOfTurn" | "EagerEndOfTurn" | "TurnResumed" | "EndOfTurn"; + /** + * The index of the current turn + */ + turn_index?: number; + /** + * Start time in seconds of the audio range that was transcribed + */ + audio_window_start?: number; + /** + * End time in seconds of the audio range that was transcribed + */ + audio_window_end?: number; + /** + * Text that was said over the course of the current turn + */ + transcript?: string; + /** + * The words in the transcript + */ + words?: { + /** + * The individual punctuated, properly-cased word from the transcript + */ + word: string; + /** + * Confidence that this word was transcribed correctly + */ + confidence: number; + }[]; + /** + * Confidence that no more speech is coming in this turn + */ + end_of_turn_confidence?: number; +} +declare abstract class Base_Ai_Cf_Deepgram_Flux { + inputs: Ai_Cf_Deepgram_Flux_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Flux_Output; +} +interface Ai_Cf_Deepgram_Aura_2_En_Input { + /** + * Speaker used to produce the audio. + */ + speaker?: "amalthea" | "andromeda" | "apollo" | "arcas" | "aries" | "asteria" | "athena" | "atlas" | "aurora" | "callista" | "cora" | "cordelia" | "delia" | "draco" | "electra" | "harmonia" | "helena" | "hera" | "hermes" | "hyperion" | "iris" | "janus" | "juno" | "jupiter" | "luna" | "mars" | "minerva" | "neptune" | "odysseus" | "ophelia" | "orion" | "orpheus" | "pandora" | "phoebe" | "pluto" | "saturn" | "thalia" | "theia" | "vesta" | "zeus"; + /** + * Encoding of the output audio. + */ + encoding?: "linear16" | "flac" | "mulaw" | "alaw" | "mp3" | "opus" | "aac"; + /** + * Container specifies the file format wrapper for the output audio. The available options depend on the encoding type.. + */ + container?: "none" | "wav" | "ogg"; + /** + * The text content to be converted to speech + */ + text: string; + /** + * Sample Rate specifies the sample rate for the output audio. Based on the encoding, different sample rates are supported. For some encodings, the sample rate is not configurable + */ + sample_rate?: number; + /** + * The bitrate of the audio in bits per second. Choose from predefined ranges or specific values based on the encoding type. + */ + bit_rate?: number; +} +/** + * The generated audio in MP3 format + */ +type Ai_Cf_Deepgram_Aura_2_En_Output = string; +declare abstract class Base_Ai_Cf_Deepgram_Aura_2_En { + inputs: Ai_Cf_Deepgram_Aura_2_En_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Aura_2_En_Output; +} +interface Ai_Cf_Deepgram_Aura_2_Es_Input { + /** + * Speaker used to produce the audio. + */ + speaker?: "sirio" | "nestor" | "carina" | "celeste" | "alvaro" | "diana" | "aquila" | "selena" | "estrella" | "javier"; + /** + * Encoding of the output audio. + */ + encoding?: "linear16" | "flac" | "mulaw" | "alaw" | "mp3" | "opus" | "aac"; + /** + * Container specifies the file format wrapper for the output audio. The available options depend on the encoding type.. + */ + container?: "none" | "wav" | "ogg"; + /** + * The text content to be converted to speech + */ + text: string; + /** + * Sample Rate specifies the sample rate for the output audio. Based on the encoding, different sample rates are supported. For some encodings, the sample rate is not configurable + */ + sample_rate?: number; + /** + * The bitrate of the audio in bits per second. Choose from predefined ranges or specific values based on the encoding type. + */ + bit_rate?: number; +} +/** + * The generated audio in MP3 format + */ +type Ai_Cf_Deepgram_Aura_2_Es_Output = string; +declare abstract class Base_Ai_Cf_Deepgram_Aura_2_Es { + inputs: Ai_Cf_Deepgram_Aura_2_Es_Input; + postProcessedOutputs: Ai_Cf_Deepgram_Aura_2_Es_Output; +} +interface AiModels { + "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification; + "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage; + "@cf/runwayml/stable-diffusion-v1-5-inpainting": BaseAiTextToImage; + "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage; + "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage; + "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage; + "@cf/myshell-ai/melotts": BaseAiTextToSpeech; + "@cf/google/embeddinggemma-300m": BaseAiTextEmbeddings; + "@cf/microsoft/resnet-50": BaseAiImageClassification; + "@cf/meta/llama-2-7b-chat-int8": BaseAiTextGeneration; + "@cf/mistral/mistral-7b-instruct-v0.1": BaseAiTextGeneration; + "@cf/meta/llama-2-7b-chat-fp16": BaseAiTextGeneration; + "@hf/thebloke/llama-2-13b-chat-awq": BaseAiTextGeneration; + "@hf/thebloke/mistral-7b-instruct-v0.1-awq": BaseAiTextGeneration; + "@hf/thebloke/zephyr-7b-beta-awq": BaseAiTextGeneration; + "@hf/thebloke/openhermes-2.5-mistral-7b-awq": BaseAiTextGeneration; + "@hf/thebloke/neural-chat-7b-v3-1-awq": BaseAiTextGeneration; + "@hf/thebloke/llamaguard-7b-awq": BaseAiTextGeneration; + "@hf/thebloke/deepseek-coder-6.7b-base-awq": BaseAiTextGeneration; + "@hf/thebloke/deepseek-coder-6.7b-instruct-awq": BaseAiTextGeneration; + "@cf/deepseek-ai/deepseek-math-7b-instruct": BaseAiTextGeneration; + "@cf/defog/sqlcoder-7b-2": BaseAiTextGeneration; + "@cf/openchat/openchat-3.5-0106": BaseAiTextGeneration; + "@cf/tiiuae/falcon-7b-instruct": BaseAiTextGeneration; + "@cf/thebloke/discolm-german-7b-v1-awq": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-0.5b-chat": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-7b-chat-awq": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-14b-chat-awq": BaseAiTextGeneration; + "@cf/tinyllama/tinyllama-1.1b-chat-v1.0": BaseAiTextGeneration; + "@cf/microsoft/phi-2": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-1.8b-chat": BaseAiTextGeneration; + "@cf/mistral/mistral-7b-instruct-v0.2-lora": BaseAiTextGeneration; + "@hf/nousresearch/hermes-2-pro-mistral-7b": BaseAiTextGeneration; + "@hf/nexusflow/starling-lm-7b-beta": BaseAiTextGeneration; + "@hf/google/gemma-7b-it": BaseAiTextGeneration; + "@cf/meta-llama/llama-2-7b-chat-hf-lora": BaseAiTextGeneration; + "@cf/google/gemma-2b-it-lora": BaseAiTextGeneration; + "@cf/google/gemma-7b-it-lora": BaseAiTextGeneration; + "@hf/mistral/mistral-7b-instruct-v0.2": BaseAiTextGeneration; + "@cf/meta/llama-3-8b-instruct": BaseAiTextGeneration; + "@cf/fblgit/una-cybertron-7b-v2-bf16": BaseAiTextGeneration; + "@cf/meta/llama-3-8b-instruct-awq": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct-fp8": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct-awq": BaseAiTextGeneration; + "@cf/meta/llama-3.2-3b-instruct": BaseAiTextGeneration; + "@cf/meta/llama-3.2-1b-instruct": BaseAiTextGeneration; + "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b": BaseAiTextGeneration; + "@cf/ibm-granite/granite-4.0-h-micro": BaseAiTextGeneration; + "@cf/facebook/bart-large-cnn": BaseAiSummarization; + "@cf/llava-hf/llava-1.5-7b-hf": BaseAiImageToText; + "@cf/baai/bge-base-en-v1.5": Base_Ai_Cf_Baai_Bge_Base_En_V1_5; + "@cf/openai/whisper": Base_Ai_Cf_Openai_Whisper; + "@cf/meta/m2m100-1.2b": Base_Ai_Cf_Meta_M2M100_1_2B; + "@cf/baai/bge-small-en-v1.5": Base_Ai_Cf_Baai_Bge_Small_En_V1_5; + "@cf/baai/bge-large-en-v1.5": Base_Ai_Cf_Baai_Bge_Large_En_V1_5; + "@cf/unum/uform-gen2-qwen-500m": Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M; + "@cf/openai/whisper-tiny-en": Base_Ai_Cf_Openai_Whisper_Tiny_En; + "@cf/openai/whisper-large-v3-turbo": Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo; + "@cf/baai/bge-m3": Base_Ai_Cf_Baai_Bge_M3; + "@cf/black-forest-labs/flux-1-schnell": Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell; + "@cf/meta/llama-3.2-11b-vision-instruct": Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct; + "@cf/meta/llama-3.3-70b-instruct-fp8-fast": Base_Ai_Cf_Meta_Llama_3_3_70B_Instruct_Fp8_Fast; + "@cf/meta/llama-guard-3-8b": Base_Ai_Cf_Meta_Llama_Guard_3_8B; + "@cf/baai/bge-reranker-base": Base_Ai_Cf_Baai_Bge_Reranker_Base; + "@cf/qwen/qwen2.5-coder-32b-instruct": Base_Ai_Cf_Qwen_Qwen2_5_Coder_32B_Instruct; + "@cf/qwen/qwq-32b": Base_Ai_Cf_Qwen_Qwq_32B; + "@cf/mistralai/mistral-small-3.1-24b-instruct": Base_Ai_Cf_Mistralai_Mistral_Small_3_1_24B_Instruct; + "@cf/google/gemma-3-12b-it": Base_Ai_Cf_Google_Gemma_3_12B_It; + "@cf/meta/llama-4-scout-17b-16e-instruct": Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct; + "@cf/qwen/qwen3-30b-a3b-fp8": Base_Ai_Cf_Qwen_Qwen3_30B_A3B_Fp8; + "@cf/deepgram/nova-3": Base_Ai_Cf_Deepgram_Nova_3; + "@cf/qwen/qwen3-embedding-0.6b": Base_Ai_Cf_Qwen_Qwen3_Embedding_0_6B; + "@cf/pipecat-ai/smart-turn-v2": Base_Ai_Cf_Pipecat_Ai_Smart_Turn_V2; + "@cf/openai/gpt-oss-120b": Base_Ai_Cf_Openai_Gpt_Oss_120B; + "@cf/openai/gpt-oss-20b": Base_Ai_Cf_Openai_Gpt_Oss_20B; + "@cf/leonardo/phoenix-1.0": Base_Ai_Cf_Leonardo_Phoenix_1_0; + "@cf/leonardo/lucid-origin": Base_Ai_Cf_Leonardo_Lucid_Origin; + "@cf/deepgram/aura-1": Base_Ai_Cf_Deepgram_Aura_1; + "@cf/ai4bharat/indictrans2-en-indic-1B": Base_Ai_Cf_Ai4Bharat_Indictrans2_En_Indic_1B; + "@cf/aisingapore/gemma-sea-lion-v4-27b-it": Base_Ai_Cf_Aisingapore_Gemma_Sea_Lion_V4_27B_It; + "@cf/pfnet/plamo-embedding-1b": Base_Ai_Cf_Pfnet_Plamo_Embedding_1B; + "@cf/deepgram/flux": Base_Ai_Cf_Deepgram_Flux; + "@cf/deepgram/aura-2-en": Base_Ai_Cf_Deepgram_Aura_2_En; + "@cf/deepgram/aura-2-es": Base_Ai_Cf_Deepgram_Aura_2_Es; +} +type AiOptions = { + /** + * Send requests as an asynchronous batch job, only works for supported models + * https://developers.cloudflare.com/workers-ai/features/batch-api + */ + queueRequest?: boolean; + /** + * Establish websocket connections, only works for supported models + */ + websocket?: boolean; + /** + * Tag your requests to group and view them in Cloudflare dashboard. + * + * Rules: + * Tags must only contain letters, numbers, and the symbols: : - . / @ + * Each tag can have maximum 50 characters. + * Maximum 5 tags are allowed each request. + * Duplicate tags will removed. + */ + tags?: string[]; + gateway?: GatewayOptions; + returnRawResponse?: boolean; + prefix?: string; + extraHeaders?: object; +}; +type AiModelsSearchParams = { + author?: string; + hide_experimental?: boolean; + page?: number; + per_page?: number; + search?: string; + source?: number; + task?: string; +}; +type AiModelsSearchObject = { + id: string; + source: number; + name: string; + description: string; + task: { + id: string; + name: string; + description: string; + }; + tags: string[]; + properties: { + property_id: string; + value: string; + }[]; +}; +interface InferenceUpstreamError extends Error { +} +interface AiInternalError extends Error { +} +type AiModelListType = Record; +declare abstract class Ai { + aiGatewayLogId: string | null; + gateway(gatewayId: string): AiGateway; + autorag(autoragId: string): AutoRAG; + run(model: Name, inputs: InputOptions, options?: Options): Promise; + models(params?: AiModelsSearchParams): Promise; + toMarkdown(): ToMarkdownService; + toMarkdown(files: MarkdownDocument[], options?: ConversionRequestOptions): Promise; + toMarkdown(files: MarkdownDocument, options?: ConversionRequestOptions): Promise; +} +type GatewayRetries = { + maxAttempts?: 1 | 2 | 3 | 4 | 5; + retryDelayMs?: number; + backoff?: 'constant' | 'linear' | 'exponential'; +}; +type GatewayOptions = { + id: string; + cacheKey?: string; + cacheTtl?: number; + skipCache?: boolean; + metadata?: Record; + collectLog?: boolean; + eventId?: string; + requestTimeoutMs?: number; + retries?: GatewayRetries; +}; +type UniversalGatewayOptions = Exclude & { + /** + ** @deprecated + */ + id?: string; +}; +type AiGatewayPatchLog = { + score?: number | null; + feedback?: -1 | 1 | null; + metadata?: Record | null; +}; +type AiGatewayLog = { + id: string; + provider: string; + model: string; + model_type?: string; + path: string; + duration: number; + request_type?: string; + request_content_type?: string; + status_code: number; + response_content_type?: string; + success: boolean; + cached: boolean; + tokens_in?: number; + tokens_out?: number; + metadata?: Record; + step?: number; + cost?: number; + custom_cost?: boolean; + request_size: number; + request_head?: string; + request_head_complete: boolean; + response_size: number; + response_head?: string; + response_head_complete: boolean; + created_at: Date; +}; +type AIGatewayProviders = 'workers-ai' | 'anthropic' | 'aws-bedrock' | 'azure-openai' | 'google-vertex-ai' | 'huggingface' | 'openai' | 'perplexity-ai' | 'replicate' | 'groq' | 'cohere' | 'google-ai-studio' | 'mistral' | 'grok' | 'openrouter' | 'deepseek' | 'cerebras' | 'cartesia' | 'elevenlabs' | 'adobe-firefly'; +type AIGatewayHeaders = { + 'cf-aig-metadata': Record | string; + 'cf-aig-custom-cost': { + per_token_in?: number; + per_token_out?: number; + } | { + total_cost?: number; + } | string; + 'cf-aig-cache-ttl': number | string; + 'cf-aig-skip-cache': boolean | string; + 'cf-aig-cache-key': string; + 'cf-aig-event-id': string; + 'cf-aig-request-timeout': number | string; + 'cf-aig-max-attempts': number | string; + 'cf-aig-retry-delay': number | string; + 'cf-aig-backoff': string; + 'cf-aig-collect-log': boolean | string; + Authorization: string; + 'Content-Type': string; + [key: string]: string | number | boolean | object; +}; +type AIGatewayUniversalRequest = { + provider: AIGatewayProviders | string; // eslint-disable-line + endpoint: string; + headers: Partial; + query: unknown; +}; +interface AiGatewayInternalError extends Error { +} +interface AiGatewayLogNotFound extends Error { +} +declare abstract class AiGateway { + patchLog(logId: string, data: AiGatewayPatchLog): Promise; + getLog(logId: string): Promise; + run(data: AIGatewayUniversalRequest | AIGatewayUniversalRequest[], options?: { + gateway?: UniversalGatewayOptions; + extraHeaders?: object; + }): Promise; + getUrl(provider?: AIGatewayProviders | string): Promise; // eslint-disable-line +} +interface AutoRAGInternalError extends Error { +} +interface AutoRAGNotFoundError extends Error { +} +interface AutoRAGUnauthorizedError extends Error { +} +interface AutoRAGNameNotSetError extends Error { +} +type ComparisonFilter = { + key: string; + type: 'eq' | 'ne' | 'gt' | 'gte' | 'lt' | 'lte'; + value: string | number | boolean; +}; +type CompoundFilter = { + type: 'and' | 'or'; + filters: ComparisonFilter[]; +}; +type AutoRagSearchRequest = { + query: string; + filters?: CompoundFilter | ComparisonFilter; + max_num_results?: number; + ranking_options?: { + ranker?: string; + score_threshold?: number; + }; + reranking?: { + enabled?: boolean; + model?: string; + }; + rewrite_query?: boolean; +}; +type AutoRagAiSearchRequest = AutoRagSearchRequest & { + stream?: boolean; + system_prompt?: string; +}; +type AutoRagAiSearchRequestStreaming = Omit & { + stream: true; +}; +type AutoRagSearchResponse = { + object: 'vector_store.search_results.page'; + search_query: string; + data: { + file_id: string; + filename: string; + score: number; + attributes: Record; + content: { + type: 'text'; + text: string; + }[]; + }[]; + has_more: boolean; + next_page: string | null; +}; +type AutoRagListResponse = { + id: string; + enable: boolean; + type: string; + source: string; + vectorize_name: string; + paused: boolean; + status: string; +}[]; +type AutoRagAiSearchResponse = AutoRagSearchResponse & { + response: string; +}; +declare abstract class AutoRAG { + list(): Promise; + search(params: AutoRagSearchRequest): Promise; + aiSearch(params: AutoRagAiSearchRequestStreaming): Promise; + aiSearch(params: AutoRagAiSearchRequest): Promise; + aiSearch(params: AutoRagAiSearchRequest): Promise; +} +interface BasicImageTransformations { + /** + * Maximum width in image pixels. The value must be an integer. + */ + width?: number; + /** + * Maximum height in image pixels. The value must be an integer. + */ + height?: number; + /** + * Resizing mode as a string. It affects interpretation of width and height + * options: + * - scale-down: Similar to contain, but the image is never enlarged. If + * the image is larger than given width or height, it will be resized. + * Otherwise its original size will be kept. + * - contain: Resizes to maximum size that fits within the given width and + * height. If only a single dimension is given (e.g. only width), the + * image will be shrunk or enlarged to exactly match that dimension. + * Aspect ratio is always preserved. + * - cover: Resizes (shrinks or enlarges) to fill the entire area of width + * and height. If the image has an aspect ratio different from the ratio + * of width and height, it will be cropped to fit. + * - crop: The image will be shrunk and cropped to fit within the area + * specified by width and height. The image will not be enlarged. For images + * smaller than the given dimensions it's the same as scale-down. For + * images larger than the given dimensions, it's the same as cover. + * See also trim. + * - pad: Resizes to the maximum size that fits within the given width and + * height, and then fills the remaining area with a background color + * (white by default). Use of this mode is not recommended, as the same + * effect can be more efficiently achieved with the contain mode and the + * CSS object-fit: contain property. + * - squeeze: Stretches and deforms to the width and height given, even if it + * breaks aspect ratio + */ + fit?: "scale-down" | "contain" | "cover" | "crop" | "pad" | "squeeze"; + /** + * Image segmentation using artificial intelligence models. Sets pixels not + * within selected segment area to transparent e.g "foreground" sets every + * background pixel as transparent. + */ + segment?: "foreground"; + /** + * When cropping with fit: "cover", this defines the side or point that should + * be left uncropped. The value is either a string + * "left", "right", "top", "bottom", "auto", or "center" (the default), + * or an object {x, y} containing focal point coordinates in the original + * image expressed as fractions ranging from 0.0 (top or left) to 1.0 + * (bottom or right), 0.5 being the center. {fit: "cover", gravity: "top"} will + * crop bottom or left and right sides as necessary, but won’t crop anything + * from the top. {fit: "cover", gravity: {x:0.5, y:0.2}} will crop each side to + * preserve as much as possible around a point at 20% of the height of the + * source image. + */ + gravity?: 'face' | 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | BasicImageTransformationsGravityCoordinates; + /** + * Background color to add underneath the image. Applies only to images with + * transparency (such as PNG). Accepts any CSS color (#RRGGBB, rgba(…), + * hsl(…), etc.) + */ + background?: string; + /** + * Number of degrees (90, 180, 270) to rotate the image by. width and height + * options refer to axes after rotation. + */ + rotate?: 0 | 90 | 180 | 270 | 360; +} +interface BasicImageTransformationsGravityCoordinates { + x?: number; + y?: number; + mode?: 'remainder' | 'box-center'; +} +/** + * In addition to the properties you can set in the RequestInit dict + * that you pass as an argument to the Request constructor, you can + * set certain properties of a `cf` object to control how Cloudflare + * features are applied to that new Request. + * + * Note: Currently, these properties cannot be tested in the + * playground. + */ +interface RequestInitCfProperties extends Record { + cacheEverything?: boolean; + /** + * A request's cache key is what determines if two requests are + * "the same" for caching purposes. If a request has the same cache key + * as some previous request, then we can serve the same cached response for + * both. (e.g. 'some-key') + * + * Only available for Enterprise customers. + */ + cacheKey?: string; + /** + * This allows you to append additional Cache-Tag response headers + * to the origin response without modifications to the origin server. + * This will allow for greater control over the Purge by Cache Tag feature + * utilizing changes only in the Workers process. + * + * Only available for Enterprise customers. + */ + cacheTags?: string[]; + /** + * Force response to be cached for a given number of seconds. (e.g. 300) + */ + cacheTtl?: number; + /** + * Force response to be cached for a given number of seconds based on the Origin status code. + * (e.g. { '200-299': 86400, '404': 1, '500-599': 0 }) + */ + cacheTtlByStatus?: Record; + scrapeShield?: boolean; + apps?: boolean; + image?: RequestInitCfPropertiesImage; + minify?: RequestInitCfPropertiesImageMinify; + mirage?: boolean; + polish?: "lossy" | "lossless" | "off"; + r2?: RequestInitCfPropertiesR2; + /** + * Redirects the request to an alternate origin server. You can use this, + * for example, to implement load balancing across several origins. + * (e.g.us-east.example.com) + * + * Note - For security reasons, the hostname set in resolveOverride must + * be proxied on the same Cloudflare zone of the incoming request. + * Otherwise, the setting is ignored. CNAME hosts are allowed, so to + * resolve to a host under a different domain or a DNS only domain first + * declare a CNAME record within your own zone’s DNS mapping to the + * external hostname, set proxy on Cloudflare, then set resolveOverride + * to point to that CNAME record. + */ + resolveOverride?: string; +} +interface RequestInitCfPropertiesImageDraw extends BasicImageTransformations { + /** + * Absolute URL of the image file to use for the drawing. It can be any of + * the supported file formats. For drawing of watermarks or non-rectangular + * overlays we recommend using PNG or WebP images. + */ + url: string; + /** + * Floating-point number between 0 (transparent) and 1 (opaque). + * For example, opacity: 0.5 makes overlay semitransparent. + */ + opacity?: number; + /** + * - If set to true, the overlay image will be tiled to cover the entire + * area. This is useful for stock-photo-like watermarks. + * - If set to "x", the overlay image will be tiled horizontally only + * (form a line). + * - If set to "y", the overlay image will be tiled vertically only + * (form a line). + */ + repeat?: true | "x" | "y"; + /** + * Position of the overlay image relative to a given edge. Each property is + * an offset in pixels. 0 aligns exactly to the edge. For example, left: 10 + * positions left side of the overlay 10 pixels from the left edge of the + * image it's drawn over. bottom: 0 aligns bottom of the overlay with bottom + * of the background image. + * + * Setting both left & right, or both top & bottom is an error. + * + * If no position is specified, the image will be centered. + */ + top?: number; + left?: number; + bottom?: number; + right?: number; +} +interface RequestInitCfPropertiesImage extends BasicImageTransformations { + /** + * Device Pixel Ratio. Default 1. Multiplier for width/height that makes it + * easier to specify higher-DPI sizes in . + */ + dpr?: number; + /** + * Allows you to trim your image. Takes dpr into account and is performed before + * resizing or rotation. + * + * It can be used as: + * - left, top, right, bottom - it will specify the number of pixels to cut + * off each side + * - width, height - the width/height you'd like to end up with - can be used + * in combination with the properties above + * - border - this will automatically trim the surroundings of an image based on + * it's color. It consists of three properties: + * - color: rgb or hex representation of the color you wish to trim (todo: verify the rgba bit) + * - tolerance: difference from color to treat as color + * - keep: the number of pixels of border to keep + */ + trim?: "border" | { + top?: number; + bottom?: number; + left?: number; + right?: number; + width?: number; + height?: number; + border?: boolean | { + color?: string; + tolerance?: number; + keep?: number; + }; + }; + /** + * Quality setting from 1-100 (useful values are in 60-90 range). Lower values + * make images look worse, but load faster. The default is 85. It applies only + * to JPEG and WebP images. It doesn’t have any effect on PNG. + */ + quality?: number | "low" | "medium-low" | "medium-high" | "high"; + /** + * Output format to generate. It can be: + * - avif: generate images in AVIF format. + * - webp: generate images in Google WebP format. Set quality to 100 to get + * the WebP-lossless format. + * - json: instead of generating an image, outputs information about the + * image, in JSON format. The JSON object will contain image size + * (before and after resizing), source image’s MIME type, file size, etc. + * - jpeg: generate images in JPEG format. + * - png: generate images in PNG format. + */ + format?: "avif" | "webp" | "json" | "jpeg" | "png" | "baseline-jpeg" | "png-force" | "svg"; + /** + * Whether to preserve animation frames from input files. Default is true. + * Setting it to false reduces animations to still images. This setting is + * recommended when enlarging images or processing arbitrary user content, + * because large GIF animations can weigh tens or even hundreds of megabytes. + * It is also useful to set anim:false when using format:"json" to get the + * response quicker without the number of frames. + */ + anim?: boolean; + /** + * What EXIF data should be preserved in the output image. Note that EXIF + * rotation and embedded color profiles are always applied ("baked in" into + * the image), and aren't affected by this option. Note that if the Polish + * feature is enabled, all metadata may have been removed already and this + * option may have no effect. + * - keep: Preserve most of EXIF metadata, including GPS location if there's + * any. + * - copyright: Only keep the copyright tag, and discard everything else. + * This is the default behavior for JPEG files. + * - none: Discard all invisible EXIF metadata. Currently WebP and PNG + * output formats always discard metadata. + */ + metadata?: "keep" | "copyright" | "none"; + /** + * Strength of sharpening filter to apply to the image. Floating-point + * number between 0 (no sharpening, default) and 10 (maximum). 1.0 is a + * recommended value for downscaled images. + */ + sharpen?: number; + /** + * Radius of a blur filter (approximate gaussian). Maximum supported radius + * is 250. + */ + blur?: number; + /** + * Overlays are drawn in the order they appear in the array (last array + * entry is the topmost layer). + */ + draw?: RequestInitCfPropertiesImageDraw[]; + /** + * Fetching image from authenticated origin. Setting this property will + * pass authentication headers (Authorization, Cookie, etc.) through to + * the origin. + */ + "origin-auth"?: "share-publicly"; + /** + * Adds a border around the image. The border is added after resizing. Border + * width takes dpr into account, and can be specified either using a single + * width property, or individually for each side. + */ + border?: { + color: string; + width: number; + } | { + color: string; + top: number; + right: number; + bottom: number; + left: number; + }; + /** + * Increase brightness by a factor. A value of 1.0 equals no change, a value + * of 0.5 equals half brightness, and a value of 2.0 equals twice as bright. + * 0 is ignored. + */ + brightness?: number; + /** + * Increase contrast by a factor. A value of 1.0 equals no change, a value of + * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is + * ignored. + */ + contrast?: number; + /** + * Increase exposure by a factor. A value of 1.0 equals no change, a value of + * 0.5 darkens the image, and a value of 2.0 lightens the image. 0 is ignored. + */ + gamma?: number; + /** + * Increase contrast by a factor. A value of 1.0 equals no change, a value of + * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is + * ignored. + */ + saturation?: number; + /** + * Flips the images horizontally, vertically, or both. Flipping is applied before + * rotation, so if you apply flip=h,rotate=90 then the image will be flipped + * horizontally, then rotated by 90 degrees. + */ + flip?: 'h' | 'v' | 'hv'; + /** + * Slightly reduces latency on a cache miss by selecting a + * quickest-to-compress file format, at a cost of increased file size and + * lower image quality. It will usually override the format option and choose + * JPEG over WebP or AVIF. We do not recommend using this option, except in + * unusual circumstances like resizing uncacheable dynamically-generated + * images. + */ + compression?: "fast"; +} +interface RequestInitCfPropertiesImageMinify { + javascript?: boolean; + css?: boolean; + html?: boolean; +} +interface RequestInitCfPropertiesR2 { + /** + * Colo id of bucket that an object is stored in + */ + bucketColoId?: number; +} +/** + * Request metadata provided by Cloudflare's edge. + */ +type IncomingRequestCfProperties = IncomingRequestCfPropertiesBase & IncomingRequestCfPropertiesBotManagementEnterprise & IncomingRequestCfPropertiesCloudflareForSaaSEnterprise & IncomingRequestCfPropertiesGeographicInformation & IncomingRequestCfPropertiesCloudflareAccessOrApiShield; +interface IncomingRequestCfPropertiesBase extends Record { + /** + * [ASN](https://www.iana.org/assignments/as-numbers/as-numbers.xhtml) of the incoming request. + * + * @example 395747 + */ + asn?: number; + /** + * The organization which owns the ASN of the incoming request. + * + * @example "Google Cloud" + */ + asOrganization?: string; + /** + * The original value of the `Accept-Encoding` header if Cloudflare modified it. + * + * @example "gzip, deflate, br" + */ + clientAcceptEncoding?: string; + /** + * The number of milliseconds it took for the request to reach your worker. + * + * @example 22 + */ + clientTcpRtt?: number; + /** + * The three-letter [IATA](https://en.wikipedia.org/wiki/IATA_airport_code) + * airport code of the data center that the request hit. + * + * @example "DFW" + */ + colo: string; + /** + * Represents the upstream's response to a + * [TCP `keepalive` message](https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html) + * from cloudflare. + * + * For workers with no upstream, this will always be `1`. + * + * @example 3 + */ + edgeRequestKeepAliveStatus: IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus; + /** + * The HTTP Protocol the request used. + * + * @example "HTTP/2" + */ + httpProtocol: string; + /** + * The browser-requested prioritization information in the request object. + * + * If no information was set, defaults to the empty string `""` + * + * @example "weight=192;exclusive=0;group=3;group-weight=127" + * @default "" + */ + requestPriority: string; + /** + * The TLS version of the connection to Cloudflare. + * In requests served over plaintext (without TLS), this property is the empty string `""`. + * + * @example "TLSv1.3" + */ + tlsVersion: string; + /** + * The cipher for the connection to Cloudflare. + * In requests served over plaintext (without TLS), this property is the empty string `""`. + * + * @example "AEAD-AES128-GCM-SHA256" + */ + tlsCipher: string; + /** + * Metadata containing the [`HELLO`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2) and [`FINISHED`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9) messages from this request's TLS handshake. + * + * If the incoming request was served over plaintext (without TLS) this field is undefined. + */ + tlsExportedAuthenticator?: IncomingRequestCfPropertiesExportedAuthenticatorMetadata; +} +interface IncomingRequestCfPropertiesBotManagementBase { + /** + * Cloudflare’s [level of certainty](https://developers.cloudflare.com/bots/concepts/bot-score/) that a request comes from a bot, + * represented as an integer percentage between `1` (almost certainly a bot) and `99` (almost certainly human). + * + * @example 54 + */ + score: number; + /** + * A boolean value that is true if the request comes from a good bot, like Google or Bing. + * Most customers choose to allow this traffic. For more details, see [Traffic from known bots](https://developers.cloudflare.com/firewall/known-issues-and-faq/#how-does-firewall-rules-handle-traffic-from-known-bots). + */ + verifiedBot: boolean; + /** + * A boolean value that is true if the request originates from a + * Cloudflare-verified proxy service. + */ + corporateProxy: boolean; + /** + * A boolean value that's true if the request matches [file extensions](https://developers.cloudflare.com/bots/reference/static-resources/) for many types of static resources. + */ + staticResource: boolean; + /** + * List of IDs that correlate to the Bot Management heuristic detections made on a request (you can have multiple heuristic detections on the same request). + */ + detectionIds: number[]; +} +interface IncomingRequestCfPropertiesBotManagement { + /** + * Results of Cloudflare's Bot Management analysis + */ + botManagement: IncomingRequestCfPropertiesBotManagementBase; + /** + * Duplicate of `botManagement.score`. + * + * @deprecated + */ + clientTrustScore: number; +} +interface IncomingRequestCfPropertiesBotManagementEnterprise extends IncomingRequestCfPropertiesBotManagement { + /** + * Results of Cloudflare's Bot Management analysis + */ + botManagement: IncomingRequestCfPropertiesBotManagementBase & { + /** + * A [JA3 Fingerprint](https://developers.cloudflare.com/bots/concepts/ja3-fingerprint/) to help profile specific SSL/TLS clients + * across different destination IPs, Ports, and X509 certificates. + */ + ja3Hash: string; + }; +} +interface IncomingRequestCfPropertiesCloudflareForSaaSEnterprise { + /** + * Custom metadata set per-host in [Cloudflare for SaaS](https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/). + * + * This field is only present if you have Cloudflare for SaaS enabled on your account + * and you have followed the [required steps to enable it]((https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/domain-support/custom-metadata/)). + */ + hostMetadata?: HostMetadata; +} +interface IncomingRequestCfPropertiesCloudflareAccessOrApiShield { + /** + * Information about the client certificate presented to Cloudflare. + * + * This is populated when the incoming request is served over TLS using + * either Cloudflare Access or API Shield (mTLS) + * and the presented SSL certificate has a valid + * [Certificate Serial Number](https://ldapwiki.com/wiki/Certificate%20Serial%20Number) + * (i.e., not `null` or `""`). + * + * Otherwise, a set of placeholder values are used. + * + * The property `certPresented` will be set to `"1"` when + * the object is populated (i.e. the above conditions were met). + */ + tlsClientAuth: IncomingRequestCfPropertiesTLSClientAuth | IncomingRequestCfPropertiesTLSClientAuthPlaceholder; +} +/** + * Metadata about the request's TLS handshake + */ +interface IncomingRequestCfPropertiesExportedAuthenticatorMetadata { + /** + * The client's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal + * + * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" + */ + clientHandshake: string; + /** + * The server's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal + * + * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" + */ + serverHandshake: string; + /** + * The client's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal + * + * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" + */ + clientFinished: string; + /** + * The server's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal + * + * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" + */ + serverFinished: string; +} +/** + * Geographic data about the request's origin. + */ +interface IncomingRequestCfPropertiesGeographicInformation { + /** + * The [ISO 3166-1 Alpha 2](https://www.iso.org/iso-3166-country-codes.html) country code the request originated from. + * + * If your worker is [configured to accept TOR connections](https://support.cloudflare.com/hc/en-us/articles/203306930-Understanding-Cloudflare-Tor-support-and-Onion-Routing), this may also be `"T1"`, indicating a request that originated over TOR. + * + * If Cloudflare is unable to determine where the request originated this property is omitted. + * + * The country code `"T1"` is used for requests originating on TOR. + * + * @example "GB" + */ + country?: Iso3166Alpha2Code | "T1"; + /** + * If present, this property indicates that the request originated in the EU + * + * @example "1" + */ + isEUCountry?: "1"; + /** + * A two-letter code indicating the continent the request originated from. + * + * @example "AN" + */ + continent?: ContinentCode; + /** + * The city the request originated from + * + * @example "Austin" + */ + city?: string; + /** + * Postal code of the incoming request + * + * @example "78701" + */ + postalCode?: string; + /** + * Latitude of the incoming request + * + * @example "30.27130" + */ + latitude?: string; + /** + * Longitude of the incoming request + * + * @example "-97.74260" + */ + longitude?: string; + /** + * Timezone of the incoming request + * + * @example "America/Chicago" + */ + timezone?: string; + /** + * If known, the ISO 3166-2 name for the first level region associated with + * the IP address of the incoming request + * + * @example "Texas" + */ + region?: string; + /** + * If known, the ISO 3166-2 code for the first-level region associated with + * the IP address of the incoming request + * + * @example "TX" + */ + regionCode?: string; + /** + * Metro code (DMA) of the incoming request + * + * @example "635" + */ + metroCode?: string; +} +/** Data about the incoming request's TLS certificate */ +interface IncomingRequestCfPropertiesTLSClientAuth { + /** Always `"1"`, indicating that the certificate was presented */ + certPresented: "1"; + /** + * Result of certificate verification. + * + * @example "FAILED:self signed certificate" + */ + certVerified: Exclude; + /** The presented certificate's revokation status. + * + * - A value of `"1"` indicates the certificate has been revoked + * - A value of `"0"` indicates the certificate has not been revoked + */ + certRevoked: "1" | "0"; + /** + * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) + * + * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certIssuerDN: string; + /** + * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) + * + * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certSubjectDN: string; + /** + * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) + * + * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certIssuerDNRFC2253: string; + /** + * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) + * + * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certSubjectDNRFC2253: string; + /** The certificate issuer's distinguished name (legacy policies) */ + certIssuerDNLegacy: string; + /** The certificate subject's distinguished name (legacy policies) */ + certSubjectDNLegacy: string; + /** + * The certificate's serial number + * + * @example "00936EACBE07F201DF" + */ + certSerial: string; + /** + * The certificate issuer's serial number + * + * @example "2489002934BDFEA34" + */ + certIssuerSerial: string; + /** + * The certificate's Subject Key Identifier + * + * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" + */ + certSKI: string; + /** + * The certificate issuer's Subject Key Identifier + * + * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" + */ + certIssuerSKI: string; + /** + * The certificate's SHA-1 fingerprint + * + * @example "6b9109f323999e52259cda7373ff0b4d26bd232e" + */ + certFingerprintSHA1: string; + /** + * The certificate's SHA-256 fingerprint + * + * @example "acf77cf37b4156a2708e34c4eb755f9b5dbbe5ebb55adfec8f11493438d19e6ad3f157f81fa3b98278453d5652b0c1fd1d71e5695ae4d709803a4d3f39de9dea" + */ + certFingerprintSHA256: string; + /** + * The effective starting date of the certificate + * + * @example "Dec 22 19:39:00 2018 GMT" + */ + certNotBefore: string; + /** + * The effective expiration date of the certificate + * + * @example "Dec 22 19:39:00 2018 GMT" + */ + certNotAfter: string; +} +/** Placeholder values for TLS Client Authorization */ +interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder { + certPresented: "0"; + certVerified: "NONE"; + certRevoked: "0"; + certIssuerDN: ""; + certSubjectDN: ""; + certIssuerDNRFC2253: ""; + certSubjectDNRFC2253: ""; + certIssuerDNLegacy: ""; + certSubjectDNLegacy: ""; + certSerial: ""; + certIssuerSerial: ""; + certSKI: ""; + certIssuerSKI: ""; + certFingerprintSHA1: ""; + certFingerprintSHA256: ""; + certNotBefore: ""; + certNotAfter: ""; +} +/** Possible outcomes of TLS verification */ +declare type CertVerificationStatus = +/** Authentication succeeded */ +"SUCCESS" +/** No certificate was presented */ + | "NONE" +/** Failed because the certificate was self-signed */ + | "FAILED:self signed certificate" +/** Failed because the certificate failed a trust chain check */ + | "FAILED:unable to verify the first certificate" +/** Failed because the certificate not yet valid */ + | "FAILED:certificate is not yet valid" +/** Failed because the certificate is expired */ + | "FAILED:certificate has expired" +/** Failed for another unspecified reason */ + | "FAILED"; +/** + * An upstream endpoint's response to a TCP `keepalive` message from Cloudflare. + */ +declare type IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus = 0 /** Unknown */ | 1 /** no keepalives (not found) */ | 2 /** no connection re-use, opening keepalive connection failed */ | 3 /** no connection re-use, keepalive accepted and saved */ | 4 /** connection re-use, refused by the origin server (`TCP FIN`) */ | 5; /** connection re-use, accepted by the origin server */ +/** ISO 3166-1 Alpha-2 codes */ +declare type Iso3166Alpha2Code = "AD" | "AE" | "AF" | "AG" | "AI" | "AL" | "AM" | "AO" | "AQ" | "AR" | "AS" | "AT" | "AU" | "AW" | "AX" | "AZ" | "BA" | "BB" | "BD" | "BE" | "BF" | "BG" | "BH" | "BI" | "BJ" | "BL" | "BM" | "BN" | "BO" | "BQ" | "BR" | "BS" | "BT" | "BV" | "BW" | "BY" | "BZ" | "CA" | "CC" | "CD" | "CF" | "CG" | "CH" | "CI" | "CK" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CV" | "CW" | "CX" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "DO" | "DZ" | "EC" | "EE" | "EG" | "EH" | "ER" | "ES" | "ET" | "FI" | "FJ" | "FK" | "FM" | "FO" | "FR" | "GA" | "GB" | "GD" | "GE" | "GF" | "GG" | "GH" | "GI" | "GL" | "GM" | "GN" | "GP" | "GQ" | "GR" | "GS" | "GT" | "GU" | "GW" | "GY" | "HK" | "HM" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IM" | "IN" | "IO" | "IQ" | "IR" | "IS" | "IT" | "JE" | "JM" | "JO" | "JP" | "KE" | "KG" | "KH" | "KI" | "KM" | "KN" | "KP" | "KR" | "KW" | "KY" | "KZ" | "LA" | "LB" | "LC" | "LI" | "LK" | "LR" | "LS" | "LT" | "LU" | "LV" | "LY" | "MA" | "MC" | "MD" | "ME" | "MF" | "MG" | "MH" | "MK" | "ML" | "MM" | "MN" | "MO" | "MP" | "MQ" | "MR" | "MS" | "MT" | "MU" | "MV" | "MW" | "MX" | "MY" | "MZ" | "NA" | "NC" | "NE" | "NF" | "NG" | "NI" | "NL" | "NO" | "NP" | "NR" | "NU" | "NZ" | "OM" | "PA" | "PE" | "PF" | "PG" | "PH" | "PK" | "PL" | "PM" | "PN" | "PR" | "PS" | "PT" | "PW" | "PY" | "QA" | "RE" | "RO" | "RS" | "RU" | "RW" | "SA" | "SB" | "SC" | "SD" | "SE" | "SG" | "SH" | "SI" | "SJ" | "SK" | "SL" | "SM" | "SN" | "SO" | "SR" | "SS" | "ST" | "SV" | "SX" | "SY" | "SZ" | "TC" | "TD" | "TF" | "TG" | "TH" | "TJ" | "TK" | "TL" | "TM" | "TN" | "TO" | "TR" | "TT" | "TV" | "TW" | "TZ" | "UA" | "UG" | "UM" | "US" | "UY" | "UZ" | "VA" | "VC" | "VE" | "VG" | "VI" | "VN" | "VU" | "WF" | "WS" | "YE" | "YT" | "ZA" | "ZM" | "ZW"; +/** The 2-letter continent codes Cloudflare uses */ +declare type ContinentCode = "AF" | "AN" | "AS" | "EU" | "NA" | "OC" | "SA"; +type CfProperties = IncomingRequestCfProperties | RequestInitCfProperties; +interface D1Meta { + duration: number; + size_after: number; + rows_read: number; + rows_written: number; + last_row_id: number; + changed_db: boolean; + changes: number; + /** + * The region of the database instance that executed the query. + */ + served_by_region?: string; + /** + * The three letters airport code of the colo that executed the query. + */ + served_by_colo?: string; + /** + * True if-and-only-if the database instance that executed the query was the primary. + */ + served_by_primary?: boolean; + timings?: { + /** + * The duration of the SQL query execution by the database instance. It doesn't include any network time. + */ + sql_duration_ms: number; + }; + /** + * Number of total attempts to execute the query, due to automatic retries. + * Note: All other fields in the response like `timings` only apply to the last attempt. + */ + total_attempts?: number; +} +interface D1Response { + success: true; + meta: D1Meta & Record; + error?: never; +} +type D1Result = D1Response & { + results: T[]; +}; +interface D1ExecResult { + count: number; + duration: number; +} +type D1SessionConstraint = +// Indicates that the first query should go to the primary, and the rest queries +// using the same D1DatabaseSession will go to any replica that is consistent with +// the bookmark maintained by the session (returned by the first query). +'first-primary' +// Indicates that the first query can go anywhere (primary or replica), and the rest queries +// using the same D1DatabaseSession will go to any replica that is consistent with +// the bookmark maintained by the session (returned by the first query). + | 'first-unconstrained'; +type D1SessionBookmark = string; +declare abstract class D1Database { + prepare(query: string): D1PreparedStatement; + batch(statements: D1PreparedStatement[]): Promise[]>; + exec(query: string): Promise; + /** + * Creates a new D1 Session anchored at the given constraint or the bookmark. + * All queries executed using the created session will have sequential consistency, + * meaning that all writes done through the session will be visible in subsequent reads. + * + * @param constraintOrBookmark Either the session constraint or the explicit bookmark to anchor the created session. + */ + withSession(constraintOrBookmark?: D1SessionBookmark | D1SessionConstraint): D1DatabaseSession; + /** + * @deprecated dump() will be removed soon, only applies to deprecated alpha v1 databases. + */ + dump(): Promise; +} +declare abstract class D1DatabaseSession { + prepare(query: string): D1PreparedStatement; + batch(statements: D1PreparedStatement[]): Promise[]>; + /** + * @returns The latest session bookmark across all executed queries on the session. + * If no query has been executed yet, `null` is returned. + */ + getBookmark(): D1SessionBookmark | null; +} +declare abstract class D1PreparedStatement { + bind(...values: unknown[]): D1PreparedStatement; + first(colName: string): Promise; + first>(): Promise; + run>(): Promise>; + all>(): Promise>; + raw(options: { + columnNames: true; + }): Promise<[ + string[], + ...T[] + ]>; + raw(options?: { + columnNames?: false; + }): Promise; +} +// `Disposable` was added to TypeScript's standard lib types in version 5.2. +// To support older TypeScript versions, define an empty `Disposable` interface. +// Users won't be able to use `using`/`Symbol.dispose` without upgrading to 5.2, +// but this will ensure type checking on older versions still passes. +// TypeScript's interface merging will ensure our empty interface is effectively +// ignored when `Disposable` is included in the standard lib. +interface Disposable { +} +/** + * The returned data after sending an email + */ +interface EmailSendResult { + /** + * The Email Message ID + */ + messageId: string; +} +/** + * An email message that can be sent from a Worker. + */ +interface EmailMessage { + /** + * Envelope From attribute of the email message. + */ + readonly from: string; + /** + * Envelope To attribute of the email message. + */ + readonly to: string; +} +/** + * An email message that is sent to a consumer Worker and can be rejected/forwarded. + */ +interface ForwardableEmailMessage extends EmailMessage { + /** + * Stream of the email message content. + */ + readonly raw: ReadableStream; + /** + * An [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). + */ + readonly headers: Headers; + /** + * Size of the email message content. + */ + readonly rawSize: number; + /** + * Reject this email message by returning a permanent SMTP error back to the connecting client including the given reason. + * @param reason The reject reason. + * @returns void + */ + setReject(reason: string): void; + /** + * Forward this email message to a verified destination address of the account. + * @param rcptTo Verified destination address. + * @param headers A [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). + * @returns A promise that resolves when the email message is forwarded. + */ + forward(rcptTo: string, headers?: Headers): Promise; + /** + * Reply to the sender of this email message with a new EmailMessage object. + * @param message The reply message. + * @returns A promise that resolves when the email message is replied. + */ + reply(message: EmailMessage): Promise; +} +/** A file attachment for an email message */ +type EmailAttachment = { + disposition: 'inline'; + contentId: string; + filename: string; + type: string; + content: string | ArrayBuffer | ArrayBufferView; +} | { + disposition: 'attachment'; + contentId?: undefined; + filename: string; + type: string; + content: string | ArrayBuffer | ArrayBufferView; +}; +/** An Email Address */ +interface EmailAddress { + name: string; + email: string; +} +/** + * A binding that allows a Worker to send email messages. + */ +interface SendEmail { + send(message: EmailMessage): Promise; + send(builder: { + from: string | EmailAddress; + to: string | string[]; + subject: string; + replyTo?: string | EmailAddress; + cc?: string | string[]; + bcc?: string | string[]; + headers?: Record; + text?: string; + html?: string; + attachments?: EmailAttachment[]; + }): Promise; +} +declare abstract class EmailEvent extends ExtendableEvent { + readonly message: ForwardableEmailMessage; +} +declare type EmailExportedHandler = (message: ForwardableEmailMessage, env: Env, ctx: ExecutionContext) => void | Promise; +declare module "cloudflare:email" { + let _EmailMessage: { + prototype: EmailMessage; + new (from: string, to: string, raw: ReadableStream | string): EmailMessage; + }; + export { _EmailMessage as EmailMessage }; +} +/** + * Hello World binding to serve as an explanatory example. DO NOT USE + */ +interface HelloWorldBinding { + /** + * Retrieve the current stored value + */ + get(): Promise<{ + value: string; + ms?: number; + }>; + /** + * Set a new stored value + */ + set(value: string): Promise; +} +interface Hyperdrive { + /** + * Connect directly to Hyperdrive as if it's your database, returning a TCP socket. + * + * Calling this method returns an identical socket to if you call + * `connect("host:port")` using the `host` and `port` fields from this object. + * Pick whichever approach works better with your preferred DB client library. + * + * Note that this socket is not yet authenticated -- it's expected that your + * code (or preferably, the client library of your choice) will authenticate + * using the information in this class's readonly fields. + */ + connect(): Socket; + /** + * A valid DB connection string that can be passed straight into the typical + * client library/driver/ORM. This will typically be the easiest way to use + * Hyperdrive. + */ + readonly connectionString: string; + /* + * A randomly generated hostname that is only valid within the context of the + * currently running Worker which, when passed into `connect()` function from + * the "cloudflare:sockets" module, will connect to the Hyperdrive instance + * for your database. + */ + readonly host: string; + /* + * The port that must be paired the the host field when connecting. + */ + readonly port: number; + /* + * The username to use when authenticating to your database via Hyperdrive. + * Unlike the host and password, this will be the same every time + */ + readonly user: string; + /* + * The randomly generated password to use when authenticating to your + * database via Hyperdrive. Like the host field, this password is only valid + * within the context of the currently running Worker instance from which + * it's read. + */ + readonly password: string; + /* + * The name of the database to connect to. + */ + readonly database: string; +} +// Copyright (c) 2024 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +type ImageInfoResponse = { + format: 'image/svg+xml'; +} | { + format: string; + fileSize: number; + width: number; + height: number; +}; +type ImageTransform = { + width?: number; + height?: number; + background?: string; + blur?: number; + border?: { + color?: string; + width?: number; + } | { + top?: number; + bottom?: number; + left?: number; + right?: number; + }; + brightness?: number; + contrast?: number; + fit?: 'scale-down' | 'contain' | 'pad' | 'squeeze' | 'cover' | 'crop'; + flip?: 'h' | 'v' | 'hv'; + gamma?: number; + segment?: 'foreground'; + gravity?: 'face' | 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | { + x?: number; + y?: number; + mode: 'remainder' | 'box-center'; + }; + rotate?: 0 | 90 | 180 | 270; + saturation?: number; + sharpen?: number; + trim?: 'border' | { + top?: number; + bottom?: number; + left?: number; + right?: number; + width?: number; + height?: number; + border?: boolean | { + color?: string; + tolerance?: number; + keep?: number; + }; + }; +}; +type ImageDrawOptions = { + opacity?: number; + repeat?: boolean | string; + top?: number; + left?: number; + bottom?: number; + right?: number; +}; +type ImageInputOptions = { + encoding?: 'base64'; +}; +type ImageOutputOptions = { + format: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp' | 'image/avif' | 'rgb' | 'rgba'; + quality?: number; + background?: string; + anim?: boolean; +}; +interface ImagesBinding { + /** + * Get image metadata (type, width and height) + * @throws {@link ImagesError} with code 9412 if input is not an image + * @param stream The image bytes + */ + info(stream: ReadableStream, options?: ImageInputOptions): Promise; + /** + * Begin applying a series of transformations to an image + * @param stream The image bytes + * @returns A transform handle + */ + input(stream: ReadableStream, options?: ImageInputOptions): ImageTransformer; +} +interface ImageTransformer { + /** + * Apply transform next, returning a transform handle. + * You can then apply more transformations, draw, or retrieve the output. + * @param transform + */ + transform(transform: ImageTransform): ImageTransformer; + /** + * Draw an image on this transformer, returning a transform handle. + * You can then apply more transformations, draw, or retrieve the output. + * @param image The image (or transformer that will give the image) to draw + * @param options The options configuring how to draw the image + */ + draw(image: ReadableStream | ImageTransformer, options?: ImageDrawOptions): ImageTransformer; + /** + * Retrieve the image that results from applying the transforms to the + * provided input + * @param options Options that apply to the output e.g. output format + */ + output(options: ImageOutputOptions): Promise; +} +type ImageTransformationOutputOptions = { + encoding?: 'base64'; +}; +interface ImageTransformationResult { + /** + * The image as a response, ready to store in cache or return to users + */ + response(): Response; + /** + * The content type of the returned image + */ + contentType(): string; + /** + * The bytes of the response + */ + image(options?: ImageTransformationOutputOptions): ReadableStream; +} +interface ImagesError extends Error { + readonly code: number; + readonly message: string; + readonly stack?: string; +} +/** + * Media binding for transforming media streams. + * Provides the entry point for media transformation operations. + */ +interface MediaBinding { + /** + * Creates a media transformer from an input stream. + * @param media - The input media bytes + * @returns A MediaTransformer instance for applying transformations + */ + input(media: ReadableStream): MediaTransformer; +} +/** + * Media transformer for applying transformation operations to media content. + * Handles sizing, fitting, and other input transformation parameters. + */ +interface MediaTransformer { + /** + * Applies transformation options to the media content. + * @param transform - Configuration for how the media should be transformed + * @returns A generator for producing the transformed media output + */ + transform(transform: MediaTransformationInputOptions): MediaTransformationGenerator; +} +/** + * Generator for producing media transformation results. + * Configures the output format and parameters for the transformed media. + */ +interface MediaTransformationGenerator { + /** + * Generates the final media output with specified options. + * @param output - Configuration for the output format and parameters + * @returns The final transformation result containing the transformed media + */ + output(output: MediaTransformationOutputOptions): MediaTransformationResult; +} +/** + * Result of a media transformation operation. + * Provides multiple ways to access the transformed media content. + */ +interface MediaTransformationResult { + /** + * Returns the transformed media as a readable stream of bytes. + * @returns A stream containing the transformed media data + */ + media(): ReadableStream; + /** + * Returns the transformed media as an HTTP response object. + * @returns The transformed media as a Response, ready to store in cache or return to users + */ + response(): Response; + /** + * Returns the MIME type of the transformed media. + * @returns The content type string (e.g., 'image/jpeg', 'video/mp4') + */ + contentType(): string; +} +/** + * Configuration options for transforming media input. + * Controls how the media should be resized and fitted. + */ +type MediaTransformationInputOptions = { + /** How the media should be resized to fit the specified dimensions */ + fit?: 'contain' | 'cover' | 'scale-down'; + /** Target width in pixels */ + width?: number; + /** Target height in pixels */ + height?: number; +}; +/** + * Configuration options for Media Transformations output. + * Controls the format, timing, and type of the generated output. + */ +type MediaTransformationOutputOptions = { + /** + * Output mode determining the type of media to generate + */ + mode?: 'video' | 'spritesheet' | 'frame' | 'audio'; + /** Whether to include audio in the output */ + audio?: boolean; + /** + * Starting timestamp for frame extraction or start time for clips. (e.g. '2s'). + */ + time?: string; + /** + * Duration for video clips, audio extraction, and spritesheet generation (e.g. '5s'). + */ + duration?: string; + /** + * Number of frames in the spritesheet. + */ + imageCount?: number; + /** + * Output format for the generated media. + */ + format?: 'jpg' | 'png' | 'm4a'; +}; +/** + * Error object for media transformation operations. + * Extends the standard Error interface with additional media-specific information. + */ +interface MediaError extends Error { + readonly code: number; + readonly message: string; + readonly stack?: string; +} +declare module 'cloudflare:node' { + interface NodeStyleServer { + listen(...args: unknown[]): this; + address(): { + port?: number | null | undefined; + }; + } + export function httpServerHandler(port: number): ExportedHandler; + export function httpServerHandler(options: { + port: number; + }): ExportedHandler; + export function httpServerHandler(server: NodeStyleServer): ExportedHandler; +} +type Params

= Record; +type EventContext = { + request: Request>; + functionPath: string; + waitUntil: (promise: Promise) => void; + passThroughOnException: () => void; + next: (input?: Request | string, init?: RequestInit) => Promise; + env: Env & { + ASSETS: { + fetch: typeof fetch; + }; + }; + params: Params

; + data: Data; +}; +type PagesFunction = Record> = (context: EventContext) => Response | Promise; +type EventPluginContext = { + request: Request>; + functionPath: string; + waitUntil: (promise: Promise) => void; + passThroughOnException: () => void; + next: (input?: Request | string, init?: RequestInit) => Promise; + env: Env & { + ASSETS: { + fetch: typeof fetch; + }; + }; + params: Params

; + data: Data; + pluginArgs: PluginArgs; +}; +type PagesPluginFunction = Record, PluginArgs = unknown> = (context: EventPluginContext) => Response | Promise; +declare module "assets:*" { + export const onRequest: PagesFunction; +} +// Copyright (c) 2022-2023 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +declare module "cloudflare:pipelines" { + export abstract class PipelineTransformationEntrypoint { + protected env: Env; + protected ctx: ExecutionContext; + constructor(ctx: ExecutionContext, env: Env); + /** + * run receives an array of PipelineRecord which can be + * transformed and returned to the pipeline + * @param records Incoming records from the pipeline to be transformed + * @param metadata Information about the specific pipeline calling the transformation entrypoint + * @returns A promise containing the transformed PipelineRecord array + */ + public run(records: I[], metadata: PipelineBatchMetadata): Promise; + } + export type PipelineRecord = Record; + export type PipelineBatchMetadata = { + pipelineId: string; + pipelineName: string; + }; + export interface Pipeline { + /** + * The Pipeline interface represents the type of a binding to a Pipeline + * + * @param records The records to send to the pipeline + */ + send(records: T[]): Promise; + } +} +// PubSubMessage represents an incoming PubSub message. +// The message includes metadata about the broker, the client, and the payload +// itself. +// https://developers.cloudflare.com/pub-sub/ +interface PubSubMessage { + // Message ID + readonly mid: number; + // MQTT broker FQDN in the form mqtts://BROKER.NAMESPACE.cloudflarepubsub.com:PORT + readonly broker: string; + // The MQTT topic the message was sent on. + readonly topic: string; + // The client ID of the client that published this message. + readonly clientId: string; + // The unique identifier (JWT ID) used by the client to authenticate, if token + // auth was used. + readonly jti?: string; + // A Unix timestamp (seconds from Jan 1, 1970), set when the Pub/Sub Broker + // received the message from the client. + readonly receivedAt: number; + // An (optional) string with the MIME type of the payload, if set by the + // client. + readonly contentType: string; + // Set to 1 when the payload is a UTF-8 string + // https://docs.oasis-open.org/mqtt/mqtt/v5.0/os/mqtt-v5.0-os.html#_Toc3901063 + readonly payloadFormatIndicator: number; + // Pub/Sub (MQTT) payloads can be UTF-8 strings, or byte arrays. + // You can use payloadFormatIndicator to inspect this before decoding. + payload: string | Uint8Array; +} +// JsonWebKey extended by kid parameter +interface JsonWebKeyWithKid extends JsonWebKey { + // Key Identifier of the JWK + readonly kid: string; +} +interface RateLimitOptions { + key: string; +} +interface RateLimitOutcome { + success: boolean; +} +interface RateLimit { + /** + * Rate limit a request based on the provided options. + * @see https://developers.cloudflare.com/workers/runtime-apis/bindings/rate-limit/ + * @returns A promise that resolves with the outcome of the rate limit. + */ + limit(options: RateLimitOptions): Promise; +} +// Namespace for RPC utility types. Unfortunately, we can't use a `module` here as these types need +// to referenced by `Fetcher`. This is included in the "importable" version of the types which +// strips all `module` blocks. +declare namespace Rpc { + // Branded types for identifying `WorkerEntrypoint`/`DurableObject`/`Target`s. + // TypeScript uses *structural* typing meaning anything with the same shape as type `T` is a `T`. + // For the classes exported by `cloudflare:workers` we want *nominal* typing (i.e. we only want to + // accept `WorkerEntrypoint` from `cloudflare:workers`, not any other class with the same shape) + export const __RPC_STUB_BRAND: '__RPC_STUB_BRAND'; + export const __RPC_TARGET_BRAND: '__RPC_TARGET_BRAND'; + export const __WORKER_ENTRYPOINT_BRAND: '__WORKER_ENTRYPOINT_BRAND'; + export const __DURABLE_OBJECT_BRAND: '__DURABLE_OBJECT_BRAND'; + export const __WORKFLOW_ENTRYPOINT_BRAND: '__WORKFLOW_ENTRYPOINT_BRAND'; + export interface RpcTargetBranded { + [__RPC_TARGET_BRAND]: never; + } + export interface WorkerEntrypointBranded { + [__WORKER_ENTRYPOINT_BRAND]: never; + } + export interface DurableObjectBranded { + [__DURABLE_OBJECT_BRAND]: never; + } + export interface WorkflowEntrypointBranded { + [__WORKFLOW_ENTRYPOINT_BRAND]: never; + } + export type EntrypointBranded = WorkerEntrypointBranded | DurableObjectBranded | WorkflowEntrypointBranded; + // Types that can be used through `Stub`s + export type Stubable = RpcTargetBranded | ((...args: any[]) => any); + // Types that can be passed over RPC + // The reason for using a generic type here is to build a serializable subset of structured + // cloneable composite types. This allows types defined with the "interface" keyword to pass the + // serializable check as well. Otherwise, only types defined with the "type" keyword would pass. + type Serializable = + // Structured cloneables + BaseType + // Structured cloneable composites + | Map ? Serializable : never, T extends Map ? Serializable : never> | Set ? Serializable : never> | ReadonlyArray ? Serializable : never> | { + [K in keyof T]: K extends number | string ? Serializable : never; + } + // Special types + | Stub + // Serialized as stubs, see `Stubify` + | Stubable; + // Base type for all RPC stubs, including common memory management methods. + // `T` is used as a marker type for unwrapping `Stub`s later. + interface StubBase extends Disposable { + [__RPC_STUB_BRAND]: T; + dup(): this; + } + export type Stub = Provider & StubBase; + // This represents all the types that can be sent as-is over an RPC boundary + type BaseType = void | undefined | null | boolean | number | bigint | string | TypedArray | ArrayBuffer | DataView | Date | Error | RegExp | ReadableStream | WritableStream | Request | Response | Headers; + // Recursively rewrite all `Stubable` types with `Stub`s + // prettier-ignore + type Stubify = T extends Stubable ? Stub : T extends Map ? Map, Stubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { + [key: string | number]: any; + } ? { + [K in keyof T]: Stubify; + } : T; + // Recursively rewrite all `Stub`s with the corresponding `T`s. + // Note we use `StubBase` instead of `Stub` here to avoid circular dependencies: + // `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`. + // prettier-ignore + type Unstubify = T extends StubBase ? V : T extends Map ? Map, Unstubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { + [key: string | number]: unknown; + } ? { + [K in keyof T]: Unstubify; + } : T; + type UnstubifyAll = { + [I in keyof A]: Unstubify; + }; + // Utility type for adding `Provider`/`Disposable`s to `object` types only. + // Note `unknown & T` is equivalent to `T`. + type MaybeProvider = T extends object ? Provider : unknown; + type MaybeDisposable = T extends object ? Disposable : unknown; + // Type for method return or property on an RPC interface. + // - Stubable types are replaced by stubs. + // - Serializable types are passed by value, with stubable types replaced by stubs + // and a top-level `Disposer`. + // Everything else can't be passed over PRC. + // Technically, we use custom thenables here, but they quack like `Promise`s. + // Intersecting with `(Maybe)Provider` allows pipelining. + // prettier-ignore + type Result = R extends Stubable ? Promise> & Provider : R extends Serializable ? Promise & MaybeDisposable> & MaybeProvider : never; + // Type for method or property on an RPC interface. + // For methods, unwrap `Stub`s in parameters, and rewrite returns to be `Result`s. + // Unwrapping `Stub`s allows calling with `Stubable` arguments. + // For properties, rewrite types to be `Result`s. + // In each case, unwrap `Promise`s. + type MethodOrProperty = V extends (...args: infer P) => infer R ? (...args: UnstubifyAll

) => Result> : Result>; + // Type for the callable part of an `Provider` if `T` is callable. + // This is intersected with methods/properties. + type MaybeCallableProvider = T extends (...args: any[]) => any ? MethodOrProperty : unknown; + // Base type for all other types providing RPC-like interfaces. + // Rewrites all methods/properties to be `MethodOrProperty`s, while preserving callable types. + // `Reserved` names (e.g. stub method names like `dup()`) and symbols can't be accessed over RPC. + export type Provider = MaybeCallableProvider & Pick<{ + [K in keyof T]: MethodOrProperty; + }, Exclude>>; +} +declare namespace Cloudflare { + // Type of `env`. + // + // The specific project can extend `Env` by redeclaring it in project-specific files. Typescript + // will merge all declarations. + // + // You can use `wrangler types` to generate the `Env` type automatically. + interface Env { + } + // Project-specific parameters used to inform types. + // + // This interface is, again, intended to be declared in project-specific files, and then that + // declaration will be merged with this one. + // + // A project should have a declaration like this: + // + // interface GlobalProps { + // // Declares the main module's exports. Used to populate Cloudflare.Exports aka the type + // // of `ctx.exports`. + // mainModule: typeof import("my-main-module"); + // + // // Declares which of the main module's exports are configured with durable storage, and + // // thus should behave as Durable Object namsepace bindings. + // durableNamespaces: "MyDurableObject" | "AnotherDurableObject"; + // } + // + // You can use `wrangler types` to generate `GlobalProps` automatically. + interface GlobalProps { + } + // Evaluates to the type of a property in GlobalProps, defaulting to `Default` if it is not + // present. + type GlobalProp = K extends keyof GlobalProps ? GlobalProps[K] : Default; + // The type of the program's main module exports, if known. Requires `GlobalProps` to declare the + // `mainModule` property. + type MainModule = GlobalProp<"mainModule", {}>; + // The type of ctx.exports, which contains loopback bindings for all top-level exports. + type Exports = { + [K in keyof MainModule]: LoopbackForExport + // If the export is listed in `durableNamespaces`, then it is also a + // DurableObjectNamespace. + & (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace : DurableObjectNamespace : DurableObjectNamespace : {}); + }; +} +declare namespace CloudflareWorkersModule { + export type RpcStub = Rpc.Stub; + export const RpcStub: { + new (value: T): Rpc.Stub; + }; + export abstract class RpcTarget implements Rpc.RpcTargetBranded { + [Rpc.__RPC_TARGET_BRAND]: never; + } + // `protected` fields don't appear in `keyof`s, so can't be accessed over RPC + export abstract class WorkerEntrypoint implements Rpc.WorkerEntrypointBranded { + [Rpc.__WORKER_ENTRYPOINT_BRAND]: never; + protected ctx: ExecutionContext; + protected env: Env; + constructor(ctx: ExecutionContext, env: Env); + email?(message: ForwardableEmailMessage): void | Promise; + fetch?(request: Request): Response | Promise; + queue?(batch: MessageBatch): void | Promise; + scheduled?(controller: ScheduledController): void | Promise; + tail?(events: TraceItem[]): void | Promise; + tailStream?(event: TailStream.TailEvent): TailStream.TailEventHandlerType | Promise; + test?(controller: TestController): void | Promise; + trace?(traces: TraceItem[]): void | Promise; + } + export abstract class DurableObject implements Rpc.DurableObjectBranded { + [Rpc.__DURABLE_OBJECT_BRAND]: never; + protected ctx: DurableObjectState; + protected env: Env; + constructor(ctx: DurableObjectState, env: Env); + alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; + fetch?(request: Request): Response | Promise; + webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; + webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; + webSocketError?(ws: WebSocket, error: unknown): void | Promise; + } + export type WorkflowDurationLabel = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; + export type WorkflowSleepDuration = `${number} ${WorkflowDurationLabel}${'s' | ''}` | number; + export type WorkflowDelayDuration = WorkflowSleepDuration; + export type WorkflowTimeoutDuration = WorkflowSleepDuration; + export type WorkflowRetentionDuration = WorkflowSleepDuration; + export type WorkflowBackoff = 'constant' | 'linear' | 'exponential'; + export type WorkflowStepConfig = { + retries?: { + limit: number; + delay: WorkflowDelayDuration | number; + backoff?: WorkflowBackoff; + }; + timeout?: WorkflowTimeoutDuration | number; + }; + export type WorkflowEvent = { + payload: Readonly; + timestamp: Date; + instanceId: string; + }; + export type WorkflowStepEvent = { + payload: Readonly; + timestamp: Date; + type: string; + }; + export abstract class WorkflowStep { + do>(name: string, callback: () => Promise): Promise; + do>(name: string, config: WorkflowStepConfig, callback: () => Promise): Promise; + sleep: (name: string, duration: WorkflowSleepDuration) => Promise; + sleepUntil: (name: string, timestamp: Date | number) => Promise; + waitForEvent>(name: string, options: { + type: string; + timeout?: WorkflowTimeoutDuration | number; + }): Promise>; + } + export type WorkflowInstanceStatus = 'queued' | 'running' | 'paused' | 'errored' | 'terminated' | 'complete' | 'waiting' | 'waitingForPause' | 'unknown'; + export abstract class WorkflowEntrypoint | unknown = unknown> implements Rpc.WorkflowEntrypointBranded { + [Rpc.__WORKFLOW_ENTRYPOINT_BRAND]: never; + protected ctx: ExecutionContext; + protected env: Env; + constructor(ctx: ExecutionContext, env: Env); + run(event: Readonly>, step: WorkflowStep): Promise; + } + export function waitUntil(promise: Promise): void; + export function withEnv(newEnv: unknown, fn: () => unknown): unknown; + export function withExports(newExports: unknown, fn: () => unknown): unknown; + export function withEnvAndExports(newEnv: unknown, newExports: unknown, fn: () => unknown): unknown; + export const env: Cloudflare.Env; + export const exports: Cloudflare.Exports; +} +declare module 'cloudflare:workers' { + export = CloudflareWorkersModule; +} +interface SecretsStoreSecret { + /** + * Get a secret from the Secrets Store, returning a string of the secret value + * if it exists, or throws an error if it does not exist + */ + get(): Promise; +} +declare module "cloudflare:sockets" { + function _connect(address: string | SocketAddress, options?: SocketOptions): Socket; + export { _connect as connect }; +} +type MarkdownDocument = { + name: string; + blob: Blob; +}; +type ConversionResponse = { + id: string; + name: string; + mimeType: string; + format: 'markdown'; + tokens: number; + data: string; +} | { + id: string; + name: string; + mimeType: string; + format: 'error'; + error: string; +}; +type ImageConversionOptions = { + descriptionLanguage?: 'en' | 'es' | 'fr' | 'it' | 'pt' | 'de'; +}; +type EmbeddedImageConversionOptions = ImageConversionOptions & { + convert?: boolean; + maxConvertedImages?: number; +}; +type ConversionOptions = { + html?: { + images?: EmbeddedImageConversionOptions & { + convertOGImage?: boolean; + }; + hostname?: string; + }; + docx?: { + images?: EmbeddedImageConversionOptions; + }; + image?: ImageConversionOptions; + pdf?: { + images?: EmbeddedImageConversionOptions; + metadata?: boolean; + }; +}; +type ConversionRequestOptions = { + gateway?: GatewayOptions; + extraHeaders?: object; + conversionOptions?: ConversionOptions; +}; +type SupportedFileFormat = { + mimeType: string; + extension: string; +}; +declare abstract class ToMarkdownService { + transform(files: MarkdownDocument[], options?: ConversionRequestOptions): Promise; + transform(files: MarkdownDocument, options?: ConversionRequestOptions): Promise; + supported(): Promise; +} +declare namespace TailStream { + interface Header { + readonly name: string; + readonly value: string; + } + interface FetchEventInfo { + readonly type: "fetch"; + readonly method: string; + readonly url: string; + readonly cfJson?: object; + readonly headers: Header[]; + } + interface JsRpcEventInfo { + readonly type: "jsrpc"; + } + interface ScheduledEventInfo { + readonly type: "scheduled"; + readonly scheduledTime: Date; + readonly cron: string; + } + interface AlarmEventInfo { + readonly type: "alarm"; + readonly scheduledTime: Date; + } + interface QueueEventInfo { + readonly type: "queue"; + readonly queueName: string; + readonly batchSize: number; + } + interface EmailEventInfo { + readonly type: "email"; + readonly mailFrom: string; + readonly rcptTo: string; + readonly rawSize: number; + } + interface TraceEventInfo { + readonly type: "trace"; + readonly traces: (string | null)[]; + } + interface HibernatableWebSocketEventInfoMessage { + readonly type: "message"; + } + interface HibernatableWebSocketEventInfoError { + readonly type: "error"; + } + interface HibernatableWebSocketEventInfoClose { + readonly type: "close"; + readonly code: number; + readonly wasClean: boolean; + } + interface HibernatableWebSocketEventInfo { + readonly type: "hibernatableWebSocket"; + readonly info: HibernatableWebSocketEventInfoClose | HibernatableWebSocketEventInfoError | HibernatableWebSocketEventInfoMessage; + } + interface CustomEventInfo { + readonly type: "custom"; + } + interface FetchResponseInfo { + readonly type: "fetch"; + readonly statusCode: number; + } + type EventOutcome = "ok" | "canceled" | "exception" | "unknown" | "killSwitch" | "daemonDown" | "exceededCpu" | "exceededMemory" | "loadShed" | "responseStreamDisconnected" | "scriptNotFound"; + interface ScriptVersion { + readonly id: string; + readonly tag?: string; + readonly message?: string; + } + interface Onset { + readonly type: "onset"; + readonly attributes: Attribute[]; + // id for the span being opened by this Onset event. + readonly spanId: string; + readonly dispatchNamespace?: string; + readonly entrypoint?: string; + readonly executionModel: string; + readonly scriptName?: string; + readonly scriptTags?: string[]; + readonly scriptVersion?: ScriptVersion; + readonly info: FetchEventInfo | JsRpcEventInfo | ScheduledEventInfo | AlarmEventInfo | QueueEventInfo | EmailEventInfo | TraceEventInfo | HibernatableWebSocketEventInfo | CustomEventInfo; + } + interface Outcome { + readonly type: "outcome"; + readonly outcome: EventOutcome; + readonly cpuTime: number; + readonly wallTime: number; + } + interface SpanOpen { + readonly type: "spanOpen"; + readonly name: string; + // id for the span being opened by this SpanOpen event. + readonly spanId: string; + readonly info?: FetchEventInfo | JsRpcEventInfo | Attributes; + } + interface SpanClose { + readonly type: "spanClose"; + readonly outcome: EventOutcome; + } + interface DiagnosticChannelEvent { + readonly type: "diagnosticChannel"; + readonly channel: string; + readonly message: any; + } + interface Exception { + readonly type: "exception"; + readonly name: string; + readonly message: string; + readonly stack?: string; + } + interface Log { + readonly type: "log"; + readonly level: "debug" | "error" | "info" | "log" | "warn"; + readonly message: object; + } + interface DroppedEventsDiagnostic { + readonly diagnosticsType: "droppedEvents"; + readonly count: number; + } + interface StreamDiagnostic { + readonly type: 'streamDiagnostic'; + // To add new diagnostic types, define a new interface and add it to this union type. + readonly diagnostic: DroppedEventsDiagnostic; + } + // This marks the worker handler return information. + // This is separate from Outcome because the worker invocation can live for a long time after + // returning. For example - Websockets that return an http upgrade response but then continue + // streaming information or SSE http connections. + interface Return { + readonly type: "return"; + readonly info?: FetchResponseInfo; + } + interface Attribute { + readonly name: string; + readonly value: string | string[] | boolean | boolean[] | number | number[] | bigint | bigint[]; + } + interface Attributes { + readonly type: "attributes"; + readonly info: Attribute[]; + } + type EventType = Onset | Outcome | SpanOpen | SpanClose | DiagnosticChannelEvent | Exception | Log | StreamDiagnostic | Return | Attributes; + // Context in which this trace event lives. + interface SpanContext { + // Single id for the entire top-level invocation + // This should be a new traceId for the first worker stage invoked in the eyeball request and then + // same-account service-bindings should reuse the same traceId but cross-account service-bindings + // should use a new traceId. + readonly traceId: string; + // spanId in which this event is handled + // for Onset and SpanOpen events this would be the parent span id + // for Outcome and SpanClose these this would be the span id of the opening Onset and SpanOpen events + // For Hibernate and Mark this would be the span under which they were emitted. + // spanId is not set ONLY if: + // 1. This is an Onset event + // 2. We are not inheriting any SpanContext. (e.g. this is a cross-account service binding or a new top-level invocation) + readonly spanId?: string; + } + interface TailEvent { + // invocation id of the currently invoked worker stage. + // invocation id will always be unique to every Onset event and will be the same until the Outcome event. + readonly invocationId: string; + // Inherited spanContext for this event. + readonly spanContext: SpanContext; + readonly timestamp: Date; + readonly sequence: number; + readonly event: Event; + } + type TailEventHandler = (event: TailEvent) => void | Promise; + type TailEventHandlerObject = { + outcome?: TailEventHandler; + spanOpen?: TailEventHandler; + spanClose?: TailEventHandler; + diagnosticChannel?: TailEventHandler; + exception?: TailEventHandler; + log?: TailEventHandler; + return?: TailEventHandler; + attributes?: TailEventHandler; + }; + type TailEventHandlerType = TailEventHandler | TailEventHandlerObject; +} +// Copyright (c) 2022-2023 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +/** + * Data types supported for holding vector metadata. + */ +type VectorizeVectorMetadataValue = string | number | boolean | string[]; +/** + * Additional information to associate with a vector. + */ +type VectorizeVectorMetadata = VectorizeVectorMetadataValue | Record; +type VectorFloatArray = Float32Array | Float64Array; +interface VectorizeError { + code?: number; + error: string; +} +/** + * Comparison logic/operation to use for metadata filtering. + * + * This list is expected to grow as support for more operations are released. + */ +type VectorizeVectorMetadataFilterOp = '$eq' | '$ne' | '$lt' | '$lte' | '$gt' | '$gte'; +type VectorizeVectorMetadataFilterCollectionOp = '$in' | '$nin'; +/** + * Filter criteria for vector metadata used to limit the retrieved query result set. + */ +type VectorizeVectorMetadataFilter = { + [field: string]: Exclude | null | { + [Op in VectorizeVectorMetadataFilterOp]?: Exclude | null; + } | { + [Op in VectorizeVectorMetadataFilterCollectionOp]?: Exclude[]; + }; +}; +/** + * Supported distance metrics for an index. + * Distance metrics determine how other "similar" vectors are determined. + */ +type VectorizeDistanceMetric = "euclidean" | "cosine" | "dot-product"; +/** + * Metadata return levels for a Vectorize query. + * + * Default to "none". + * + * @property all Full metadata for the vector return set, including all fields (including those un-indexed) without truncation. This is a more expensive retrieval, as it requires additional fetching & reading of un-indexed data. + * @property indexed Return all metadata fields configured for indexing in the vector return set. This level of retrieval is "free" in that no additional overhead is incurred returning this data. However, note that indexed metadata is subject to truncation (especially for larger strings). + * @property none No indexed metadata will be returned. + */ +type VectorizeMetadataRetrievalLevel = "all" | "indexed" | "none"; +interface VectorizeQueryOptions { + topK?: number; + namespace?: string; + returnValues?: boolean; + returnMetadata?: boolean | VectorizeMetadataRetrievalLevel; + filter?: VectorizeVectorMetadataFilter; +} +/** + * Information about the configuration of an index. + */ +type VectorizeIndexConfig = { + dimensions: number; + metric: VectorizeDistanceMetric; +} | { + preset: string; // keep this generic, as we'll be adding more presets in the future and this is only in a read capacity +}; +/** + * Metadata about an existing index. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link VectorizeIndexInfo} for its post-beta equivalent. + */ +interface VectorizeIndexDetails { + /** The unique ID of the index */ + readonly id: string; + /** The name of the index. */ + name: string; + /** (optional) A human readable description for the index. */ + description?: string; + /** The index configuration, including the dimension size and distance metric. */ + config: VectorizeIndexConfig; + /** The number of records containing vectors within the index. */ + vectorsCount: number; +} +/** + * Metadata about an existing index. + */ +interface VectorizeIndexInfo { + /** The number of records containing vectors within the index. */ + vectorCount: number; + /** Number of dimensions the index has been configured for. */ + dimensions: number; + /** ISO 8601 datetime of the last processed mutation on in the index. All changes before this mutation will be reflected in the index state. */ + processedUpToDatetime: number; + /** UUIDv4 of the last mutation processed by the index. All changes before this mutation will be reflected in the index state. */ + processedUpToMutation: number; +} +/** + * Represents a single vector value set along with its associated metadata. + */ +interface VectorizeVector { + /** The ID for the vector. This can be user-defined, and must be unique. It should uniquely identify the object, and is best set based on the ID of what the vector represents. */ + id: string; + /** The vector values */ + values: VectorFloatArray | number[]; + /** The namespace this vector belongs to. */ + namespace?: string; + /** Metadata associated with the vector. Includes the values of other fields and potentially additional details. */ + metadata?: Record; +} +/** + * Represents a matched vector for a query along with its score and (if specified) the matching vector information. + */ +type VectorizeMatch = Pick, "values"> & Omit & { + /** The score or rank for similarity, when returned as a result */ + score: number; +}; +/** + * A set of matching {@link VectorizeMatch} for a particular query. + */ +interface VectorizeMatches { + matches: VectorizeMatch[]; + count: number; +} +/** + * Results of an operation that performed a mutation on a set of vectors. + * Here, `ids` is a list of vectors that were successfully processed. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link VectorizeAsyncMutation} for its post-beta equivalent. + */ +interface VectorizeVectorMutation { + /* List of ids of vectors that were successfully processed. */ + ids: string[]; + /* Total count of the number of processed vectors. */ + count: number; +} +/** + * Result type indicating a mutation on the Vectorize Index. + * Actual mutations are processed async where the `mutationId` is the unique identifier for the operation. + */ +interface VectorizeAsyncMutation { + /** The unique identifier for the async mutation operation containing the changeset. */ + mutationId: string; +} +/** + * A Vectorize Vector Search Index for querying vectors/embeddings. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link Vectorize} for its new implementation. + */ +declare abstract class VectorizeIndex { + /** + * Get information about the currently bound index. + * @returns A promise that resolves with information about the current index. + */ + public describe(): Promise; + /** + * Use the provided vector to perform a similarity search across the index. + * @param vector Input vector that will be used to drive the similarity search. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; + /** + * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. + * @param vectors List of vectors that will be inserted. + * @returns A promise that resolves with the ids & count of records that were successfully processed. + */ + public insert(vectors: VectorizeVector[]): Promise; + /** + * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. + * @param vectors List of vectors that will be upserted. + * @returns A promise that resolves with the ids & count of records that were successfully processed. + */ + public upsert(vectors: VectorizeVector[]): Promise; + /** + * Delete a list of vectors with a matching id. + * @param ids List of vector ids that should be deleted. + * @returns A promise that resolves with the ids & count of records that were successfully processed (and thus deleted). + */ + public deleteByIds(ids: string[]): Promise; + /** + * Get a list of vectors with a matching id. + * @param ids List of vector ids that should be returned. + * @returns A promise that resolves with the raw unscored vectors matching the id set. + */ + public getByIds(ids: string[]): Promise; +} +/** + * A Vectorize Vector Search Index for querying vectors/embeddings. + * + * Mutations in this version are async, returning a mutation id. + */ +declare abstract class Vectorize { + /** + * Get information about the currently bound index. + * @returns A promise that resolves with information about the current index. + */ + public describe(): Promise; + /** + * Use the provided vector to perform a similarity search across the index. + * @param vector Input vector that will be used to drive the similarity search. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; + /** + * Use the provided vector-id to perform a similarity search across the index. + * @param vectorId Id for a vector in the index against which the index should be queried. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public queryById(vectorId: string, options?: VectorizeQueryOptions): Promise; + /** + * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. + * @param vectors List of vectors that will be inserted. + * @returns A promise that resolves with a unique identifier of a mutation containing the insert changeset. + */ + public insert(vectors: VectorizeVector[]): Promise; + /** + * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. + * @param vectors List of vectors that will be upserted. + * @returns A promise that resolves with a unique identifier of a mutation containing the upsert changeset. + */ + public upsert(vectors: VectorizeVector[]): Promise; + /** + * Delete a list of vectors with a matching id. + * @param ids List of vector ids that should be deleted. + * @returns A promise that resolves with a unique identifier of a mutation containing the delete changeset. + */ + public deleteByIds(ids: string[]): Promise; + /** + * Get a list of vectors with a matching id. + * @param ids List of vector ids that should be returned. + * @returns A promise that resolves with the raw unscored vectors matching the id set. + */ + public getByIds(ids: string[]): Promise; +} +/** + * The interface for "version_metadata" binding + * providing metadata about the Worker Version using this binding. + */ +type WorkerVersionMetadata = { + /** The ID of the Worker Version using this binding */ + id: string; + /** The tag of the Worker Version using this binding */ + tag: string; + /** The timestamp of when the Worker Version was uploaded */ + timestamp: string; +}; +interface DynamicDispatchLimits { + /** + * Limit CPU time in milliseconds. + */ + cpuMs?: number; + /** + * Limit number of subrequests. + */ + subRequests?: number; +} +interface DynamicDispatchOptions { + /** + * Limit resources of invoked Worker script. + */ + limits?: DynamicDispatchLimits; + /** + * Arguments for outbound Worker script, if configured. + */ + outbound?: { + [key: string]: any; + }; +} +interface DispatchNamespace { + /** + * @param name Name of the Worker script. + * @param args Arguments to Worker script. + * @param options Options for Dynamic Dispatch invocation. + * @returns A Fetcher object that allows you to send requests to the Worker script. + * @throws If the Worker script does not exist in this dispatch namespace, an error will be thrown. + */ + get(name: string, args?: { + [key: string]: any; + }, options?: DynamicDispatchOptions): Fetcher; +} +declare module 'cloudflare:workflows' { + /** + * NonRetryableError allows for a user to throw a fatal error + * that makes a Workflow instance fail immediately without triggering a retry + */ + export class NonRetryableError extends Error { + public constructor(message: string, name?: string); + } +} +declare abstract class Workflow { + /** + * Get a handle to an existing instance of the Workflow. + * @param id Id for the instance of this Workflow + * @returns A promise that resolves with a handle for the Instance + */ + public get(id: string): Promise; + /** + * Create a new instance and return a handle to it. If a provided id exists, an error will be thrown. + * @param options Options when creating an instance including id and params + * @returns A promise that resolves with a handle for the Instance + */ + public create(options?: WorkflowInstanceCreateOptions): Promise; + /** + * Create a batch of instances and return handle for all of them. If a provided id exists, an error will be thrown. + * `createBatch` is limited at 100 instances at a time or when the RPC limit for the batch (1MiB) is reached. + * @param batch List of Options when creating an instance including name and params + * @returns A promise that resolves with a list of handles for the created instances. + */ + public createBatch(batch: WorkflowInstanceCreateOptions[]): Promise; +} +type WorkflowDurationLabel = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; +type WorkflowSleepDuration = `${number} ${WorkflowDurationLabel}${'s' | ''}` | number; +type WorkflowRetentionDuration = WorkflowSleepDuration; +interface WorkflowInstanceCreateOptions { + /** + * An id for your Workflow instance. Must be unique within the Workflow. + */ + id?: string; + /** + * The event payload the Workflow instance is triggered with + */ + params?: PARAMS; + /** + * The retention policy for Workflow instance. + * Defaults to the maximum retention period available for the owner's account. + */ + retention?: { + successRetention?: WorkflowRetentionDuration; + errorRetention?: WorkflowRetentionDuration; + }; +} +type InstanceStatus = { + status: 'queued' // means that instance is waiting to be started (see concurrency limits) + | 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running + | 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish + | 'waitingForPause' // instance is finishing the current work to pause + | 'unknown'; + error?: { + name: string; + message: string; + }; + output?: unknown; +}; +interface WorkflowError { + code?: number; + message: string; +} +declare abstract class WorkflowInstance { + public id: string; + /** + * Pause the instance. + */ + public pause(): Promise; + /** + * Resume the instance. If it is already running, an error will be thrown. + */ + public resume(): Promise; + /** + * Terminate the instance. If it is errored, terminated or complete, an error will be thrown. + */ + public terminate(): Promise; + /** + * Restart the instance. + */ + public restart(): Promise; + /** + * Returns the current status of the instance. + */ + public status(): Promise; + /** + * Send an event to this instance. + */ + public sendEvent({ type, payload, }: { + type: string; + payload: unknown; + }): Promise; +} diff --git a/apps/registry/wrangler.jsonc b/apps/registry/wrangler.jsonc index 9f0b051..4c87871 100644 --- a/apps/registry/wrangler.jsonc +++ b/apps/registry/wrangler.jsonc @@ -4,6 +4,14 @@ "main": "src/server.ts", "compatibility_date": "2025-09-01", "compatibility_flags": ["nodejs_compat"], + "observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } + }, "env": { "dev": { "name": "clawdentity-registry-dev", @@ -21,19 +29,11 @@ "custom_domain": true } ], - "queues": { - "producers": [ - { - "binding": "EVENT_BUS_QUEUE", - "queue": "clawdentity-events-dev" - } - ] - }, "vars": { "ENVIRONMENT": "development", "PROXY_URL": "https://dev.proxy.clawdentity.com", "REGISTRY_ISSUER_URL": "https://dev.registry.clawdentity.com", - "EVENT_BUS_BACKEND": "queue" + "EVENT_BUS_BACKEND": "memory" } }, "production": { diff --git a/biome.json b/biome.json index 34f16ca..b8fe526 100644 --- a/biome.json +++ b/biome.json @@ -8,7 +8,8 @@ "*.json", "!**/dist", "!**/drizzle/meta", - "!**/.wrangler" + "!**/.wrangler", + "!**/worker-configuration.d.ts" ] }, "formatter": { diff --git a/package.json b/package.json index d5135bb..d35bb4a 100644 --- a/package.json +++ b/package.json @@ -32,6 +32,6 @@ "tsup": "^8.5.1", "typescript": "5.8.3", "vitest": "^4.0.18", - "wrangler": "^4.64.0" + "wrangler": "^4.67.0" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ba5e04d..f2f1bde 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -30,8 +30,8 @@ importers: specifier: ^4.0.18 version: 4.0.18(@types/node@22.19.11)(yaml@2.8.2) wrangler: - specifier: ^4.64.0 - version: 4.64.0(@cloudflare/workers-types@4.20260210.0) + specifier: ^4.67.0 + version: 4.67.0 apps/cli: dependencies: @@ -250,41 +250,41 @@ packages: resolution: {integrity: sha512-SIOD2DxrRRwQ+jgzlXCqoEFiKOFqaPjhnNTGKXSRLvp1HiOvapLaFG2kEr9dYQTYe8rKrd9uvDUzmAITeNyaHQ==} engines: {node: '>=18.0.0'} - '@cloudflare/unenv-preset@2.12.1': - resolution: {integrity: sha512-tP/Wi+40aBJovonSNJSsS7aFJY0xjuckKplmzDs2Xat06BJ68B6iG7YDUWXJL8gNn0gqW7YC5WhlYhO3QbugQA==} + '@cloudflare/unenv-preset@2.14.0': + resolution: {integrity: sha512-XKAkWhi1nBdNsSEoNG9nkcbyvfUrSjSf+VYVPfOto3gLTZVc3F4g6RASCMh6IixBKCG2yDgZKQIHGKtjcnLnKg==} peerDependencies: unenv: 2.0.0-rc.24 - workerd: ^1.20260115.0 + workerd: ^1.20260218.0 peerDependenciesMeta: workerd: optional: true - '@cloudflare/workerd-darwin-64@1.20260210.0': - resolution: {integrity: sha512-e3vMgzr8ZM6VjpJVFrnMBhjvFhlMIkhT+BLpBk3pKaWsrXao+azDlmzzxB3Zf4CZ8LmCEtaP7n5d2mNGL6Dqww==} + '@cloudflare/workerd-darwin-64@1.20260219.0': + resolution: {integrity: sha512-k+xM+swQBQnkrvwobjRPxyeYwjLSludJusR0PqeHe+h6X9QIRGgw3s1AO38lXQsqzMSgG5709oOXSF19NKVVaQ==} engines: {node: '>=16'} cpu: [x64] os: [darwin] - '@cloudflare/workerd-darwin-arm64@1.20260210.0': - resolution: {integrity: sha512-ng2uLJVMrI5VrcAS26gDGM+qxCuWD4ZA8VR4i88RdyM8TLn+AqPFisrvn7AMA+QSv0+ck+ZdFtXek7qNp2gNuA==} + '@cloudflare/workerd-darwin-arm64@1.20260219.0': + resolution: {integrity: sha512-EyfQdsG1KcIVAf4qndT00LZly7sLFm1VxMWHBvOFB/EVYF2sE5HZ0dPbe+yrax5p3eS0oLZthR8ynhz4UulMUQ==} engines: {node: '>=16'} cpu: [arm64] os: [darwin] - '@cloudflare/workerd-linux-64@1.20260210.0': - resolution: {integrity: sha512-frn2/+6DV59h13JbGSk9ATvJw3uORWssFIKZ/G/to+WRrIDQgCpSrjLtGbFSSn5eBEhYOvwxPKc7IrppkmIj/w==} + '@cloudflare/workerd-linux-64@1.20260219.0': + resolution: {integrity: sha512-N0UHXILYYa6htFO/uC92uAqusvynbSbOcHcrVXMKqP9Jy7eqXGMovyKIrNgzYnKIszNB+0lfUYdGI3Wci07LuA==} engines: {node: '>=16'} cpu: [x64] os: [linux] - '@cloudflare/workerd-linux-arm64@1.20260210.0': - resolution: {integrity: sha512-0fmxEHaDcAF+7gcqnBcQdBCOzNvGz3mTMwqxEYJc5xZgFwQf65/dYK5fnV8z56GVNqu88NEnLMG3DD2G7Ey1vw==} + '@cloudflare/workerd-linux-arm64@1.20260219.0': + resolution: {integrity: sha512-835pjQ9uuAtwPBOAkPf+oxH3mNE5mqWuE3H7hJsul7WZsRD2FDcariyoT2AW6xyOePILrn4uMnmG1KGc9m/8Pg==} engines: {node: '>=16'} cpu: [arm64] os: [linux] - '@cloudflare/workerd-windows-64@1.20260210.0': - resolution: {integrity: sha512-G/Apjk/QLNnwbu8B0JO9FuAJKHNr+gl8X3G/7qaUrpwIkPx5JFQElVE6LKk4teSrycvAy5AzLFAL0lOB1xsUIQ==} + '@cloudflare/workerd-windows-64@1.20260219.0': + resolution: {integrity: sha512-i7qcuOsuAxqqn1n5Ar3Rh1dHUL9vNmpF9FcdMTT84jIrdm5UNrPZz5grJthPmpB9LTcreT9iiP6qKbzGjnCwPA==} engines: {node: '>=16'} cpu: [x64] os: [win32] @@ -1890,8 +1890,8 @@ packages: resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} engines: {node: '>=18'} - miniflare@4.20260210.0: - resolution: {integrity: sha512-HXR6m53IOqEzq52DuGF1x7I1K6lSIqzhbCbQXv/cTmPnPJmNkr7EBtLDm4nfSkOvlDtnwDCLUjWII5fyGJI5Tw==} + miniflare@4.20260219.0: + resolution: {integrity: sha512-EIb5wXbWUnnC60XU2aiFOPNd4fgTXzECkwRSOXZ1vdcY9WZaEE9rVf+h+Apw+WkOHRkp3Dr9/ZhQ5y1R+9iZ4Q==} engines: {node: '>=18.0.0'} hasBin: true @@ -2407,17 +2407,17 @@ packages: engines: {node: '>=8'} hasBin: true - workerd@1.20260210.0: - resolution: {integrity: sha512-Sb0WXhrvf+XHQigP2trAxQnXo7wxZFC4PWnn6I7LhFxiTvzxvOAqMEiLkIz58wggRCb54T/KAA8hdjkTniR5FA==} + workerd@1.20260219.0: + resolution: {integrity: sha512-l4U4iT5H8jNV6+EK23ExnUV2z6JvqQtQPrT8XCm4G8RpwC9EPpYTOO9s/ImMPJKe1WSbQUQoJ4k8Nd83fz8skQ==} engines: {node: '>=16'} hasBin: true - wrangler@4.64.0: - resolution: {integrity: sha512-0PBiVEbshQT4Av/KLHbOAks4ioIKp/eAO7Xr2BgAX5v7cFYYgeOvudBrbtZa/hDDIA6858QuJnTQ8mI+cm8Vqw==} + wrangler@4.67.0: + resolution: {integrity: sha512-58OoVth7bqm0nqsRgcI67gHbpp0IfR1JIBqDY0XR1FzRu9Qkjn6v2iJAdFf82QcVBFhaMBYQi88WqYGswq5wlQ==} engines: {node: '>=20.0.0'} hasBin: true peerDependencies: - '@cloudflare/workers-types': ^4.20260210.0 + '@cloudflare/workers-types': ^4.20260219.0 peerDependenciesMeta: '@cloudflare/workers-types': optional: true @@ -2537,25 +2537,25 @@ snapshots: '@cloudflare/kv-asset-handler@0.4.2': {} - '@cloudflare/unenv-preset@2.12.1(unenv@2.0.0-rc.24)(workerd@1.20260210.0)': + '@cloudflare/unenv-preset@2.14.0(unenv@2.0.0-rc.24)(workerd@1.20260219.0)': dependencies: unenv: 2.0.0-rc.24 optionalDependencies: - workerd: 1.20260210.0 + workerd: 1.20260219.0 - '@cloudflare/workerd-darwin-64@1.20260210.0': + '@cloudflare/workerd-darwin-64@1.20260219.0': optional: true - '@cloudflare/workerd-darwin-arm64@1.20260210.0': + '@cloudflare/workerd-darwin-arm64@1.20260219.0': optional: true - '@cloudflare/workerd-linux-64@1.20260210.0': + '@cloudflare/workerd-linux-64@1.20260219.0': optional: true - '@cloudflare/workerd-linux-arm64@1.20260210.0': + '@cloudflare/workerd-linux-arm64@1.20260219.0': optional: true - '@cloudflare/workerd-windows-64@1.20260210.0': + '@cloudflare/workerd-windows-64@1.20260219.0': optional: true '@cloudflare/workers-types@4.20260210.0': {} @@ -3741,12 +3741,12 @@ snapshots: mimic-function@5.0.1: {} - miniflare@4.20260210.0: + miniflare@4.20260219.0: dependencies: '@cspotcode/source-map-support': 0.8.1 sharp: 0.34.5 undici: 7.18.2 - workerd: 1.20260210.0 + workerd: 1.20260219.0 ws: 8.18.0 youch: 4.1.0-beta.10 transitivePeerDependencies: @@ -4287,26 +4287,25 @@ snapshots: siginfo: 2.0.0 stackback: 0.0.2 - workerd@1.20260210.0: + workerd@1.20260219.0: optionalDependencies: - '@cloudflare/workerd-darwin-64': 1.20260210.0 - '@cloudflare/workerd-darwin-arm64': 1.20260210.0 - '@cloudflare/workerd-linux-64': 1.20260210.0 - '@cloudflare/workerd-linux-arm64': 1.20260210.0 - '@cloudflare/workerd-windows-64': 1.20260210.0 + '@cloudflare/workerd-darwin-64': 1.20260219.0 + '@cloudflare/workerd-darwin-arm64': 1.20260219.0 + '@cloudflare/workerd-linux-64': 1.20260219.0 + '@cloudflare/workerd-linux-arm64': 1.20260219.0 + '@cloudflare/workerd-windows-64': 1.20260219.0 - wrangler@4.64.0(@cloudflare/workers-types@4.20260210.0): + wrangler@4.67.0: dependencies: '@cloudflare/kv-asset-handler': 0.4.2 - '@cloudflare/unenv-preset': 2.12.1(unenv@2.0.0-rc.24)(workerd@1.20260210.0) + '@cloudflare/unenv-preset': 2.14.0(unenv@2.0.0-rc.24)(workerd@1.20260219.0) blake3-wasm: 2.1.5 esbuild: 0.27.3 - miniflare: 4.20260210.0 + miniflare: 4.20260219.0 path-to-regexp: 6.3.0 unenv: 2.0.0-rc.24 - workerd: 1.20260210.0 + workerd: 1.20260219.0 optionalDependencies: - '@cloudflare/workers-types': 4.20260210.0 fsevents: 2.3.3 transitivePeerDependencies: - bufferutil diff --git a/tsconfig.base.json b/tsconfig.base.json index 05832e8..0e2bffd 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,5 +21,5 @@ "@clawdentity/sdk/testing": ["packages/sdk/src/testing/index.ts"] } }, - "exclude": ["node_modules", "dist"] + "exclude": ["node_modules", "dist", "**/worker-configuration.d.ts"] } From af64d7652d0d637e6b69a7999d467763b950a18a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 23:43:11 +0530 Subject: [PATCH 099/190] feat(cli): add relay websocket test and update deploy workflow --- .github/AGENTS.md | 13 +- .github/workflows/deploy-develop.yml | 166 ++++++++++++++- apps/cli/package.json | 2 +- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/openclaw.test.ts | 137 +++++++++++++ apps/cli/src/commands/openclaw.ts | 273 +++++++++++++++++++++++++ 6 files changed, 578 insertions(+), 14 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index a1aca7e..1ab34dc 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -23,9 +23,12 @@ - proxy (`apps/proxy`, env `dev`) after registry health passes - Install dependencies before any `pnpm exec wrangler ...` command so Wrangler is available on clean runners. - Regenerate Worker type bindings in CI (`wrangler types --env dev`) and fail on git diff drift for `worker-configuration.d.ts` to prevent stale runtime binding types from shipping. -- Sync proxy internal-service credentials from GitHub secrets after dependency install and before proxy deploy: - - `REGISTRY_INTERNAL_SERVICE_ID` - - `REGISTRY_INTERNAL_SERVICE_SECRET` +- Resolve proxy internal-service credentials during deploy after registry health verification: + - Use `REGISTRY_ADMIN_API_KEY` if present. + - Otherwise bootstrap one-time admin using `REGISTRY_BOOTSTRAP_SECRET` (fresh DB only). + - Create or rotate internal service `proxy-dev` with scope `identity.read`. + - Sync resulting `REGISTRY_INTERNAL_SERVICE_ID` and `REGISTRY_INTERNAL_SERVICE_SECRET` into proxy Worker secrets before proxy deploy. + - Never print auth payload bodies from bootstrap/internal-service API failures; keep error logs code-only to avoid secret leakage. - Add a Wrangler preflight dry-run for both workers before mutating remote state (migrations/deploy): - `wrangler deploy --env dev --dry-run --var APP_VERSION:` - Verify registry health at `https://dev.registry.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. @@ -43,7 +46,9 @@ - Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. ## Secrets and Permissions -- Required deploy secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`, `REGISTRY_INTERNAL_SERVICE_ID`, `REGISTRY_INTERNAL_SERVICE_SECRET`. +- Required deploy secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`, and one of: + - `REGISTRY_ADMIN_API_KEY` (recommended for steady-state deploys) + - `REGISTRY_BOOTSTRAP_SECRET` (required for first deploy on fresh DB) - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. - Optional deploy secret: `PROXY_HEALTH_URL` (only needed if proxy workers.dev URL cannot be resolved in CI output). - Required publish secret: `NPM_TOKEN`. diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 99e92a8..d7a6ce5 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -23,8 +23,7 @@ jobs: CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} APP_VERSION: ${{ github.sha }} PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} - REGISTRY_INTERNAL_SERVICE_ID: ${{ secrets.REGISTRY_INTERNAL_SERVICE_ID }} - REGISTRY_INTERNAL_SERVICE_SECRET: ${{ secrets.REGISTRY_INTERNAL_SERVICE_SECRET }} + REGISTRY_ADMIN_API_KEY: ${{ secrets.REGISTRY_ADMIN_API_KEY }} steps: - name: Checkout uses: actions/checkout@v4 @@ -42,11 +41,15 @@ jobs: version: 10.23.0 - name: Validate required secrets + env: + REGISTRY_BOOTSTRAP_SECRET: ${{ secrets.REGISTRY_BOOTSTRAP_SECRET }} run: | test -n "${CLOUDFLARE_API_TOKEN}" test -n "${CLOUDFLARE_ACCOUNT_ID}" - test -n "${REGISTRY_INTERNAL_SERVICE_ID}" - test -n "${REGISTRY_INTERNAL_SERVICE_SECRET}" + if [ -z "${REGISTRY_ADMIN_API_KEY}" ] && [ -z "${REGISTRY_BOOTSTRAP_SECRET}" ]; then + echo "Either REGISTRY_ADMIN_API_KEY or REGISTRY_BOOTSTRAP_SECRET must be configured." >&2 + exit 1 + fi - name: Install dependencies run: pnpm install --frozen-lockfile @@ -57,11 +60,6 @@ jobs: pnpm exec wrangler --cwd apps/proxy types --env dev git diff --exit-code -- apps/registry/worker-configuration.d.ts apps/proxy/worker-configuration.d.ts - - name: Sync proxy internal service credentials - run: | - printf "%s" "${REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev - printf "%s" "${REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev - - name: Lint run: pnpm lint @@ -161,6 +159,156 @@ jobs: time.sleep(delay_seconds) PY + - name: Resolve registry admin API key + env: + REGISTRY_BOOTSTRAP_SECRET: ${{ secrets.REGISTRY_BOOTSTRAP_SECRET }} + run: | + if [ -n "${REGISTRY_ADMIN_API_KEY}" ]; then + echo "::add-mask::${REGISTRY_ADMIN_API_KEY}" + fi + if [ -n "${REGISTRY_BOOTSTRAP_SECRET}" ]; then + echo "::add-mask::${REGISTRY_BOOTSTRAP_SECRET}" + fi + python3 - <<'PY' + import json, os, sys, urllib.request, urllib.error + + registry_url = "https://dev.registry.clawdentity.com" + admin_pat = os.environ.get("REGISTRY_ADMIN_API_KEY", "").strip() + bootstrap_secret = os.environ.get("REGISTRY_BOOTSTRAP_SECRET", "").strip() + github_env = os.environ["GITHUB_ENV"] + + if admin_pat: + with open(github_env, "a", encoding="utf-8") as f: + f.write(f"RESOLVED_REGISTRY_ADMIN_API_KEY={admin_pat}\n") + print("Using REGISTRY_ADMIN_API_KEY from GitHub secrets.") + sys.exit(0) + + if not bootstrap_secret: + raise SystemExit( + "REGISTRY_ADMIN_API_KEY is missing and REGISTRY_BOOTSTRAP_SECRET is not set." + ) + + payload = { + "displayName": "Deploy Admin", + "apiKeyName": "deploy-admin", + } + req = urllib.request.Request( + f"{registry_url}/v1/admin/bootstrap", + method="POST", + data=json.dumps(payload).encode("utf-8"), + headers={ + "Content-Type": "application/json", + "x-bootstrap-secret": bootstrap_secret, + "Accept": "application/json", + "User-Agent": "Clawdentity-CI/1.0", + }, + ) + + try: + with urllib.request.urlopen(req, timeout=15) as resp: + body = json.load(resp) + except urllib.error.HTTPError as exc: + try: + body = json.loads(exc.read().decode("utf-8")) + except Exception: + body = {} + + code = body.get("error", {}).get("code") + if code == "ADMIN_BOOTSTRAP_ALREADY_COMPLETED": + raise SystemExit( + "Admin bootstrap already completed. Configure REGISTRY_ADMIN_API_KEY in GitHub secrets." + ) + raise SystemExit(f"Admin bootstrap failed ({exc.code}).") + + token = body.get("apiKey", {}).get("token", "").strip() + if not token: + raise SystemExit("Bootstrap succeeded but response missing apiKey.token.") + + with open(github_env, "a", encoding="utf-8") as f: + f.write(f"RESOLVED_REGISTRY_ADMIN_API_KEY={token}\n") + print("Resolved deploy admin API key via bootstrap.") + PY + + - name: Provision proxy internal service credentials + run: | + python3 - <<'PY' + import json, os, urllib.request, urllib.error + + registry_url = "https://dev.registry.clawdentity.com" + admin_pat = os.environ.get("RESOLVED_REGISTRY_ADMIN_API_KEY", "").strip() + if not admin_pat: + raise SystemExit("RESOLVED_REGISTRY_ADMIN_API_KEY is not set.") + + headers = { + "Authorization": f"Bearer {admin_pat}", + "Content-Type": "application/json", + "Accept": "application/json", + "User-Agent": "Clawdentity-CI/1.0", + } + github_env = os.environ["GITHUB_ENV"] + service_name = "proxy-dev" + service_scopes = ["identity.read"] + + def request_json(path, method="GET", payload=None): + data = None + if payload is not None: + data = json.dumps(payload).encode("utf-8") + req = urllib.request.Request( + f"{registry_url}{path}", + method=method, + data=data, + headers=headers, + ) + try: + with urllib.request.urlopen(req, timeout=20) as resp: + return resp.status, json.load(resp) + except urllib.error.HTTPError as exc: + raise SystemExit( + f"Registry request failed ({method} {path}, {exc.code})." + ) + + _, list_body = request_json("/v1/admin/internal-services") + existing = None + for service in list_body.get("internalServices", []): + if service.get("name") == service_name: + existing = service + break + + if existing: + _, result = request_json( + f"/v1/admin/internal-services/{existing['id']}/rotate", + method="POST", + payload={"scopes": service_scopes}, + ) + action = "rotated" + else: + _, result = request_json( + "/v1/admin/internal-services", + method="POST", + payload={"name": service_name, "scopes": service_scopes}, + ) + action = "created" + + internal = result.get("internalService", {}) + service_id = (internal.get("id") or "").strip() + service_secret = (internal.get("secret") or "").strip() + if not service_id or not service_secret: + raise SystemExit(f"Internal service response missing id/secret: {result}") + + with open(github_env, "a", encoding="utf-8") as f: + f.write(f"RESOLVED_REGISTRY_INTERNAL_SERVICE_ID={service_id}\n") + f.write(f"RESOLVED_REGISTRY_INTERNAL_SERVICE_SECRET={service_secret}\n") + + print( + f"Successfully {action} internal service '{service_name}' (id={service_id})." + ) + PY + + - name: Sync proxy internal service credentials + run: | + printf "%s" "${RESOLVED_REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev + printf "%s" "${RESOLVED_REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev + - name: Deploy proxy to dev environment run: | mkdir -p artifacts diff --git a/apps/cli/package.json b/apps/cli/package.json index e3fff3f..f6ec40e 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,6 +1,6 @@ { "name": "clawdentity", - "version": "0.0.23", + "version": "0.0.24", "type": "module", "publishConfig": { "access": "public" diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 4107efb..31ba0da 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -108,6 +108,7 @@ - Relay probe must target local OpenClaw `POST /hooks/send-to-peer` with deterministic payload fields (`peer`, `sessionId`, `message`). - Relay test output must summarize endpoint, HTTP status, and remediation guidance when delivery fails. - `openclaw relay test --json` must emit a stable result envelope and include preflight details when preflight failed. +- `openclaw relay ws-test --peer ` must verify paired-peer selection plus connector websocket readiness using connector `/v1/status`, and return deterministic remediation when websocket connectivity is down. ## Pair Command Rules - `pair start ` must call proxy `/pair/start` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 3450407..91f8d6b 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -14,6 +14,7 @@ import { decodeOpenclawInviteCode, runOpenclawDoctor, runOpenclawRelayTest, + runOpenclawRelayWebsocketTest, setupOpenclawRelay, setupOpenclawRelayFromInvite, setupOpenclawSelfReady, @@ -1799,4 +1800,140 @@ describe("openclaw command helpers", () => { sandbox.cleanup(); } }); + + it("returns relay websocket test success when connector websocket is connected", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayWebsocketTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("success"); + expect(result.message).toBe( + "Connector websocket is connected for paired relay", + ); + expect(result.connectorStatusUrl).toBe( + "http://127.0.0.1:19400/v1/status", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("auto-selects peer for relay websocket test when exactly one peer is configured", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayWebsocketTest({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("success"); + expect(result.peerAlias).toBe("beta"); + } finally { + sandbox.cleanup(); + } + }); + + it("returns relay websocket test failure when connector websocket is disconnected", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const disconnectedConnectorFetch: typeof fetch = async () => + new Response( + JSON.stringify({ + status: "ok", + websocketConnected: false, + }), + { + status: 200, + headers: { + "content-type": "application/json", + }, + }, + ); + + const result = await runOpenclawRelayWebsocketTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: disconnectedConnectorFetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect(result.status).toBe("failure"); + expect(result.message).toBe("Connector websocket is not connected"); + expect(result.remediationHint).toBe( + "Run: clawdentity openclaw setup ", + ); + } finally { + sandbox.cleanup(); + } + }); }); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 814bf17..53402d8 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -149,6 +149,15 @@ type OpenclawRelayTestOptions = { json?: boolean; }; +type OpenclawRelayWebsocketTestOptions = { + peer?: string; + homeDir?: string; + openclawDir?: string; + fetchImpl?: typeof fetch; + resolveConfigImpl?: typeof resolveConfig; + json?: boolean; +}; + type OpenclawGatewayDeviceApprovalInput = { requestId: string; openclawDir: string; @@ -286,6 +295,18 @@ export type OpenclawRelayTestResult = { preflight?: OpenclawDoctorResult; }; +export type OpenclawRelayWebsocketTestResult = { + status: "success" | "failure"; + checkedAt: string; + peerAlias: string; + message: string; + connectorBaseUrl?: string; + connectorStatusUrl?: string; + remediationHint?: string; + details?: Record; + preflight?: OpenclawDoctorResult; +}; + function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } @@ -2096,6 +2117,23 @@ function printRelayTestResult(result: OpenclawRelayTestResult): void { } } +function printRelayWebsocketTestResult( + result: OpenclawRelayWebsocketTestResult, +): void { + writeStdoutLine(`Relay websocket test status: ${result.status}`); + writeStdoutLine(`Peer alias: ${result.peerAlias}`); + if (typeof result.connectorBaseUrl === "string") { + writeStdoutLine(`Connector base URL: ${result.connectorBaseUrl}`); + } + if (typeof result.connectorStatusUrl === "string") { + writeStdoutLine(`Connector status URL: ${result.connectorStatusUrl}`); + } + writeStdoutLine(`Message: ${result.message}`); + if (result.remediationHint) { + writeStdoutLine(`Fix: ${result.remediationHint}`); + } +} + function toSendToPeerEndpoint(openclawBaseUrl: string): string { const normalizedBase = openclawBaseUrl.endsWith("/") ? openclawBaseUrl @@ -2103,6 +2141,75 @@ function toSendToPeerEndpoint(openclawBaseUrl: string): string { return new URL(OPENCLAW_SEND_TO_PEER_HOOK_PATH, normalizedBase).toString(); } +async function resolveSelectedAgentName(input: { + homeDir: string; +}): Promise<{ agentName: string; selectedAgentPath: string }> { + const selectedAgentPath = resolveOpenclawAgentNamePath(input.homeDir); + let selectedAgentRaw: string; + try { + selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_SELECTED_AGENT_MISSING", + "Selected agent marker is missing", + { selectedAgentPath }, + ); + } + throw createCliError( + "CLI_OPENCLAW_SELECTED_AGENT_INVALID", + "Selected agent marker is invalid", + { selectedAgentPath }, + ); + } + + try { + return { + agentName: assertValidAgentName(selectedAgentRaw.trim()), + selectedAgentPath, + }; + } catch { + throw createCliError( + "CLI_OPENCLAW_SELECTED_AGENT_INVALID", + "Selected agent marker is invalid", + { selectedAgentPath }, + ); + } +} + +async function resolveConnectorAssignment(input: { + homeDir: string; + agentName: string; +}): Promise<{ + connectorAssignmentsPath: string; + connectorBaseUrl: string; + connectorStatusUrl: string; +}> { + const connectorAssignmentsPath = resolveConnectorAssignmentsPath( + input.homeDir, + ); + const connectorAssignments = await loadConnectorAssignments( + connectorAssignmentsPath, + ); + const assignment = connectorAssignments.agents[input.agentName]; + if (assignment === undefined) { + throw createCliError( + "CLI_OPENCLAW_CONNECTOR_ASSIGNMENT_MISSING", + "Connector assignment is missing for selected agent", + { + connectorAssignmentsPath, + agentName: input.agentName, + }, + ); + } + + return { + connectorAssignmentsPath, + connectorBaseUrl: assignment.connectorBaseUrl, + connectorStatusUrl: resolveConnectorStatusUrl(assignment.connectorBaseUrl), + }; +} + export async function runOpenclawDoctor( options: OpenclawDoctorOptions = {}, ): Promise { @@ -3272,6 +3379,129 @@ export async function runOpenclawRelayTest( }; } +export async function runOpenclawRelayWebsocketTest( + options: OpenclawRelayWebsocketTestOptions, +): Promise { + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const checkedAt = nowIso(); + + let peerAlias: string; + try { + peerAlias = await resolveRelayProbePeerAlias({ + homeDir, + peerAliasOption: options.peer, + }); + } catch (error) { + const appError = error instanceof AppError ? error : undefined; + return { + status: "failure", + checkedAt, + peerAlias: "unresolved", + message: appError?.message ?? "Unable to resolve relay peer alias", + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, + details: appError?.details as Record | undefined, + }; + } + + const preflight = await runOpenclawDoctor({ + homeDir, + openclawDir, + peerAlias, + resolveConfigImpl: options.resolveConfigImpl, + includeConnectorRuntimeCheck: false, + }); + if (preflight.status === "unhealthy") { + const firstFailure = preflight.checks.find( + (check) => check.status === "fail", + ); + return { + status: "failure", + checkedAt, + peerAlias, + message: + firstFailure === undefined + ? "Preflight checks failed" + : `Preflight failed: ${firstFailure.label}`, + remediationHint: firstFailure?.remediationHint, + preflight, + }; + } + + const fetchImpl = options.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + return { + status: "failure", + checkedAt, + peerAlias, + message: "fetch implementation is unavailable", + remediationHint: + "Run relay websocket test in a Node runtime with fetch support", + preflight, + }; + } + + let connectorBaseUrl: string | undefined; + let connectorStatusUrl: string | undefined; + try { + const selectedAgent = await resolveSelectedAgentName({ homeDir }); + const connectorAssignment = await resolveConnectorAssignment({ + homeDir, + agentName: selectedAgent.agentName, + }); + connectorBaseUrl = connectorAssignment.connectorBaseUrl; + connectorStatusUrl = connectorAssignment.connectorStatusUrl; + } catch (error) { + const appError = error instanceof AppError ? error : undefined; + return { + status: "failure", + checkedAt, + peerAlias, + connectorBaseUrl, + connectorStatusUrl, + message: + appError?.message ?? + "Unable to resolve connector assignment for websocket test", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: appError?.details as Record | undefined, + preflight, + }; + } + + const connectorStatus = await fetchConnectorHealthStatus({ + connectorBaseUrl, + fetchImpl, + }); + if (!connectorStatus.connected) { + return { + status: "failure", + checkedAt, + peerAlias, + connectorBaseUrl, + connectorStatusUrl: connectorStatus.statusUrl, + message: "Connector websocket is not connected", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: + connectorStatus.reason === undefined + ? undefined + : { + reason: connectorStatus.reason, + }, + preflight, + }; + } + + return { + status: "success", + checkedAt, + peerAlias, + connectorBaseUrl, + connectorStatusUrl: connectorStatus.statusUrl, + message: "Connector websocket is connected for paired relay", + preflight, + }; +} + export function createOpenclawInviteCode( options: OpenclawInviteOptions, ): OpenclawInviteResult { @@ -3781,5 +4011,48 @@ export const createOpenclawCommand = (): Command => { ), ); + relayCommand + .command("ws-test") + .description( + "Validate connector websocket connectivity for a paired relay peer", + ) + .option("--peer ", "Peer alias in local peers map") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "openclaw relay ws-test", + async (options: OpenclawRelayWebsocketTestOptions) => { + const result = await runOpenclawRelayWebsocketTest(options); + if (options.json) { + writeStdoutLine(JSON.stringify(result, null, 2)); + } else { + printRelayWebsocketTestResult(result); + if ( + result.preflight !== undefined && + result.preflight.status === "unhealthy" + ) { + writeStdoutLine("Preflight details:"); + for (const check of result.preflight.checks) { + if (check.status === "fail") { + writeStdoutLine(formatDoctorCheckLine(check)); + if (check.remediationHint) { + writeStdoutLine(`Fix: ${check.remediationHint}`); + } + } + } + } + } + + if (result.status === "failure") { + process.exitCode = 1; + } + }, + ), + ); + return openclawCommand; }; From 9ccd568cc9648b44a34e53b306111dfb6834b424 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 23:45:01 +0530 Subject: [PATCH 100/190] test(cli): make openclaw fixtures profile-aware --- apps/cli/src/commands/openclaw.test.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 91f8d6b..46a971e 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -8,6 +8,7 @@ import { import { tmpdir } from "node:os"; import { dirname, join } from "node:path"; import { describe, expect, it } from "vitest"; +import { getConfigDir } from "../config/manager.js"; import { createOpenclawCommand, createOpenclawInviteCode, @@ -68,7 +69,7 @@ function createSandbox(): OpenclawSandbox { } function resolveCliStateDir(homeDir: string): string { - return join(homeDir, ".clawdentity", "states", "prod"); + return getConfigDir({ homeDir }); } function seedLocalAgentCredentials(homeDir: string, agentName: string): void { From 48d5fdab5885a40cfe98e7cd51aa4369a245eacc Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 23:53:00 +0530 Subject: [PATCH 101/190] ci(deploy): simplify develop secret flow to internal service creds --- .github/AGENTS.md | 18 +-- .github/workflows/deploy-develop.yml | 162 ++------------------------- 2 files changed, 17 insertions(+), 163 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 1ab34dc..0a14021 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -23,12 +23,10 @@ - proxy (`apps/proxy`, env `dev`) after registry health passes - Install dependencies before any `pnpm exec wrangler ...` command so Wrangler is available on clean runners. - Regenerate Worker type bindings in CI (`wrangler types --env dev`) and fail on git diff drift for `worker-configuration.d.ts` to prevent stale runtime binding types from shipping. -- Resolve proxy internal-service credentials during deploy after registry health verification: - - Use `REGISTRY_ADMIN_API_KEY` if present. - - Otherwise bootstrap one-time admin using `REGISTRY_BOOTSTRAP_SECRET` (fresh DB only). - - Create or rotate internal service `proxy-dev` with scope `identity.read`. - - Sync resulting `REGISTRY_INTERNAL_SERVICE_ID` and `REGISTRY_INTERNAL_SERVICE_SECRET` into proxy Worker secrets before proxy deploy. - - Never print auth payload bodies from bootstrap/internal-service API failures; keep error logs code-only to avoid secret leakage. +- Sync proxy internal-service credentials from GitHub secrets on every deploy: + - `REGISTRY_INTERNAL_SERVICE_ID` + - `REGISTRY_INTERNAL_SERVICE_SECRET` + - Push both values into proxy Worker secrets before proxy deploy. - Add a Wrangler preflight dry-run for both workers before mutating remote state (migrations/deploy): - `wrangler deploy --env dev --dry-run --var APP_VERSION:` - Verify registry health at `https://dev.registry.clawdentity.com/health` and verify proxy health via deployed URL (workers.dev or explicit override) with expected `APP_VERSION`. @@ -46,9 +44,11 @@ - Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. ## Secrets and Permissions -- Required deploy secrets: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`, and one of: - - `REGISTRY_ADMIN_API_KEY` (recommended for steady-state deploys) - - `REGISTRY_BOOTSTRAP_SECRET` (required for first deploy on fresh DB) +- Required deploy secrets: + - `CLOUDFLARE_API_TOKEN` + - `CLOUDFLARE_ACCOUNT_ID` + - `REGISTRY_INTERNAL_SERVICE_ID` + - `REGISTRY_INTERNAL_SERVICE_SECRET` - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. - Optional deploy secret: `PROXY_HEALTH_URL` (only needed if proxy workers.dev URL cannot be resolved in CI output). - Required publish secret: `NPM_TOKEN`. diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index d7a6ce5..a3f1277 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -23,7 +23,8 @@ jobs: CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} APP_VERSION: ${{ github.sha }} PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} - REGISTRY_ADMIN_API_KEY: ${{ secrets.REGISTRY_ADMIN_API_KEY }} + REGISTRY_INTERNAL_SERVICE_ID: ${{ secrets.REGISTRY_INTERNAL_SERVICE_ID }} + REGISTRY_INTERNAL_SERVICE_SECRET: ${{ secrets.REGISTRY_INTERNAL_SERVICE_SECRET }} steps: - name: Checkout uses: actions/checkout@v4 @@ -41,15 +42,11 @@ jobs: version: 10.23.0 - name: Validate required secrets - env: - REGISTRY_BOOTSTRAP_SECRET: ${{ secrets.REGISTRY_BOOTSTRAP_SECRET }} run: | test -n "${CLOUDFLARE_API_TOKEN}" test -n "${CLOUDFLARE_ACCOUNT_ID}" - if [ -z "${REGISTRY_ADMIN_API_KEY}" ] && [ -z "${REGISTRY_BOOTSTRAP_SECRET}" ]; then - echo "Either REGISTRY_ADMIN_API_KEY or REGISTRY_BOOTSTRAP_SECRET must be configured." >&2 - exit 1 - fi + test -n "${REGISTRY_INTERNAL_SERVICE_ID}" + test -n "${REGISTRY_INTERNAL_SERVICE_SECRET}" - name: Install dependencies run: pnpm install --frozen-lockfile @@ -159,155 +156,12 @@ jobs: time.sleep(delay_seconds) PY - - name: Resolve registry admin API key - env: - REGISTRY_BOOTSTRAP_SECRET: ${{ secrets.REGISTRY_BOOTSTRAP_SECRET }} - run: | - if [ -n "${REGISTRY_ADMIN_API_KEY}" ]; then - echo "::add-mask::${REGISTRY_ADMIN_API_KEY}" - fi - if [ -n "${REGISTRY_BOOTSTRAP_SECRET}" ]; then - echo "::add-mask::${REGISTRY_BOOTSTRAP_SECRET}" - fi - python3 - <<'PY' - import json, os, sys, urllib.request, urllib.error - - registry_url = "https://dev.registry.clawdentity.com" - admin_pat = os.environ.get("REGISTRY_ADMIN_API_KEY", "").strip() - bootstrap_secret = os.environ.get("REGISTRY_BOOTSTRAP_SECRET", "").strip() - github_env = os.environ["GITHUB_ENV"] - - if admin_pat: - with open(github_env, "a", encoding="utf-8") as f: - f.write(f"RESOLVED_REGISTRY_ADMIN_API_KEY={admin_pat}\n") - print("Using REGISTRY_ADMIN_API_KEY from GitHub secrets.") - sys.exit(0) - - if not bootstrap_secret: - raise SystemExit( - "REGISTRY_ADMIN_API_KEY is missing and REGISTRY_BOOTSTRAP_SECRET is not set." - ) - - payload = { - "displayName": "Deploy Admin", - "apiKeyName": "deploy-admin", - } - req = urllib.request.Request( - f"{registry_url}/v1/admin/bootstrap", - method="POST", - data=json.dumps(payload).encode("utf-8"), - headers={ - "Content-Type": "application/json", - "x-bootstrap-secret": bootstrap_secret, - "Accept": "application/json", - "User-Agent": "Clawdentity-CI/1.0", - }, - ) - - try: - with urllib.request.urlopen(req, timeout=15) as resp: - body = json.load(resp) - except urllib.error.HTTPError as exc: - try: - body = json.loads(exc.read().decode("utf-8")) - except Exception: - body = {} - - code = body.get("error", {}).get("code") - if code == "ADMIN_BOOTSTRAP_ALREADY_COMPLETED": - raise SystemExit( - "Admin bootstrap already completed. Configure REGISTRY_ADMIN_API_KEY in GitHub secrets." - ) - raise SystemExit(f"Admin bootstrap failed ({exc.code}).") - - token = body.get("apiKey", {}).get("token", "").strip() - if not token: - raise SystemExit("Bootstrap succeeded but response missing apiKey.token.") - - with open(github_env, "a", encoding="utf-8") as f: - f.write(f"RESOLVED_REGISTRY_ADMIN_API_KEY={token}\n") - print("Resolved deploy admin API key via bootstrap.") - PY - - - name: Provision proxy internal service credentials - run: | - python3 - <<'PY' - import json, os, urllib.request, urllib.error - - registry_url = "https://dev.registry.clawdentity.com" - admin_pat = os.environ.get("RESOLVED_REGISTRY_ADMIN_API_KEY", "").strip() - if not admin_pat: - raise SystemExit("RESOLVED_REGISTRY_ADMIN_API_KEY is not set.") - - headers = { - "Authorization": f"Bearer {admin_pat}", - "Content-Type": "application/json", - "Accept": "application/json", - "User-Agent": "Clawdentity-CI/1.0", - } - github_env = os.environ["GITHUB_ENV"] - service_name = "proxy-dev" - service_scopes = ["identity.read"] - - def request_json(path, method="GET", payload=None): - data = None - if payload is not None: - data = json.dumps(payload).encode("utf-8") - req = urllib.request.Request( - f"{registry_url}{path}", - method=method, - data=data, - headers=headers, - ) - try: - with urllib.request.urlopen(req, timeout=20) as resp: - return resp.status, json.load(resp) - except urllib.error.HTTPError as exc: - raise SystemExit( - f"Registry request failed ({method} {path}, {exc.code})." - ) - - _, list_body = request_json("/v1/admin/internal-services") - existing = None - for service in list_body.get("internalServices", []): - if service.get("name") == service_name: - existing = service - break - - if existing: - _, result = request_json( - f"/v1/admin/internal-services/{existing['id']}/rotate", - method="POST", - payload={"scopes": service_scopes}, - ) - action = "rotated" - else: - _, result = request_json( - "/v1/admin/internal-services", - method="POST", - payload={"name": service_name, "scopes": service_scopes}, - ) - action = "created" - - internal = result.get("internalService", {}) - service_id = (internal.get("id") or "").strip() - service_secret = (internal.get("secret") or "").strip() - if not service_id or not service_secret: - raise SystemExit(f"Internal service response missing id/secret: {result}") - - with open(github_env, "a", encoding="utf-8") as f: - f.write(f"RESOLVED_REGISTRY_INTERNAL_SERVICE_ID={service_id}\n") - f.write(f"RESOLVED_REGISTRY_INTERNAL_SERVICE_SECRET={service_secret}\n") - - print( - f"Successfully {action} internal service '{service_name}' (id={service_id})." - ) - PY - - name: Sync proxy internal service credentials run: | - printf "%s" "${RESOLVED_REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev - printf "%s" "${RESOLVED_REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev + echo "::add-mask::${REGISTRY_INTERNAL_SERVICE_ID}" + echo "::add-mask::${REGISTRY_INTERNAL_SERVICE_SECRET}" + printf "%s" "${REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev + printf "%s" "${REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev - name: Deploy proxy to dev environment run: | From 3cfc21f4fa997380e8ec03f48f061bbfca8d8762 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Thu, 19 Feb 2026 23:57:42 +0530 Subject: [PATCH 102/190] docs(readme): update root and cli readme --- README.md | 52 +++++++++++++++++++++++----------------------- apps/cli/README.md | 2 +- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/README.md b/README.md index 1bd2f8f..a83b52d 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@

Clawdentity

- Verified identity + instant revocation for AI agents — starting with OpenClaw. + Cryptographic identity layer for AI agent-to-agent trust — starting with OpenClaw.

@@ -19,24 +19,24 @@ ## The Problem -OpenClaw webhook auth uses a **single shared gateway token**. That works for transport, but breaks down for identity-aware agent systems: +OpenClaw lets agents talk to each other over webhooks, but every agent shares **one token**. That causes real problems: -- **Shared-secret blast radius** — if one token leaks, any caller can impersonate a trusted agent until rotation -- **No per-agent identity** — receivers cannot prove which exact agent sent a request or who owns it -- **Weak revocation** — disabling one compromised agent means rotating shared credentials across all integrations -- **No local trust policy** — gateway operators cannot enforce "who is allowed" per caller identity -- **Public exposure trade-off** — for agent-to-agent communication, you need a public endpoint; without a proxy layer, that means exposing OpenClaw directly or sharing the webhook token with every caller +- **One leak exposes everyone** — if the token gets out, anyone can impersonate any agent +- **No way to tell agents apart** — you can't prove which agent sent a request +- **Can't block just one agent** — disabling one means resetting the token for all of them +- **No access control** — you can't decide which agents are allowed to call yours +- **Your server is exposed** — without a proxy, OpenClaw has to be publicly reachable and every caller needs the token ## What Clawdentity Does -Clawdentity works **with** OpenClaw (not a fork) and adds the missing identity layer for agent-to-agent trust: +Clawdentity works **with** OpenClaw (not a fork) and adds the missing identity layer: -- **Per-agent identity** — each agent gets a unique DID and registry-signed passport (AIT) -- **Request-level signing** — every request is cryptographically signed with a proof-of-possession (PoP) header -- **Instant revocation** — revoke one agent via signed CRL without rotating any shared tokens -- **Proxy enforcement** — trust-pair policies, per-agent rate limits, and replay protection at the gateway boundary -- **OpenClaw stays private** — the proxy is the only public-facing endpoint; your OpenClaw instance stays on localhost and the webhook token is never shared -- **QR-code pairing** — one-scan first-contact trust approval between agents +- **Each agent gets its own identity** — a unique keypair and a registry-signed passport (DID + AIT) +- **Every request is signed** — the proxy can verify exactly who sent it and reject tampering +- **Revoke one agent without breaking the rest** — no shared token rotation needed +- **Per-agent access control** — trust policies, rate limits, and replay protection at the proxy +- **OpenClaw stays private** — only the proxy is public; your OpenClaw instance stays on localhost +- **QR-code pairing** — one scan to approve trust between two agents ## How It Works @@ -53,10 +53,10 @@ Clawdentity Proxy ← verifies identity, trust policy, rate limits OpenClaw Gateway ← localhost only, never exposed ``` -1. **Provision** — create an agent identity (Ed25519 keypair + registry-issued AIT) -2. **Sign** — SDK signs every outbound request with the agent's private key -3. **Verify** — proxy validates AIT + PoP + CRL + trust pair before forwarding -4. **Forward** — only verified requests reach OpenClaw on localhost; your instance is never directly reachable from the internet +1. **Create** — generate an agent identity (keypair + registry-issued passport) +2. **Sign** — every outbound request is signed with the agent's private key +3. **Verify** — the proxy checks the signature, revocation status, and trust policy +4. **Forward** — only verified requests reach OpenClaw on localhost ## Quick Start @@ -98,22 +98,22 @@ clawdentity openclaw doctor | Property | Shared Webhook Token | Clawdentity | |----------|---------------------|-------------| -| **Identity** | All callers look the same | Each agent has a unique DID and signed passport | +| **Identity** | All callers look the same | Each agent has its own signed identity | | **Blast radius** | One leak exposes everything | One compromised key only affects that agent | -| **Revocation** | Rotate shared token = break all integrations | Revoke one agent instantly via CRL, others unaffected | +| **Revocation** | Rotate token = break all integrations | Revoke one agent, others unaffected | | **Replay protection** | None | Timestamp + nonce + signature on every request | -| **Tamper detection** | None | Body hash + PoP signature = any modification is detectable | -| **Per-caller policy** | Not possible | Trust pairs by sender/recipient DID, rate limit per agent | +| **Tamper detection** | None | Signed body hash — any modification is detectable | +| **Access control** | Not possible | Per-agent trust policies and rate limits | | **Key exposure** | Token must be shared with every caller | Private key never leaves the agent's machine | -| **Network exposure** | OpenClaw must be reachable by callers; token shared with each | OpenClaw stays on localhost; only the proxy is public | +| **Network exposure** | OpenClaw must be public, token shared with each caller | OpenClaw stays on localhost; only the proxy is public | ## Security Highlights - **Private keys never leave your machine** — generated and stored locally, never transmitted -- **Ed25519 + EdDSA** — modern, fast elliptic-curve cryptography -- **Per-request proof-of-possession** — every HTTP call is signed with method, path, body hash, timestamp, and nonce +- **Ed25519 signatures** — fast, modern elliptic-curve cryptography +- **Every request is signed** — method, path, body hash, timestamp, and nonce are all covered - **Replay protection** — timestamp skew check + per-agent nonce cache -- **Instant revocation** — signed CRL propagation; proxy rejects revoked agents on next refresh +- **Revoke any agent instantly** — the proxy stops accepting it on the next refresh - **Trust pairs** — receiver operators control which agents are allowed, per-DID ## Self-Hosting diff --git a/apps/cli/README.md b/apps/cli/README.md index a6dd814..cd13af0 100644 --- a/apps/cli/README.md +++ b/apps/cli/README.md @@ -1,6 +1,6 @@ # clawdentity -CLI for Clawdentity — verified identity for AI agents. +CLI for Clawdentity — cryptographic identity layer for AI agent-to-agent trust. [![npm version](https://img.shields.io/npm/v/clawdentity.svg)](https://www.npmjs.com/package/clawdentity) [![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/vrknetha/clawdentity/blob/main/LICENSE) From 1204bf69d46308a5d7e7e3759e57444263a26f45 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 00:08:58 +0530 Subject: [PATCH 103/190] ci(deploy): make worker type generation deterministic --- .github/AGENTS.md | 2 +- .github/workflows/deploy-develop.yml | 4 ++-- apps/proxy/AGENTS.md | 2 +- apps/proxy/package.json | 1 + apps/proxy/worker-configuration.d.ts | 26 +++++++++++-------------- apps/registry/AGENTS.md | 2 +- apps/registry/package.json | 1 + apps/registry/worker-configuration.d.ts | 16 ++++++--------- 8 files changed, 24 insertions(+), 30 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 0a14021..7b15173 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -22,7 +22,7 @@ - registry (`apps/registry`, env `dev`) with D1 migration apply before deploy - proxy (`apps/proxy`, env `dev`) after registry health passes - Install dependencies before any `pnpm exec wrangler ...` command so Wrangler is available on clean runners. -- Regenerate Worker type bindings in CI (`wrangler types --env dev`) and fail on git diff drift for `worker-configuration.d.ts` to prevent stale runtime binding types from shipping. +- Regenerate Worker type bindings in CI with dotenv overlays disabled (`pnpm -F @clawdentity/registry run types:dev` and `pnpm -F @clawdentity/proxy run types:dev`) and fail on git diff drift for `worker-configuration.d.ts` to prevent stale runtime binding types from shipping. - Sync proxy internal-service credentials from GitHub secrets on every deploy: - `REGISTRY_INTERNAL_SERVICE_ID` - `REGISTRY_INTERNAL_SERVICE_SECRET` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index a3f1277..bf3c2d8 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -53,8 +53,8 @@ jobs: - name: Verify Worker type bindings are up to date run: | - pnpm exec wrangler --cwd apps/registry types --env dev - pnpm exec wrangler --cwd apps/proxy types --env dev + pnpm -F @clawdentity/registry run types:dev + pnpm -F @clawdentity/proxy run types:dev git diff --exit-code -- apps/registry/worker-configuration.d.ts apps/proxy/worker-configuration.d.ts - name: Lint diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 5acbfec..82514fe 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -10,7 +10,7 @@ - Duplicate Durable Object `bindings` and `migrations` inside each Wrangler env block; env sections do not inherit top-level DO config. - Keep deploy traceability explicit by passing `APP_VERSION` (or fallback `PROXY_VERSION`) via Worker bindings; `/health` must surface the resolved version. - Keep Wrangler observability logging enabled (`observability.enabled=true`, `logs.enabled=true`, `invocation_logs=true`) so relay/auth failures are visible in Cloudflare logs. -- Keep `worker-configuration.d.ts` committed and regenerate with `wrangler types --env dev` after `wrangler.jsonc` or binding changes. +- Keep `worker-configuration.d.ts` committed and regenerate with `CLOUDFLARE_LOAD_DEV_VARS_FROM_DOT_ENV=false wrangler types --env dev` (or `pnpm -F @clawdentity/proxy run types:dev`) after `wrangler.jsonc` or binding changes. - Parse config with a schema and fail fast with `CONFIG_VALIDATION_FAILED` before startup proceeds. - Keep defaults explicit for non-secret settings (`listenPort`, `openclawBaseUrl`, `registryUrl`, CRL timings, stale behavior). - Keep agent DID limiter defaults explicit in `src/config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60`, `AGENT_RATE_LIMIT_WINDOW_MS=60000`) unless explicitly overridden. diff --git a/apps/proxy/package.json b/apps/proxy/package.json index 4f7b6a4..f7e3ac0 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -25,6 +25,7 @@ }, "scripts": { "build": "tsup", + "types:dev": "CLOUDFLARE_LOAD_DEV_VARS_FROM_DOT_ENV=false wrangler types --env dev", "deploy:dev": "wrangler deploy --env dev", "deploy:production": "wrangler deploy --env production", "dev": "wrangler dev --env dev --port 8787", diff --git a/apps/proxy/worker-configuration.d.ts b/apps/proxy/worker-configuration.d.ts index a5cf30f..6df184b 100644 --- a/apps/proxy/worker-configuration.d.ts +++ b/apps/proxy/worker-configuration.d.ts @@ -1,5 +1,5 @@ /* eslint-disable */ -// Generated by Wrangler by running `wrangler --cwd apps/proxy types --env dev` (hash: dc5827228fb6484fcb5b0f01c54ab10d) +// Generated by Wrangler by running `wrangler types --env dev` (hash: 93cc9217e908c055f480d3531f3bb650) // Runtime types generated with workerd@1.20260219.0 2025-09-01 nodejs_compat declare namespace Cloudflare { interface GlobalProps { @@ -7,19 +7,15 @@ declare namespace Cloudflare { durableNamespaces: "AgentRelaySession" | "ProxyTrustState"; } interface Env { - ENVIRONMENT: string; - APP_VERSION: string; - REGISTRY_URL: string; - OPENCLAW_BASE_URL: string; - REGISTRY_INTERNAL_SERVICE_ID: string; - REGISTRY_INTERNAL_SERVICE_SECRET: string; - INJECT_IDENTITY_INTO_MESSAGE: string; - RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: string; - RELAY_QUEUE_TTL_SECONDS: string; - RELAY_RETRY_INITIAL_MS: string; - RELAY_RETRY_MAX_MS: string; - RELAY_RETRY_MAX_ATTEMPTS: string; - RELAY_RETRY_JITTER_RATIO: string; + ENVIRONMENT: "development"; + REGISTRY_URL: "https://dev.registry.clawdentity.com"; + INJECT_IDENTITY_INTO_MESSAGE: "true"; + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: "500"; + RELAY_QUEUE_TTL_SECONDS: "3600"; + RELAY_RETRY_INITIAL_MS: "1000"; + RELAY_RETRY_MAX_MS: "30000"; + RELAY_RETRY_MAX_ATTEMPTS: "25"; + RELAY_RETRY_JITTER_RATIO: "0.2"; AGENT_RELAY_SESSION: DurableObjectNamespace; PROXY_TRUST_STATE: DurableObjectNamespace; } @@ -29,7 +25,7 @@ type StringifyValues> = { [Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string; }; declare namespace NodeJS { - interface ProcessEnv extends StringifyValues> {} + interface ProcessEnv extends StringifyValues> {} } // Begin runtime types diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 68220fb..97843bc 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -24,7 +24,7 @@ - Keep the worker entrypoint in `src/server.ts`; use `src/index.ts` only as the package export wrapper. - Keep environment variables non-secret in `wrangler.jsonc` and secret values out of git. - Keep Wrangler observability logging enabled (`observability.enabled=true`, `logs.enabled=true`, `invocation_logs=true`) so deploy/runtime failures are visible without ad-hoc debugging. -- Keep `worker-configuration.d.ts` committed and regenerate with `wrangler types --env dev` after `wrangler.jsonc` or binding changes. +- Keep `worker-configuration.d.ts` committed and regenerate with `CLOUDFLARE_LOAD_DEV_VARS_FROM_DOT_ENV=false wrangler types --env dev` (or `pnpm -F @clawdentity/registry run types:dev`) after `wrangler.jsonc` or binding changes. - Keep `.dev.vars` and `.env.example` synchronized when adding/changing runtime config fields (`ENVIRONMENT`, `APP_VERSION`, `PROXY_URL`, `EVENT_BUS_BACKEND`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). - Use memory event bus in `development` while no downstream consumers exist (`EVENT_BUS_BACKEND=memory`). - Keep production queue-backed (`EVENT_BUS_BACKEND=queue` + `EVENT_BUS_QUEUE`) until rollout policy changes. diff --git a/apps/registry/package.json b/apps/registry/package.json index f91b9b5..dbe2184 100644 --- a/apps/registry/package.json +++ b/apps/registry/package.json @@ -13,6 +13,7 @@ }, "scripts": { "build": "tsup", + "types:dev": "CLOUDFLARE_LOAD_DEV_VARS_FROM_DOT_ENV=false wrangler types --env dev", "format": "biome format .", "lint": "biome lint .", "dev": "wrangler dev --env dev --port 8788", diff --git a/apps/registry/worker-configuration.d.ts b/apps/registry/worker-configuration.d.ts index a4e0230..4f95e18 100644 --- a/apps/registry/worker-configuration.d.ts +++ b/apps/registry/worker-configuration.d.ts @@ -1,5 +1,5 @@ /* eslint-disable */ -// Generated by Wrangler by running `wrangler --cwd apps/registry types --env dev` (hash: 203d0da1e8a57c87486b2e73012c56e0) +// Generated by Wrangler by running `wrangler types --env dev` (hash: 3679fd6cc58d60e14c5cd0f4a75e49aa) // Runtime types generated with workerd@1.20260219.0 2025-09-01 nodejs_compat declare namespace Cloudflare { interface GlobalProps { @@ -7,14 +7,10 @@ declare namespace Cloudflare { } interface Env { DB: D1Database; - ENVIRONMENT: string; - APP_VERSION: string; - EVENT_BUS_BACKEND: string; - PROXY_URL: string; - REGISTRY_ISSUER_URL: string; - BOOTSTRAP_SECRET: string; - REGISTRY_SIGNING_KEY: string; - REGISTRY_SIGNING_KEYS: string; + ENVIRONMENT: "development"; + PROXY_URL: "https://dev.proxy.clawdentity.com"; + REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com"; + EVENT_BUS_BACKEND: "memory"; } } interface Env extends Cloudflare.Env {} @@ -22,7 +18,7 @@ type StringifyValues> = { [Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string; }; declare namespace NodeJS { - interface ProcessEnv extends StringifyValues> {} + interface ProcessEnv extends StringifyValues> {} } // Begin runtime types From 44d2c8551a8c7783ef27e49a6819eb276b2f1148 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 00:33:33 +0530 Subject: [PATCH 104/190] ci(deploy): fallback to dev proxy domain for health check --- .github/AGENTS.md | 2 +- .github/workflows/deploy-develop.yml | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 7b15173..0607652 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -50,7 +50,7 @@ - `REGISTRY_INTERNAL_SERVICE_ID` - `REGISTRY_INTERNAL_SERVICE_SECRET` - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. -- Optional deploy secret: `PROXY_HEALTH_URL` (only needed if proxy workers.dev URL cannot be resolved in CI output). +- Optional deploy secret: `PROXY_HEALTH_URL` (only needed when dev proxy health endpoint is not `https://dev.proxy.clawdentity.com`; CI now falls back to that URL if workers.dev output is unavailable). - Required publish secret: `NPM_TOKEN`. - Keep Cloudflare token scope minimal for current workflows: - `Workers Scripts:Edit` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index bf3c2d8..6f39575 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -176,11 +176,8 @@ jobs: PROXY_HEALTH_URL="${PROXY_WORKERS_DEV_URL}/health" elif [ -n "${PROXY_HEALTH_URL_OVERRIDE}" ]; then PROXY_HEALTH_URL="${PROXY_HEALTH_URL_OVERRIDE%/}/health" - fi - - if [ -z "${PROXY_HEALTH_URL}" ]; then - echo "Unable to resolve proxy health URL. Set optional PROXY_HEALTH_URL secret if workers.dev URL is unavailable." >&2 - exit 1 + else + PROXY_HEALTH_URL="https://dev.proxy.clawdentity.com/health" fi echo "PROXY_HEALTH_URL=${PROXY_HEALTH_URL}" >> "${GITHUB_ENV}" From b7580be8f6e923a3ba0daf024724c44352074123 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 00:34:07 +0530 Subject: [PATCH 105/190] ci(deploy): support registry health URL override --- .github/AGENTS.md | 4 +++- .github/workflows/deploy-develop.yml | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 0607652..f8bf3df 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -50,7 +50,9 @@ - `REGISTRY_INTERNAL_SERVICE_ID` - `REGISTRY_INTERNAL_SERVICE_SECRET` - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. -- Optional deploy secret: `PROXY_HEALTH_URL` (only needed when dev proxy health endpoint is not `https://dev.proxy.clawdentity.com`; CI now falls back to that URL if workers.dev output is unavailable). +- Optional deploy secrets: + - `REGISTRY_HEALTH_URL` (only needed when dev registry health endpoint is not `https://dev.registry.clawdentity.com`; CI falls back to that URL by default). + - `PROXY_HEALTH_URL` (only needed when dev proxy health endpoint is not `https://dev.proxy.clawdentity.com`; CI now falls back to that URL if workers.dev output is unavailable). - Required publish secret: `NPM_TOKEN`. - Keep Cloudflare token scope minimal for current workflows: - `Workers Scripts:Edit` diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 6f39575..962669f 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -22,6 +22,7 @@ jobs: CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CF_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} APP_VERSION: ${{ github.sha }} + REGISTRY_HEALTH_URL_OVERRIDE: ${{ secrets.REGISTRY_HEALTH_URL }} PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} REGISTRY_INTERNAL_SERVICE_ID: ${{ secrets.REGISTRY_INTERNAL_SERVICE_ID }} REGISTRY_INTERNAL_SERVICE_SECRET: ${{ secrets.REGISTRY_INTERNAL_SERVICE_SECRET }} @@ -114,7 +115,10 @@ jobs: python3 - <<'PY' import json, os, sys, time, urllib.request, urllib.error - url = "https://dev.registry.clawdentity.com/health" + configured_url = os.environ.get("REGISTRY_HEALTH_URL_OVERRIDE", "").strip() + if configured_url and not configured_url.endswith("/health"): + configured_url = f"{configured_url.rstrip('/')}/health" + url = configured_url or "https://dev.registry.clawdentity.com/health" expected_version = os.environ.get("APP_VERSION", "") if not expected_version: raise SystemExit("APP_VERSION was not set in workflow environment") From e34d89840df914c4682c2b33e1fde651c7cd1f8c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 10:37:20 +0530 Subject: [PATCH 106/190] fix(pair): route confirm to issuer and persist peer proxy origin - Route pair confirm requests to the ticket issuer proxy origin instead of local resolved proxy URL.\n- Persist peer proxy URLs using profile proxyOrigin when available, with ticket issuer fallback.\n- Carry optional proxyOrigin through proxy pairing route/trust state models so status/confirm responses preserve responder origin.\n- Add CLI and proxy tests covering issuer-routed confirm and responder proxy URL persistence.\n- Update CLI/proxy AGENTS.md pairing rules to document proxyOrigin and issuer-routing requirements. --- apps/cli/src/commands/AGENTS.md | 2 + apps/cli/src/commands/pair.test.ts | 115 ++++++++++++------ apps/cli/src/commands/pair.ts | 143 ++++++++++++++++++++--- apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/pairing-route.test.ts | 12 +- apps/proxy/src/pairing-route.ts | 45 ++++++- apps/proxy/src/proxy-trust-state.test.ts | 10 +- apps/proxy/src/proxy-trust-state.ts | 29 ++++- apps/proxy/src/proxy-trust-store.ts | 1 + 9 files changed, 298 insertions(+), 60 deletions(-) diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 31ba0da..b1ab4ba 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -116,9 +116,11 @@ - `pair start --qr` must generate a one-time local PNG QR containing the returned ticket and print the filesystem path. - `pair start --qr` must sweep expired QR artifacts in `~/.clawdentity/pairing` before writing a new file. - `pair confirm ` must call proxy `/pair/confirm` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. +- `pair confirm ` must send `/pair/confirm` to the proxy origin embedded in the pairing ticket issuer (`iss`), not the local resolved proxy URL. - `pair confirm` must accept either `--qr-file ` (primary) or `--ticket ` (fallback), never both. - `pair confirm --qr-file` must delete the consumed QR file after successful confirm (best effort, non-fatal on cleanup failure). - `pair status --ticket ` must poll `/pair/status` and persist peers locally when status transitions to `confirmed`. +- Pair profile payloads/responses may include `proxyOrigin`; persistence must prefer the peer's `proxyOrigin` (when present) and only fall back to ticket issuer origin. - After peer persistence, pair flows must best-effort sync OpenClaw transform peer snapshot (`hooks/transforms/clawdentity-peers.json`) when `~/.clawdentity/openclaw-relay.json` provides `relayTransformPeersPath`, so relay delivery works without manual file copying. - `pair start --wait` should use `/pair/status` polling and auto-save the responder peer locally so reverse pairing is not required. - `pair` commands must resolve proxy URL automatically from CLI config/registry metadata, with `CLAWDENTITY_PROXY_URL` env override support. diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts index 042bb37..25ad1a0 100644 --- a/apps/cli/src/commands/pair.test.ts +++ b/apps/cli/src/commands/pair.test.ts @@ -304,46 +304,72 @@ describe("pair command helpers", () => { }); }); - it("fails confirm when ticket issuer does not match configured proxy URL", async () => { + it("routes confirm to ticket issuer proxy when local proxy origin differs", async () => { const fixture = await createPairFixture(); const ticket = `clwpair1_${Buffer.from( JSON.stringify({ iss: "https://alpha.proxy.example" }), ).toString("base64url")}`; + const fetchImpl = vi.fn(async (url: string, init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://beta.proxy.example", + }, + { status: 200 }, + ); + } - await expect( - confirmPairing( - "beta", - { - ticket, - }, + expect(url).toBe("https://alpha.proxy.example/pair/confirm"); + const requestBody = JSON.parse(String(init?.body ?? "{}")) as { + responderProfile?: { proxyOrigin?: string }; + }; + expect(requestBody.responderProfile?.proxyOrigin).toBe( + "https://beta.proxy.example", + ); + + return Response.json( { - fetchImpl: (async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://beta.proxy.example", - }, - { status: 200 }, - ); - } - return Response.json({}, { status: 200 }); - }) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-confirm", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - humanName: RESPONDER_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, }, - ), - ).rejects.toMatchObject({ - code: "CLI_PAIR_TICKET_ISSUER_MISMATCH", + { status: 201 }, + ); }); + + const result = await confirmPairing( + "beta", + { + ticket, + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").chmod, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); }); it("normalizes wrapped tickets before pair status request", async () => { @@ -646,7 +672,13 @@ describe("pair command helpers", () => { it("polls pair status until confirmed and persists peer for initiator", async () => { const fixture = await createPairFixture(); - const writeFileImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn( + async ( + _filePath: string, + _data: string | Uint8Array, + _encoding?: BufferEncoding, + ) => undefined, + ); const mkdirImpl = vi.fn(async () => undefined); const chmodImpl = vi.fn(async () => undefined); const sleepImpl = vi.fn(async () => undefined); @@ -665,7 +697,10 @@ describe("pair command helpers", () => { initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", initiatorProfile: INITIATOR_PROFILE, responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", - responderProfile: RESPONDER_PROFILE, + responderProfile: { + ...RESPONDER_PROFILE, + proxyOrigin: "https://beta.proxy.example", + }, expiresAt: "2026-02-18T00:00:00.000Z", confirmedAt: "2026-02-18T00:00:05.000Z", }, @@ -724,6 +759,18 @@ describe("pair command helpers", () => { expect(writeFileImpl).toHaveBeenCalledTimes(1); expect(mkdirImpl).toHaveBeenCalledTimes(1); expect(chmodImpl).toHaveBeenCalledTimes(1); + const peerWriteCall = writeFileImpl.mock.calls[0]; + const persistedPeers = JSON.parse(String(peerWriteCall?.[1] ?? "{}")) as { + peers: { + [key: string]: { + did: string; + proxyUrl: string; + }; + }; + }; + expect(persistedPeers.peers["peer-22222222"]?.proxyUrl).toBe( + "https://beta.proxy.example/hooks/agent", + ); }); }); diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 56c7ffe..8865e8b 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -143,6 +143,7 @@ type LocalAgentProofMaterial = { type PeerProfile = { agentName: string; humanName: string; + proxyOrigin?: string; }; const isRecord = (value: unknown): value is Record => { @@ -213,10 +214,26 @@ function parsePeerProfile(payload: unknown): PeerProfile { ); } - return { + const profile: PeerProfile = { agentName: parseProfileName(payload.agentName, "agentName"), humanName: parseProfileName(payload.humanName, "humanName"), }; + + const proxyOrigin = parseNonEmptyString(payload.proxyOrigin); + if (proxyOrigin.length > 0) { + let parsedProxyOrigin: string; + try { + parsedProxyOrigin = new URL(parseProxyUrl(proxyOrigin)).origin; + } catch { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + "proxyOrigin is invalid for pairing", + ); + } + profile.proxyOrigin = parsedProxyOrigin; + } + + return profile; } function parsePairingTicket(value: unknown): string { @@ -697,6 +714,7 @@ function parsePositiveIntegerOption(input: { function resolveLocalPairProfile(input: { config: CliConfig; agentName: string; + proxyUrl?: string; }): PeerProfile { const humanName = parseNonEmptyString(input.config.humanName); if (humanName.length === 0) { @@ -706,10 +724,91 @@ function resolveLocalPairProfile(input: { ); } - return { + const profile: PeerProfile = { agentName: parseProfileName(input.agentName, "agentName"), humanName: parseProfileName(humanName, "humanName"), }; + const proxyUrl = parseNonEmptyString(input.proxyUrl); + if (proxyUrl.length > 0) { + profile.proxyOrigin = new URL(parseProxyUrl(proxyUrl)).origin; + } + return profile; +} + +function normalizeProxyOrigin(candidate: string): string { + return new URL(parseProxyUrl(candidate)).origin; +} + +function resolvePeerProxyUrl(input: { + ticket: string; + peerProfile: PeerProfile; + peerProxyOrigin?: string; +}): string { + const configuredPeerOrigin = parseNonEmptyString(input.peerProxyOrigin); + const profilePeerOrigin = parseNonEmptyString(input.peerProfile.proxyOrigin); + const fallbackPeerOrigin = parsePairingTicketIssuerOrigin(input.ticket); + const peerOrigin = + configuredPeerOrigin.length > 0 + ? configuredPeerOrigin + : profilePeerOrigin.length > 0 + ? profilePeerOrigin + : fallbackPeerOrigin; + + return new URL( + "/hooks/agent", + `${normalizeProxyOrigin(peerOrigin)}/`, + ).toString(); +} + +function toIssuerProxyUrl(ticket: string): string { + return parseProxyUrl(parsePairingTicketIssuerOrigin(ticket)); +} + +function toIssuerProxyRequestUrl(ticket: string, path: string): string { + return toProxyRequestUrl(toIssuerProxyUrl(ticket), path); +} + +function toPeerProxyOriginFromStatus(input: { + callerAgentDid: string; + initiatorAgentDid: string; + responderAgentDid: string; + initiatorProfile: PeerProfile; + responderProfile?: PeerProfile; +}): string | undefined { + if (input.callerAgentDid === input.initiatorAgentDid) { + return input.responderProfile?.proxyOrigin; + } + + if (input.callerAgentDid === input.responderAgentDid) { + return input.initiatorProfile.proxyOrigin; + } + + return undefined; +} + +function toPeerProxyOriginFromConfirm(input: { + ticket: string; + initiatorProfile: PeerProfile; +}): string { + const initiatorOrigin = parseNonEmptyString( + input.initiatorProfile.proxyOrigin, + ); + if (initiatorOrigin.length > 0) { + return initiatorOrigin; + } + return parsePairingTicketIssuerOrigin(input.ticket); +} + +function toResponderProfile(input: { + config: CliConfig; + agentName: string; + localProxyUrl: string; +}): PeerProfile { + return resolveLocalPairProfile({ + config: input.config, + agentName: input.agentName, + proxyUrl: input.localProxyUrl, + }); } function parseProxyUrl(candidate: string): string { @@ -1337,6 +1436,7 @@ async function persistPairedPeer(input: { ticket: string; peerDid: string; peerProfile: PeerProfile; + peerProxyOrigin?: string; dependencies: PairRequestOptions; }): Promise { const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; @@ -1345,8 +1445,11 @@ async function persistPairedPeer(input: { const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; const chmodImpl = input.dependencies.chmodImpl ?? chmod; - const issuerOrigin = parsePairingTicketIssuerOrigin(input.ticket); - const peerProxyUrl = new URL("/hooks/agent", `${issuerOrigin}/`).toString(); + const peerProxyUrl = resolvePeerProxyUrl({ + ticket: input.ticket, + peerProfile: input.peerProfile, + peerProxyOrigin: input.peerProxyOrigin, + }); const peersConfig = await loadPeersConfig({ getConfigDirImpl, readFileImpl, @@ -1403,6 +1506,7 @@ export async function startPairing( const initiatorProfile = resolveLocalPairProfile({ config, agentName: normalizedAgentName, + proxyUrl, }); const { ait, secretKey } = await readAgentProofMaterial( @@ -1486,16 +1590,17 @@ export async function confirmPairing( const qrDecodeImpl = dependencies.qrDecodeImpl ?? decodeTicketFromPng; const config = await resolveConfigImpl(); const normalizedAgentName = assertValidAgentName(agentName); - const responderProfile = resolveLocalPairProfile({ + const localProxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); + const responderProfile = toResponderProfile({ config, agentName: normalizedAgentName, + localProxyUrl, }); const ticketSource = resolveConfirmTicketSource(options); - const proxyUrl = await resolveProxyUrl({ - config, - fetchImpl, - }); let ticket = ticketSource.ticket; if (ticketSource.source === "qr-file") { @@ -1524,18 +1629,14 @@ export async function confirmPairing( ticket = parsePairingTicket(qrDecodeImpl(new Uint8Array(imageBytes))); } ticket = parsePairingTicket(ticket); - assertTicketIssuerMatchesProxy({ - ticket, - proxyUrl, - context: "confirm", - }); + const proxyUrl = toIssuerProxyUrl(ticket); const { ait, secretKey } = await readAgentProofMaterial( normalizedAgentName, dependencies, ); - const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_CONFIRM_PATH); + const requestUrl = toIssuerProxyRequestUrl(ticket, PAIR_CONFIRM_PATH); const requestBody = JSON.stringify({ ticket, responderProfile, @@ -1577,10 +1678,15 @@ export async function confirmPairing( } const parsed = parsePairConfirmResponse(responseBody); + const peerProxyOrigin = toPeerProxyOriginFromConfirm({ + ticket, + initiatorProfile: parsed.initiatorProfile, + }); const peerAlias = await persistPairedPeer({ ticket, peerDid: parsed.initiatorAgentDid, peerProfile: parsed.initiatorProfile, + peerProxyOrigin, dependencies, }); @@ -1714,6 +1820,13 @@ async function getPairingStatusOnce( ticket, peerDid, peerProfile, + peerProxyOrigin: toPeerProxyOriginFromStatus({ + callerAgentDid, + initiatorAgentDid: parsed.initiatorAgentDid, + responderAgentDid, + initiatorProfile: parsed.initiatorProfile, + responderProfile: parsed.responderProfile, + }), dependencies, }); } diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index e3f39f7..a942481 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -49,6 +49,7 @@ - Keep pairing profile contract strict: - `/pair/start` requires `initiatorProfile.{agentName,humanName}` - `/pair/confirm` requires `responderProfile.{agentName,humanName}` + - `/pair/start` and `/pair/confirm` may include optional `*.proxyOrigin` values; when present they must be valid `http(s)` URL origins and must be preserved in `/pair/status` responses. - `/pair/status` returns stored profile fields for initiator and responder - Keep pairing tickets issuer-authenticated via local signature in `/pair/start`; `/pair/confirm` must consume only locally stored tickets in single-proxy mode. - Keep ticket parsing tolerant for operator copy/paste paths: normalize surrounding markdown/backticks and whitespace before parse + trust-store lookup in both in-memory and Durable Object backends. diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index 0daf2a6..9a05e61 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -17,6 +17,10 @@ const RESPONDER_PROFILE = { agentName: "beta", humanName: "Ira", }; +const RESPONDER_PROFILE_WITH_PROXY_ORIGIN = { + ...RESPONDER_PROFILE, + proxyOrigin: "https://beta.proxy.example", +}; vi.mock("./auth-middleware.js", async () => { const { createMiddleware } = await import("hono/factory"); @@ -291,7 +295,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }, body: JSON.stringify({ ticket: ticket.ticket, - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, }), }); @@ -315,7 +319,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { initiatorAgentDid: INITIATOR_AGENT_DID, initiatorProfile: INITIATOR_PROFILE, responderAgentDid: RESPONDER_AGENT_DID, - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, }); expect( @@ -401,7 +405,7 @@ describe(`POST ${PAIR_STATUS_PATH}`, () => { await trustStore.confirmPairingTicket({ ticket: ticket.ticket, responderAgentDid: RESPONDER_AGENT_DID, - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, nowMs: 1_700_000_000_200, }); @@ -436,7 +440,7 @@ describe(`POST ${PAIR_STATUS_PATH}`, () => { initiatorAgentDid: INITIATOR_AGENT_DID, initiatorProfile: INITIATOR_PROFILE, responderAgentDid: RESPONDER_AGENT_DID, - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, expiresAt: "2023-11-14T22:28:20.000Z", confirmedAt: "2023-11-14T22:13:20.000Z", }); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 08f5c00..6b4da23 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -184,11 +184,52 @@ function parsePeerProfile(value: unknown, label: string): PeerProfile { }); } - const payload = value as { agentName?: unknown; humanName?: unknown }; - return { + const payload = value as { + agentName?: unknown; + humanName?: unknown; + proxyOrigin?: unknown; + }; + const profile: PeerProfile = { agentName: parseProfileName(payload.agentName, `${label}.agentName`), humanName: parseProfileName(payload.humanName, `${label}.humanName`), }; + + if (payload.proxyOrigin !== undefined) { + if (typeof payload.proxyOrigin !== "string") { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: `${label}.proxyOrigin must be a valid URL origin`, + status: 400, + expose: true, + }); + } + + let parsedProxyOrigin: URL; + try { + parsedProxyOrigin = new URL(payload.proxyOrigin.trim()); + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: `${label}.proxyOrigin must be a valid URL origin`, + status: 400, + expose: true, + }); + } + if ( + parsedProxyOrigin.protocol !== "https:" && + parsedProxyOrigin.protocol !== "http:" + ) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: `${label}.proxyOrigin must be a valid URL origin`, + status: 400, + expose: true, + }); + } + profile.proxyOrigin = parsedProxyOrigin.origin; + } + + return profile; } async function parseJsonBody(c: PairingRouteContext): Promise { diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts index 0a29845..f6701b4 100644 --- a/apps/proxy/src/proxy-trust-state.test.ts +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -16,6 +16,10 @@ const RESPONDER_PROFILE = { agentName: "beta", humanName: "Ira", }; +const RESPONDER_PROFILE_WITH_PROXY_ORIGIN = { + ...RESPONDER_PROFILE, + proxyOrigin: "https://beta.proxy.example", +}; function tamperTicketNonce(ticket: string): string { const prefix = "clwpair1_"; @@ -143,7 +147,7 @@ describe("ProxyTrustState", () => { makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { ticket: ticketBody.ticket, responderAgentDid: "did:claw:agent:bob", - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, nowMs: 1_700_000_000_100, }), ); @@ -159,7 +163,7 @@ describe("ProxyTrustState", () => { initiatorAgentDid: "did:claw:agent:alice", initiatorProfile: INITIATOR_PROFILE, responderAgentDid: "did:claw:agent:bob", - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, issuerProxyUrl: "https://proxy-a.example.com", }); @@ -192,7 +196,7 @@ describe("ProxyTrustState", () => { initiatorAgentDid: "did:claw:agent:alice", initiatorProfile: INITIATOR_PROFILE, responderAgentDid: "did:claw:agent:bob", - responderProfile: RESPONDER_PROFILE, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, expiresAtMs: 1_700_000_060_000, confirmedAtMs: 1_700_000_000_000, }); diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index cd1bf5e..bf4cbdb 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -53,7 +53,11 @@ function parsePeerProfile(value: unknown): PeerProfile | undefined { return undefined; } - const entry = value as { agentName?: unknown; humanName?: unknown }; + const entry = value as { + agentName?: unknown; + humanName?: unknown; + proxyOrigin?: unknown; + }; if ( !isNonEmptyString(entry.agentName) || !isNonEmptyString(entry.humanName) @@ -61,10 +65,31 @@ function parsePeerProfile(value: unknown): PeerProfile | undefined { return undefined; } - return { + const profile: PeerProfile = { agentName: entry.agentName.trim(), humanName: entry.humanName.trim(), }; + if (entry.proxyOrigin !== undefined) { + if (!isNonEmptyString(entry.proxyOrigin)) { + return undefined; + } + + let parsedProxyOrigin: URL; + try { + parsedProxyOrigin = new URL(entry.proxyOrigin.trim()); + } catch { + return undefined; + } + if ( + parsedProxyOrigin.protocol !== "https:" && + parsedProxyOrigin.protocol !== "http:" + ) { + return undefined; + } + profile.proxyOrigin = parsedProxyOrigin.origin; + } + + return profile; } function addPeer( diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index cc0be12..805ffd6 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -67,6 +67,7 @@ export type PairingTicketStatusResult = export type PeerProfile = { agentName: string; humanName: string; + proxyOrigin?: string; }; export type PairingInput = { From 2d91b413c9534c56b0305cb9fa243efdbe3b2f42 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 11:01:37 +0530 Subject: [PATCH 107/190] feat(connector,proxy): harden websocket heartbeat and reconnect resilience --- apps/proxy/src/AGENTS.md | 6 +- apps/proxy/src/agent-relay-session.test.ts | 253 +++++++++++-- apps/proxy/src/agent-relay-session.ts | 215 +++++++++-- packages/connector/src/AGENTS.md | 7 + packages/connector/src/client.test.ts | 230 ++++++++++++ packages/connector/src/client.ts | 401 +++++++++++++++++++-- packages/connector/src/constants.ts | 2 + packages/connector/src/index.ts | 2 + packages/connector/src/runtime.ts | 32 ++ 9 files changed, 1064 insertions(+), 84 deletions(-) diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index a942481..eb9dfc5 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -33,7 +33,6 @@ - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep trust/pairing state centralized in `proxy-trust-store.ts` and `proxy-trust-state.ts` (Durable Object backed). - Keep shared trust key/expiry helpers in `proxy-trust-keys.ts`; do not duplicate pair-key or expiry-normalization logic across store/state runtimes. -- Keep shared trust key/expiry helpers in `proxy-trust-keys.ts`; do not duplicate pair-key or expiry-normalization logic across store/state runtimes. - Keep pairing route logic isolated in `pairing-route.ts`; `server.ts` should compose it, not implement policy details. - Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. - Keep server middleware composable and single-responsibility to reduce churn in later T27-T31 auth/forwarding work. @@ -69,6 +68,11 @@ - Keep relay delivery semantics asynchronous and durable: `/hooks/agent` accepts queued deliveries with `202` (`state=queued`) when recipient connector is offline. - Keep relay queue saturation explicit: reject new deliveries with `507 PROXY_RELAY_QUEUE_FULL`; do not evict queued messages implicitly. - Keep relay retries inside `agent-relay-session.ts` with bounded backoff (`RELAY_RETRY_*`) and per-agent queue caps/TTL (`RELAY_QUEUE_*`); do not add ad-hoc retry loops in route handlers. +- Keep relay websocket heartbeat liveness explicit in `agent-relay-session.ts`: track per-socket heartbeat ack time and enforce a 60s ack timeout before socket eviction. +- Keep stale connector cleanup proactive: evict stale sockets during alarm sweeps and before accepting a new reconnect socket. +- Keep connector session ownership deterministic: new reconnect sockets supersede older live sockets with a clean `1000` close code so delivery always targets one active socket. +- Keep reconnect recovery eager: drain durable queue immediately on reconnect instead of waiting for the next alarm tick. +- Keep close semantics strict for pending delivery promises: clean `1000` closes do not reject pending deliveries, but unclean closes reject when no sockets remain. - Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. - Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. - Index pairing tickets by ticket `kid` in both in-memory and Durable Object stores; persist the original full ticket string alongside each entry and require exact ticket match on confirm. diff --git a/apps/proxy/src/agent-relay-session.test.ts b/apps/proxy/src/agent-relay-session.test.ts index 48ff43d..1ab697b 100644 --- a/apps/proxy/src/agent-relay-session.test.ts +++ b/apps/proxy/src/agent-relay-session.test.ts @@ -19,6 +19,31 @@ function createMockSocket(): MockWebSocket { }; } +async function withMockWebSocketPair( + pairClient: MockWebSocket, + pairServer: MockWebSocket, + callback: () => Promise, +): Promise { + const originalWebSocketPair = (globalThis as { WebSocketPair?: unknown }) + .WebSocketPair; + + (globalThis as unknown as { WebSocketPair: unknown }).WebSocketPair = class { + 0 = pairClient as unknown as WebSocket; + 1 = pairServer as unknown as WebSocket; + }; + + try { + await callback(); + } finally { + if (originalWebSocketPair === undefined) { + delete (globalThis as { WebSocketPair?: unknown }).WebSocketPair; + } else { + (globalThis as { WebSocketPair?: unknown }).WebSocketPair = + originalWebSocketPair; + } + } +} + function createStateHarness() { const connectedSockets: WebSocket[] = []; const storageMap = new Map(); @@ -52,18 +77,9 @@ describe("AgentRelaySession", () => { const harness = createStateHarness(); const relaySession = new AgentRelaySession(harness.state); - const originalWebSocketPair = (globalThis as { WebSocketPair?: unknown }) - .WebSocketPair; const pairClient = createMockSocket(); const pairServer = createMockSocket(); - - (globalThis as unknown as { WebSocketPair: unknown }).WebSocketPair = - class { - 0 = pairClient as unknown as WebSocket; - 1 = pairServer as unknown as WebSocket; - }; - - try { + await withMockWebSocketPair(pairClient, pairServer, async () => { const request = new Request( `https://relay.example.test${RELAY_CONNECT_PATH}`, { @@ -87,7 +103,9 @@ describe("AgentRelaySession", () => { expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ "did:claw:agent:connector", ]); - expect(harness.storage.setAlarm).toHaveBeenCalledTimes(1); + expect(harness.storage.setAlarm.mock.calls.length).toBeGreaterThanOrEqual( + 1, + ); // Node's WHATWG Response may reject status 101 in tests; Workers runtime accepts it. if (connectResponse !== undefined) { @@ -95,14 +113,7 @@ describe("AgentRelaySession", () => { } else { expect(connectError).toBeInstanceOf(RangeError); } - } finally { - if (originalWebSocketPair === undefined) { - delete (globalThis as { WebSocketPair?: unknown }).WebSocketPair; - } else { - (globalThis as { WebSocketPair?: unknown }).WebSocketPair = - originalWebSocketPair; - } - } + }); }); it("returns 426 for non-websocket connect requests", async () => { @@ -196,7 +207,7 @@ describe("AgentRelaySession", () => { expect(persisted.deliveries[0]?.requestId).toBe("req-2"); }); - it("drains queued messages after connector reconnects", async () => { + it("drains queued messages immediately after connector reconnects", async () => { const harness = createStateHarness(); const relaySession = new AgentRelaySession(harness.state, { RELAY_RETRY_JITTER_RATIO: "0", @@ -210,11 +221,11 @@ describe("AgentRelaySession", () => { payload: { event: "agent.started" }, }); - const connectorSocket = createMockSocket(); - const ws = connectorSocket as unknown as WebSocket; - harness.connectedSockets.push(ws); + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + const ws = pairServer as unknown as WebSocket; - connectorSocket.send.mockImplementation((payload: unknown) => { + pairServer.send.mockImplementation((payload: unknown) => { const frame = parseFrame(payload); if (frame.type !== "deliver") { return; @@ -233,13 +244,33 @@ describe("AgentRelaySession", () => { ); }); - await new Promise((resolve) => setTimeout(resolve, 5)); - await relaySession.alarm(); + await withMockWebSocketPair(pairClient, pairServer, async () => { + let connectError: unknown; + try { + await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ); + } catch (error) { + connectError = error; + } + + if (connectError !== undefined) { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + + await new Promise((resolve) => setTimeout(resolve, 10)); - const sendFrames = connectorSocket.send.mock.calls + const sendFrames = pairServer.send.mock.calls .map((call) => parseFrame(call[0])) .filter((frame) => frame.type === "deliver"); - expect(sendFrames.length).toBe(1); + expect(sendFrames).toHaveLength(1); const dedupedResult = await relaySession.deliverToConnector({ requestId: "req-3", @@ -251,6 +282,172 @@ describe("AgentRelaySession", () => { expect(dedupedResult.queueDepth).toBe(0); }); + it("evicts stale sockets during alarm heartbeat sweep", async () => { + vi.useFakeTimers(); + const nowMs = Date.now(); + vi.setSystemTime(nowMs); + + try { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const staleSocket = createMockSocket(); + const ws = staleSocket as unknown as WebSocket; + staleSocket.close.mockImplementation(() => { + harness.connectedSockets.splice( + harness.connectedSockets.indexOf(ws), + 1, + ); + }); + harness.connectedSockets.push(ws); + + await relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "heartbeat_ack", + id: generateUlid(nowMs + 1), + ts: new Date(nowMs + 1).toISOString(), + ackId: generateUlid(nowMs + 2), + }), + ); + + vi.advanceTimersByTime(60_001); + await relaySession.alarm(); + + expect(staleSocket.close).toHaveBeenCalledWith( + 1011, + "heartbeat_ack_timeout", + ); + const outboundHeartbeats = staleSocket.send.mock.calls + .map((call) => parseFrame(call[0])) + .filter((frame) => frame.type === "heartbeat"); + expect(outboundHeartbeats).toHaveLength(0); + } finally { + vi.useRealTimers(); + } + }); + + it("supersedes an existing socket when a new connector session connects", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + const oldSocket = createMockSocket(); + const oldWs = oldSocket as unknown as WebSocket; + oldSocket.close.mockImplementation(() => { + harness.connectedSockets.splice( + harness.connectedSockets.indexOf(oldWs), + 1, + ); + }); + harness.connectedSockets.push(oldWs); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + + await withMockWebSocketPair(pairClient, pairServer, async () => { + let connectError: unknown; + try { + await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ); + } catch (error) { + connectError = error; + } + + if (connectError !== undefined) { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + + expect(oldSocket.close).toHaveBeenCalledWith( + 1000, + "superseded_by_new_connection", + ); + expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ + "did:claw:agent:connector", + ]); + expect(oldSocket.close.mock.invocationCallOrder[0]).toBeLessThan( + harness.state.acceptWebSocket.mock.invocationCallOrder[0], + ); + }); + + it("does not reject pending deliveries on clean close code 1000", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + const pendingDelivery = relaySession.deliverToConnector({ + requestId: "req-clean-close", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + await vi.waitFor(() => { + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + }); + + harness.connectedSockets.splice(harness.connectedSockets.indexOf(ws), 1); + await relaySession.webSocketClose(ws, 1000, "normal", true); + + const settleState = await Promise.race([ + pendingDelivery.then( + () => "settled", + () => "settled", + ), + new Promise<"pending">((resolve) => { + setTimeout(() => resolve("pending"), 5); + }), + ]); + expect(settleState).toBe("pending"); + + await relaySession.webSocketClose(ws, 1011, "unclean", false); + const queuedAfterUnclean = await pendingDelivery; + expect(queuedAfterUnclean.state).toBe("queued"); + expect(queuedAfterUnclean.queued).toBe(true); + }); + + it("rejects pending deliveries on unclean close when no sockets remain", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + const pendingDelivery = relaySession.deliverToConnector({ + requestId: "req-unclean-close", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + await vi.waitFor(() => { + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + }); + + harness.connectedSockets.splice(harness.connectedSockets.indexOf(ws), 1); + await relaySession.webSocketClose(ws, 1011, "socket_error", false); + + const settleState = await Promise.race([ + pendingDelivery.then((result) => result.state), + new Promise<"timeout">((resolve) => { + setTimeout(() => resolve("timeout"), 20); + }), + ]); + expect(settleState).toBe("queued"); + }); + it("supports fetch RPC delivery endpoint for compatibility", async () => { const harness = createStateHarness(); const relaySession = new AgentRelaySession(harness.state, { diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts index 6b1def8..f32283b 100644 --- a/apps/proxy/src/agent-relay-session.ts +++ b/apps/proxy/src/agent-relay-session.ts @@ -12,7 +12,10 @@ import { parseProxyConfig } from "./config.js"; const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; const RELAY_RPC_DELIVER_PATH = "/rpc/deliver-to-connector"; const RELAY_HEARTBEAT_INTERVAL_MS = 30_000; +const RELAY_HEARTBEAT_ACK_TIMEOUT_MS = 60_000; const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; +const RELAY_SOCKET_SUPERSEDED_CLOSE_CODE = 1000; +const RELAY_SOCKET_STALE_CLOSE_CODE = 1011; type DurableObjectStorageLike = { deleteAlarm?: () => Promise | void; @@ -118,21 +121,26 @@ type RelayDeliveryPolicy = { retryMaxMs: number; }; -function toHeartbeatFrame(): string { - return serializeFrame({ - v: CONNECTOR_FRAME_VERSION, - type: "heartbeat", - id: generateUlid(Date.now()), - ts: new Date().toISOString(), - }); +function toHeartbeatFrame(nowMs: number): { id: string; payload: string } { + const id = generateUlid(nowMs); + return { + id, + payload: serializeFrame({ + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat", + id, + ts: new Date(nowMs).toISOString(), + }), + }; } function toHeartbeatAckFrame(ackId: string): string { + const nowMs = Date.now(); const ackFrame: HeartbeatAckFrame = { v: CONNECTOR_FRAME_VERSION, type: "heartbeat_ack", - id: generateUlid(Date.now()), - ts: new Date().toISOString(), + id: generateUlid(nowMs), + ts: new Date(nowMs).toISOString(), ackId, }; @@ -248,7 +256,10 @@ export async function deliverToRelaySession( export class AgentRelaySession { private readonly deliveryPolicy: RelayDeliveryPolicy; + private readonly heartbeatAckSockets = new Map(); private readonly pendingDeliveries = new Map(); + private readonly socketLastAckAtMs = new Map(); + private readonly socketsPendingClose = new Set(); private readonly state: DurableObjectStateLike; private inMemoryQueueState: RelayQueueState = { deliveries: [], @@ -304,20 +315,11 @@ export class AgentRelaySession { async alarm(): Promise { const nowMs = Date.now(); - const sockets = this.state.getWebSockets(); + const sockets = this.getActiveSockets(nowMs); if (sockets.length > 0) { - const heartbeatFrame = toHeartbeatFrame(); for (const socket of sockets) { - try { - socket.send(heartbeatFrame); - } catch { - try { - socket.close(1011, "heartbeat_send_failed"); - } catch { - // Ignore close errors for already-closed sockets. - } - } + this.sendHeartbeatFrame(socket, nowMs); } } @@ -341,12 +343,12 @@ export class AgentRelaySession { return toRelayDeliveryResult({ deliveryId: existingReceipt.deliveryId, state: existingReceipt.state, - connectedSockets: this.state.getWebSockets().length, + connectedSockets: this.getActiveSockets(nowMs).length, queueDepth: queueState.deliveries.length, }); } - const sockets = this.state.getWebSockets(); + const sockets = this.getActiveSockets(nowMs); const deliveryId = generateUlid(nowMs); const deliveryTtlExpiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; let priorAttempts = 0; @@ -423,6 +425,7 @@ export class AgentRelaySession { ws: WebSocket, message: string | ArrayBuffer, ): Promise { + const nowMs = Date.now(); const frameResult = (() => { try { return parseFrame(message); @@ -439,12 +442,14 @@ export class AgentRelaySession { const frame = frameResult; if (frame.type === "heartbeat") { + this.touchSocketAck(ws, nowMs); ws.send(toHeartbeatAckFrame(frame.id)); await this.scheduleFromStorage(); return; } if (frame.type === "deliver_ack") { + this.touchSocketAck(ws, nowMs); const pending = this.pendingDeliveries.get(frame.ackId); if (pending) { clearTimeout(pending.timeoutHandle); @@ -456,6 +461,9 @@ export class AgentRelaySession { } if (frame.type === "heartbeat_ack") { + const ackedSocket = this.heartbeatAckSockets.get(frame.ackId); + this.heartbeatAckSockets.delete(frame.ackId); + this.touchSocketAck(ackedSocket ?? ws, nowMs); await this.scheduleFromStorage(); return; } @@ -463,17 +471,27 @@ export class AgentRelaySession { await this.scheduleFromStorage(); } - async webSocketClose(): Promise { - if (this.state.getWebSockets().length === 0) { + async webSocketClose( + ws?: WebSocket, + code?: number, + _reason?: string, + wasClean?: boolean, + ): Promise { + if (ws !== undefined) { + this.removeSocketTracking(ws); + this.socketsPendingClose.delete(ws); + } + + const gracefulClose = code === 1000 && (wasClean ?? true); + if (!gracefulClose && this.state.getWebSockets().length === 0) { this.rejectPendingDeliveries(new Error("Connector socket closed")); } await this.scheduleFromStorage(); } - async webSocketError(): Promise { - this.rejectPendingDeliveries(new Error("Connector socket error")); - await this.webSocketClose(); + async webSocketError(ws?: WebSocket): Promise { + await this.webSocketClose(ws, 1011, "connector_socket_error", false); } private async handleConnect(request: Request): Promise { @@ -488,12 +506,23 @@ export class AgentRelaySession { return new Response("Missing connector agent DID", { status: 400 }); } + const nowMs = Date.now(); + const activeSockets = this.getActiveSockets(nowMs); + for (const socket of activeSockets) { + this.closeSocket( + socket, + RELAY_SOCKET_SUPERSEDED_CLOSE_CODE, + "superseded_by_new_connection", + ); + } + const pair = new WebSocketPair(); const client = pair[0]; const server = pair[1]; this.state.acceptWebSocket(server, [connectorAgentDid]); - await this.scheduleFromStorage(); + this.touchSocketAck(server, nowMs); + await this.drainQueueOnReconnect(); return new Response(null, { status: 101, @@ -668,7 +697,7 @@ export class AgentRelaySession { return false; } - const sockets = this.state.getWebSockets(); + const sockets = this.getActiveSockets(nowMs); if (sockets.length === 0) { let mutated = false; for (const delivery of queueState.deliveries) { @@ -842,6 +871,132 @@ export class AgentRelaySession { } } + private getActiveSockets(nowMs: number): WebSocket[] { + const sockets = this.state.getWebSockets(); + this.pruneSocketTracking(sockets); + const activeSockets: WebSocket[] = []; + + for (const socket of sockets) { + if (this.socketsPendingClose.has(socket)) { + continue; + } + + const lastAckAtMs = this.resolveSocketLastAckAtMs(socket, nowMs); + if (nowMs - lastAckAtMs > RELAY_HEARTBEAT_ACK_TIMEOUT_MS) { + this.closeSocket( + socket, + RELAY_SOCKET_STALE_CLOSE_CODE, + "heartbeat_ack_timeout", + ); + continue; + } + + activeSockets.push(socket); + } + + return activeSockets; + } + + private resolveSocketLastAckAtMs(socket: WebSocket, nowMs: number): number { + const existing = this.socketLastAckAtMs.get(socket); + if (existing !== undefined) { + return existing; + } + + this.socketLastAckAtMs.set(socket, nowMs); + return nowMs; + } + + private touchSocketAck(socket: WebSocket, nowMs: number): void { + this.socketsPendingClose.delete(socket); + this.socketLastAckAtMs.set(socket, nowMs); + } + + private sendHeartbeatFrame(socket: WebSocket, nowMs: number): void { + const heartbeatFrame = toHeartbeatFrame(nowMs); + this.clearSocketHeartbeatAcks(socket); + this.heartbeatAckSockets.set(heartbeatFrame.id, socket); + + try { + socket.send(heartbeatFrame.payload); + } catch { + this.heartbeatAckSockets.delete(heartbeatFrame.id); + this.closeSocket( + socket, + RELAY_SOCKET_STALE_CLOSE_CODE, + "heartbeat_send_failed", + ); + } + } + + private clearSocketHeartbeatAcks(socket: WebSocket): void { + for (const [ackId, ackSocket] of this.heartbeatAckSockets) { + if (ackSocket === socket) { + this.heartbeatAckSockets.delete(ackId); + } + } + } + + private closeSocket(socket: WebSocket, code: number, reason: string): void { + this.socketsPendingClose.add(socket); + this.removeSocketTracking(socket); + try { + socket.close(code, reason); + } catch { + // Ignore close errors for already-closed sockets. + } + } + + private removeSocketTracking(socket: WebSocket): void { + this.socketLastAckAtMs.delete(socket); + this.clearSocketHeartbeatAcks(socket); + } + + private pruneSocketTracking(activeSockets: WebSocket[]): void { + const activeSocketSet = new Set(activeSockets); + + for (const socket of this.socketLastAckAtMs.keys()) { + if (!activeSocketSet.has(socket)) { + this.socketLastAckAtMs.delete(socket); + } + } + + for (const socket of this.socketsPendingClose) { + if (!activeSocketSet.has(socket)) { + this.socketsPendingClose.delete(socket); + } + } + + for (const [ackId, socket] of this.heartbeatAckSockets.entries()) { + if (!activeSocketSet.has(socket)) { + this.heartbeatAckSockets.delete(ackId); + } + } + } + + private async drainQueueOnReconnect(): Promise { + const nowMs = Date.now(); + const queueState = await this.loadQueueState(nowMs); + let queueMutated = false; + + for (const delivery of queueState.deliveries) { + if (delivery.nextAttemptAtMs > nowMs) { + delivery.nextAttemptAtMs = nowMs; + queueMutated = true; + } + } + + if (await this.processQueueDeliveries(queueState, nowMs)) { + queueMutated = true; + } + + if (queueMutated) { + await this.saveQueueState(queueState); + } + + await this.scheduleNextAlarm(queueState, nowMs); + } + private async scheduleFromStorage(): Promise { const nowMs = Date.now(); const queueState = await this.loadQueueState(nowMs); @@ -859,7 +1014,7 @@ export class AgentRelaySession { candidates.push(queueWakeAtMs); } - if (this.state.getWebSockets().length > 0) { + if (this.getActiveSockets(nowMs).length > 0) { candidates.push(nowMs + RELAY_HEARTBEAT_INTERVAL_MS); } diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index ba7f72f..4044427 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -18,6 +18,13 @@ - `/v1/status` must include websocket state and inbound replay health (`pendingCount`, `oldestPendingAt`, replay activity/error, hook status). - On inbox/status read failures, return explicit structured errors instead of crashing runtime. +## WebSocket Resilience Rules +- Keep websocket reconnect behavior centralized in `client.ts` (single cleanup path for close/error/unexpected-response/timeout). +- Keep default websocket connect timeout at `DEFAULT_CONNECT_TIMEOUT_MS` (15000ms) and heartbeat ack timeout at `DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS` (60000ms). +- Track outbound heartbeat IDs and clear pending entries only when matching `heartbeat_ack` frames are received. +- If heartbeat ack timeout expires, disconnect and reconnect using the same reconnect policy used for other transport failures. +- Handle `unexpected-response` status codes from ws upgrade failures; for `401`, trigger `onAuthUpgradeRejected` and allow one immediate reconnect before normal backoff. + ## Testing Rules - `inbound-inbox.test.ts` must cover persistence, dedupe, cap enforcement, and replay bookkeeping transitions. - `client.test.ts` must cover both delivery modes: diff --git a/packages/connector/src/client.test.ts b/packages/connector/src/client.test.ts index 99f13e1..0aeafae 100644 --- a/packages/connector/src/client.test.ts +++ b/packages/connector/src/client.test.ts @@ -13,6 +13,7 @@ class MockWebSocket { message: new Set(), close: new Set(), error: new Set(), + "unexpected-response": new Set(), }; constructor(url: string) { @@ -62,6 +63,14 @@ class MockWebSocket { }); } + error(error: unknown): void { + this.emit("error", { error }); + } + + unexpectedResponse(status: number): void { + this.emit("unexpected-response", { status }); + } + private emit(type: string, event: unknown): void { for (const listener of this.listeners[type] ?? []) { listener(event); @@ -345,6 +354,227 @@ describe("ConnectorClient", () => { client.disconnect(); }); + it("reconnects when heartbeat acknowledgement times out", async () => { + vi.useFakeTimers(); + + const sockets: MockWebSocket[] = []; + const disconnectedEvents: { code: number; reason: string }[] = []; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 10, + heartbeatAckTimeoutMs: 25, + reconnectMinDelayMs: 50, + reconnectMaxDelayMs: 50, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: (event) => { + disconnectedEvents.push({ code: event.code, reason: event.reason }); + }, + }, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + await vi.advanceTimersByTimeAsync(35); + expect(sockets).toHaveLength(1); + expect(disconnectedEvents).toHaveLength(1); + expect(disconnectedEvents[0]?.reason).toContain( + "Heartbeat acknowledgement", + ); + + await vi.advanceTimersByTimeAsync(50); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("does not reconnect when heartbeat acknowledgement arrives before timeout", async () => { + vi.useFakeTimers(); + + const sockets: MockWebSocket[] = []; + const disconnected = vi.fn(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 100, + heartbeatAckTimeoutMs: 40, + reconnectMinDelayMs: 20, + reconnectMaxDelayMs: 20, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: disconnected, + }, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + await vi.advanceTimersByTimeAsync(100); + const outboundHeartbeat = parseFrame(sockets[0].sent[0]); + expect(outboundHeartbeat.type).toBe("heartbeat"); + if (outboundHeartbeat.type !== "heartbeat") { + throw new Error("expected heartbeat frame"); + } + + sockets[0].message( + serializeFrame({ + v: 1, + type: "heartbeat_ack", + id: generateUlid(1700000000010), + ts: "2026-01-01T00:00:00.010Z", + ackId: outboundHeartbeat.id, + }), + ); + + await vi.advanceTimersByTimeAsync(80); + expect(disconnected).not.toHaveBeenCalled(); + expect(sockets).toHaveLength(1); + + client.disconnect(); + }); + + it("reconnects when websocket connection does not open before timeout", async () => { + vi.useFakeTimers(); + + const sockets: MockWebSocket[] = []; + const disconnected = vi.fn(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 30, + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 20, + reconnectMaxDelayMs: 20, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: disconnected, + }, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + await vi.advanceTimersByTimeAsync(29); + expect(sockets).toHaveLength(1); + await vi.advanceTimersByTimeAsync(1); + expect(disconnected).toHaveBeenCalledTimes(1); + await vi.advanceTimersByTimeAsync(20); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("reconnects after websocket error even when close event is missing", async () => { + vi.useFakeTimers(); + + const sockets: MockWebSocket[] = []; + const disconnected = vi.fn(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 40, + reconnectMaxDelayMs: 40, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: disconnected, + }, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + sockets[0].readyState = 3; + sockets[0].error(new Error("boom")); + + expect(disconnected).toHaveBeenCalledTimes(1); + await vi.advanceTimersByTimeAsync(39); + expect(sockets).toHaveLength(1); + await vi.advanceTimersByTimeAsync(1); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("retries websocket upgrade rejection with one immediate retry on 401", async () => { + vi.useFakeTimers(); + + const sockets: MockWebSocket[] = []; + const onAuthUpgradeRejected = + vi.fn<(event: { status: number; immediateRetry: boolean }) => void>(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 100, + reconnectMaxDelayMs: 100, + reconnectJitterRatio: 0, + hooks: { + onAuthUpgradeRejected, + }, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + sockets[0].unexpectedResponse(401); + await vi.runOnlyPendingTimersAsync(); + expect(sockets).toHaveLength(2); + expect(onAuthUpgradeRejected).toHaveBeenCalledTimes(1); + expect(onAuthUpgradeRejected).toHaveBeenNthCalledWith(1, { + status: 401, + immediateRetry: true, + }); + + sockets[1].unexpectedResponse(401); + await vi.advanceTimersByTimeAsync(99); + expect(sockets).toHaveLength(2); + await vi.advanceTimersByTimeAsync(1); + expect(sockets).toHaveLength(3); + expect(onAuthUpgradeRejected).toHaveBeenCalledTimes(2); + expect(onAuthUpgradeRejected).toHaveBeenNthCalledWith(2, { + status: 401, + immediateRetry: false, + }); + + client.disconnect(); + }); + it("reconnects after websocket closes", () => { vi.useFakeTimers(); diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index d617bb5..c550d64 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -2,6 +2,8 @@ import { generateUlid } from "@clawdentity/protocol"; import { createLogger, type Logger } from "@clawdentity/sdk"; import { CONNECTOR_FRAME_VERSION, + DEFAULT_CONNECT_TIMEOUT_MS, + DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS, DEFAULT_HEARTBEAT_INTERVAL_MS, DEFAULT_OPENCLAW_DELIVER_MAX_ATTEMPTS, DEFAULT_OPENCLAW_DELIVER_RETRY_BACKOFF_FACTOR, @@ -28,8 +30,14 @@ import { serializeFrame, } from "./frames.js"; -type ConnectorWebSocketEventType = "open" | "message" | "close" | "error"; +type ConnectorWebSocketEventType = + | "open" + | "message" + | "close" + | "error" + | "unexpected-response"; type ConnectorWebSocketListener = (event: unknown) => void; +const WS_READY_STATE_CONNECTING = 0; export type ConnectorWebSocket = { readonly readyState: number; @@ -48,6 +56,10 @@ export type ConnectorClientHooks = { reason: string; wasClean: boolean; }) => void; + onAuthUpgradeRejected?: (event: { + status: number; + immediateRetry: boolean; + }) => void | Promise; onFrame?: (frame: ConnectorFrame) => void; onDeliverSucceeded?: (frame: DeliverFrame) => void; onDeliverFailed?: (frame: DeliverFrame, error: unknown) => void; @@ -62,7 +74,9 @@ export type ConnectorClientOptions = { openclawBaseUrl: string; openclawHookToken?: string; openclawHookPath?: string; + connectTimeoutMs?: number; heartbeatIntervalMs?: number; + heartbeatAckTimeoutMs?: number; reconnectMinDelayMs?: number; reconnectMaxDelayMs?: number; reconnectBackoffFactor?: number; @@ -185,6 +199,40 @@ function readCloseEvent(event: unknown): { }; } +function readUnexpectedResponseStatus(event: unknown): number | undefined { + if (!isObject(event)) { + return undefined; + } + + if (typeof event.status === "number") { + return event.status; + } + + if (typeof event.statusCode === "number") { + return event.statusCode; + } + + const response = event.response; + if (isObject(response)) { + if (typeof response.status === "number") { + return response.status; + } + if (typeof response.statusCode === "number") { + return response.statusCode; + } + } + + return undefined; +} + +function readErrorEventReason(event: unknown): string { + if (!isObject(event) || !("error" in event)) { + return "WebSocket error"; + } + + return sanitizeErrorReason(event.error); +} + function normalizeConnectionHeaders( headers: Record | undefined, ): Record { @@ -213,7 +261,9 @@ export class ConnectorClient { | undefined; private readonly openclawHookUrl: string; private readonly openclawHookToken?: string; + private readonly connectTimeoutMs: number; private readonly heartbeatIntervalMs: number; + private readonly heartbeatAckTimeoutMs: number; private readonly reconnectMinDelayMs: number; private readonly reconnectMaxDelayMs: number; private readonly reconnectBackoffFactor: number; @@ -240,8 +290,12 @@ export class ConnectorClient { private socket: ConnectorWebSocket | undefined; private reconnectTimeout: ReturnType | undefined; + private connectTimeout: ReturnType | undefined; private heartbeatInterval: ReturnType | undefined; + private heartbeatAckTimeout: ReturnType | undefined; + private readonly pendingHeartbeatAcks = new Map(); private reconnectAttempt = 0; + private authUpgradeImmediateRetryUsed = false; private started = false; private readonly outboundQueue: EnqueueFrame[] = []; @@ -252,8 +306,18 @@ export class ConnectorClient { ); this.connectionHeadersProvider = options.connectionHeadersProvider; this.openclawHookToken = options.openclawHookToken; + this.connectTimeoutMs = Math.max( + 0, + Math.floor(options.connectTimeoutMs ?? DEFAULT_CONNECT_TIMEOUT_MS), + ); this.heartbeatIntervalMs = options.heartbeatIntervalMs ?? DEFAULT_HEARTBEAT_INTERVAL_MS; + this.heartbeatAckTimeoutMs = Math.max( + 0, + Math.floor( + options.heartbeatAckTimeoutMs ?? DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS, + ), + ); this.reconnectMinDelayMs = options.reconnectMinDelayMs ?? DEFAULT_RECONNECT_MIN_DELAY_MS; this.reconnectMaxDelayMs = @@ -326,12 +390,12 @@ export class ConnectorClient { disconnect(): void { this.started = false; this.clearReconnectTimeout(); - this.clearHeartbeatInterval(); + this.clearSocketState(); if (this.socket !== undefined) { const socket = this.socket; this.socket = undefined; - socket.close(1000, "client disconnect"); + this.closeSocketQuietly(socket, 1000, "client disconnect"); } } @@ -392,8 +456,18 @@ export class ConnectorClient { return; } - this.socket.addEventListener("open", () => { + const socket = this.socket; + this.startConnectTimeout(socket); + + socket.addEventListener("open", () => { + if (this.socket !== socket) { + return; + } + + this.clearConnectTimeout(); + this.clearHeartbeatTracking(); this.reconnectAttempt = 0; + this.authUpgradeImmediateRetryUsed = false; this.logger.info("connector.websocket.connected", { url: this.connectorUrl, }); @@ -402,13 +476,18 @@ export class ConnectorClient { this.hooks.onConnected?.(); }); - this.socket.addEventListener("message", (event) => { + socket.addEventListener("message", (event) => { + if (this.socket !== socket) { + return; + } + void this.handleIncomingMessage(readMessageEventData(event)); }); - this.socket.addEventListener("close", (event) => { - this.clearHeartbeatInterval(); - this.socket = undefined; + socket.addEventListener("close", (event) => { + if (!this.detachSocket(socket)) { + return; + } const closeEvent = readCloseEvent(event); @@ -429,29 +508,78 @@ export class ConnectorClient { } }); - this.socket.addEventListener("error", () => { + socket.addEventListener("error", (event) => { + if (this.socket !== socket) { + return; + } + + const readyState = socket.readyState; + const shouldForceReconnect = + readyState !== WS_READY_STATE_OPEN && + readyState !== WS_READY_STATE_CONNECTING; + if (!shouldForceReconnect) { + this.logger.warn("connector.websocket.error", { + url: this.connectorUrl, + reason: readErrorEventReason(event), + readyState, + }); + return; + } + + if (!this.detachSocket(socket)) { + return; + } + + const reason = readErrorEventReason(event); this.logger.warn("connector.websocket.error", { url: this.connectorUrl, + reason, + }); + this.closeSocketQuietly(socket, 1011, "websocket error"); + + this.hooks.onDisconnected?.({ + code: 1006, + reason, + wasClean: false, }); + + if (this.started) { + this.scheduleReconnect(); + } + }); + + socket.addEventListener("unexpected-response", (event) => { + void this.handleUnexpectedResponse(socket, event); }); } - private scheduleReconnect(): void { + private scheduleReconnect(options?: { + delayMs?: number; + incrementAttempt?: boolean; + }): void { if (!this.started) { return; } - const exponentialDelay = - this.reconnectMinDelayMs * - this.reconnectBackoffFactor ** this.reconnectAttempt; - const boundedDelay = Math.min(exponentialDelay, this.reconnectMaxDelayMs); + this.clearReconnectTimeout(); - const jitterRange = boundedDelay * this.reconnectJitterRatio; - const jitterOffset = - jitterRange === 0 ? 0 : (this.random() * 2 - 1) * jitterRange; + let delayMs: number; + if (options?.delayMs !== undefined) { + delayMs = Math.max(0, Math.floor(options.delayMs)); + } else { + const exponentialDelay = + this.reconnectMinDelayMs * + this.reconnectBackoffFactor ** this.reconnectAttempt; + const boundedDelay = Math.min(exponentialDelay, this.reconnectMaxDelayMs); + const jitterRange = boundedDelay * this.reconnectJitterRatio; + const jitterOffset = + jitterRange === 0 ? 0 : (this.random() * 2 - 1) * jitterRange; + delayMs = Math.max(0, Math.floor(boundedDelay + jitterOffset)); + } - const delayMs = Math.max(0, Math.floor(boundedDelay + jitterOffset)); - this.reconnectAttempt += 1; + if (options?.incrementAttempt ?? true) { + this.reconnectAttempt += 1; + } this.reconnectTimeout = setTimeout(() => { void this.connectSocket(); @@ -465,8 +593,148 @@ export class ConnectorClient { } } + private startConnectTimeout(socket: ConnectorWebSocket): void { + this.clearConnectTimeout(); + + if (this.connectTimeoutMs <= 0) { + return; + } + + this.connectTimeout = setTimeout(() => { + if (!this.detachSocket(socket)) { + return; + } + + this.logger.warn("connector.websocket.connect_timeout", { + timeoutMs: this.connectTimeoutMs, + url: this.connectorUrl, + }); + this.closeSocketQuietly(socket, 1000, "connect timeout"); + this.hooks.onDisconnected?.({ + code: 1006, + reason: "WebSocket connect timed out", + wasClean: false, + }); + if (this.started) { + this.scheduleReconnect(); + } + }, this.connectTimeoutMs); + } + + private clearConnectTimeout(): void { + if (this.connectTimeout !== undefined) { + clearTimeout(this.connectTimeout); + this.connectTimeout = undefined; + } + } + + private clearSocketState(): void { + this.clearConnectTimeout(); + this.clearHeartbeatTracking(); + } + + private clearHeartbeatTracking(): void { + if (this.heartbeatInterval !== undefined) { + clearInterval(this.heartbeatInterval); + this.heartbeatInterval = undefined; + } + if (this.heartbeatAckTimeout !== undefined) { + clearTimeout(this.heartbeatAckTimeout); + this.heartbeatAckTimeout = undefined; + } + this.pendingHeartbeatAcks.clear(); + } + + private detachSocket(socket: ConnectorWebSocket): boolean { + if (this.socket !== socket) { + return false; + } + + this.socket = undefined; + this.clearSocketState(); + return true; + } + + private closeSocketQuietly( + socket: ConnectorWebSocket, + code?: number, + reason?: string, + ): void { + try { + socket.close(code, reason); + } catch (error) { + this.logger.warn("connector.websocket.close_failed", { + reason: sanitizeErrorReason(error), + }); + } + } + + private async handleUnexpectedResponse( + socket: ConnectorWebSocket, + event: unknown, + ): Promise { + if (!this.detachSocket(socket)) { + return; + } + + const statusCode = readUnexpectedResponseStatus(event); + const isAuthRejected = statusCode === 401; + const immediateRetry = + isAuthRejected && !this.authUpgradeImmediateRetryUsed; + if (isAuthRejected) { + this.authUpgradeImmediateRetryUsed = true; + await this.invokeAuthUpgradeRejectedHook({ + status: 401, + immediateRetry, + }); + } + + const reason = + statusCode === undefined + ? "WebSocket upgrade rejected" + : `WebSocket upgrade rejected with status ${statusCode}`; + + this.logger.warn("connector.websocket.unexpected_response", { + statusCode, + immediateRetry, + url: this.connectorUrl, + }); + this.closeSocketQuietly(socket, 1000, reason); + this.hooks.onDisconnected?.({ + code: 1006, + reason, + wasClean: false, + }); + + if (this.started) { + this.scheduleReconnect( + immediateRetry ? { delayMs: 0, incrementAttempt: false } : undefined, + ); + } + } + + private async invokeAuthUpgradeRejectedHook(input: { + status: number; + immediateRetry: boolean; + }): Promise { + if (this.hooks.onAuthUpgradeRejected === undefined) { + return; + } + + try { + await this.hooks.onAuthUpgradeRejected(input); + } catch (error) { + this.logger.warn( + "connector.websocket.auth_upgrade_rejected_hook_failed", + { + reason: sanitizeErrorReason(error), + }, + ); + } + } + private startHeartbeatInterval(): void { - this.clearHeartbeatInterval(); + this.clearHeartbeatTracking(); if (this.heartbeatIntervalMs <= 0) { return; @@ -480,14 +748,92 @@ export class ConnectorClient { ts: this.makeTimestamp(), }; - this.sendFrame(frame); + if (this.sendFrame(frame)) { + this.trackHeartbeatAck(frame.id); + } }, this.heartbeatIntervalMs); } - private clearHeartbeatInterval(): void { - if (this.heartbeatInterval !== undefined) { - clearInterval(this.heartbeatInterval); - this.heartbeatInterval = undefined; + private trackHeartbeatAck(ackId: string): void { + if (this.heartbeatAckTimeoutMs <= 0) { + return; + } + + this.pendingHeartbeatAcks.set(ackId, this.now()); + this.scheduleHeartbeatAckTimeoutCheck(); + } + + private handleHeartbeatAckFrame(frame: HeartbeatAckFrame): void { + if (!this.pendingHeartbeatAcks.delete(frame.ackId)) { + return; + } + + this.scheduleHeartbeatAckTimeoutCheck(); + } + + private scheduleHeartbeatAckTimeoutCheck(): void { + if (this.heartbeatAckTimeout !== undefined) { + clearTimeout(this.heartbeatAckTimeout); + this.heartbeatAckTimeout = undefined; + } + + if ( + this.pendingHeartbeatAcks.size === 0 || + this.heartbeatAckTimeoutMs <= 0 + ) { + return; + } + + let oldestSentAt = Number.POSITIVE_INFINITY; + for (const sentAt of this.pendingHeartbeatAcks.values()) { + oldestSentAt = Math.min(oldestSentAt, sentAt); + } + + const elapsedMs = this.now() - oldestSentAt; + const delayMs = Math.max(0, this.heartbeatAckTimeoutMs - elapsedMs); + this.heartbeatAckTimeout = setTimeout(() => { + this.heartbeatAckTimeout = undefined; + this.handleHeartbeatAckTimeout(); + }, delayMs); + } + + private handleHeartbeatAckTimeout(): void { + const pendingCount = this.pendingHeartbeatAcks.size; + if (pendingCount === 0) { + return; + } + + let oldestSentAt = Number.POSITIVE_INFINITY; + for (const sentAt of this.pendingHeartbeatAcks.values()) { + oldestSentAt = Math.min(oldestSentAt, sentAt); + } + + const nowMs = this.now(); + const oldestPendingAgeMs = nowMs - oldestSentAt; + if (oldestPendingAgeMs < this.heartbeatAckTimeoutMs) { + this.scheduleHeartbeatAckTimeoutCheck(); + return; + } + + const socket = this.socket; + if (socket === undefined || !this.detachSocket(socket)) { + return; + } + + this.logger.warn("connector.websocket.heartbeat_ack_timeout", { + pendingCount, + oldestPendingAgeMs, + timeoutMs: this.heartbeatAckTimeoutMs, + }); + this.closeSocketQuietly(socket, 1000, "heartbeat ack timeout"); + this.hooks.onDisconnected?.({ + code: 1006, + reason: "Heartbeat acknowledgement timed out", + wasClean: false, + }); + + if (this.started) { + this.scheduleReconnect(); } } @@ -545,6 +891,11 @@ export class ConnectorClient { return; } + if (frame.type === "heartbeat_ack") { + this.handleHeartbeatAckFrame(frame); + return; + } + if (frame.type === "deliver") { await this.handleDeliverFrame(frame); return; diff --git a/packages/connector/src/constants.ts b/packages/connector/src/constants.ts index 2f3442c..3e40e56 100644 --- a/packages/connector/src/constants.ts +++ b/packages/connector/src/constants.ts @@ -10,7 +10,9 @@ export const DEFAULT_OPENCLAW_DELIVER_RETRY_INITIAL_DELAY_MS = 300; export const DEFAULT_OPENCLAW_DELIVER_RETRY_MAX_DELAY_MS = 2_000; export const DEFAULT_OPENCLAW_DELIVER_RETRY_BACKOFF_FACTOR = 2; +export const DEFAULT_CONNECT_TIMEOUT_MS = 15_000; export const DEFAULT_HEARTBEAT_INTERVAL_MS = 30_000; +export const DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS = 60_000; export const DEFAULT_RECONNECT_MIN_DELAY_MS = 1_000; export const DEFAULT_RECONNECT_MAX_DELAY_MS = 30_000; export const DEFAULT_RECONNECT_BACKOFF_FACTOR = 2; diff --git a/packages/connector/src/index.ts b/packages/connector/src/index.ts index 306b5ac..52122c2 100644 --- a/packages/connector/src/index.ts +++ b/packages/connector/src/index.ts @@ -9,6 +9,7 @@ export { AGENT_ACCESS_HEADER, CONNECTOR_FRAME_VERSION, CONNECTOR_VERSION, + DEFAULT_CONNECT_TIMEOUT_MS, DEFAULT_CONNECTOR_BASE_URL, DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, @@ -19,6 +20,7 @@ export { DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, DEFAULT_CONNECTOR_OUTBOUND_PATH, DEFAULT_CONNECTOR_STATUS_PATH, + DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS, DEFAULT_HEARTBEAT_INTERVAL_MS, DEFAULT_OPENCLAW_BASE_URL, DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index 560586b..5b3b316 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -472,6 +472,15 @@ function createWebSocketFactory(): ( return; } + if (type === "unexpected-response") { + socket.on("unexpected-response", (_request, response) => { + listener({ + status: response.statusCode, + }); + }); + return; + } + socket.on("error", (error) => listener({ error })); }, }; @@ -699,6 +708,10 @@ export async function startConnectorRuntime( return; } + await refreshCurrentAuth(); + }; + + const refreshCurrentAuth = async (): Promise => { currentAuth = await refreshAgentAuthWithClawProof({ registryUrl: input.registryUrl, ait: input.credentials.ait, @@ -835,6 +848,25 @@ export async function startConnectorRuntime( openclawHookToken, fetchImpl, logger, + hooks: { + onAuthUpgradeRejected: async ({ status, immediateRetry }) => { + logger.warn("connector.websocket.auth_upgrade_rejected", { + status, + immediateRetry, + }); + await syncAuthFromDisk(); + try { + await refreshCurrentAuth(); + } catch (error) { + logger.warn( + "connector.runtime.registry_auth_refresh_on_ws_upgrade_reject_failed", + { + reason: sanitizeErrorReason(error), + }, + ); + } + }, + }, inboundDeliverHandler: async (frame) => { const persisted = await inboundInbox.enqueue(frame); if (!persisted.accepted) { From 0deb90ac596c81182b9f7a38aca7efa89391eb90 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 11:09:39 +0530 Subject: [PATCH 108/190] Review websocket stability diff --- apps/proxy/src/AGENTS.md | 3 +- apps/proxy/src/agent-relay-session.test.ts | 138 ++++++++++++++++++++- apps/proxy/src/agent-relay-session.ts | 6 +- 3 files changed, 140 insertions(+), 7 deletions(-) diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index eb9dfc5..9fb4834 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -71,7 +71,8 @@ - Keep relay websocket heartbeat liveness explicit in `agent-relay-session.ts`: track per-socket heartbeat ack time and enforce a 60s ack timeout before socket eviction. - Keep stale connector cleanup proactive: evict stale sockets during alarm sweeps and before accepting a new reconnect socket. - Keep connector session ownership deterministic: new reconnect sockets supersede older live sockets with a clean `1000` close code so delivery always targets one active socket. -- Keep reconnect recovery eager: drain durable queue immediately on reconnect instead of waiting for the next alarm tick. +- Keep reconnect recovery eager but handshake-safe: trigger durable queue drain immediately after reconnect, but do not block websocket `101` upgrade responses on `deliver_ack` waits. +- Keep superseded socket state sticky until close cleanup: late frames from sockets marked in `socketsPendingClose` must not reactivate those sockets. - Keep close semantics strict for pending delivery promises: clean `1000` closes do not reject pending deliveries, but unclean closes reject when no sockets remain. - Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. - Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. diff --git a/apps/proxy/src/agent-relay-session.test.ts b/apps/proxy/src/agent-relay-session.test.ts index 1ab697b..de0ff88 100644 --- a/apps/proxy/src/agent-relay-session.test.ts +++ b/apps/proxy/src/agent-relay-session.test.ts @@ -19,11 +19,11 @@ function createMockSocket(): MockWebSocket { }; } -async function withMockWebSocketPair( +async function withMockWebSocketPair( pairClient: MockWebSocket, pairServer: MockWebSocket, - callback: () => Promise, -): Promise { + callback: () => Promise, +): Promise { const originalWebSocketPair = (globalThis as { WebSocketPair?: unknown }) .WebSocketPair; @@ -33,7 +33,7 @@ async function withMockWebSocketPair( }; try { - await callback(); + return await callback(); } finally { if (originalWebSocketPair === undefined) { delete (globalThis as { WebSocketPair?: unknown }).WebSocketPair; @@ -282,6 +282,54 @@ describe("AgentRelaySession", () => { expect(dedupedResult.queueDepth).toBe(0); }); + it("returns websocket upgrade quickly while reconnect drain runs in background", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + RELAY_RETRY_INITIAL_MS: "1", + }); + + await relaySession.deliverToConnector({ + requestId: "req-upgrade-fast", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + + const connectState = await withMockWebSocketPair( + pairClient, + pairServer, + async () => { + const connectAttempt = relaySession + .fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ) + .then( + () => "settled" as const, + () => "settled" as const, + ); + + return Promise.race([ + connectAttempt, + new Promise<"pending">((resolve) => { + setTimeout(() => resolve("pending"), 50); + }), + ]); + }, + ); + + expect(connectState).toBe("settled"); + }); + it("evicts stale sockets during alarm heartbeat sweep", async () => { vi.useFakeTimers(); const nowMs = Date.now(); @@ -378,6 +426,88 @@ describe("AgentRelaySession", () => { ); }); + it("keeps superseded sockets inactive even when late frames arrive", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const oldSocket = createMockSocket(); + const oldWs = oldSocket as unknown as WebSocket; + harness.connectedSockets.push(oldWs); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + const newWs = pairServer as unknown as WebSocket; + pairServer.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + newWs, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 3), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + await withMockWebSocketPair(pairClient, pairServer, async () => { + let connectError: unknown; + try { + await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ); + } catch (error) { + connectError = error; + } + + if (connectError !== undefined) { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + + await relaySession.webSocketMessage( + oldWs, + JSON.stringify({ + v: 1, + type: "heartbeat_ack", + id: generateUlid(Date.now() + 4), + ts: new Date().toISOString(), + ackId: generateUlid(Date.now() + 5), + }), + ); + + const deliveryState = await Promise.race([ + relaySession + .deliverToConnector({ + requestId: "req-superseded-socket", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }) + .then((result) => result.state), + new Promise<"pending">((resolve) => { + setTimeout(() => resolve("pending"), 50); + }), + ]); + + expect(deliveryState).toBe("delivered"); + expect(oldSocket.send).not.toHaveBeenCalled(); + expect(pairServer.send).toHaveBeenCalled(); + }); + it("does not reject pending deliveries on clean close code 1000", async () => { const harness = createStateHarness(); const relaySession = new AgentRelaySession(harness.state, { diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts index f32283b..269ef82 100644 --- a/apps/proxy/src/agent-relay-session.ts +++ b/apps/proxy/src/agent-relay-session.ts @@ -522,7 +522,7 @@ export class AgentRelaySession { this.state.acceptWebSocket(server, [connectorAgentDid]); this.touchSocketAck(server, nowMs); - await this.drainQueueOnReconnect(); + void this.drainQueueOnReconnect(); return new Response(null, { status: 101, @@ -908,7 +908,9 @@ export class AgentRelaySession { } private touchSocketAck(socket: WebSocket, nowMs: number): void { - this.socketsPendingClose.delete(socket); + if (this.socketsPendingClose.has(socket)) { + return; + } this.socketLastAckAtMs.set(socket, nowMs); } From aa95b16615030762892d597994dde5880a3a12d4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 11:27:13 +0530 Subject: [PATCH 109/190] Review and fix websocket stability --- apps/cli/src/AGENTS.md | 2 +- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/agent.test.ts | 12 +++++++++ apps/cli/src/commands/agent.ts | 6 +++-- apps/cli/src/commands/openclaw.ts | 10 +++---- apps/cli/src/commands/pair.ts | 21 ++++++++------- apps/cli/src/commands/verify.test.ts | 10 ++++++- apps/cli/src/commands/verify.ts | 5 ++-- apps/openclaw-skill/src/AGENTS.md | 1 + .../src/transforms/registry-auth.ts | 8 +++--- apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/agent-hook-route.test.ts | 4 +-- apps/proxy/src/agent-hook-route.ts | 8 +++--- apps/proxy/src/agent-relay-session.ts | 24 +++++++++-------- apps/proxy/src/auth-middleware.test.ts | 2 +- apps/proxy/src/pairing-route.ts | 24 ++++++++--------- apps/proxy/src/proxy-trust-state.ts | 9 ++++--- apps/proxy/src/proxy-trust-store.ts | 9 ++++--- apps/registry/src/AGENTS.md | 2 +- apps/registry/src/agent-auth-lifecycle.ts | 7 ++--- apps/registry/src/agent-registration.ts | 6 +++-- apps/registry/src/auth/agent-claw-auth.ts | 3 ++- apps/registry/src/auth/service-auth.ts | 3 ++- apps/registry/src/invite-lifecycle.ts | 7 ++--- apps/registry/src/server.ts | 27 ++++++++++--------- packages/connector/src/AGENTS.md | 1 + packages/connector/src/client.ts | 4 +-- packages/connector/src/inbound-inbox.ts | 7 ++--- packages/connector/src/runtime.ts | 23 +++++++++------- packages/sdk/src/AGENTS.md | 6 +++++ packages/sdk/src/agent-auth-client.ts | 3 ++- packages/sdk/src/datetime.test.ts | 19 +++++++++++-- packages/sdk/src/datetime.ts | 14 +++++++--- packages/sdk/src/event-bus.ts | 4 +-- packages/sdk/src/index.ts | 2 +- packages/sdk/src/logging.ts | 6 ++--- packages/sdk/src/request-context.ts | 3 ++- packages/sdk/src/testing/ait-fixtures.ts | 3 ++- 38 files changed, 189 insertions(+), 118 deletions(-) create mode 100644 packages/sdk/src/AGENTS.md diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index ec3d3d6..2ba7706 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -7,7 +7,7 @@ - Keep each command implementation in `commands/.ts` with one exported factory (`createCommand`). - Reuse shared command wrappers (`withErrorHandling`) and IO helpers (`writeStdoutLine`, `writeStderrLine`) instead of inline process writes. - Prefer explicit error-to-reason mapping for operator-facing failures rather than generic stack traces. -- Prefer SDK shared primitives (`AppError`, `nowIso`) for new command error/date logic instead of ad-hoc equivalents. +- Prefer SDK shared primitives (`AppError`) for new command error logic instead of ad-hoc equivalents. - Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. - Admin bootstrap must print the one-time PAT before attempting to persist it and depend on `persistBootstrapConfig` so config write failures are surfaced via CLI errors while the operator still sees the PAT. - API-key lifecycle command logic should stay in `commands/api-key.ts`; keep create/list/revoke request mapping explicit and keep token exposure limited to create output only. diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index b1ab4ba..fdd9349 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -9,6 +9,7 @@ - Use `withErrorHandling` for command actions unless a command has a documented reason not to. - Route all user-facing messages through `writeStdoutLine`/`writeStderrLine`. - For new command-domain errors, use SDK `AppError` with stable `code` values. +- Keep command timestamps UTC and standardized through SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`) instead of direct `Date` calls. - Normalize Commander option keys at the command boundary when helper/runtime option names differ (for example `--peer` -> `peerAlias`) so flags are never silently ignored. ## Config Command Rules diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts index df7fbe7..f36ae08 100644 --- a/apps/cli/src/commands/agent.test.ts +++ b/apps/cli/src/commands/agent.test.ts @@ -37,8 +37,12 @@ vi.mock("@clawdentity/sdk", () => ({ encodeEd25519SignatureBase64url: vi.fn(), encodeEd25519KeypairBase64url: vi.fn(), generateEd25519Keypair: vi.fn(), + nowUtcMs: vi.fn(() => 1_700_000_000_000), refreshAgentAuthWithClawProof: vi.fn(), signEd25519: vi.fn(), + toIso: vi.fn((value: Date | string | number) => + new Date(value).toISOString(), + ), })); import { @@ -47,8 +51,10 @@ import { encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, generateEd25519Keypair, + nowUtcMs, refreshAgentAuthWithClawProof, signEd25519, + toIso, } from "@clawdentity/sdk"; import { resolveConfig } from "../config/manager.js"; import { createAgentCommand } from "./agent.js"; @@ -62,6 +68,7 @@ const mockedUnlink = vi.mocked(unlink); const mockedWriteFile = vi.mocked(writeFile); const mockedResolveConfig = vi.mocked(resolveConfig); const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); +const mockedNowUtcMs = vi.mocked(nowUtcMs); const mockedRefreshAgentAuthWithClawProof = vi.mocked( refreshAgentAuthWithClawProof, ); @@ -73,6 +80,7 @@ const mockedEncodeEd25519KeypairBase64url = vi.mocked( encodeEd25519KeypairBase64url, ); const mockedDecodeAIT = vi.mocked(decodeAIT); +const mockedToIso = vi.mocked(toIso); const mockFetch = vi.fn(); @@ -159,6 +167,10 @@ describe("agent create command", () => { publicKey: Uint8Array.from({ length: 32 }, (_, index) => index + 1), secretKey: Uint8Array.from({ length: 32 }, (_, index) => 64 - index), }); + mockedNowUtcMs.mockReturnValue(1_700_000_000_000); + mockedToIso.mockImplementation((value: Date | string | number) => + new Date(value).toISOString(), + ); mockedEncodeEd25519KeypairBase64url.mockReturnValue({ publicKey: "public-key-b64url", diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index d37f2c1..dfa1ead 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -21,8 +21,10 @@ import { encodeEd25519KeypairBase64url, encodeEd25519SignatureBase64url, generateEd25519Keypair, + nowUtcMs, refreshAgentAuthWithClawProof, signEd25519, + toIso, } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; @@ -272,7 +274,7 @@ const parseAgentIdFromDid = (agentName: string, did: string): string => { }; const formatExpiresAt = (expires: number): string => { - return new Date(expires * 1000).toISOString(); + return toIso(expires * 1000); }; const resolveFramework = ( @@ -510,7 +512,7 @@ const writeSecureFileAtomically = async ( path: string, content: string, ): Promise => { - const tempPath = `${path}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + const tempPath = `${path}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; await writeFile(tempPath, content, "utf-8"); await chmod(tempPath, FILE_MODE); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 53402d8..3333965 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -10,7 +10,7 @@ import { encodeBase64url, parseDid, } from "@clawdentity/protocol"; -import { AppError, createLogger, nowIso } from "@clawdentity/sdk"; +import { AppError, createLogger, nowIso, nowUtcMs } from "@clawdentity/sdk"; import { Command } from "commander"; import { getConfigDir, resolveConfig } from "../config/manager.js"; import { writeStdoutLine } from "../io.js"; @@ -1345,13 +1345,13 @@ async function waitForConnectorConnected(input: { fetchImpl: typeof fetch; waitTimeoutSeconds: number; }): Promise { - const deadline = Date.now() + input.waitTimeoutSeconds * 1000; + const deadline = nowUtcMs() + input.waitTimeoutSeconds * 1000; let latest = await fetchConnectorHealthStatus({ connectorBaseUrl: input.connectorBaseUrl, fetchImpl: input.fetchImpl, }); - while (!latest.connected && Date.now() < deadline) { + while (!latest.connected && nowUtcMs() < deadline) { await new Promise((resolve) => { setTimeout(resolve, 1000); }); @@ -1395,7 +1395,7 @@ async function monitorConnectorStabilityWindow(input: { }); } - const deadline = Date.now() + input.durationSeconds * 1000; + const deadline = nowUtcMs() + input.durationSeconds * 1000; let latest = await fetchConnectorHealthStatus({ connectorBaseUrl: input.connectorBaseUrl, fetchImpl: input.fetchImpl, @@ -1404,7 +1404,7 @@ async function monitorConnectorStabilityWindow(input: { return latest; } - while (Date.now() < deadline) { + while (nowUtcMs() < deadline) { await sleepMilliseconds(input.pollIntervalMs); latest = await fetchConnectorHealthStatus({ connectorBaseUrl: input.connectorBaseUrl, diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 8865e8b..5c52d12 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -9,7 +9,12 @@ import { } from "node:fs/promises"; import { dirname, join, resolve } from "node:path"; import { decodeBase64url, parseDid } from "@clawdentity/protocol"; -import { AppError, createLogger, signHttpRequest } from "@clawdentity/sdk"; +import { + AppError, + createLogger, + nowUtcMs, + signHttpRequest, +} from "@clawdentity/sdk"; import { Command } from "commander"; import jsQR from "jsqr"; import { PNG } from "pngjs"; @@ -150,6 +155,8 @@ const isRecord = (value: unknown): value is Record => { return typeof value === "object" && value !== null; }; +const nowUnixSeconds = (): number => Math.floor(nowUtcMs() / 1000); + function createCliError(code: string, message: string): AppError { return new AppError({ code, @@ -1490,8 +1497,7 @@ export async function startPairing( ): Promise { const fetchImpl = dependencies.fetchImpl ?? fetch; const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = - dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; const nonceFactoryImpl = dependencies.nonceFactoryImpl ?? (() => randomBytes(NONCE_SIZE).toString("base64url")); @@ -1581,8 +1587,7 @@ export async function confirmPairing( ): Promise { const fetchImpl = dependencies.fetchImpl ?? fetch; const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = - dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; const nonceFactoryImpl = dependencies.nonceFactoryImpl ?? (() => randomBytes(NONCE_SIZE).toString("base64url")); @@ -1722,8 +1727,7 @@ async function getPairingStatusOnce( ): Promise { const fetchImpl = dependencies.fetchImpl ?? fetch; const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = - dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; const nonceFactoryImpl = dependencies.nonceFactoryImpl ?? (() => randomBytes(NONCE_SIZE).toString("base64url")); @@ -1845,8 +1849,7 @@ async function waitForPairingStatus(input: { pollIntervalSeconds: number; dependencies: PairRequestOptions; }): Promise { - const nowSecondsImpl = - input.dependencies.nowSecondsImpl ?? (() => Math.floor(Date.now() / 1000)); + const nowSecondsImpl = input.dependencies.nowSecondsImpl ?? nowUnixSeconds; const sleepImpl = input.dependencies.sleepImpl ?? (async (ms: number) => { diff --git a/apps/cli/src/commands/verify.test.ts b/apps/cli/src/commands/verify.test.ts index f2fecce..833d0c4 100644 --- a/apps/cli/src/commands/verify.test.ts +++ b/apps/cli/src/commands/verify.test.ts @@ -20,12 +20,18 @@ vi.mock("@clawdentity/sdk", () => ({ warn: vi.fn(), error: vi.fn(), })), + nowUtcMs: vi.fn(() => 1_700_000_000_000), parseRegistryConfig: vi.fn(), verifyAIT: vi.fn(), verifyCRL: vi.fn(), })); -import { parseRegistryConfig, verifyAIT, verifyCRL } from "@clawdentity/sdk"; +import { + nowUtcMs, + parseRegistryConfig, + verifyAIT, + verifyCRL, +} from "@clawdentity/sdk"; import { readCacheFile, resolveConfig, @@ -37,6 +43,7 @@ const mockedTokenReadFile = vi.mocked(readFile); const mockedResolveConfig = vi.mocked(resolveConfig); const mockedReadCacheFile = vi.mocked(readCacheFile); const mockedWriteCacheFile = vi.mocked(writeCacheFile); +const mockedNowUtcMs = vi.mocked(nowUtcMs); const mockedParseRegistryConfig = vi.mocked(parseRegistryConfig); const mockedVerifyAit = vi.mocked(verifyAIT); const mockedVerifyCrl = vi.mocked(verifyCRL); @@ -157,6 +164,7 @@ describe("verify command", () => { }); mockedReadCacheFile.mockResolvedValue(undefined); mockedWriteCacheFile.mockResolvedValue(undefined); + mockedNowUtcMs.mockImplementation(() => Date.now()); mockedParseRegistryConfig.mockReturnValue({ ENVIRONMENT: "test", diff --git a/apps/cli/src/commands/verify.ts b/apps/cli/src/commands/verify.ts index c0b27da..9d270e0 100644 --- a/apps/cli/src/commands/verify.ts +++ b/apps/cli/src/commands/verify.ts @@ -2,6 +2,7 @@ import { readFile } from "node:fs/promises"; import { parseCrlClaims } from "@clawdentity/protocol"; import { createLogger, + nowUtcMs, parseRegistryConfig, type RegistryConfig, verifyAIT, @@ -278,7 +279,7 @@ const fetchRegistryKeys = async ( const loadRegistryKeys = async ( registryUrl: string, ): Promise => { - const now = Date.now(); + const now = nowUtcMs(); const rawCache = await readCacheFile(REGISTRY_KEYS_CACHE_FILE); const cache = typeof rawCache === "string" ? parseRegistryKeysCache(rawCache) : undefined; @@ -356,7 +357,7 @@ const loadCrlClaims = async (input: { registryUrl: string; verificationKeys: VerificationKey[]; }): Promise => { - const now = Date.now(); + const now = nowUtcMs(); const rawCache = await readCacheFile(CRL_CLAIMS_CACHE_FILE); const cache = typeof rawCache === "string" ? parseCrlCache(rawCache) : undefined; diff --git a/apps/openclaw-skill/src/AGENTS.md b/apps/openclaw-skill/src/AGENTS.md index 50f3957..a9c521d 100644 --- a/apps/openclaw-skill/src/AGENTS.md +++ b/apps/openclaw-skill/src/AGENTS.md @@ -9,6 +9,7 @@ ## Safety Rules - Validate external input (`payload`, peer config JSON) before use. - Do not log relay payload contents or local connector credential material. +- Keep local auth/lock timestamps UTC and standardized via SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`) instead of direct `Date` calls. - Keep transform relay path as local connector handoff only, not direct peer HTTP calls. - Relay transform must prefer OpenClaw-local runtime artifacts in `hooks/transforms/`: - `clawdentity-relay.json` for connector endpoint candidates/path diff --git a/apps/openclaw-skill/src/transforms/registry-auth.ts b/apps/openclaw-skill/src/transforms/registry-auth.ts index e636e73..f537cd2 100644 --- a/apps/openclaw-skill/src/transforms/registry-auth.ts +++ b/apps/openclaw-skill/src/transforms/registry-auth.ts @@ -8,7 +8,7 @@ import { writeFile, } from "node:fs/promises"; import { join } from "node:path"; -import type { AgentAuthBundle } from "@clawdentity/sdk"; +import { type AgentAuthBundle, nowUtcMs } from "@clawdentity/sdk"; const CLAWDENTITY_DIR = ".clawdentity"; const AGENTS_DIR = "agents"; @@ -121,7 +121,7 @@ export async function writeAgentRegistryAuthAtomic(input: { auth: AgentAuthBundle; }): Promise { const registryAuthPath = resolveAgentRegistryAuthPath(input); - const tempPath = `${registryAuthPath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + const tempPath = `${registryAuthPath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; const content = `${JSON.stringify(input.auth, null, 2)}\n`; await writeFile(tempPath, content, "utf8"); @@ -152,7 +152,7 @@ export async function withAgentRegistryAuthLock(input: { for (let attempt = 0; attempt < LOCK_MAX_ATTEMPTS; attempt += 1) { try { const lockHandle = await open(lockPath, "wx", FILE_MODE); - await lockHandle.writeFile(`${Date.now()}`); + await lockHandle.writeFile(`${nowUtcMs()}`); await lockHandle.close(); lockAcquired = true; break; @@ -163,7 +163,7 @@ export async function withAgentRegistryAuthLock(input: { try { const lockStat = await stat(lockPath); - if (Date.now() - lockStat.mtimeMs > STALE_LOCK_AGE_MS) { + if (nowUtcMs() - lockStat.mtimeMs > STALE_LOCK_AGE_MS) { await unlink(lockPath); continue; } diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 9fb4834..752577b 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -68,6 +68,7 @@ - Keep relay delivery semantics asynchronous and durable: `/hooks/agent` accepts queued deliveries with `202` (`state=queued`) when recipient connector is offline. - Keep relay queue saturation explicit: reject new deliveries with `507 PROXY_RELAY_QUEUE_FULL`; do not evict queued messages implicitly. - Keep relay retries inside `agent-relay-session.ts` with bounded backoff (`RELAY_RETRY_*`) and per-agent queue caps/TTL (`RELAY_QUEUE_*`); do not add ad-hoc retry loops in route handlers. +- Keep relay-session timestamps UTC and standardized via shared SDK datetime helpers (`nowUtcMs`, `toIso`) rather than ad-hoc datetime formatting. - Keep relay websocket heartbeat liveness explicit in `agent-relay-session.ts`: track per-socket heartbeat ack time and enforce a 60s ack timeout before socket eviction. - Keep stale connector cleanup proactive: evict stale sockets during alarm sweeps and before accepting a new reconnect socket. - Keep connector session ownership deterministic: new reconnect sockets supersede older live sockets with a clean `1000` close code so delivery always targets one active socket. diff --git a/apps/proxy/src/agent-hook-route.test.ts b/apps/proxy/src/agent-hook-route.test.ts index da7718c..e415c45 100644 --- a/apps/proxy/src/agent-hook-route.test.ts +++ b/apps/proxy/src/agent-hook-route.test.ts @@ -119,7 +119,7 @@ function createRelayHarness(input?: { function createHookRouteApp(input: { relayNamespace?: AgentRelaySessionNamespace; injectIdentityIntoMessage?: boolean; - now?: () => Date; + now?: () => string; }) { const trustStore: ProxyTrustStore = { createPairingTicket: vi.fn(), @@ -148,7 +148,7 @@ function createHookRouteApp(input: { describe("POST /hooks/agent", () => { it("delivers hook payload to recipient relay session", async () => { const relayHarness = createRelayHarness(); - const now = new Date("2026-02-16T20:00:00.000Z"); + const now = "2026-02-16T20:00:00.000Z"; const app = createHookRouteApp({ relayNamespace: relayHarness.namespace, now: () => now, diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index d578653..3ceef2c 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -2,7 +2,7 @@ import { parseDid, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "@clawdentity/protocol"; -import { AppError, type Logger } from "@clawdentity/sdk"; +import { AppError, type Logger, nowIso } from "@clawdentity/sdk"; import type { Context } from "hono"; import { type AgentRelaySessionNamespace, @@ -23,7 +23,7 @@ export { RELAY_RECIPIENT_AGENT_DID_HEADER } from "@clawdentity/protocol"; export type AgentHookRuntimeOptions = { injectIdentityIntoMessage?: boolean; - now?: () => Date; + now?: () => string; resolveSessionNamespace?: ( c: ProxyContext, ) => AgentRelaySessionNamespace | undefined; @@ -157,7 +157,7 @@ export function createAgentHookHandler( options: CreateAgentHookHandlerOptions, ): (c: ProxyContext) => Promise { const injectIdentityIntoMessage = options.injectIdentityIntoMessage ?? false; - const now = options.now ?? (() => new Date()); + const now = options.now ?? nowIso; const resolveSessionNamespace = options.resolveSessionNamespace ?? resolveDefaultSessionNamespace; @@ -277,7 +277,7 @@ export function createAgentHookHandler( queued, queueDepth, connectedSockets, - sentAt: now().toISOString(), + sentAt: now(), }); return c.json( diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts index 269ef82..81dcdc4 100644 --- a/apps/proxy/src/agent-relay-session.ts +++ b/apps/proxy/src/agent-relay-session.ts @@ -7,6 +7,7 @@ import { serializeFrame, } from "@clawdentity/connector"; import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { nowUtcMs, toIso } from "@clawdentity/sdk"; import { parseProxyConfig } from "./config.js"; const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; @@ -129,18 +130,18 @@ function toHeartbeatFrame(nowMs: number): { id: string; payload: string } { v: CONNECTOR_FRAME_VERSION, type: "heartbeat", id, - ts: new Date(nowMs).toISOString(), + ts: toIso(nowMs), }), }; } function toHeartbeatAckFrame(ackId: string): string { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const ackFrame: HeartbeatAckFrame = { v: CONNECTOR_FRAME_VERSION, type: "heartbeat_ack", id: generateUlid(nowMs), - ts: new Date(nowMs).toISOString(), + ts: toIso(nowMs), ackId, }; @@ -148,11 +149,12 @@ function toHeartbeatAckFrame(ackId: string): string { } function toDeliverFrame(input: RelayDeliveryInput): DeliverFrame { + const nowMs = nowUtcMs(); return { v: CONNECTOR_FRAME_VERSION, type: "deliver", - id: generateUlid(Date.now()), - ts: new Date().toISOString(), + id: generateUlid(nowMs), + ts: toIso(nowMs), fromAgentDid: input.senderAgentDid, toAgentDid: input.recipientAgentDid, payload: input.payload, @@ -314,7 +316,7 @@ export class AgentRelaySession { } async alarm(): Promise { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const sockets = this.getActiveSockets(nowMs); if (sockets.length > 0) { @@ -335,7 +337,7 @@ export class AgentRelaySession { async deliverToConnector( input: RelayDeliveryInput, ): Promise { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const queueState = await this.loadQueueState(nowMs); const existingReceipt = queueState.receipts[input.requestId]; @@ -425,7 +427,7 @@ export class AgentRelaySession { ws: WebSocket, message: string | ArrayBuffer, ): Promise { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const frameResult = (() => { try { return parseFrame(message); @@ -506,7 +508,7 @@ export class AgentRelaySession { return new Response("Missing connector agent DID", { status: 400 }); } - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const activeSockets = this.getActiveSockets(nowMs); for (const socket of activeSockets) { this.closeSocket( @@ -977,7 +979,7 @@ export class AgentRelaySession { } private async drainQueueOnReconnect(): Promise { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const queueState = await this.loadQueueState(nowMs); let queueMutated = false; @@ -1000,7 +1002,7 @@ export class AgentRelaySession { } private async scheduleFromStorage(): Promise { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const queueState = await this.loadQueueState(nowMs); await this.scheduleNextAlarm(queueState, nowMs); } diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts index 55a78ac..d0412ba 100644 --- a/apps/proxy/src/auth-middleware.test.ts +++ b/apps/proxy/src/auth-middleware.test.ts @@ -214,7 +214,7 @@ async function createAuthHarness( }, hooks: { resolveSessionNamespace: () => relayNamespace, - now: () => new Date(NOW_MS), + now: () => new Date(NOW_MS).toISOString(), }, relay: { resolveSessionNamespace: () => relayNamespace, diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 6b4da23..51c8754 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -2,6 +2,8 @@ import { AppError, createRegistryIdentityClient, type Logger, + nowUtcMs, + toIso, } from "@clawdentity/sdk"; import type { Context } from "hono"; import type { ProxyRequestVariables } from "./auth-middleware.js"; @@ -322,7 +324,7 @@ export function createPairStartHandler( options: CreatePairStartHandlerOptions, ): (c: PairingRouteContext) => Promise { const fetchImpl = options.fetchImpl ?? fetch; - const nowMs = options.nowMs ?? Date.now; + const nowMs = options.nowMs ?? nowUtcMs; const registryUrl = normalizeRegistryUrl(options.registryUrl); return async (c) => { @@ -407,7 +409,7 @@ export function createPairStartHandler( requestId: c.get("requestId"), initiatorAgentDid: auth.agentDid, issuerProxyUrl: pairingTicketResult.issuerProxyUrl, - expiresAt: new Date(pairingTicketResult.expiresAtMs).toISOString(), + expiresAt: toIso(pairingTicketResult.expiresAtMs), pkid: signingKey.pkid, }); @@ -415,7 +417,7 @@ export function createPairStartHandler( initiatorAgentDid: pairingTicketResult.initiatorAgentDid, initiatorProfile: pairingTicketResult.initiatorProfile, ticket: pairingTicketResult.ticket, - expiresAt: new Date(pairingTicketResult.expiresAtMs).toISOString(), + expiresAt: toIso(pairingTicketResult.expiresAtMs), }); }; } @@ -423,7 +425,7 @@ export function createPairStartHandler( export function createPairConfirmHandler( options: CreatePairConfirmHandlerOptions, ): (c: PairingRouteContext) => Promise { - const nowMs = options.nowMs ?? Date.now; + const nowMs = options.nowMs ?? nowUtcMs; return async (c) => { const auth = c.get("auth"); @@ -507,7 +509,7 @@ export function createPairConfirmHandler( export function createPairStatusHandler( options: CreatePairStatusHandlerOptions, ): (c: PairingRouteContext) => Promise { - const nowMs = options.nowMs ?? Date.now; + const nowMs = options.nowMs ?? nowUtcMs; return async (c) => { const auth = c.get("auth"); @@ -568,11 +570,9 @@ export function createPairStatusHandler( status.status === "confirmed" ? status.responderProfile.humanName : undefined, - expiresAt: new Date(status.expiresAtMs).toISOString(), + expiresAt: toIso(status.expiresAtMs), confirmedAt: - status.status === "confirmed" - ? new Date(status.confirmedAtMs).toISOString() - : undefined, + status.status === "confirmed" ? toIso(status.confirmedAtMs) : undefined, }); return c.json({ @@ -583,11 +583,9 @@ export function createPairStatusHandler( status.status === "confirmed" ? status.responderAgentDid : undefined, responderProfile: status.status === "confirmed" ? status.responderProfile : undefined, - expiresAt: new Date(status.expiresAtMs).toISOString(), + expiresAt: toIso(status.expiresAtMs), confirmedAt: - status.status === "confirmed" - ? new Date(status.confirmedAtMs).toISOString() - : undefined, + status.status === "confirmed" ? toIso(status.confirmedAtMs) : undefined, }); }; } diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index bf4cbdb..348aaac 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -1,3 +1,4 @@ +import { nowUtcMs } from "@clawdentity/sdk"; import { normalizePairingTicketText, PairingTicketParseError, @@ -168,7 +169,7 @@ export class ProxyTrustState { } async alarm(): Promise { - const nowMs = Date.now(); + const nowMs = nowUtcMs(); const expirableState = await this.loadExpirableState(); const mutated = this.removeExpiredEntries(expirableState, nowMs); if (mutated) { @@ -205,7 +206,7 @@ export class ProxyTrustState { }); } - const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( body.expiresAtMs, ); @@ -309,7 +310,7 @@ export class ProxyTrustState { throw error; } - const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); const expirableState = await this.loadExpirableState(); const stored = expirableState.pairingTickets[parsedTicket.kid]; @@ -392,7 +393,7 @@ export class ProxyTrustState { }); } - const nowMs = typeof body.nowMs === "number" ? body.nowMs : Date.now(); + const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); const ticket = normalizePairingTicketText(body.ticket); let parsedTicket: ReturnType; try { diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 805ffd6..02d5ec2 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -1,3 +1,4 @@ +import { nowUtcMs } from "@clawdentity/sdk"; import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; import { normalizePairingTicketText, @@ -327,7 +328,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { ticketKid: string; expiresAtMs: number; } { - const nowMs = input.nowMs ?? Date.now(); + const nowMs = input.nowMs ?? nowUtcMs(); const normalizedTicket = normalizePairingTicketText(input.ticket); const parsedTicket = parseStoredTicket(normalizedTicket); cleanup(nowMs, parsedTicket.kid); @@ -374,7 +375,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { function resolveTicketStatus( input: PairingTicketStatusInput, ): PairingTicketStatusResult { - const nowMs = input.nowMs ?? Date.now(); + const nowMs = input.nowMs ?? nowUtcMs(); const normalizedTicket = normalizePairingTicketText(input.ticket); const parsedTicket = parseStoredTicket(normalizedTicket); cleanup(nowMs, parsedTicket.kid); @@ -441,7 +442,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { return { async createPairingTicket(input) { - const nowMs = input.nowMs ?? Date.now(); + const nowMs = input.nowMs ?? nowUtcMs(); cleanup(nowMs); const ticket = normalizePairingTicketText(input.ticket); @@ -490,7 +491,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { expiresAtMs, } = resolveConfirmablePairingTicket(input); const confirmedAtMs = normalizeExpiryToWholeSecond( - input.nowMs ?? Date.now(), + input.nowMs ?? nowUtcMs(), ); pairKeys.add( toPairKey( diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index b9a6ae5..e0b61d9 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -144,7 +144,7 @@ - Persist `agents.current_jti` and `agents.expires_at` on insert; generated AIT claims (`jti`, `exp`) must stay in sync with those persisted values. - Verify challenge ownership before signing AIT: challenge must exist for the caller, be unexpired, remain `pending`, and match the request public key + signature. - Consume challenge with guarded state transition (`pending` -> `used`) in the same mutation unit as agent insert; reject zero-row updates as replayed challenge. -- Use shared SDK datetime helpers (`nowIso`, `addSeconds`) for issuance/expiry math instead of ad-hoc `Date.now()` arithmetic in route logic. +- Use shared SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`, `addSeconds`) for issuance/expiry math and timestamp serialization in route logic. - Resolve signing material through a reusable signer helper (`registry-signer.ts`) that derives the public key from `REGISTRY_SIGNING_KEY` and matches it to an `active` `kid` in `REGISTRY_SIGNING_KEYS` before signing. - Keep AIT `iss` deterministic from environment mapping (`development`/`test` -> `https://dev.registry.clawdentity.com`, `production` -> `https://registry.clawdentity.com`) rather than request-origin inference. - Bootstrap agent auth refresh material in the same mutation unit as agent creation by inserting an active `agent_auth_sessions` row. diff --git a/apps/registry/src/agent-auth-lifecycle.ts b/apps/registry/src/agent-auth-lifecycle.ts index 257a135..f2c1eaa 100644 --- a/apps/registry/src/agent-auth-lifecycle.ts +++ b/apps/registry/src/agent-auth-lifecycle.ts @@ -3,6 +3,7 @@ import { AppError, addSeconds, nowIso, + nowUtcMs, type RegistryConfig, shouldExposeVerboseErrors, } from "@clawdentity/sdk"; @@ -119,7 +120,7 @@ export async function issueAgentAuth(options?: { accessTtlSeconds?: number; refreshTtlSeconds?: number; }): Promise { - const nowMs = options?.nowMs ?? Date.now(); + const nowMs = options?.nowMs ?? nowUtcMs(); const accessTtlSeconds = options?.accessTtlSeconds ?? DEFAULT_AGENT_ACCESS_TOKEN_TTL_SECONDS; const refreshTtlSeconds = @@ -134,8 +135,8 @@ export async function issueAgentAuth(options?: { const accessIssuedAt = nowIso(); const refreshIssuedAt = accessIssuedAt; - const accessExpiresAt = addSeconds(new Date(nowMs), accessTtlSeconds); - const refreshExpiresAt = addSeconds(new Date(nowMs), refreshTtlSeconds); + const accessExpiresAt = addSeconds(nowMs, accessTtlSeconds); + const refreshExpiresAt = addSeconds(nowMs, refreshTtlSeconds); const createdAt = accessIssuedAt; const updatedAt = accessIssuedAt; diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index 7b8eead..821adcb 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -13,8 +13,10 @@ import { AppError, addSeconds, nowIso, + nowUtcMs, type RegistryConfig, shouldExposeVerboseErrors, + toIso, verifyEd25519, } from "@clawdentity/sdk"; @@ -532,7 +534,7 @@ export async function verifyAgentRegistrationOwnershipProof(input: { } const expiresAtMs = Date.parse(input.challenge.expiresAt); - if (!Number.isFinite(expiresAtMs) || expiresAtMs <= Date.now()) { + if (!Number.isFinite(expiresAtMs) || expiresAtMs <= nowUtcMs()) { throw registrationProofError({ environment: input.environment, code: "AGENT_REGISTRATION_CHALLENGE_EXPIRED", @@ -703,7 +705,7 @@ function resolveReissueExpiry(input: { ); return { - expiresAt: new Date(previousExpiryMs).toISOString(), + expiresAt: toIso(previousExpiryMs), exp: previousExpirySeconds, ttlDays, }; diff --git a/apps/registry/src/auth/agent-claw-auth.ts b/apps/registry/src/auth/agent-claw-auth.ts index 7c21245..f3caa22 100644 --- a/apps/registry/src/auth/agent-claw-auth.ts +++ b/apps/registry/src/auth/agent-claw-auth.ts @@ -1,6 +1,7 @@ import { type AitClaims, decodeBase64url } from "@clawdentity/protocol"; import { AppError, + nowUtcMs, type RegistryAitVerificationKey, type RegistryConfig, verifyAIT, @@ -87,7 +88,7 @@ export async function verifyAgentClawRequest(input: { nowMs?: number; maxTimestampSkewSeconds?: number; }): Promise { - const nowMs = input.nowMs ?? Date.now(); + const nowMs = input.nowMs ?? nowUtcMs(); const maxTimestampSkewSeconds = input.maxTimestampSkewSeconds ?? DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS; const token = parseClawAuthorizationHeader( diff --git a/apps/registry/src/auth/service-auth.ts b/apps/registry/src/auth/service-auth.ts index 0b4455f..415fe1b 100644 --- a/apps/registry/src/auth/service-auth.ts +++ b/apps/registry/src/auth/service-auth.ts @@ -3,6 +3,7 @@ import { AppError, INTERNAL_SERVICE_ID_HEADER, INTERNAL_SERVICE_SECRET_HEADER, + nowIso, } from "@clawdentity/sdk"; import { and, eq } from "drizzle-orm"; import { createMiddleware } from "hono/factory"; @@ -167,7 +168,7 @@ export function createServiceAuth(options?: { await db .update(internal_services) .set({ - last_used_at: new Date().toISOString(), + last_used_at: nowIso(), }) .where(eq(internal_services.id, row.id)); diff --git a/apps/registry/src/invite-lifecycle.ts b/apps/registry/src/invite-lifecycle.ts index b6d0075..0c9dd01 100644 --- a/apps/registry/src/invite-lifecycle.ts +++ b/apps/registry/src/invite-lifecycle.ts @@ -3,6 +3,7 @@ import { AppError, type RegistryConfig, shouldExposeVerboseErrors, + toIso, } from "@clawdentity/sdk"; const DEFAULT_INVITE_REDEEM_DISPLAY_NAME = "User"; @@ -87,7 +88,7 @@ function inviteRedeemInvalidError(options: { export function parseInviteCreatePayload(input: { payload: unknown; environment: RegistryConfig["ENVIRONMENT"]; - now: Date; + nowMs: number; }): InviteCreatePayload { if ( typeof input.payload !== "object" || @@ -125,10 +126,10 @@ export function parseInviteCreatePayload(input: { const expiresAtMillis = Date.parse(expiresAtInput); if (!Number.isFinite(expiresAtMillis)) { fieldErrors.expiresAt = ["expiresAt must be a valid ISO-8601 datetime"]; - } else if (expiresAtMillis <= input.now.getTime()) { + } else if (expiresAtMillis <= input.nowMs) { fieldErrors.expiresAt = ["expiresAt must be in the future"]; } else { - expiresAt = new Date(expiresAtMillis).toISOString(); + expiresAt = toIso(expiresAtMillis); } } } diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 79400b6..0cc04de 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -25,6 +25,7 @@ import { createRequestLoggingMiddleware, type EventBus, nowIso, + nowUtcMs, parseRegistryConfig, type QueuePublisher, type RegistryConfig, @@ -296,7 +297,7 @@ function buildCrlClaims(input: { }) { return { iss: input.issuer, - jti: generateUlid(Date.now()), + jti: generateUlid(nowUtcMs()), iat: input.nowSeconds, exp: input.nowSeconds + CRL_TTL_SECONDS, revocations: input.rows.map((row) => { @@ -794,7 +795,7 @@ async function insertAgentAuthEvent(input: { }): Promise { const createdAt = input.createdAt ?? nowIso(); await input.db.insert(agent_auth_events).values({ - id: generateUlid(Date.now()), + id: generateUlid(nowUtcMs()), agent_id: input.agentId, session_id: input.sessionId, event_type: input.eventType, @@ -1114,7 +1115,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const apiKeyToken = generateApiKeyToken(); const apiKeyHash = await hashApiKeyToken(apiKeyToken); const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); - const apiKeyId = generateUlid(Date.now() + 1); + const apiKeyId = generateUlid(nowUtcMs() + 1); const createdAt = nowIso(); const applyBootstrapMutation = async ( @@ -1242,7 +1243,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { } const signer = await resolveRegistrySigner(config); - const nowSeconds = Math.floor(Date.now() / 1000); + const nowSeconds = Math.floor(nowUtcMs() / 1000); const claims = buildCrlClaims({ rows, environment: config.ENVIRONMENT, @@ -1334,7 +1335,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const secretHash = await hashInternalServiceSecret(secret); const secretPrefix = deriveInternalServiceSecretPrefix(secret); const createdAt = nowIso(); - const serviceId = generateUlid(Date.now()); + const serviceId = generateUlid(nowUtcMs()); await db.insert(internal_services).values({ id: serviceId, name: parsed.name, @@ -1546,10 +1547,10 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const parsedPayload = parseInviteCreatePayload({ payload, environment: config.ENVIRONMENT, - now: new Date(), + nowMs: nowUtcMs(), }); - const inviteId = generateUlid(Date.now()); + const inviteId = generateUlid(nowUtcMs()); const inviteCode = generateInviteCode(); const createdAt = nowIso(); const db = createDb(c.env.DB); @@ -1610,7 +1611,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { throw inviteRedeemCodeInvalidError(); } - const nowMillis = Date.now(); + const nowMillis = nowUtcMs(); if (invite.redeemed_by !== null) { throw inviteRedeemAlreadyUsedError(); } @@ -1782,7 +1783,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const apiKeyToken = generateApiKeyToken(); const apiKeyHash = await hashApiKeyToken(apiKeyToken); const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); - const apiKeyId = generateUlid(Date.now() + 1); + const apiKeyId = generateUlid(nowUtcMs() + 1); const createdAt = nowIso(); const db = createDb(c.env.DB); @@ -2271,7 +2272,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { request: c.req.raw, bodyBytes, }); - const nowMillis = Date.now(); + const nowMillis = nowUtcMs(); const db = createDb(c.env.DB); const existingAgent = await findOwnedAgentByDid({ db, @@ -2478,7 +2479,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { }); } - const nowMillis = Date.now(); + const nowMillis = nowUtcMs(); if (isIsoExpired(existingSession.access_expires_at, nowMillis)) { throw new AppError({ code: "AGENT_AUTH_VALIDATE_EXPIRED", @@ -2650,7 +2651,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { await executor .insert(revocations) .values({ - id: generateUlid(Date.now()), + id: generateUlid(nowUtcMs()), jti: currentJti, agent_id: existingAgent.id, reason: null, @@ -2786,7 +2787,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { await executor .insert(revocations) .values({ - id: generateUlid(Date.now()), + id: generateUlid(nowUtcMs()), jti: currentJti, agent_id: existingAgent.id, reason: "reissued", diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 4044427..850ffbc 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -17,6 +17,7 @@ - Keep replay configuration environment-driven via `CONNECTOR_INBOUND_*` vars with safe defaults from `constants.ts`. - `/v1/status` must include websocket state and inbound replay health (`pendingCount`, `oldestPendingAt`, replay activity/error, hook status). - On inbox/status read failures, return explicit structured errors instead of crashing runtime. +- Keep connector runtime/inbox timestamps standardized via shared SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`) instead of ad-hoc datetime formatting. ## WebSocket Resilience Rules - Keep websocket reconnect behavior centralized in `client.ts` (single cleanup path for close/error/unexpected-response/timeout). diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index c550d64..8ec3662 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -1,5 +1,5 @@ import { generateUlid } from "@clawdentity/protocol"; -import { createLogger, type Logger } from "@clawdentity/sdk"; +import { createLogger, type Logger, toIso } from "@clawdentity/sdk"; import { CONNECTOR_FRAME_VERSION, DEFAULT_CONNECT_TIMEOUT_MS, @@ -1092,6 +1092,6 @@ export class ConnectorClient { } private makeTimestamp(): string { - return new Date(this.now()).toISOString(); + return toIso(this.now()); } } diff --git a/packages/connector/src/inbound-inbox.ts b/packages/connector/src/inbound-inbox.ts index 47ea1f4..d3a7bc5 100644 --- a/packages/connector/src/inbound-inbox.ts +++ b/packages/connector/src/inbound-inbox.ts @@ -6,6 +6,7 @@ import { writeFile, } from "node:fs/promises"; import { dirname, join } from "node:path"; +import { nowIso, nowUtcMs } from "@clawdentity/sdk"; import type { DeliverFrame } from "./frames.js"; const INBOUND_INBOX_DIR_NAME = "inbound-inbox"; @@ -66,10 +67,6 @@ export type ConnectorInboundInboxOptions = { maxPendingMessages: number; }; -function nowIso(): string { - return new Date().toISOString(); -} - function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } @@ -480,7 +477,7 @@ export class ConnectorInboundInbox { updatedAt: nowIso(), } satisfies InboundInboxIndexFile; - const tmpPath = `${this.indexPath}.tmp-${Date.now()}`; + const tmpPath = `${this.indexPath}.tmp-${nowUtcMs()}`; await writeFile(tmpPath, `${JSON.stringify(payload, null, 2)}\n`, "utf8"); await rename(tmpPath, this.indexPath); } diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index 5b3b316..4312aa0 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -18,8 +18,11 @@ import { createLogger, executeWithAgentAuthRefreshRetry, type Logger, + nowIso, + nowUtcMs, refreshAgentAuthWithClawProof, signHttpRequest, + toIso, } from "@clawdentity/sdk"; import { WebSocket as NodeWebSocket } from "ws"; import { ConnectorClient, type ConnectorWebSocket } from "./client.js"; @@ -498,7 +501,7 @@ async function writeRegistryAuthAtomic(input: { input.agentName, REGISTRY_AUTH_FILENAME, ); - const tmpPath = `${targetPath}.tmp-${Date.now()}-${Math.random().toString(16).slice(2)}`; + const tmpPath = `${targetPath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; await mkdir(dirname(targetPath), { recursive: true }); await writeFile(tmpPath, `${JSON.stringify(input.auth, null, 2)}\n`, "utf8"); @@ -647,7 +650,7 @@ async function buildUpgradeHeaders(input: { wsUrl: URL; secretKey: Uint8Array; }): Promise> { - const timestamp = Math.floor(Date.now() / 1000).toString(); + const timestamp = Math.floor(nowUtcMs() / 1000).toString(); const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); const signed = await signHttpRequest({ method: "GET", @@ -704,7 +707,7 @@ export async function startConnectorRuntime( const refreshCurrentAuthIfNeeded = async (): Promise => { await syncAuthFromDisk(); - if (!shouldRefreshAccessToken(currentAuth, Date.now())) { + if (!shouldRefreshAccessToken(currentAuth, nowUtcMs())) { return; } @@ -768,11 +771,11 @@ export async function startConnectorRuntime( try { const dueItems = await inboundInbox.listDuePending({ - nowMs: Date.now(), + nowMs: nowUtcMs(), limit: inboundReplayPolicy.batchSize, }); for (const pending of dueItems) { - inboundReplayStatus.lastAttemptAt = new Date().toISOString(); + inboundReplayStatus.lastAttemptAt = nowIso(); try { await deliverToOpenclawHook({ fetchImpl, @@ -782,7 +785,7 @@ export async function startConnectorRuntime( payload: pending.payload, }); await inboundInbox.markDelivered(pending.requestId); - inboundReplayStatus.lastReplayAt = new Date().toISOString(); + inboundReplayStatus.lastReplayAt = nowIso(); inboundReplayStatus.lastReplayError = undefined; inboundReplayStatus.lastAttemptStatus = "ok"; logger.info("connector.inbound.replay_succeeded", { @@ -795,14 +798,14 @@ export async function startConnectorRuntime( error instanceof LocalOpenclawDeliveryError ? error.retryable : true; - const nextAttemptAt = new Date( - Date.now() + + const nextAttemptAt = toIso( + nowUtcMs() + computeReplayDelayMs({ attemptCount: pending.attemptCount + 1, policy: inboundReplayPolicy, }) * (retryable ? 1 : 10), - ).toISOString(); + ); await inboundInbox.markReplayFailure({ requestId: pending.requestId, errorMessage: reason, @@ -904,7 +907,7 @@ export async function startConnectorRuntime( const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; const performRelay = async (auth: AgentAuthBundle): Promise => { - const unixSeconds = Math.floor(Date.now() / 1000).toString(); + const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); const signed = await signHttpRequest({ method: "POST", diff --git a/packages/sdk/src/AGENTS.md b/packages/sdk/src/AGENTS.md new file mode 100644 index 0000000..9fdb94d --- /dev/null +++ b/packages/sdk/src/AGENTS.md @@ -0,0 +1,6 @@ +# AGENTS.md (packages/sdk/src) + +## Purpose +- Follow `packages/sdk/AGENTS.md` as the canonical SDK guidance. +- Keep datetime primitives centralized in `datetime.ts` and exported through `index.ts` (`nowUtcMs`, `toIso`, `nowIso`, `addSeconds`, `isExpired`). +- Keep helper tests focused and deterministic in `datetime.test.ts`. diff --git a/packages/sdk/src/agent-auth-client.ts b/packages/sdk/src/agent-auth-client.ts index a0fcf6e..079df33 100644 --- a/packages/sdk/src/agent-auth-client.ts +++ b/packages/sdk/src/agent-auth-client.ts @@ -2,6 +2,7 @@ import { AGENT_AUTH_REFRESH_PATH, encodeBase64url, } from "@clawdentity/protocol"; +import { nowUtcMs } from "./datetime.js"; import { AppError } from "./exceptions.js"; import { signHttpRequest } from "./http/sign.js"; @@ -239,7 +240,7 @@ export async function refreshAgentAuthWithClawProof(input: { const refreshBody = JSON.stringify({ refreshToken: input.refreshToken, }); - const nowMs = input.nowMs?.() ?? Date.now(); + const nowMs = input.nowMs?.() ?? nowUtcMs(); const timestamp = String(Math.floor(nowMs / 1000)); const nonce = encodeBase64url(crypto.getRandomValues(new Uint8Array(16))); const signed = await signHttpRequest({ diff --git a/packages/sdk/src/datetime.test.ts b/packages/sdk/src/datetime.test.ts index 40d9a95..f5b8124 100644 --- a/packages/sdk/src/datetime.test.ts +++ b/packages/sdk/src/datetime.test.ts @@ -1,7 +1,22 @@ -import { describe, expect, it } from "vitest"; -import { addSeconds, isExpired } from "./datetime.js"; +import { describe, expect, it, vi } from "vitest"; +import { addSeconds, isExpired, nowUtcMs, toIso } from "./datetime.js"; describe("datetime helpers", () => { + it("returns epoch milliseconds for the current instant", () => { + vi.useFakeTimers(); + vi.setSystemTime(new Date("2026-01-01T00:00:00.000Z")); + try { + expect(nowUtcMs()).toBe(1767225600000); + } finally { + vi.useRealTimers(); + } + }); + + it("formats valid datetime values as ISO-8601 UTC", () => { + expect(toIso("2026-01-01T00:00:00.000Z")).toBe("2026-01-01T00:00:00.000Z"); + expect(toIso(1767225600000)).toBe("2026-01-01T00:00:00.000Z"); + }); + it("adds seconds to a datetime", () => { expect(addSeconds("2026-01-01T00:00:00.000Z", 90)).toBe( "2026-01-01T00:01:30.000Z", diff --git a/packages/sdk/src/datetime.ts b/packages/sdk/src/datetime.ts index 8116aa8..a37e64d 100644 --- a/packages/sdk/src/datetime.ts +++ b/packages/sdk/src/datetime.ts @@ -7,16 +7,24 @@ function toDate(value: Date | string | number): Date { return parsed; } +export function nowUtcMs(): number { + return Date.now(); +} + +export function toIso(value: Date | string | number): string { + return toDate(value).toISOString(); +} + export function nowIso(): string { - return new Date().toISOString(); + return toIso(nowUtcMs()); } export function addSeconds( value: Date | string | number, seconds: number, ): string { - const next = new Date(toDate(value).getTime() + seconds * 1000); - return next.toISOString(); + const next = toDate(value).getTime() + seconds * 1000; + return toIso(next); } export function isExpired( diff --git a/packages/sdk/src/event-bus.ts b/packages/sdk/src/event-bus.ts index 4deb7d0..bbf6285 100644 --- a/packages/sdk/src/event-bus.ts +++ b/packages/sdk/src/event-bus.ts @@ -1,4 +1,4 @@ -import { nowIso } from "./datetime.js"; +import { nowIso, nowUtcMs } from "./datetime.js"; const DEFAULT_EVENT_VERSION = "v1"; @@ -45,7 +45,7 @@ function createEventId(): string { } const random = Math.random().toString(36).slice(2, 10); - return `${Date.now()}-${random}`; + return `${nowUtcMs()}-${random}`; } function normalizeRequiredString(value: string, fieldName: string): string { diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 6bc6e01..866d9ad 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -34,7 +34,7 @@ export { signEd25519, verifyEd25519, } from "./crypto/ed25519.js"; -export { addSeconds, isExpired, nowIso } from "./datetime.js"; +export { addSeconds, isExpired, nowIso, nowUtcMs, toIso } from "./datetime.js"; export type { EventBus, EventEnvelope, diff --git a/packages/sdk/src/logging.ts b/packages/sdk/src/logging.ts index 0af94ef..410983a 100644 --- a/packages/sdk/src/logging.ts +++ b/packages/sdk/src/logging.ts @@ -1,5 +1,5 @@ import { createMiddleware } from "hono/factory"; -import { nowIso } from "./datetime.js"; +import { nowIso, nowUtcMs } from "./datetime.js"; import { REQUEST_ID_HEADER, resolveRequestId } from "./request-context.js"; type LogLevel = "debug" | "info" | "warn" | "error"; @@ -63,7 +63,7 @@ export function createLogger(baseFields: LogFields = {}): Logger { export function createRequestLoggingMiddleware(logger: Logger) { return createMiddleware(async (c, next) => { - const startedAt = Date.now(); + const startedAt = nowUtcMs(); let caughtError: unknown; try { @@ -81,7 +81,7 @@ export function createRequestLoggingMiddleware(logger: Logger) { method: c.req.method, path: c.req.path, status: caughtError ? 500 : c.res.status, - durationMs: Date.now() - startedAt, + durationMs: nowUtcMs() - startedAt, }); } }); diff --git a/packages/sdk/src/request-context.ts b/packages/sdk/src/request-context.ts index a0de962..d7e8146 100644 --- a/packages/sdk/src/request-context.ts +++ b/packages/sdk/src/request-context.ts @@ -1,4 +1,5 @@ import { createMiddleware } from "hono/factory"; +import { nowUtcMs } from "./datetime.js"; export const REQUEST_ID_HEADER = "x-request-id"; const REQUEST_ID_PATTERN = /^[A-Za-z0-9._:-]{8,128}$/; @@ -12,7 +13,7 @@ function generateRequestId(): string { } const random = Math.random().toString(36).slice(2, 12); - return `${Date.now().toString(36)}-${random}`; + return `${nowUtcMs().toString(36)}-${random}`; } export function resolveRequestId(requestId?: string): string { diff --git a/packages/sdk/src/testing/ait-fixtures.ts b/packages/sdk/src/testing/ait-fixtures.ts index 077ed2b..e82e65a 100644 --- a/packages/sdk/src/testing/ait-fixtures.ts +++ b/packages/sdk/src/testing/ait-fixtures.ts @@ -4,6 +4,7 @@ import { makeAgentDid, makeHumanDid, } from "@clawdentity/protocol"; +import { nowUtcMs } from "../datetime.js"; export type BuildTestAitClaimsInput = { publicKeyX: string; @@ -27,7 +28,7 @@ const DEFAULT_TTL_SECONDS = 600; export function buildTestAitClaims(input: BuildTestAitClaimsInput): AitClaims { const seedMs = input.seedMs ?? DEFAULT_SEED_MS; const nowSeconds = - input.nowSeconds ?? Math.floor((input.seedMs ?? Date.now()) / 1000); + input.nowSeconds ?? Math.floor((input.seedMs ?? nowUtcMs()) / 1000); const ttlSeconds = input.ttlSeconds ?? DEFAULT_TTL_SECONDS; const nbfSkewSeconds = input.nbfSkewSeconds ?? 5; From fbfa2062c6d98255c50e87728a324911e1820728 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 12:01:15 +0530 Subject: [PATCH 110/190] Pull latest and execute plan --- AGENTS.md | 6 +- ARCHITECTURE.md | 1 - README.md | 1 - apps/cli/src/commands/openclaw.test.ts | 64 +- apps/cli/src/commands/openclaw.ts | 126 ++-- apps/proxy/src/AGENTS.md | 12 + apps/proxy/src/agent-hook-route.ts | 25 + apps/proxy/src/agent-relay-session.ts | 320 +++++++++- apps/proxy/src/auth-middleware.ts | 7 +- apps/proxy/src/config.test.ts | 18 + apps/proxy/src/config.ts | 23 + .../src/relay-delivery-receipt-route.test.ts | 313 ++++++++++ .../proxy/src/relay-delivery-receipt-route.ts | 211 +++++++ apps/proxy/src/server.ts | 19 + apps/proxy/src/worker.ts | 4 + packages/connector/src/AGENTS.md | 24 +- packages/connector/src/client.ts | 231 +++++++- packages/connector/src/constants.ts | 3 + packages/connector/src/inbound-inbox.test.ts | 154 +++-- packages/connector/src/inbound-inbox.ts | 560 +++++++++++++++--- packages/connector/src/index.ts | 5 + packages/connector/src/runtime.ts | 530 +++++++++++++++-- packages/protocol/AGENTS.md | 2 +- packages/protocol/src/endpoints.ts | 3 + packages/protocol/src/index.ts | 3 + 25 files changed, 2408 insertions(+), 257 deletions(-) create mode 100644 apps/proxy/src/relay-delivery-receipt-route.test.ts create mode 100644 apps/proxy/src/relay-delivery-receipt-route.ts diff --git a/AGENTS.md b/AGENTS.md index 34425a9..b001cc3 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -36,8 +36,8 @@ ## Documentation Sync - `README.md` must reflect current execution model and links to issue governance. -- `PRD.md` must reflect current rollout order, deployment gating, and verification strategy. -- If backlog shape changes, update README + PRD + the relevant GitHub issue threads in the same change. +- Architecture and rollout docs (for example `ARCHITECTURE.md`) must reflect current deployment gating and verification strategy. +- If backlog shape changes, update README + architecture docs + the relevant GitHub issue threads in the same change. ## Validation Baseline - Run and pass: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build` for implementation changes. @@ -148,7 +148,7 @@ - `http://localhost:18789/` and `http://localhost:19001/` ## Scaffold Best Practices -- Start by reviewing README, PRD, and the active execution tracker issue so documentation mirrors the execution model. +- Start by reviewing README, ARCHITECTURE.md, and the active execution tracker issue so documentation mirrors the execution model. - Define the workspace layout now: `apps/registry`, `apps/proxy`, `apps/cli`, `packages/sdk`, and `packages/protocol` (with shared tooling such as `pnpm-workspace.yaml`, `tsconfig.base.json`, and `biome.json`) so downstream tickets have a known structure. - Declare placeholder scripts for lint/test/build (e.g., `pnpm -r lint`, `pnpm -r test`, `pnpm -r build`) and identify the expected toolchain (Biome, Vitest, tsup, etc.) so future work can fill implementations without duplication. - Document the CI entrypoints (GitHub Actions or another pipeline) that will run the above scripts, so deployment scaffolding can wire the baseline checks without guessing what belongs in initial setup. diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index d180e28..d77ad41 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -551,5 +551,4 @@ clawdentity/ ## Further Reading - **[README.md](./README.md)** — overview, quick start, and comparison -- **[PRD.md](./PRD.md)** — MVP product requirements and rollout strategy - **Execution and issue governance:** [GitHub issue tracker](https://github.com/vrknetha/clawdentity/issues/74) diff --git a/README.md b/README.md index a83b52d..99f1f1b 100644 --- a/README.md +++ b/README.md @@ -157,4 +157,3 @@ This repo uses a **deployment-first gate** tracked in [GitHub Issues](https://gi ## Deep Docs - **[ARCHITECTURE.md](./ARCHITECTURE.md)** — full protocol flows, verification pipeline, security architecture, deployment details -- **[PRD.md](./PRD.md)** — MVP product requirements and rollout strategy diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 46a971e..5927a42 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -96,15 +96,25 @@ function connectorReadyFetch(): typeof fetch { new Response( JSON.stringify({ status: "ok", - websocketConnected: true, - inboundInbox: { - pendingCount: 0, - pendingBytes: 0, - replayerActive: false, + websocket: { + connected: true, }, - openclawHook: { - url: "http://127.0.0.1:18789/hooks/agent", - lastAttemptStatus: "ok", + inbound: { + pending: { + pendingCount: 0, + pendingBytes: 0, + }, + deadLetter: { + deadLetterCount: 0, + deadLetterBytes: 0, + }, + replay: { + replayerActive: false, + }, + openclawHook: { + url: "http://127.0.0.1:18789/hooks/agent", + lastAttemptStatus: "ok", + }, }, }), { @@ -1503,19 +1513,29 @@ describe("openclaw command helpers", () => { new Response( JSON.stringify({ status: "ok", - websocketConnected: true, - inboundInbox: { - pendingCount: 2, - pendingBytes: 512, - oldestPendingAt: "2026-01-01T00:00:00.000Z", - lastReplayError: - "Local OpenClaw hook rejected payload with status 500", - replayerActive: false, + websocket: { + connected: true, }, - openclawHook: { - url: "http://127.0.0.1:18789/hooks/agent", - lastAttemptStatus: "failed", - lastAttemptAt: "2026-01-01T00:00:00.000Z", + inbound: { + pending: { + pendingCount: 2, + pendingBytes: 512, + oldestPendingAt: "2026-01-01T00:00:00.000Z", + }, + deadLetter: { + deadLetterCount: 0, + deadLetterBytes: 0, + }, + replay: { + lastReplayError: + "Local OpenClaw hook rejected payload with status 500", + replayerActive: false, + }, + openclawHook: { + url: "http://127.0.0.1:18789/hooks/agent", + lastAttemptStatus: "failed", + lastAttemptAt: "2026-01-01T00:00:00.000Z", + }, }, }), { @@ -1906,7 +1926,9 @@ describe("openclaw command helpers", () => { new Response( JSON.stringify({ status: "ok", - websocketConnected: false, + websocket: { + connected: false, + }, }), { status: 200, diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index 3333965..d9d673f 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -1190,6 +1190,9 @@ function resolveConnectorStatusUrl(connectorBaseUrl: string): string { type ConnectorHealthStatus = { connected: boolean; inboundInbox?: { + deadLetterBytes?: number; + deadLetterCount?: number; + oldestDeadLetterAt?: string; lastReplayAt?: string; lastReplayError?: string; nextAttemptAt?: string; @@ -1210,6 +1213,9 @@ type ConnectorHealthStatus = { function parseConnectorStatusPayload(payload: unknown): { inboundInbox?: { + deadLetterBytes?: number; + deadLetterCount?: number; + oldestDeadLetterAt?: string; lastReplayAt?: string; lastReplayError?: string; nextAttemptAt?: string; @@ -1225,61 +1231,93 @@ function parseConnectorStatusPayload(payload: unknown): { }; websocketConnected: boolean; } { - if (!isRecord(payload) || typeof payload.websocketConnected !== "boolean") { + if ( + !isRecord(payload) || + !isRecord(payload.websocket) || + typeof payload.websocket.connected !== "boolean" + ) { throw createCliError( "CLI_OPENCLAW_SETUP_CONNECTOR_STATUS_INVALID", "Connector status response is invalid", ); } + const inboundRoot = isRecord(payload.inbound) ? payload.inbound : undefined; + const pending = + inboundRoot && isRecord(inboundRoot.pending) + ? inboundRoot.pending + : undefined; + const deadLetter = + inboundRoot && isRecord(inboundRoot.deadLetter) + ? inboundRoot.deadLetter + : undefined; + const replay = + inboundRoot && isRecord(inboundRoot.replay) + ? inboundRoot.replay + : undefined; + const hook = + inboundRoot && isRecord(inboundRoot.openclawHook) + ? inboundRoot.openclawHook + : undefined; + return { - websocketConnected: payload.websocketConnected, - inboundInbox: isRecord(payload.inboundInbox) - ? { - pendingCount: - typeof payload.inboundInbox.pendingCount === "number" - ? payload.inboundInbox.pendingCount - : undefined, - pendingBytes: - typeof payload.inboundInbox.pendingBytes === "number" - ? payload.inboundInbox.pendingBytes - : undefined, - oldestPendingAt: - typeof payload.inboundInbox.oldestPendingAt === "string" - ? payload.inboundInbox.oldestPendingAt - : undefined, - nextAttemptAt: - typeof payload.inboundInbox.nextAttemptAt === "string" - ? payload.inboundInbox.nextAttemptAt - : undefined, - lastReplayAt: - typeof payload.inboundInbox.lastReplayAt === "string" - ? payload.inboundInbox.lastReplayAt - : undefined, - lastReplayError: - typeof payload.inboundInbox.lastReplayError === "string" - ? payload.inboundInbox.lastReplayError - : undefined, - replayerActive: - typeof payload.inboundInbox.replayerActive === "boolean" - ? payload.inboundInbox.replayerActive - : undefined, - } - : undefined, - openclawHook: isRecord(payload.openclawHook) + websocketConnected: payload.websocket.connected, + inboundInbox: + pending || deadLetter || replay + ? { + pendingCount: + pending && typeof pending.pendingCount === "number" + ? pending.pendingCount + : undefined, + pendingBytes: + pending && typeof pending.pendingBytes === "number" + ? pending.pendingBytes + : undefined, + oldestPendingAt: + pending && typeof pending.oldestPendingAt === "string" + ? pending.oldestPendingAt + : undefined, + nextAttemptAt: + pending && typeof pending.nextAttemptAt === "string" + ? pending.nextAttemptAt + : undefined, + lastReplayAt: + replay && typeof replay.lastReplayAt === "string" + ? replay.lastReplayAt + : undefined, + lastReplayError: + replay && typeof replay.lastReplayError === "string" + ? replay.lastReplayError + : undefined, + replayerActive: + replay && typeof replay.replayerActive === "boolean" + ? replay.replayerActive + : undefined, + deadLetterCount: + deadLetter && typeof deadLetter.deadLetterCount === "number" + ? deadLetter.deadLetterCount + : undefined, + deadLetterBytes: + deadLetter && typeof deadLetter.deadLetterBytes === "number" + ? deadLetter.deadLetterBytes + : undefined, + oldestDeadLetterAt: + deadLetter && typeof deadLetter.oldestDeadLetterAt === "string" + ? deadLetter.oldestDeadLetterAt + : undefined, + } + : undefined, + openclawHook: hook ? { - url: - typeof payload.openclawHook.url === "string" - ? payload.openclawHook.url - : undefined, + url: typeof hook.url === "string" ? hook.url : undefined, lastAttemptAt: - typeof payload.openclawHook.lastAttemptAt === "string" - ? payload.openclawHook.lastAttemptAt + typeof hook.lastAttemptAt === "string" + ? hook.lastAttemptAt : undefined, lastAttemptStatus: - payload.openclawHook.lastAttemptStatus === "ok" || - payload.openclawHook.lastAttemptStatus === "failed" - ? payload.openclawHook.lastAttemptStatus + hook.lastAttemptStatus === "ok" || + hook.lastAttemptStatus === "failed" + ? hook.lastAttemptStatus : undefined, } : undefined, diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 752577b..1d1bc99 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -39,6 +39,7 @@ - Keep `/hooks/agent` forwarding logic isolated in `agent-hook-route.ts`; `server.ts` should only compose middleware/routes. - Keep relay websocket connect handling isolated in `relay-connect-route.ts`; `server.ts` should only compose middleware/routes. - Keep DO runtime behavior in `agent-relay-session.ts` (websocket accept, heartbeat alarm, connector delivery RPC). +- Keep relay delivery-receipt HTTP handlers isolated in `relay-delivery-receipt-route.ts`; `server.ts` should only compose `POST/GET /v1/relay/delivery-receipts`. - Do not import Node-only startup helpers into `worker.ts`; Worker runtime must stay free of process/port startup concerns. - Keep worker runtime cache keys sensitive to deploy-time version bindings so `/health` reflects fresh `APP_VERSION` after deploy. - Keep auth failure semantics stable: auth-invalid requests map to `401`; verified-but-not-trusted requests map to `403`; registry keyset outages map to `503`; CRL outages map to `503` when stale behavior is `fail-closed`. @@ -70,12 +71,23 @@ - Keep relay retries inside `agent-relay-session.ts` with bounded backoff (`RELAY_RETRY_*`) and per-agent queue caps/TTL (`RELAY_QUEUE_*`); do not add ad-hoc retry loops in route handlers. - Keep relay-session timestamps UTC and standardized via shared SDK datetime helpers (`nowUtcMs`, `toIso`) rather than ad-hoc datetime formatting. - Keep relay websocket heartbeat liveness explicit in `agent-relay-session.ts`: track per-socket heartbeat ack time and enforce a 60s ack timeout before socket eviction. +- Keep relay frame-size enforcement explicit: + - reject oversized inbound websocket frames with close code `1009` + - reject oversized outbound deliver frames before socket send + - keep limits environment-driven via `RELAY_MAX_FRAME_BYTES` +- Keep connector backpressure explicit with a bounded in-flight window (`RELAY_MAX_IN_FLIGHT_DELIVERIES`); do not bypass this with ad-hoc parallel sends. - Keep stale connector cleanup proactive: evict stale sockets during alarm sweeps and before accepting a new reconnect socket. - Keep connector session ownership deterministic: new reconnect sockets supersede older live sockets with a clean `1000` close code so delivery always targets one active socket. - Keep reconnect recovery eager but handshake-safe: trigger durable queue drain immediately after reconnect, but do not block websocket `101` upgrade responses on `deliver_ack` waits. - Keep superseded socket state sticky until close cleanup: late frames from sockets marked in `socketsPendingClose` must not reactivate those sockets. - Keep close semantics strict for pending delivery promises: clean `1000` closes do not reject pending deliveries, but unclean closes reject when no sockets remain. - Keep identity message injection explicit and default-on (`INJECT_IDENTITY_INTO_MESSAGE=true`); operators can disable it when unchanged forwarding is required. +- Keep relay delivery receipt persistence in `agent-relay-session.ts` with explicit RPC routes: + - `/rpc/record-delivery-receipt` + - `/rpc/get-delivery-receipt` +- Receipt states must remain constrained to `processed_by_openclaw` and `dead_lettered`. +- Receipt reads/writes must verify authenticated/trusted sender-recipient pairs and enforce recipient DID ownership at the route layer. +- Keep `conversationId` and `replyTo` metadata flowing from `/hooks/agent` into relay queue/deliver frames for downstream ordering and callback semantics. - Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. - Index pairing tickets by ticket `kid` in both in-memory and Durable Object stores; persist the original full ticket string alongside each entry and require exact ticket match on confirm. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. diff --git a/apps/proxy/src/agent-hook-route.ts b/apps/proxy/src/agent-hook-route.ts index 3ceef2c..33fe6cf 100644 --- a/apps/proxy/src/agent-hook-route.ts +++ b/apps/proxy/src/agent-hook-route.ts @@ -1,5 +1,7 @@ import { parseDid, + RELAY_CONVERSATION_ID_HEADER, + RELAY_DELIVERY_RECEIPT_URL_HEADER, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "@clawdentity/protocol"; import { AppError, type Logger, nowIso } from "@clawdentity/sdk"; @@ -147,6 +149,19 @@ function parseRecipientAgentDid(c: ProxyContext): string { return recipientDid; } +function parseOptionalHeaderValue( + c: ProxyContext, + headerName: string, +): string | undefined { + const value = c.req.header(headerName); + if (typeof value !== "string") { + return undefined; + } + + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + function resolveDefaultSessionNamespace( c: ProxyContext, ): AgentRelaySessionNamespace | undefined { @@ -213,11 +228,21 @@ export function createAgentHookHandler( } const requestId = c.get("requestId"); + const conversationId = parseOptionalHeaderValue( + c, + RELAY_CONVERSATION_ID_HEADER, + ); + const deliveryReceiptUrl = parseOptionalHeaderValue( + c, + RELAY_DELIVERY_RECEIPT_URL_HEADER, + ); const relayInput: RelayDeliveryInput = { requestId, senderAgentDid: auth.agentDid, recipientAgentDid, payload, + conversationId, + replyTo: deliveryReceiptUrl, }; const relaySession = sessionNamespace.get( diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts index 81dcdc4..a0a966d 100644 --- a/apps/proxy/src/agent-relay-session.ts +++ b/apps/proxy/src/agent-relay-session.ts @@ -12,6 +12,8 @@ import { parseProxyConfig } from "./config.js"; const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; const RELAY_RPC_DELIVER_PATH = "/rpc/deliver-to-connector"; +const RELAY_RPC_GET_RECEIPT_PATH = "/rpc/get-delivery-receipt"; +const RELAY_RPC_RECORD_RECEIPT_PATH = "/rpc/record-delivery-receipt"; const RELAY_HEARTBEAT_INTERVAL_MS = 30_000; const RELAY_HEARTBEAT_ACK_TIMEOUT_MS = 60_000; const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; @@ -32,13 +34,19 @@ type DurableObjectStateLike = { }; export type RelayDeliveryInput = { + conversationId?: string; payload: unknown; recipientAgentDid: string; + replyTo?: string; requestId: string; senderAgentDid: string; }; -export type RelayDeliveryState = "delivered" | "queued"; +export type RelayDeliveryState = + | "delivered" + | "queued" + | "processed_by_openclaw" + | "dead_lettered"; export type RelayDeliveryResult = { connectedSockets: number; @@ -49,6 +57,24 @@ export type RelayDeliveryResult = { state: RelayDeliveryState; }; +export type RelayReceiptRecordInput = { + reason?: string; + recipientAgentDid: string; + requestId: string; + senderAgentDid: string; + status: "processed_by_openclaw" | "dead_lettered"; +}; + +export type RelayReceiptLookupInput = { + requestId: string; + senderAgentDid: string; +}; + +export type RelayReceiptLookupResult = { + found: boolean; + receipt?: RelayDeliveryReceipt; +}; + export class RelaySessionDeliveryError extends Error { readonly code: string; readonly status: number; @@ -75,6 +101,10 @@ export type AgentRelaySessionStub = { deliverToConnector?: ( input: RelayDeliveryInput, ) => Promise; + getDeliveryReceipt?: ( + input: RelayReceiptLookupInput, + ) => Promise; + recordDeliveryReceipt?: (input: RelayReceiptRecordInput) => Promise; fetch: (request: Request) => Promise; }; @@ -97,14 +127,20 @@ type QueuedRelayDelivery = { nextAttemptAtMs: number; payload: unknown; recipientAgentDid: string; + replyTo?: string; requestId: string; senderAgentDid: string; + conversationId?: string; }; type RelayDeliveryReceipt = { deliveryId: string; expiresAtMs: number; + recipientAgentDid: string; + reason?: string; requestId: string; + senderAgentDid: string; + statusUpdatedAt: string; state: RelayDeliveryState; }; @@ -114,6 +150,8 @@ type RelayQueueState = { }; type RelayDeliveryPolicy = { + maxFrameBytes: number; + maxInFlightDeliveries: number; queueMaxMessagesPerAgent: number; queueTtlMs: number; retryInitialMs: number; @@ -158,9 +196,19 @@ function toDeliverFrame(input: RelayDeliveryInput): DeliverFrame { fromAgentDid: input.senderAgentDid, toAgentDid: input.recipientAgentDid, payload: input.payload, + conversationId: input.conversationId, + replyTo: input.replyTo, }; } +function getWebSocketMessageBytes(message: string | ArrayBuffer): number { + if (typeof message === "string") { + return new TextEncoder().encode(message).byteLength; + } + + return message.byteLength; +} + function parseDeliveryInput(value: unknown): RelayDeliveryInput { if (typeof value !== "object" || value === null) { throw new TypeError("Relay delivery input must be an object"); @@ -175,11 +223,91 @@ function parseDeliveryInput(value: unknown): RelayDeliveryInput { throw new TypeError("Relay delivery input is invalid"); } + if ( + input.replyTo !== undefined && + (typeof input.replyTo !== "string" || input.replyTo.trim().length === 0) + ) { + throw new TypeError("Relay delivery input is invalid"); + } + if (typeof input.replyTo === "string") { + try { + new URL(input.replyTo); + } catch { + throw new TypeError("Relay delivery input is invalid"); + } + } + return { requestId: input.requestId, senderAgentDid: input.senderAgentDid, recipientAgentDid: input.recipientAgentDid, payload: input.payload, + conversationId: + typeof input.conversationId === "string" && + input.conversationId.trim().length > 0 + ? input.conversationId.trim() + : undefined, + replyTo: + typeof input.replyTo === "string" && input.replyTo.trim().length > 0 + ? input.replyTo.trim() + : undefined, + }; +} + +function parseReceiptRecordInput(value: unknown): RelayReceiptRecordInput { + if (typeof value !== "object" || value === null) { + throw new TypeError("Relay receipt input must be an object"); + } + + const input = value as Partial; + if ( + typeof input.requestId !== "string" || + input.requestId.trim().length === 0 || + typeof input.senderAgentDid !== "string" || + input.senderAgentDid.trim().length === 0 || + typeof input.recipientAgentDid !== "string" || + input.recipientAgentDid.trim().length === 0 + ) { + throw new TypeError("Relay receipt input is invalid"); + } + + if ( + input.status !== "processed_by_openclaw" && + input.status !== "dead_lettered" + ) { + throw new TypeError("Relay receipt input is invalid"); + } + + return { + requestId: input.requestId.trim(), + senderAgentDid: input.senderAgentDid.trim(), + recipientAgentDid: input.recipientAgentDid.trim(), + status: input.status, + reason: + typeof input.reason === "string" && input.reason.trim().length > 0 + ? input.reason.trim() + : undefined, + }; +} + +function parseReceiptLookupInput(value: unknown): RelayReceiptLookupInput { + if (typeof value !== "object" || value === null) { + throw new TypeError("Relay receipt lookup input must be an object"); + } + + const input = value as Partial; + if ( + typeof input.requestId !== "string" || + input.requestId.trim().length === 0 || + typeof input.senderAgentDid !== "string" || + input.senderAgentDid.trim().length === 0 + ) { + throw new TypeError("Relay receipt lookup input is invalid"); + } + + return { + requestId: input.requestId.trim(), + senderAgentDid: input.senderAgentDid.trim(), }; } @@ -256,6 +384,54 @@ export async function deliverToRelaySession( return (await response.json()) as RelayDeliveryResult; } +export async function recordRelayDeliveryReceipt( + relaySession: AgentRelaySessionStub, + input: RelayReceiptRecordInput, +): Promise { + const response = await relaySession.fetch( + new Request(`https://agent-relay-session${RELAY_RPC_RECORD_RECEIPT_PATH}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(input), + }), + ); + + if (!response.ok) { + throw new RelaySessionDeliveryError({ + code: "PROXY_RELAY_RECEIPT_WRITE_FAILED", + message: "Relay delivery receipt write RPC failed", + status: response.status, + }); + } +} + +export async function getRelayDeliveryReceipt( + relaySession: AgentRelaySessionStub, + input: RelayReceiptLookupInput, +): Promise { + const response = await relaySession.fetch( + new Request(`https://agent-relay-session${RELAY_RPC_GET_RECEIPT_PATH}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(input), + }), + ); + + if (!response.ok) { + throw new RelaySessionDeliveryError({ + code: "PROXY_RELAY_RECEIPT_READ_FAILED", + message: "Relay delivery receipt read RPC failed", + status: response.status, + }); + } + + return (await response.json()) as RelayReceiptLookupResult; +} + export class AgentRelaySession { private readonly deliveryPolicy: RelayDeliveryPolicy; private readonly heartbeatAckSockets = new Map(); @@ -272,6 +448,8 @@ export class AgentRelaySession { this.state = state; const config = parseProxyConfig(env ?? {}); this.deliveryPolicy = { + maxFrameBytes: config.relayMaxFrameBytes, + maxInFlightDeliveries: config.relayMaxInFlightDeliveries, queueMaxMessagesPerAgent: config.relayQueueMaxMessagesPerAgent, queueTtlMs: config.relayQueueTtlSeconds * 1000, retryInitialMs: config.relayRetryInitialMs, @@ -312,6 +490,38 @@ export class AgentRelaySession { } } + if ( + request.method === "POST" && + url.pathname === RELAY_RPC_RECORD_RECEIPT_PATH + ) { + let input: RelayReceiptRecordInput; + try { + input = parseReceiptRecordInput(await request.json()); + } catch { + return new Response("Invalid relay receipt input", { status: 400 }); + } + + await this.recordDeliveryReceipt(input); + return Response.json({ accepted: true }, { status: 202 }); + } + + if ( + request.method === "POST" && + url.pathname === RELAY_RPC_GET_RECEIPT_PATH + ) { + let input: RelayReceiptLookupInput; + try { + input = parseReceiptLookupInput(await request.json()); + } catch { + return new Response("Invalid relay receipt lookup input", { + status: 400, + }); + } + + const receipt = await this.getDeliveryReceipt(input); + return Response.json(receipt, { status: 200 }); + } + return new Response("Not found", { status: 404 }); } @@ -341,7 +551,12 @@ export class AgentRelaySession { const queueState = await this.loadQueueState(nowMs); const existingReceipt = queueState.receipts[input.requestId]; - if (existingReceipt !== undefined && existingReceipt.expiresAtMs > nowMs) { + if ( + existingReceipt !== undefined && + existingReceipt.expiresAtMs > nowMs && + existingReceipt.senderAgentDid === input.senderAgentDid && + existingReceipt.recipientAgentDid === input.recipientAgentDid + ) { return toRelayDeliveryResult({ deliveryId: existingReceipt.deliveryId, state: existingReceipt.state, @@ -355,7 +570,10 @@ export class AgentRelaySession { const deliveryTtlExpiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; let priorAttempts = 0; - if (sockets.length > 0) { + if ( + sockets.length > 0 && + this.pendingDeliveries.size < this.deliveryPolicy.maxInFlightDeliveries + ) { priorAttempts = 1; try { const accepted = await this.sendDeliverFrame(sockets[0], input); @@ -365,6 +583,9 @@ export class AgentRelaySession { deliveryId, state: "delivered", expiresAtMs: deliveryTtlExpiresAtMs, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), }); await this.saveQueueState(queueState); await this.scheduleNextAlarm(queueState, nowMs); @@ -397,6 +618,8 @@ export class AgentRelaySession { requestId: input.requestId, senderAgentDid: input.senderAgentDid, recipientAgentDid: input.recipientAgentDid, + conversationId: input.conversationId, + replyTo: input.replyTo, payload: input.payload, createdAtMs: nowMs, attemptCount: priorAttempts, @@ -410,6 +633,9 @@ export class AgentRelaySession { deliveryId: queuedDelivery.deliveryId, state: "queued", expiresAtMs: queuedDelivery.expiresAtMs, + senderAgentDid: queuedDelivery.senderAgentDid, + recipientAgentDid: queuedDelivery.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), }); await this.saveQueueState(queueState); @@ -423,10 +649,59 @@ export class AgentRelaySession { }); } + async recordDeliveryReceipt(input: RelayReceiptRecordInput): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + const existing = queueState.receipts[input.requestId]; + if (existing === undefined) { + return; + } + + if ( + existing.senderAgentDid !== input.senderAgentDid || + existing.recipientAgentDid !== input.recipientAgentDid + ) { + return; + } + + existing.state = input.status; + existing.reason = input.reason; + existing.expiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; + existing.statusUpdatedAt = toIso(nowMs); + await this.saveQueueState(queueState); + await this.scheduleNextAlarm(queueState, nowMs); + } + + async getDeliveryReceipt( + input: RelayReceiptLookupInput, + ): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + const existing = queueState.receipts[input.requestId]; + if ( + existing === undefined || + existing.senderAgentDid !== input.senderAgentDid + ) { + return { found: false }; + } + + return { + found: true, + receipt: existing, + }; + } + async webSocketMessage( ws: WebSocket, message: string | ArrayBuffer, ): Promise { + const frameBytes = getWebSocketMessageBytes(message); + if (frameBytes > this.deliveryPolicy.maxFrameBytes) { + this.closeSocket(ws, 1009, "frame_too_large"); + await this.scheduleFromStorage(); + return; + } + const nowMs = nowUtcMs(); const frameResult = (() => { try { @@ -581,6 +856,10 @@ export class AgentRelaySession { typeof candidate.requestId === "string" && typeof candidate.senderAgentDid === "string" && typeof candidate.recipientAgentDid === "string" && + (candidate.conversationId === undefined || + typeof candidate.conversationId === "string") && + (candidate.replyTo === undefined || + typeof candidate.replyTo === "string") && typeof candidate.createdAtMs === "number" && Number.isFinite(candidate.createdAtMs) && typeof candidate.attemptCount === "number" && @@ -613,9 +892,17 @@ export class AgentRelaySession { typeof receipt.requestId !== "string" || receipt.requestId !== key || typeof receipt.deliveryId !== "string" || + typeof receipt.senderAgentDid !== "string" || + typeof receipt.recipientAgentDid !== "string" || typeof receipt.expiresAtMs !== "number" || !Number.isFinite(receipt.expiresAtMs) || - (receipt.state !== "queued" && receipt.state !== "delivered") + typeof receipt.statusUpdatedAt !== "string" || + !( + receipt.state === "queued" || + receipt.state === "delivered" || + receipt.state === "processed_by_openclaw" || + receipt.state === "dead_lettered" + ) ) { continue; } @@ -624,7 +911,11 @@ export class AgentRelaySession { requestId: receipt.requestId, deliveryId: receipt.deliveryId, expiresAtMs: receipt.expiresAtMs, + senderAgentDid: receipt.senderAgentDid, + recipientAgentDid: receipt.recipientAgentDid, state: receipt.state, + reason: typeof receipt.reason === "string" ? receipt.reason : undefined, + statusUpdatedAt: receipt.statusUpdatedAt, }; } @@ -725,6 +1016,12 @@ export class AgentRelaySession { const socket = sockets[0]; for (let index = 0; index < queueState.deliveries.length; ) { + if ( + this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries + ) { + break; + } + const delivery = queueState.deliveries[index]; if (delivery.expiresAtMs <= nowMs) { @@ -761,6 +1058,8 @@ export class AgentRelaySession { requestId: delivery.requestId, senderAgentDid: delivery.senderAgentDid, recipientAgentDid: delivery.recipientAgentDid, + conversationId: delivery.conversationId, + replyTo: delivery.replyTo, payload: delivery.payload, }); } catch { @@ -774,6 +1073,9 @@ export class AgentRelaySession { deliveryId: delivery.deliveryId, state: "delivered", expiresAtMs: nowMs + this.deliveryPolicy.queueTtlMs, + senderAgentDid: delivery.senderAgentDid, + recipientAgentDid: delivery.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), }); mutated = true; continue; @@ -840,8 +1142,18 @@ export class AgentRelaySession { socket: WebSocket, input: RelayDeliveryInput, ): Promise { + if ( + this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries + ) { + throw new Error("Relay connector in-flight window is full"); + } + const frame = toDeliverFrame(input); const framePayload = serializeFrame(frame); + const frameBytes = new TextEncoder().encode(framePayload).byteLength; + if (frameBytes > this.deliveryPolicy.maxFrameBytes) { + throw new Error("Relay connector frame exceeds max allowed size"); + } return new Promise((resolve, reject) => { const timeoutHandle = setTimeout(() => { diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 9e51f1a..fdf1290 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -2,6 +2,7 @@ import { AGENT_AUTH_VALIDATE_PATH, decodeBase64url, RELAY_CONNECT_PATH, + RELAY_DELIVERY_RECEIPTS_PATH, } from "@clawdentity/protocol"; import { AitJwtError, @@ -602,7 +603,11 @@ export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { }); } - if (c.req.path === "/hooks/agent" || c.req.path === RELAY_CONNECT_PATH) { + if ( + c.req.path === "/hooks/agent" || + c.req.path === RELAY_CONNECT_PATH || + c.req.path === RELAY_DELIVERY_RECEIPTS_PATH + ) { const accessToken = parseAgentAccessHeader( c.req.header("x-claw-agent-access"), ); diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 01eb7e1..8980de0 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -12,6 +12,8 @@ import { DEFAULT_PROXY_ENVIRONMENT, DEFAULT_PROXY_LISTEN_PORT, DEFAULT_REGISTRY_URL, + DEFAULT_RELAY_MAX_FRAME_BYTES, + DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES, DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, DEFAULT_RELAY_QUEUE_TTL_SECONDS, DEFAULT_RELAY_RETRY_INITIAL_MS, @@ -45,6 +47,8 @@ describe("proxy config", () => { relayRetryMaxMs: DEFAULT_RELAY_RETRY_MAX_MS, relayRetryMaxAttempts: DEFAULT_RELAY_RETRY_MAX_ATTEMPTS, relayRetryJitterRatio: DEFAULT_RELAY_RETRY_JITTER_RATIO, + relayMaxInFlightDeliveries: DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES, + relayMaxFrameBytes: DEFAULT_RELAY_MAX_FRAME_BYTES, }); }); @@ -66,6 +70,8 @@ describe("proxy config", () => { RELAY_RETRY_MAX_MS: "15000", RELAY_RETRY_MAX_ATTEMPTS: "7", RELAY_RETRY_JITTER_RATIO: "0.4", + RELAY_MAX_IN_FLIGHT_DELIVERIES: "8", + RELAY_MAX_FRAME_BYTES: "2048", }); expect(config.listenPort).toBe(4100); @@ -85,6 +91,8 @@ describe("proxy config", () => { expect(config.relayRetryMaxMs).toBe(15000); expect(config.relayRetryMaxAttempts).toBe(7); expect(config.relayRetryJitterRatio).toBe(0.4); + expect(config.relayMaxInFlightDeliveries).toBe(8); + expect(config.relayMaxFrameBytes).toBe(2048); }); it("allows disabling identity injection via env override", () => { @@ -149,6 +157,16 @@ describe("proxy config", () => { RELAY_RETRY_JITTER_RATIO: "1.1", }), ).toThrow(ProxyConfigError); + expect(() => + parseProxyConfig({ + RELAY_MAX_IN_FLIGHT_DELIVERIES: "0", + }), + ).toThrow(ProxyConfigError); + expect(() => + parseProxyConfig({ + RELAY_MAX_FRAME_BYTES: "0", + }), + ).toThrow(ProxyConfigError); }); it("throws when only one internal service credential is provided", () => { diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 450caa2..25f9d1e 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -34,6 +34,8 @@ export const DEFAULT_RELAY_RETRY_INITIAL_MS = 1000; export const DEFAULT_RELAY_RETRY_MAX_MS = 30_000; export const DEFAULT_RELAY_RETRY_MAX_ATTEMPTS = 25; export const DEFAULT_RELAY_RETRY_JITTER_RATIO = 0.2; +export const DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES = 5; +export const DEFAULT_RELAY_MAX_FRAME_BYTES = 1024 * 1024; export class ProxyConfigError extends Error { readonly code = "CONFIG_VALIDATION_FAILED"; @@ -146,6 +148,16 @@ const proxyRuntimeEnvSchema = z.object({ .min(0) .max(1) .default(DEFAULT_RELAY_RETRY_JITTER_RATIO), + RELAY_MAX_IN_FLIGHT_DELIVERIES: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES), + RELAY_MAX_FRAME_BYTES: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_MAX_FRAME_BYTES), }); export const proxyConfigSchema = z.object({ @@ -167,6 +179,8 @@ export const proxyConfigSchema = z.object({ relayRetryMaxMs: z.number().int().positive(), relayRetryMaxAttempts: z.number().int().positive(), relayRetryJitterRatio: z.number().min(0).max(1), + relayMaxInFlightDeliveries: z.number().int().positive(), + relayMaxFrameBytes: z.number().int().positive(), }); export type ProxyConfig = z.infer; @@ -196,6 +210,8 @@ type RuntimeEnvInput = { RELAY_RETRY_MAX_MS?: unknown; RELAY_RETRY_MAX_ATTEMPTS?: unknown; RELAY_RETRY_JITTER_RATIO?: unknown; + RELAY_MAX_IN_FLIGHT_DELIVERIES?: unknown; + RELAY_MAX_FRAME_BYTES?: unknown; OPENCLAW_STATE_DIR?: unknown; HOME?: unknown; USERPROFILE?: unknown; @@ -496,6 +512,10 @@ function normalizeRuntimeEnv(input: unknown): Record { RELAY_RETRY_MAX_MS: firstNonEmpty(env, ["RELAY_RETRY_MAX_MS"]), RELAY_RETRY_MAX_ATTEMPTS: firstNonEmpty(env, ["RELAY_RETRY_MAX_ATTEMPTS"]), RELAY_RETRY_JITTER_RATIO: firstNonEmpty(env, ["RELAY_RETRY_JITTER_RATIO"]), + RELAY_MAX_IN_FLIGHT_DELIVERIES: firstNonEmpty(env, [ + "RELAY_MAX_IN_FLIGHT_DELIVERIES", + ]), + RELAY_MAX_FRAME_BYTES: firstNonEmpty(env, ["RELAY_MAX_FRAME_BYTES"]), }; } @@ -617,6 +637,9 @@ export function parseProxyConfig( relayRetryMaxMs: parsedRuntimeEnv.data.RELAY_RETRY_MAX_MS, relayRetryMaxAttempts: parsedRuntimeEnv.data.RELAY_RETRY_MAX_ATTEMPTS, relayRetryJitterRatio: parsedRuntimeEnv.data.RELAY_RETRY_JITTER_RATIO, + relayMaxInFlightDeliveries: + parsedRuntimeEnv.data.RELAY_MAX_IN_FLIGHT_DELIVERIES, + relayMaxFrameBytes: parsedRuntimeEnv.data.RELAY_MAX_FRAME_BYTES, }; if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID !== undefined) { candidateConfig.registryInternalServiceId = diff --git a/apps/proxy/src/relay-delivery-receipt-route.test.ts b/apps/proxy/src/relay-delivery-receipt-route.test.ts new file mode 100644 index 0000000..c0e0ab2 --- /dev/null +++ b/apps/proxy/src/relay-delivery-receipt-route.test.ts @@ -0,0 +1,313 @@ +import { describe, expect, it, vi } from "vitest"; + +vi.mock("./auth-middleware.js", async () => { + const { createMiddleware } = await import("hono/factory"); + + return { + createProxyAuthMiddleware: () => + createMiddleware(async (c, next) => { + if (c.req.header("x-test-missing-auth") !== "1") { + c.set("auth", { + agentDid: + c.req.header("x-test-auth-agent-did") ?? "did:claw:agent:alpha", + ownerDid: "did:claw:owner:alpha", + issuer: "https://registry.example.com", + aitJti: "ait-jti-alpha", + cnfPublicKey: "test-public-key", + }); + } + + await next(); + }), + }; +}); + +import type { + AgentRelaySessionNamespace, + AgentRelaySessionStub, + RelayReceiptLookupInput, + RelayReceiptRecordInput, +} from "./agent-relay-session.js"; +import { parseProxyConfig } from "./config.js"; +import type { ProxyTrustStore } from "./proxy-trust-store.js"; +import { RELAY_DELIVERY_RECEIPTS_PATH } from "./relay-delivery-receipt-route.js"; +import { createProxyApp } from "./server.js"; + +function createRelayReceiptHarness(input?: { + lookupFound?: boolean; + lookupReceiptState?: + | "queued" + | "delivered" + | "processed_by_openclaw" + | "dead_lettered"; + recordStatus?: number; + lookupStatus?: number; +}) { + const recordInputs: RelayReceiptRecordInput[] = []; + const lookupInputs: RelayReceiptLookupInput[] = []; + + const relayStub: AgentRelaySessionStub = { + fetch: vi.fn(async (request: Request) => { + const url = new URL(request.url); + if ( + request.method === "POST" && + url.pathname === "/rpc/record-delivery-receipt" + ) { + const payload = (await request.json()) as RelayReceiptRecordInput; + recordInputs.push(payload); + + const status = input?.recordStatus ?? 202; + if (status >= 400) { + return new Response("record failed", { status }); + } + + return Response.json({ accepted: true }, { status }); + } + + if ( + request.method === "POST" && + url.pathname === "/rpc/get-delivery-receipt" + ) { + const payload = (await request.json()) as RelayReceiptLookupInput; + lookupInputs.push(payload); + + const status = input?.lookupStatus ?? 200; + if (status >= 400) { + return new Response("lookup failed", { status }); + } + + const found = input?.lookupFound ?? true; + return Response.json( + found + ? { + found: true, + receipt: { + deliveryId: "dlv_1", + requestId: payload.requestId, + state: input?.lookupReceiptState ?? "processed_by_openclaw", + senderAgentDid: payload.senderAgentDid, + recipientAgentDid: "did:claw:agent:beta", + statusUpdatedAt: "2026-02-20T00:00:00.000Z", + expiresAtMs: Date.now() + 60_000, + }, + } + : { found: false }, + { status }, + ); + } + + return new Response("not found", { status: 404 }); + }), + }; + + const doId = { toString: () => "relay-do" } as unknown as DurableObjectId; + + return { + recordInputs, + lookupInputs, + namespace: { + idFromName: vi.fn((_name: string) => doId), + get: vi.fn((_id: DurableObjectId) => relayStub), + } satisfies AgentRelaySessionNamespace, + }; +} + +function createApp(input: { + allowedPairs: Array<{ initiator: string; responder: string }>; +}) { + const trustStore: ProxyTrustStore = { + createPairingTicket: vi.fn(), + confirmPairingTicket: vi.fn(), + getPairingTicketStatus: vi.fn(), + isAgentKnown: vi.fn(async () => true), + isPairAllowed: vi.fn(async (pair) => + input.allowedPairs.some( + (allowed) => + allowed.initiator === pair.initiatorAgentDid && + allowed.responder === pair.responderAgentDid, + ), + ), + upsertPair: vi.fn(async () => {}), + }; + + return createProxyApp({ + config: parseProxyConfig({}), + trustStore, + }); +} + +describe("relay delivery receipt route", () => { + it("accepts POST receipt updates for authenticated recipient", async () => { + const relayHarness = createRelayReceiptHarness(); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:beta", + responder: "did:claw:agent:alpha", + }, + ], + }); + + const response = await app.request( + RELAY_DELIVERY_RECEIPTS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": "token", + }, + body: JSON.stringify({ + requestId: "req-1", + senderAgentDid: "did:claw:agent:beta", + recipientAgentDid: "did:claw:agent:alpha", + status: "processed_by_openclaw", + }), + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(202); + expect(await response.json()).toEqual({ accepted: true }); + expect(relayHarness.recordInputs).toHaveLength(1); + expect(relayHarness.recordInputs[0]?.requestId).toBe("req-1"); + expect(relayHarness.recordInputs[0]?.status).toBe("processed_by_openclaw"); + }); + + it("rejects POST when recipient differs from authenticated agent DID", async () => { + const relayHarness = createRelayReceiptHarness(); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:beta", + responder: "did:claw:agent:alpha", + }, + ], + }); + + const response = await app.request( + RELAY_DELIVERY_RECEIPTS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": "token", + }, + body: JSON.stringify({ + requestId: "req-2", + senderAgentDid: "did:claw:agent:beta", + recipientAgentDid: "did:claw:agent:gamma", + status: "dead_lettered", + }), + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_RECEIPT_FORBIDDEN"); + expect(relayHarness.recordInputs).toHaveLength(0); + }); + + it("returns receipt on GET when trusted pair exists", async () => { + const relayHarness = createRelayReceiptHarness(); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:alpha", + responder: "did:claw:agent:beta", + }, + ], + }); + + const response = await app.request( + `${RELAY_DELIVERY_RECEIPTS_PATH}?requestId=req-3&recipientAgentDid=did:claw:agent:beta`, + { + method: "GET", + headers: { + "x-claw-agent-access": "token", + }, + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + found: boolean; + receipt?: { requestId: string; state: string }; + }; + expect(body.found).toBe(true); + expect(body.receipt?.requestId).toBe("req-3"); + expect(body.receipt?.state).toBe("processed_by_openclaw"); + expect(relayHarness.lookupInputs).toHaveLength(1); + expect(relayHarness.lookupInputs[0]?.senderAgentDid).toBe( + "did:claw:agent:alpha", + ); + }); + + it("returns 404 on GET when receipt is not found", async () => { + const relayHarness = createRelayReceiptHarness({ + lookupFound: false, + }); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:alpha", + responder: "did:claw:agent:beta", + }, + ], + }); + + const response = await app.request( + `${RELAY_DELIVERY_RECEIPTS_PATH}?requestId=req-4&recipientAgentDid=did:claw:agent:beta`, + { + method: "GET", + headers: { + "x-claw-agent-access": "token", + }, + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(404); + expect(await response.json()).toEqual({ found: false }); + }); + + it("returns 502 on GET when relay receipt lookup RPC fails", async () => { + const relayHarness = createRelayReceiptHarness({ + lookupStatus: 500, + }); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:alpha", + responder: "did:claw:agent:beta", + }, + ], + }); + + const response = await app.request( + `${RELAY_DELIVERY_RECEIPTS_PATH}?requestId=req-5&recipientAgentDid=did:claw:agent:beta`, + { + method: "GET", + headers: { + "x-claw-agent-access": "token", + }, + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(502); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_RECEIPT_READ_FAILED"); + }); +}); diff --git a/apps/proxy/src/relay-delivery-receipt-route.ts b/apps/proxy/src/relay-delivery-receipt-route.ts new file mode 100644 index 0000000..c4a6c86 --- /dev/null +++ b/apps/proxy/src/relay-delivery-receipt-route.ts @@ -0,0 +1,211 @@ +import { AppError, type Logger } from "@clawdentity/sdk"; +import type { Context } from "hono"; +import { + type AgentRelaySessionNamespace, + getRelayDeliveryReceipt, + type RelayReceiptRecordInput, + RelaySessionDeliveryError, + recordRelayDeliveryReceipt, +} from "./agent-relay-session.js"; +import type { ProxyRequestVariables } from "./auth-middleware.js"; +import type { ProxyTrustStore } from "./proxy-trust-store.js"; +import { assertTrustedPair } from "./trust-policy.js"; + +export { RELAY_DELIVERY_RECEIPTS_PATH } from "@clawdentity/protocol"; + +type ProxyContext = Context<{ + Variables: ProxyRequestVariables; + Bindings: { + AGENT_RELAY_SESSION?: AgentRelaySessionNamespace; + }; +}>; + +type CreateRelayDeliveryReceiptHandlersInput = { + logger: Logger; + trustStore: ProxyTrustStore; +}; + +function parseRecordInput(payload: unknown): RelayReceiptRecordInput { + if (typeof payload !== "object" || payload === null) { + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_INVALID_INPUT", + message: "Relay delivery receipt payload is invalid", + status: 400, + expose: true, + }); + } + + const input = payload as Partial; + if ( + typeof input.requestId !== "string" || + typeof input.senderAgentDid !== "string" || + typeof input.recipientAgentDid !== "string" || + (input.status !== "processed_by_openclaw" && + input.status !== "dead_lettered") + ) { + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_INVALID_INPUT", + message: "Relay delivery receipt payload is invalid", + status: 400, + expose: true, + }); + } + + return { + requestId: input.requestId, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + status: input.status, + reason: + typeof input.reason === "string" && input.reason.trim().length > 0 + ? input.reason.trim() + : undefined, + }; +} + +function parseRequiredQuery(value: string | undefined, field: string): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_INVALID_QUERY", + message: `Missing query parameter: ${field}`, + status: 400, + expose: true, + }); + } + + return value.trim(); +} + +function resolveSessionNamespace(c: ProxyContext): AgentRelaySessionNamespace { + const namespace = c.env.AGENT_RELAY_SESSION; + if (namespace === undefined) { + throw new AppError({ + code: "PROXY_RELAY_UNAVAILABLE", + message: "Relay session namespace is unavailable", + status: 503, + }); + } + + return namespace; +} + +export function createRelayDeliveryReceiptPostHandler( + input: CreateRelayDeliveryReceiptHandlersInput, +): (c: ProxyContext) => Promise { + return async (c) => { + const auth = c.get("auth"); + if (!auth) { + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const payload = parseRecordInput(await c.req.json()); + if (payload.recipientAgentDid !== auth.agentDid) { + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_FORBIDDEN", + message: "Recipient DID does not match authenticated agent", + status: 403, + expose: true, + }); + } + + await assertTrustedPair({ + trustStore: input.trustStore, + initiatorAgentDid: payload.senderAgentDid, + responderAgentDid: payload.recipientAgentDid, + }); + + const sessionNamespace = resolveSessionNamespace(c); + const relaySession = sessionNamespace.get( + sessionNamespace.idFromName(payload.recipientAgentDid), + ); + + try { + await recordRelayDeliveryReceipt(relaySession, payload); + } catch (error) { + if (error instanceof RelaySessionDeliveryError) { + input.logger.warn("proxy.relay.receipt_record_failed", { + code: error.code, + status: error.status, + }); + } + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_WRITE_FAILED", + message: "Failed to record relay delivery receipt", + status: 502, + }); + } + + return c.json({ accepted: true }, 202); + }; +} + +export function createRelayDeliveryReceiptGetHandler( + input: CreateRelayDeliveryReceiptHandlersInput, +): (c: ProxyContext) => Promise { + return async (c) => { + const auth = c.get("auth"); + if (!auth) { + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_AUTH_CONTEXT_MISSING", + message: "Verified auth context is required", + status: 500, + }); + } + + const requestId = parseRequiredQuery(c.req.query("requestId"), "requestId"); + const recipientAgentDid = parseRequiredQuery( + c.req.query("recipientAgentDid"), + "recipientAgentDid", + ); + + await assertTrustedPair({ + trustStore: input.trustStore, + initiatorAgentDid: auth.agentDid, + responderAgentDid: recipientAgentDid, + }); + + const sessionNamespace = resolveSessionNamespace(c); + const relaySession = sessionNamespace.get( + sessionNamespace.idFromName(recipientAgentDid), + ); + + try { + const lookup = await getRelayDeliveryReceipt(relaySession, { + requestId, + senderAgentDid: auth.agentDid, + }); + if (!lookup.found || lookup.receipt === undefined) { + return c.json( + { + found: false, + }, + 404, + ); + } + + return c.json( + { + found: true, + receipt: lookup.receipt, + }, + 200, + ); + } catch (error) { + if (error instanceof RelaySessionDeliveryError) { + input.logger.warn("proxy.relay.receipt_lookup_failed", { + code: error.code, + status: error.status, + }); + } + throw new AppError({ + code: "PROXY_RELAY_RECEIPT_READ_FAILED", + message: "Failed to read relay delivery receipt", + status: 502, + }); + } + }; +} diff --git a/apps/proxy/src/server.ts b/apps/proxy/src/server.ts index 421ce0c..b0f2673 100644 --- a/apps/proxy/src/server.ts +++ b/apps/proxy/src/server.ts @@ -47,6 +47,11 @@ import { createRelayConnectHandler, type RelayConnectRuntimeOptions, } from "./relay-connect-route.js"; +import { + createRelayDeliveryReceiptGetHandler, + createRelayDeliveryReceiptPostHandler, + RELAY_DELIVERY_RECEIPTS_PATH, +} from "./relay-delivery-receipt-route.js"; type ProxyAuthRuntimeOptions = { fetchImpl?: typeof fetch; @@ -185,6 +190,20 @@ export function createProxyApp(options: CreateProxyAppOptions): ProxyApp { ...options.relay, }), ); + app.post( + RELAY_DELIVERY_RECEIPTS_PATH, + createRelayDeliveryReceiptPostHandler({ + logger, + trustStore, + }), + ); + app.get( + RELAY_DELIVERY_RECEIPTS_PATH, + createRelayDeliveryReceiptGetHandler({ + logger, + trustStore, + }), + ); options.registerRoutes?.(app); return app; diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index fec844a..2c74849 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -38,6 +38,8 @@ export type ProxyWorkerBindings = { RELAY_RETRY_MAX_MS?: string; RELAY_RETRY_MAX_ATTEMPTS?: string; RELAY_RETRY_JITTER_RATIO?: string; + RELAY_MAX_IN_FLIGHT_DELIVERIES?: string; + RELAY_MAX_FRAME_BYTES?: string; APP_VERSION?: string; PROXY_VERSION?: string; [key: string]: unknown; @@ -74,6 +76,8 @@ function toCacheKey(env: ProxyWorkerBindings): string { env.RELAY_RETRY_MAX_MS, env.RELAY_RETRY_MAX_ATTEMPTS, env.RELAY_RETRY_JITTER_RATIO, + env.RELAY_MAX_IN_FLIGHT_DELIVERIES, + env.RELAY_MAX_FRAME_BYTES, env.APP_VERSION, env.PROXY_VERSION, ]; diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 850ffbc..35a7241 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -9,15 +9,32 @@ ## Inbound Durability Rules - Connector must persist inbound relay payloads before sending `deliver_ack accepted=true`. - Persist connector inbox state as atomic JSON index + append-only JSONL events under `agents//inbound-inbox/`. +- Inbound inbox index schema is `version: 2` with explicit `pendingByRequestId` + `deadLetterByRequestId`; do not add backward-compat parsing paths for older index versions. - Inbox dedupe key is request/frame id; duplicates must not create extra pending entries. - Replay must continue after runtime restarts by loading pending entries from inbox index at startup. - Do not drop pending entries on transient replay failures; reschedule with bounded backoff. +- Non-retryable replay failures must move to dead-letter after `CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS`. +- Dead-letter operations (`listDeadLetter`, `replayDeadLetter`, `purgeDeadLetter`) must update bytes/count accounting atomically with index writes. +- Keep index writes guarded by the local advisory lock file (`index.lock`) to avoid concurrent writer corruption across processes. +- Keep event log growth bounded via rotation (`eventsMaxBytes`, `eventsMaxFiles`) rather than unbounded `events.jsonl` growth. +- Preserve inbound `conversationId` and `replyTo` metadata through inbox persistence and replay delivery. ## Replay/Health Rules - Keep replay configuration environment-driven via `CONNECTOR_INBOUND_*` vars with safe defaults from `constants.ts`. -- `/v1/status` must include websocket state and inbound replay health (`pendingCount`, `oldestPendingAt`, replay activity/error, hook status). +- `/v1/status` must use the nested contract: + - `websocket.{connected,connectAttempts,reconnectCount,uptimeMs,lastConnectedAt}` + - `inbound.pending` + - `inbound.deadLetter` + - `inbound.replay` + - `inbound.openclawHook` + - `metrics.{heartbeat,inboundDelivery,outboundQueue}` - On inbox/status read failures, return explicit structured errors instead of crashing runtime. - Keep connector runtime/inbox timestamps standardized via shared SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`) instead of ad-hoc datetime formatting. +- Keep dead-letter operational endpoints stable: + - `GET /v1/inbound/dead-letter` + - `POST /v1/inbound/dead-letter/replay` + - `POST /v1/inbound/dead-letter/purge` +- For replay delivery callbacks, post signed receipts to peer proxies using `replyTo` with statuses `processed_by_openclaw` and `dead_lettered`. ## WebSocket Resilience Rules - Keep websocket reconnect behavior centralized in `client.ts` (single cleanup path for close/error/unexpected-response/timeout). @@ -25,9 +42,12 @@ - Track outbound heartbeat IDs and clear pending entries only when matching `heartbeat_ack` frames are received. - If heartbeat ack timeout expires, disconnect and reconnect using the same reconnect policy used for other transport failures. - Handle `unexpected-response` status codes from ws upgrade failures; for `401`, trigger `onAuthUpgradeRejected` and allow one immediate reconnect before normal backoff. +- Keep outbound enqueue buffering durable when configured via `outboundQueuePersistence`; load once before replaying queued frames and persist on enqueue/dequeue transitions. +- Keep websocket/client metrics in `ConnectorClient` (`getMetricsSnapshot`) so runtime health does not recompute transport stats ad hoc. ## Testing Rules -- `inbound-inbox.test.ts` must cover persistence, dedupe, cap enforcement, and replay bookkeeping transitions. +- `inbound-inbox.test.ts` must cover persistence, dedupe, cap enforcement, replay bookkeeping, dead-letter thresholding, dead-letter replay, and dead-letter purge transitions. - `client.test.ts` must cover both delivery modes: - direct local OpenClaw delivery fallback - injected inbound persistence handler ack path +- `client.test.ts` must keep websocket lifecycle expectations compatible with non-persistent and persistent queue modes. diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index 8ec3662..096e9cf 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -65,6 +65,40 @@ export type ConnectorClientHooks = { onDeliverFailed?: (frame: DeliverFrame, error: unknown) => void; }; +export type ConnectorOutboundQueuePersistence = { + load: () => Promise; + save: (frames: EnqueueFrame[]) => Promise; +}; + +export type ConnectorClientMetricsSnapshot = { + connection: { + connectAttempts: number; + connected: boolean; + reconnectCount: number; + uptimeMs: number; + lastConnectedAt?: string; + }; + heartbeat: { + avgRttMs?: number; + maxRttMs?: number; + lastRttMs?: number; + pendingAckCount: number; + sampleCount: number; + }; + inboundDelivery: { + avgAckLatencyMs?: number; + maxAckLatencyMs?: number; + lastAckLatencyMs?: number; + sampleCount: number; + }; + outboundQueue: { + currentDepth: number; + loadedFromPersistence: boolean; + maxDepth: number; + persistenceEnabled: boolean; + }; +}; + export type ConnectorClientOptions = { connectorUrl: string; connectionHeaders?: Record; @@ -94,6 +128,7 @@ export type ConnectorClientOptions = { fetchImpl?: typeof fetch; logger?: Logger; hooks?: ConnectorClientHooks; + outboundQueuePersistence?: ConnectorOutboundQueuePersistence; inboundDeliverHandler?: | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) | undefined; @@ -281,6 +316,9 @@ export class ConnectorClient { private readonly fetchImpl: typeof fetch; private readonly logger: Logger; private readonly hooks: ConnectorClientHooks; + private readonly outboundQueuePersistence: + | ConnectorOutboundQueuePersistence + | undefined; private readonly inboundDeliverHandler: | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) | undefined; @@ -295,6 +333,23 @@ export class ConnectorClient { private heartbeatAckTimeout: ReturnType | undefined; private readonly pendingHeartbeatAcks = new Map(); private reconnectAttempt = 0; + private reconnectCount = 0; + private connectAttempts = 0; + private connectedSinceMs: number | undefined; + private accumulatedConnectedMs = 0; + private lastConnectedAtIso: string | undefined; + private heartbeatRttSampleCount = 0; + private heartbeatRttTotalMs = 0; + private heartbeatRttMaxMs = 0; + private heartbeatRttLastMs: number | undefined; + private inboundAckLatencySampleCount = 0; + private inboundAckLatencyTotalMs = 0; + private inboundAckLatencyMaxMs = 0; + private inboundAckLatencyLastMs: number | undefined; + private maxObservedOutboundQueueDepth = 0; + private outboundQueueLoaded = false; + private outboundQueueLoadPromise: Promise | undefined; + private outboundQueueSaveChain: Promise = Promise.resolve(); private authUpgradeImmediateRetryUsed = false; private started = false; private readonly outboundQueue: EnqueueFrame[] = []; @@ -367,6 +422,7 @@ export class ConnectorClient { options.logger ?? createLogger({ service: "connector", module: "client" }); this.hooks = options.hooks ?? {}; + this.outboundQueuePersistence = options.outboundQueuePersistence; this.inboundDeliverHandler = options.inboundDeliverHandler; this.now = options.now ?? Date.now; this.random = options.random ?? Math.random; @@ -384,6 +440,9 @@ export class ConnectorClient { } this.started = true; + if (this.outboundQueuePersistence !== undefined) { + void this.ensureOutboundQueueLoaded(); + } void this.connectSocket(); } @@ -407,6 +466,57 @@ export class ConnectorClient { return this.outboundQueue.length; } + getMetricsSnapshot(): ConnectorClientMetricsSnapshot { + const nowMs = this.now(); + const uptimeMs = + this.accumulatedConnectedMs + + (this.connectedSinceMs === undefined ? 0 : nowMs - this.connectedSinceMs); + + return { + connection: { + connectAttempts: this.connectAttempts, + connected: this.isConnected(), + reconnectCount: this.reconnectCount, + uptimeMs: Math.max(0, uptimeMs), + lastConnectedAt: this.lastConnectedAtIso, + }, + heartbeat: { + pendingAckCount: this.pendingHeartbeatAcks.size, + sampleCount: this.heartbeatRttSampleCount, + lastRttMs: this.heartbeatRttLastMs, + maxRttMs: + this.heartbeatRttSampleCount > 0 ? this.heartbeatRttMaxMs : undefined, + avgRttMs: + this.heartbeatRttSampleCount > 0 + ? Math.floor( + this.heartbeatRttTotalMs / this.heartbeatRttSampleCount, + ) + : undefined, + }, + inboundDelivery: { + sampleCount: this.inboundAckLatencySampleCount, + lastAckLatencyMs: this.inboundAckLatencyLastMs, + maxAckLatencyMs: + this.inboundAckLatencySampleCount > 0 + ? this.inboundAckLatencyMaxMs + : undefined, + avgAckLatencyMs: + this.inboundAckLatencySampleCount > 0 + ? Math.floor( + this.inboundAckLatencyTotalMs / + this.inboundAckLatencySampleCount, + ) + : undefined, + }, + outboundQueue: { + currentDepth: this.outboundQueue.length, + maxDepth: this.maxObservedOutboundQueueDepth, + loadedFromPersistence: this.outboundQueueLoaded, + persistenceEnabled: this.outboundQueuePersistence !== undefined, + }, + }; + } + enqueueOutbound(input: ConnectorOutboundEnqueueInput): EnqueueFrame { const frame = enqueueFrameSchema.parse({ v: CONNECTOR_FRAME_VERSION, @@ -420,12 +530,18 @@ export class ConnectorClient { }); this.outboundQueue.push(frame); + this.recordOutboundQueueDepth(); + this.persistOutboundQueue(); this.flushOutboundQueue(); return frame; } private async connectSocket(): Promise { this.clearReconnectTimeout(); + this.connectAttempts += 1; + if (this.outboundQueuePersistence !== undefined) { + await this.ensureOutboundQueueLoaded(); + } let connectionHeaders = this.connectionHeaders; if (this.connectionHeadersProvider) { @@ -468,6 +584,8 @@ export class ConnectorClient { this.clearHeartbeatTracking(); this.reconnectAttempt = 0; this.authUpgradeImmediateRetryUsed = false; + this.connectedSinceMs = this.now(); + this.lastConnectedAtIso = this.makeTimestamp(); this.logger.info("connector.websocket.connected", { url: this.connectorUrl, }); @@ -580,6 +698,7 @@ export class ConnectorClient { if (options?.incrementAttempt ?? true) { this.reconnectAttempt += 1; } + this.reconnectCount += 1; this.reconnectTimeout = setTimeout(() => { void this.connectSocket(); @@ -651,6 +770,13 @@ export class ConnectorClient { } this.socket = undefined; + if (this.connectedSinceMs !== undefined) { + this.accumulatedConnectedMs += Math.max( + 0, + this.now() - this.connectedSinceMs, + ); + this.connectedSinceMs = undefined; + } this.clearSocketState(); return true; } @@ -764,9 +890,16 @@ export class ConnectorClient { } private handleHeartbeatAckFrame(frame: HeartbeatAckFrame): void { - if (!this.pendingHeartbeatAcks.delete(frame.ackId)) { + const sentAtMs = this.pendingHeartbeatAcks.get(frame.ackId); + if (sentAtMs === undefined) { return; } + this.pendingHeartbeatAcks.delete(frame.ackId); + const rttMs = Math.max(0, this.now() - sentAtMs); + this.heartbeatRttSampleCount += 1; + this.heartbeatRttTotalMs += rttMs; + this.heartbeatRttMaxMs = Math.max(this.heartbeatRttMaxMs, rttMs); + this.heartbeatRttLastMs = rttMs; this.scheduleHeartbeatAckTimeoutCheck(); } @@ -849,9 +982,89 @@ export class ConnectorClient { return; } this.outboundQueue.shift(); + this.persistOutboundQueue(); } } + private recordOutboundQueueDepth(): void { + this.maxObservedOutboundQueueDepth = Math.max( + this.maxObservedOutboundQueueDepth, + this.outboundQueue.length, + ); + } + + private persistOutboundQueue(): void { + if (this.outboundQueuePersistence === undefined) { + return; + } + + this.outboundQueueSaveChain = this.outboundQueueSaveChain + .then(async () => { + await this.ensureOutboundQueueLoaded(); + await this.outboundQueuePersistence?.save([...this.outboundQueue]); + }) + .catch((error) => { + this.logger.warn("connector.outbound.persistence_save_failed", { + reason: sanitizeErrorReason(error), + }); + }); + } + + private async ensureOutboundQueueLoaded(): Promise { + if (this.outboundQueueLoaded) { + return; + } + + if (this.outboundQueuePersistence === undefined) { + this.outboundQueueLoaded = true; + return; + } + + if (this.outboundQueueLoadPromise !== undefined) { + await this.outboundQueueLoadPromise; + return; + } + + this.outboundQueueLoadPromise = (async () => { + try { + const loadedFrames = await this.outboundQueuePersistence?.load(); + if (!loadedFrames || loadedFrames.length === 0) { + return; + } + + const existingIds = new Set(this.outboundQueue.map((item) => item.id)); + const validLoadedFrames: EnqueueFrame[] = []; + for (const candidate of loadedFrames) { + const parsed = enqueueFrameSchema.safeParse(candidate); + if (!parsed.success) { + continue; + } + if (existingIds.has(parsed.data.id)) { + continue; + } + validLoadedFrames.push(parsed.data); + existingIds.add(parsed.data.id); + } + + if (validLoadedFrames.length === 0) { + return; + } + + this.outboundQueue.unshift(...validLoadedFrames); + this.recordOutboundQueueDepth(); + } catch (error) { + this.logger.warn("connector.outbound.persistence_load_failed", { + reason: sanitizeErrorReason(error), + }); + } finally { + this.outboundQueueLoaded = true; + } + })(); + + await this.outboundQueueLoadPromise; + this.flushOutboundQueue(); + } + private sendFrame(frame: ConnectorFrame): boolean { const socket = this.socket; if (socket === undefined || socket.readyState !== WS_READY_STATE_OPEN) { @@ -915,6 +1128,7 @@ export class ConnectorClient { } private async handleDeliverFrame(frame: DeliverFrame): Promise { + const startedAtMs = this.now(); if (this.inboundDeliverHandler !== undefined) { try { const result = await this.inboundDeliverHandler(frame); @@ -940,6 +1154,7 @@ export class ConnectorClient { ), ); } + this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); } catch (error) { const ackFrame: DeliverAckFrame = { v: CONNECTOR_FRAME_VERSION, @@ -952,6 +1167,7 @@ export class ConnectorClient { }; this.sendFrame(ackFrame); this.hooks.onDeliverFailed?.(frame, error); + this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); } return; } @@ -969,6 +1185,7 @@ export class ConnectorClient { this.sendFrame(ackFrame); this.hooks.onDeliverSucceeded?.(frame); + this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); } catch (error) { const ackFrame: DeliverAckFrame = { v: CONNECTOR_FRAME_VERSION, @@ -982,9 +1199,21 @@ export class ConnectorClient { this.sendFrame(ackFrame); this.hooks.onDeliverFailed?.(frame, error); + this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); } } + private recordInboundDeliveryAckLatency(durationMs: number): void { + const latencyMs = Math.max(0, Math.floor(durationMs)); + this.inboundAckLatencySampleCount += 1; + this.inboundAckLatencyTotalMs += latencyMs; + this.inboundAckLatencyMaxMs = Math.max( + this.inboundAckLatencyMaxMs, + latencyMs, + ); + this.inboundAckLatencyLastMs = latencyMs; + } + private async deliverToLocalOpenclaw(frame: DeliverFrame): Promise { const controller = new AbortController(); const timeout = setTimeout(() => { diff --git a/packages/connector/src/constants.ts b/packages/connector/src/constants.ts index 3e40e56..9efacc4 100644 --- a/packages/connector/src/constants.ts +++ b/packages/connector/src/constants.ts @@ -31,6 +31,9 @@ export const DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE = 25; export const DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS = 1_000; export const DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS = 60_000; export const DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR = 2; +export const DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES = 10 * 1024 * 1024; +export const DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES = 5; +export const DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS = 5; export const AGENT_ACCESS_HEADER = "x-claw-agent-access"; diff --git a/packages/connector/src/inbound-inbox.test.ts b/packages/connector/src/inbound-inbox.test.ts index 2379f47..7d4a577 100644 --- a/packages/connector/src/inbound-inbox.test.ts +++ b/packages/connector/src/inbound-inbox.test.ts @@ -1,7 +1,7 @@ import { mkdirSync, mkdtempSync, readFileSync, rmSync } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; -import { afterEach, describe, expect, it } from "vitest"; +import { describe, expect, it } from "vitest"; import { createConnectorInboundInbox, resolveConnectorInboundInboxDir, @@ -17,21 +17,23 @@ function createSandbox(): { cleanup: () => void; rootDir: string } { }; } -afterEach(() => { - // no-op hook for symmetry and future timer cleanup -}); +function createInbox(rootDir: string, agentName = "alpha") { + return createConnectorInboundInbox({ + configDir: rootDir, + agentName, + maxPendingMessages: 100, + maxPendingBytes: 1024 * 1024, + eventsMaxBytes: 1024 * 1024, + eventsMaxFiles: 5, + }); +} describe("ConnectorInboundInbox", () => { it("persists and deduplicates inbound frames", async () => { const sandbox = createSandbox(); try { - const inbox = createConnectorInboundInbox({ - configDir: sandbox.rootDir, - agentName: "alpha", - maxPendingMessages: 100, - maxPendingBytes: 1024 * 1024, - }); + const inbox = createInbox(sandbox.rootDir); const first = await inbox.enqueue({ v: 1, @@ -59,8 +61,9 @@ describe("ConnectorInboundInbox", () => { expect(second.pendingCount).toBe(1); const snapshot = await inbox.getSnapshot(); - expect(snapshot.pendingCount).toBe(1); - expect(snapshot.pendingBytes).toBeGreaterThan(0); + expect(snapshot.pending.pendingCount).toBe(1); + expect(snapshot.pending.pendingBytes).toBeGreaterThan(0); + expect(snapshot.deadLetter.deadLetterCount).toBe(0); const inboxDir = resolveConnectorInboundInboxDir({ configDir: sandbox.rootDir, @@ -70,7 +73,10 @@ describe("ConnectorInboundInbox", () => { const eventsPath = join(inboxDir, "events.jsonl"); const indexRaw = readFileSync(indexPath, "utf8"); + expect(indexRaw).toContain('"version": 2'); expect(indexRaw).toContain("pendingByRequestId"); + expect(indexRaw).toContain("deadLetterByRequestId"); + const eventsRaw = readFileSync(eventsPath, "utf8"); expect(eventsRaw).toContain("inbound_persisted"); expect(eventsRaw).toContain("inbound_duplicate"); @@ -88,6 +94,8 @@ describe("ConnectorInboundInbox", () => { agentName: "alpha", maxPendingMessages: 1, maxPendingBytes: 64, + eventsMaxBytes: 1024 * 1024, + eventsMaxFiles: 5, }); const accepted = await inbox.enqueue({ @@ -120,7 +128,10 @@ describe("ConnectorInboundInbox", () => { agentName: "beta", maxPendingMessages: 100, maxPendingBytes: 8, + eventsMaxBytes: 1024 * 1024, + eventsMaxFiles: 5, }); + const rejectedByBytes = await byteCapped.enqueue({ v: 1, type: "deliver", @@ -140,16 +151,11 @@ describe("ConnectorInboundInbox", () => { } }); - it("replays bookkeeping updates pending entries", async () => { + it("moves non-retryable replay failures to dead-letter after threshold", async () => { const sandbox = createSandbox(); try { - const inbox = createConnectorInboundInbox({ - configDir: sandbox.rootDir, - agentName: "alpha", - maxPendingMessages: 100, - maxPendingBytes: 1024 * 1024, - }); + const inbox = createInbox(sandbox.rootDir); await inbox.enqueue({ v: 1, @@ -161,28 +167,98 @@ describe("ConnectorInboundInbox", () => { payload: { message: "hello" }, }); - const dueNow = await inbox.listDuePending({ - nowMs: Date.now(), - limit: 10, + const firstFailure = await inbox.markReplayFailure({ + requestId: "01HXYZTESTDELIVER000000000004", + errorMessage: "validation failed", + nextAttemptAt: new Date(Date.now() + 60_000).toISOString(), + retryable: false, + maxNonRetryableAttempts: 2, }); - expect(dueNow).toHaveLength(1); - expect(dueNow[0]?.requestId).toBe("01HXYZTESTDELIVER000000000004"); + expect(firstFailure.movedToDeadLetter).toBe(false); - await inbox.markReplayFailure({ + const secondFailure = await inbox.markReplayFailure({ requestId: "01HXYZTESTDELIVER000000000004", - errorMessage: "hook unavailable", + errorMessage: "validation failed", + nextAttemptAt: new Date(Date.now() + 120_000).toISOString(), + retryable: false, + maxNonRetryableAttempts: 2, + }); + expect(secondFailure.movedToDeadLetter).toBe(true); + + const snapshot = await inbox.getSnapshot(); + expect(snapshot.pending.pendingCount).toBe(0); + expect(snapshot.deadLetter.deadLetterCount).toBe(1); + + const deadLetter = await inbox.listDeadLetter(); + expect(deadLetter).toHaveLength(1); + expect(deadLetter[0]?.requestId).toBe("01HXYZTESTDELIVER000000000004"); + expect(deadLetter[0]?.deadLetterReason).toContain("validation failed"); + } finally { + sandbox.cleanup(); + } + }); + + it("supports dead-letter replay and purge", async () => { + const sandbox = createSandbox(); + + try { + const inbox = createInbox(sandbox.rootDir); + const firstId = "01HXYZTESTDELIVER000000000005"; + const secondId = "01HXYZTESTDELIVER000000000006"; + + await inbox.enqueue({ + v: 1, + type: "deliver", + id: firstId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "first" }, + }); + await inbox.enqueue({ + v: 1, + type: "deliver", + id: secondId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: "did:claw:agent:sender", + toAgentDid: "did:claw:agent:receiver", + payload: { message: "second" }, + }); + + await inbox.markReplayFailure({ + requestId: firstId, + errorMessage: "hard failure", + nextAttemptAt: new Date(Date.now() + 60_000).toISOString(), + retryable: false, + maxNonRetryableAttempts: 1, + }); + await inbox.markReplayFailure({ + requestId: secondId, + errorMessage: "hard failure", nextAttemptAt: new Date(Date.now() + 60_000).toISOString(), + retryable: false, + maxNonRetryableAttempts: 1, }); - const dueLater = await inbox.listDuePending({ - nowMs: Date.now(), - limit: 10, + const replayResult = await inbox.replayDeadLetter({ + requestIds: [firstId], }); - expect(dueLater).toHaveLength(0); + expect(replayResult.replayedCount).toBe(1); + + const purgeResult = await inbox.purgeDeadLetter({ + requestIds: [secondId], + }); + expect(purgeResult.purgedCount).toBe(1); - await inbox.markDelivered("01HXYZTESTDELIVER000000000004"); const snapshot = await inbox.getSnapshot(); - expect(snapshot.pendingCount).toBe(0); + expect(snapshot.pending.pendingCount).toBe(1); + expect(snapshot.deadLetter.deadLetterCount).toBe(0); + + const dueNow = await inbox.listDuePending({ + nowMs: Date.now(), + limit: 10, + }); + expect(dueNow.map((item) => item.requestId)).toContain(firstId); } finally { sandbox.cleanup(); } @@ -198,16 +274,12 @@ describe("ConnectorInboundInbox", () => { }); mkdirSync(inboxDir, { recursive: true }); - const inbox = createConnectorInboundInbox({ - configDir: sandbox.rootDir, - agentName: "alpha", - maxPendingMessages: 100, - maxPendingBytes: 1024 * 1024, - }); - + const inbox = createInbox(sandbox.rootDir); const snapshot = await inbox.getSnapshot(); - expect(snapshot.pendingCount).toBe(0); - expect(snapshot.pendingBytes).toBe(0); + expect(snapshot.pending.pendingCount).toBe(0); + expect(snapshot.pending.pendingBytes).toBe(0); + expect(snapshot.deadLetter.deadLetterCount).toBe(0); + expect(snapshot.deadLetter.deadLetterBytes).toBe(0); } finally { sandbox.cleanup(); } diff --git a/packages/connector/src/inbound-inbox.ts b/packages/connector/src/inbound-inbox.ts index d3a7bc5..aa8daa0 100644 --- a/packages/connector/src/inbound-inbox.ts +++ b/packages/connector/src/inbound-inbox.ts @@ -3,6 +3,8 @@ import { mkdir, readFile, rename, + stat, + unlink, writeFile, } from "node:fs/promises"; import { dirname, join } from "node:path"; @@ -11,14 +13,42 @@ import type { DeliverFrame } from "./frames.js"; const INBOUND_INBOX_DIR_NAME = "inbound-inbox"; const INBOUND_INBOX_INDEX_FILE_NAME = "index.json"; +const INBOUND_INBOX_INDEX_LOCK_FILE_NAME = "index.lock"; const INBOUND_INBOX_EVENTS_FILE_NAME = "events.jsonl"; -const INBOUND_INBOX_SCHEMA_VERSION = 1; +const INBOUND_INBOX_SCHEMA_VERSION = 2; + +const DEFAULT_INDEX_LOCK_TIMEOUT_MS = 5_000; +const DEFAULT_INDEX_LOCK_STALE_MS = 30_000; +const DEFAULT_INDEX_LOCK_RETRY_MS = 50; + +export type ConnectorInboundInboxItem = { + attemptCount: number; + conversationId?: string; + fromAgentDid: string; + id: string; + lastAttemptAt?: string; + lastError?: string; + nextAttemptAt: string; + payload: unknown; + payloadBytes: number; + receivedAt: string; + replyTo?: string; + requestId: string; + toAgentDid: string; +}; + +export type ConnectorInboundDeadLetterItem = ConnectorInboundInboxItem & { + deadLetterReason: string; + deadLetteredAt: string; +}; type InboundInboxIndexFile = { - version: number; + deadLetterByRequestId: Record; + deadLetterBytes: number; pendingBytes: number; pendingByRequestId: Record; updatedAt: string; + version: number; }; type InboundInboxEvent = { @@ -29,30 +59,30 @@ type InboundInboxEvent = { | "inbound_duplicate" | "replay_succeeded" | "replay_failed" + | "dead_letter_moved" + | "dead_letter_replayed" + | "dead_letter_purged" | "inbox_pruned"; }; -export type ConnectorInboundInboxItem = { - attemptCount: number; - fromAgentDid: string; - id: string; - lastAttemptAt?: string; - lastError?: string; - nextAttemptAt: string; - payload: unknown; - payloadBytes: number; - receivedAt: string; - requestId: string; - toAgentDid: string; -}; - -export type ConnectorInboundInboxSnapshot = { +export type ConnectorInboundInboxPendingSnapshot = { nextAttemptAt?: string; oldestPendingAt?: string; pendingBytes: number; pendingCount: number; }; +export type ConnectorInboundInboxDeadLetterSnapshot = { + deadLetterBytes: number; + deadLetterCount: number; + oldestDeadLetterAt?: string; +}; + +export type ConnectorInboundInboxSnapshot = { + deadLetter: ConnectorInboundInboxDeadLetterSnapshot; + pending: ConnectorInboundInboxPendingSnapshot; +}; + export type ConnectorInboundInboxEnqueueResult = { accepted: boolean; duplicate: boolean; @@ -60,17 +90,33 @@ export type ConnectorInboundInboxEnqueueResult = { reason?: string; }; +export type ConnectorInboundInboxMarkFailureResult = { + movedToDeadLetter: boolean; +}; + export type ConnectorInboundInboxOptions = { agentName: string; configDir: string; + eventsMaxBytes: number; + eventsMaxFiles: number; maxPendingBytes: number; maxPendingMessages: number; }; +type ReleaseLock = () => Promise; + function isRecord(value: unknown): value is Record { return typeof value === "object" && value !== null; } +function parseOptionalNonEmptyString(value: unknown): string | undefined { + if (typeof value !== "string") { + return undefined; + } + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + function parsePendingItem( value: unknown, ): ConnectorInboundInboxItem | undefined { @@ -78,17 +124,12 @@ function parsePendingItem( return undefined; } - const id = typeof value.id === "string" ? value.id.trim() : ""; - const requestId = - typeof value.requestId === "string" ? value.requestId.trim() : ""; - const fromAgentDid = - typeof value.fromAgentDid === "string" ? value.fromAgentDid.trim() : ""; - const toAgentDid = - typeof value.toAgentDid === "string" ? value.toAgentDid.trim() : ""; - const receivedAt = - typeof value.receivedAt === "string" ? value.receivedAt.trim() : ""; - const nextAttemptAt = - typeof value.nextAttemptAt === "string" ? value.nextAttemptAt.trim() : ""; + const id = parseOptionalNonEmptyString(value.id) ?? ""; + const requestId = parseOptionalNonEmptyString(value.requestId) ?? ""; + const fromAgentDid = parseOptionalNonEmptyString(value.fromAgentDid) ?? ""; + const toAgentDid = parseOptionalNonEmptyString(value.toAgentDid) ?? ""; + const receivedAt = parseOptionalNonEmptyString(value.receivedAt) ?? ""; + const nextAttemptAt = parseOptionalNonEmptyString(value.nextAttemptAt) ?? ""; const attemptCount = typeof value.attemptCount === "number" && Number.isInteger(value.attemptCount) @@ -115,11 +156,6 @@ function parsePendingItem( return undefined; } - const lastError = - typeof value.lastError === "string" ? value.lastError : undefined; - const lastAttemptAt = - typeof value.lastAttemptAt === "string" ? value.lastAttemptAt : undefined; - return { id, requestId, @@ -130,8 +166,37 @@ function parsePendingItem( receivedAt, nextAttemptAt, attemptCount, - lastError, - lastAttemptAt, + lastError: parseOptionalNonEmptyString(value.lastError), + lastAttemptAt: parseOptionalNonEmptyString(value.lastAttemptAt), + conversationId: parseOptionalNonEmptyString(value.conversationId), + replyTo: parseOptionalNonEmptyString(value.replyTo), + }; +} + +function parseDeadLetterItem( + value: unknown, +): ConnectorInboundDeadLetterItem | undefined { + const pending = parsePendingItem(value); + if (!pending) { + return undefined; + } + + if (!isRecord(value)) { + return undefined; + } + + const deadLetteredAt = + parseOptionalNonEmptyString(value.deadLetteredAt) ?? ""; + const deadLetterReason = + parseOptionalNonEmptyString(value.deadLetterReason) ?? ""; + if (deadLetteredAt.length === 0 || deadLetterReason.length === 0) { + return undefined; + } + + return { + ...pending, + deadLetteredAt, + deadLetterReason, }; } @@ -139,7 +204,9 @@ function toDefaultIndexFile(): InboundInboxIndexFile { return { version: INBOUND_INBOX_SCHEMA_VERSION, pendingBytes: 0, + deadLetterBytes: 0, pendingByRequestId: {}, + deadLetterByRequestId: {}, updatedAt: nowIso(), }; } @@ -149,33 +216,55 @@ function normalizeIndexFile(raw: unknown): InboundInboxIndexFile { throw new Error("Inbound inbox index root must be an object"); } + if (raw.version !== INBOUND_INBOX_SCHEMA_VERSION) { + throw new Error( + `Inbound inbox index schema version ${String(raw.version)} is unsupported`, + ); + } + const pendingByRequestIdRaw = raw.pendingByRequestId; + const deadLetterByRequestIdRaw = raw.deadLetterByRequestId; if (!isRecord(pendingByRequestIdRaw)) { throw new Error("Inbound inbox index pendingByRequestId must be an object"); } + if (!isRecord(deadLetterByRequestIdRaw)) { + throw new Error( + "Inbound inbox index deadLetterByRequestId must be an object", + ); + } const pendingByRequestId: Record = {}; let pendingBytes = 0; for (const [requestId, candidate] of Object.entries(pendingByRequestIdRaw)) { const entry = parsePendingItem(candidate); - if (entry === undefined || entry.requestId !== requestId) { + if (!entry || entry.requestId !== requestId) { continue; } pendingByRequestId[requestId] = entry; pendingBytes += entry.payloadBytes; } + const deadLetterByRequestId: Record = + {}; + let deadLetterBytes = 0; + for (const [requestId, candidate] of Object.entries( + deadLetterByRequestIdRaw, + )) { + const entry = parseDeadLetterItem(candidate); + if (!entry || entry.requestId !== requestId) { + continue; + } + deadLetterByRequestId[requestId] = entry; + deadLetterBytes += entry.payloadBytes; + } + return { - version: - typeof raw.version === "number" && Number.isFinite(raw.version) - ? raw.version - : INBOUND_INBOX_SCHEMA_VERSION, - pendingBytes, + version: INBOUND_INBOX_SCHEMA_VERSION, pendingByRequestId, - updatedAt: - typeof raw.updatedAt === "string" && raw.updatedAt.trim().length > 0 - ? raw.updatedAt - : nowIso(), + deadLetterByRequestId, + pendingBytes, + deadLetterBytes, + updatedAt: parseOptionalNonEmptyString(raw.updatedAt) ?? nowIso(), }; } @@ -190,11 +279,14 @@ function toComparableTimeMs(value: string): number { export class ConnectorInboundInbox { private readonly agentName: string; + private readonly eventsMaxBytes: number; + private readonly eventsMaxFiles: number; private readonly eventsPath: string; + private readonly inboxDir: string; private readonly indexPath: string; + private readonly indexLockPath: string; private readonly maxPendingBytes: number; private readonly maxPendingMessages: number; - private readonly inboxDir: string; private writeChain: Promise = Promise.resolve(); @@ -207,9 +299,15 @@ export class ConnectorInboundInbox { INBOUND_INBOX_DIR_NAME, ); this.indexPath = join(this.inboxDir, INBOUND_INBOX_INDEX_FILE_NAME); + this.indexLockPath = join( + this.inboxDir, + INBOUND_INBOX_INDEX_LOCK_FILE_NAME, + ); this.eventsPath = join(this.inboxDir, INBOUND_INBOX_EVENTS_FILE_NAME); this.maxPendingBytes = options.maxPendingBytes; this.maxPendingMessages = options.maxPendingMessages; + this.eventsMaxBytes = Math.max(0, options.eventsMaxBytes); + this.eventsMaxFiles = Math.max(0, options.eventsMaxFiles); } async enqueue( @@ -217,8 +315,10 @@ export class ConnectorInboundInbox { ): Promise { return await this.withWriteLock(async () => { const index = await this.loadIndex(); - const existing = index.pendingByRequestId[frame.id]; - if (existing !== undefined) { + if ( + index.pendingByRequestId[frame.id] !== undefined || + index.deadLetterByRequestId[frame.id] !== undefined + ) { await this.appendEvent({ type: "inbound_duplicate", requestId: frame.id, @@ -264,6 +364,8 @@ export class ConnectorInboundInbox { receivedAt: nowIso(), nextAttemptAt: nowIso(), attemptCount: 0, + conversationId: parseOptionalNonEmptyString(frame.conversationId), + replyTo: parseOptionalNonEmptyString(frame.replyTo), }; index.pendingByRequestId[pendingItem.requestId] = pendingItem; @@ -277,6 +379,8 @@ export class ConnectorInboundInbox { payloadBytes, fromAgentDid: pendingItem.fromAgentDid, toAgentDid: pendingItem.toAgentDid, + conversationId: pendingItem.conversationId, + replyTo: pendingItem.replyTo, }, }); @@ -332,19 +436,53 @@ export class ConnectorInboundInbox { async markReplayFailure(input: { errorMessage: string; + maxNonRetryableAttempts: number; nextAttemptAt: string; requestId: string; - }): Promise { - await this.withWriteLock(async () => { + retryable: boolean; + }): Promise { + return await this.withWriteLock(async () => { const index = await this.loadIndex(); const entry = index.pendingByRequestId[input.requestId]; if (entry === undefined) { - return; + return { movedToDeadLetter: false }; } entry.attemptCount += 1; entry.lastError = input.errorMessage; entry.lastAttemptAt = nowIso(); + + const shouldMoveToDeadLetter = + !input.retryable && + entry.attemptCount >= Math.max(1, input.maxNonRetryableAttempts); + + if (shouldMoveToDeadLetter) { + const deadLetterEntry: ConnectorInboundDeadLetterItem = { + ...entry, + deadLetteredAt: nowIso(), + deadLetterReason: input.errorMessage, + }; + delete index.pendingByRequestId[input.requestId]; + index.pendingBytes = Math.max( + 0, + index.pendingBytes - entry.payloadBytes, + ); + index.deadLetterByRequestId[input.requestId] = deadLetterEntry; + index.deadLetterBytes += deadLetterEntry.payloadBytes; + index.updatedAt = nowIso(); + await this.saveIndex(index); + await this.appendEvent({ + type: "dead_letter_moved", + requestId: input.requestId, + details: { + attemptCount: deadLetterEntry.attemptCount, + retryable: input.retryable, + errorMessage: input.errorMessage, + }, + }); + return { movedToDeadLetter: true }; + } + entry.nextAttemptAt = input.nextAttemptAt; index.updatedAt = nowIso(); await this.saveIndex(index); @@ -354,21 +492,148 @@ export class ConnectorInboundInbox { details: { attemptCount: entry.attemptCount, nextAttemptAt: input.nextAttemptAt, + retryable: input.retryable, errorMessage: input.errorMessage, }, }); + return { movedToDeadLetter: false }; + }); + } + + async listDeadLetter(input?: { + limit?: number; + }): Promise { + const index = await this.loadIndex(); + const entries = Object.values(index.deadLetterByRequestId).sort( + (left, right) => { + const leftDeadAt = toComparableTimeMs(left.deadLetteredAt); + const rightDeadAt = toComparableTimeMs(right.deadLetteredAt); + if (leftDeadAt !== rightDeadAt) { + return leftDeadAt - rightDeadAt; + } + + return ( + toComparableTimeMs(left.receivedAt) - + toComparableTimeMs(right.receivedAt) + ); + }, + ); + + const limit = Math.max(1, input?.limit ?? (entries.length || 1)); + return entries.slice(0, limit); + } + + async replayDeadLetter(input?: { + requestIds?: string[]; + }): Promise<{ replayedCount: number }> { + return await this.withWriteLock(async () => { + const index = await this.loadIndex(); + const requestIds = + input?.requestIds && input.requestIds.length > 0 + ? Array.from(new Set(input.requestIds.map((item) => item.trim()))) + : Object.keys(index.deadLetterByRequestId); + + let replayedCount = 0; + for (const requestId of requestIds) { + if (requestId.length === 0) { + continue; + } + + const dead = index.deadLetterByRequestId[requestId]; + if (!dead) { + continue; + } + + delete index.deadLetterByRequestId[requestId]; + index.deadLetterBytes = Math.max( + 0, + index.deadLetterBytes - dead.payloadBytes, + ); + + index.pendingByRequestId[requestId] = { + ...dead, + nextAttemptAt: nowIso(), + lastError: dead.deadLetterReason, + }; + index.pendingBytes += dead.payloadBytes; + replayedCount += 1; + await this.appendEvent({ + type: "dead_letter_replayed", + requestId, + details: { + deadLetteredAt: dead.deadLetteredAt, + deadLetterReason: dead.deadLetterReason, + }, + }); + } + + if (replayedCount > 0) { + index.updatedAt = nowIso(); + await this.saveIndex(index); + } + + return { replayedCount }; + }); + } + + async purgeDeadLetter(input?: { + requestIds?: string[]; + }): Promise<{ purgedCount: number }> { + return await this.withWriteLock(async () => { + const index = await this.loadIndex(); + const requestIds = + input?.requestIds && input.requestIds.length > 0 + ? Array.from(new Set(input.requestIds.map((item) => item.trim()))) + : Object.keys(index.deadLetterByRequestId); + + let purgedCount = 0; + for (const requestId of requestIds) { + if (requestId.length === 0) { + continue; + } + + const dead = index.deadLetterByRequestId[requestId]; + if (!dead) { + continue; + } + + delete index.deadLetterByRequestId[requestId]; + index.deadLetterBytes = Math.max( + 0, + index.deadLetterBytes - dead.payloadBytes, + ); + purgedCount += 1; + await this.appendEvent({ + type: "dead_letter_purged", + requestId, + details: { + deadLetteredAt: dead.deadLetteredAt, + deadLetterReason: dead.deadLetterReason, + }, + }); + } + + if (purgedCount > 0) { + index.updatedAt = nowIso(); + await this.saveIndex(index); + } + + return { purgedCount }; }); } async pruneDelivered(): Promise { await this.withWriteLock(async () => { const index = await this.loadIndex(); - const beforeCount = Object.keys(index.pendingByRequestId).length; - if (beforeCount === 0) { + const beforePendingCount = Object.keys(index.pendingByRequestId).length; + const beforeDeadLetterCount = Object.keys( + index.deadLetterByRequestId, + ).length; + if (beforePendingCount === 0 && beforeDeadLetterCount === 0) { return; } - const after: Record = {}; + const nextPending: Record = {}; let pendingBytes = 0; for (const [requestId, entry] of Object.entries( index.pendingByRequestId, @@ -376,20 +641,35 @@ export class ConnectorInboundInbox { if (entry.attemptCount < 0) { continue; } - - after[requestId] = entry; + nextPending[requestId] = entry; pendingBytes += entry.payloadBytes; } - index.pendingByRequestId = after; + const nextDead: Record = {}; + let deadLetterBytes = 0; + for (const [requestId, entry] of Object.entries( + index.deadLetterByRequestId, + )) { + if (entry.attemptCount < 0) { + continue; + } + nextDead[requestId] = entry; + deadLetterBytes += entry.payloadBytes; + } + + index.pendingByRequestId = nextPending; index.pendingBytes = pendingBytes; + index.deadLetterByRequestId = nextDead; + index.deadLetterBytes = deadLetterBytes; index.updatedAt = nowIso(); await this.saveIndex(index); await this.appendEvent({ type: "inbox_pruned", details: { - beforeCount, - afterCount: Object.keys(after).length, + beforePendingCount, + afterPendingCount: Object.keys(nextPending).length, + beforeDeadLetterCount, + afterDeadLetterCount: Object.keys(nextDead).length, }, }); }); @@ -397,32 +677,35 @@ export class ConnectorInboundInbox { async getSnapshot(): Promise { const index = await this.loadIndex(); - const entries = Object.values(index.pendingByRequestId); - if (entries.length === 0) { - return { - pendingCount: 0, - pendingBytes: index.pendingBytes, - }; - } - - entries.sort((left, right) => { - return ( + const pendingEntries = Object.values(index.pendingByRequestId).sort( + (left, right) => toComparableTimeMs(left.receivedAt) - - toComparableTimeMs(right.receivedAt) - ); - }); + toComparableTimeMs(right.receivedAt), + ); + const deadEntries = Object.values(index.deadLetterByRequestId).sort( + (left, right) => + toComparableTimeMs(left.deadLetteredAt) - + toComparableTimeMs(right.deadLetteredAt), + ); - const nextAttemptAt = entries + const nextAttemptAt = pendingEntries .map((entry) => entry.nextAttemptAt) .sort( (left, right) => toComparableTimeMs(left) - toComparableTimeMs(right), )[0]; return { - pendingCount: entries.length, - pendingBytes: index.pendingBytes, - oldestPendingAt: entries[0]?.receivedAt, - nextAttemptAt, + pending: { + pendingCount: pendingEntries.length, + pendingBytes: index.pendingBytes, + oldestPendingAt: pendingEntries[0]?.receivedAt, + nextAttemptAt, + }, + deadLetter: { + deadLetterCount: deadEntries.length, + deadLetterBytes: index.deadLetterBytes, + oldestDeadLetterAt: deadEntries[0]?.deadLetteredAt, + }, }; } @@ -434,13 +717,82 @@ export class ConnectorInboundInbox { }); await previous; + const releaseFileLock = await this.acquireIndexFileLock(); try { return await fn(); } finally { + await releaseFileLock(); release?.(); } } + private async acquireIndexFileLock(): Promise { + const startedAt = nowUtcMs(); + await mkdir(this.inboxDir, { recursive: true }); + + while (true) { + try { + await writeFile( + this.indexLockPath, + `${JSON.stringify({ pid: process.pid, createdAt: nowIso() })}\n`, + { + encoding: "utf8", + flag: "wx", + }, + ); + + let released = false; + return async () => { + if (released) { + return; + } + released = true; + try { + await unlink(this.indexLockPath); + } catch { + // ignore + } + }; + } catch (error) { + const code = + error && typeof error === "object" && "code" in error + ? (error as { code?: string }).code + : undefined; + if (code !== "EEXIST") { + throw error; + } + + const lockStats = await this.readLockStats(); + if ( + lockStats !== undefined && + nowUtcMs() - lockStats.mtimeMs > DEFAULT_INDEX_LOCK_STALE_MS + ) { + try { + await unlink(this.indexLockPath); + } catch { + // ignore stale lock unlink race + } + continue; + } + + if (nowUtcMs() - startedAt >= DEFAULT_INDEX_LOCK_TIMEOUT_MS) { + throw new Error("Timed out waiting for inbound inbox index lock"); + } + + await this.sleep(DEFAULT_INDEX_LOCK_RETRY_MS); + } + } + } + + private async readLockStats(): Promise<{ mtimeMs: number } | undefined> { + try { + const lockStat = await stat(this.indexLockPath); + return { mtimeMs: lockStat.mtimeMs }; + } catch { + return undefined; + } + } + private async loadIndex(): Promise { await mkdir(this.inboxDir, { recursive: true }); @@ -489,6 +841,60 @@ export class ConnectorInboundInbox { `${JSON.stringify({ ...event, at: nowIso() })}\n`, "utf8", ); + await this.rotateEventsIfNeeded(); + } + + private async rotateEventsIfNeeded(): Promise { + if (this.eventsMaxBytes <= 0 || this.eventsMaxFiles <= 0) { + return; + } + + let currentSize: number; + try { + const current = await stat(this.eventsPath); + currentSize = current.size; + } catch { + return; + } + + if (currentSize <= this.eventsMaxBytes) { + return; + } + + for (let index = this.eventsMaxFiles; index >= 1; index -= 1) { + const fromPath = + index === 1 ? this.eventsPath : `${this.eventsPath}.${index - 1}`; + const toPath = `${this.eventsPath}.${index}`; + + const fromExists = await this.pathExists(fromPath); + if (!fromExists) { + continue; + } + + const toExists = await this.pathExists(toPath); + if (toExists) { + await unlink(toPath); + } + + await rename(fromPath, toPath); + } + + await writeFile(this.eventsPath, "", "utf8"); + } + + private async pathExists(pathValue: string): Promise { + try { + await stat(pathValue); + return true; + } catch { + return false; + } + } + + private async sleep(durationMs: number): Promise { + await new Promise((resolve) => { + setTimeout(resolve, durationMs); + }); } } diff --git a/packages/connector/src/index.ts b/packages/connector/src/index.ts index 52122c2..5cf2144 100644 --- a/packages/connector/src/index.ts +++ b/packages/connector/src/index.ts @@ -1,7 +1,9 @@ export type { ConnectorClientHooks, + ConnectorClientMetricsSnapshot, ConnectorClientOptions, ConnectorOutboundEnqueueInput, + ConnectorOutboundQueuePersistence, ConnectorWebSocket, } from "./client.js"; export { ConnectorClient } from "./client.js"; @@ -11,6 +13,9 @@ export { CONNECTOR_VERSION, DEFAULT_CONNECT_TIMEOUT_MS, DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index 4312aa0..76205fc 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -10,6 +10,9 @@ import { decodeBase64url, encodeBase64url, RELAY_CONNECT_PATH, + RELAY_CONVERSATION_ID_HEADER, + RELAY_DELIVERY_RECEIPT_URL_HEADER, + RELAY_DELIVERY_RECEIPTS_PATH, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "@clawdentity/protocol"; import { @@ -25,10 +28,17 @@ import { toIso, } from "@clawdentity/sdk"; import { WebSocket as NodeWebSocket } from "ws"; -import { ConnectorClient, type ConnectorWebSocket } from "./client.js"; +import { + ConnectorClient, + type ConnectorOutboundQueuePersistence, + type ConnectorWebSocket, +} from "./client.js"; import { AGENT_ACCESS_HEADER, DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, @@ -42,6 +52,7 @@ import { DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, DEFAULT_OPENCLAW_HOOK_PATH, } from "./constants.js"; +import { type EnqueueFrame, enqueueFrameSchema } from "./frames.js"; import { type ConnectorInboundInboxSnapshot, createConnectorInboundInbox, @@ -81,14 +92,20 @@ export type ConnectorRuntimeHandle = { }; type OutboundRelayRequest = { + conversationId?: string; payload: unknown; peer: string; peerDid: string; peerProxyUrl: string; + replyTo?: string; }; +type OutboundDeliveryReceiptStatus = "processed_by_openclaw" | "dead_lettered"; + const REGISTRY_AUTH_FILENAME = "registry-auth.json"; const AGENTS_DIR_NAME = "agents"; +const OUTBOUND_QUEUE_DIR_NAME = "outbound-queue"; +const OUTBOUND_QUEUE_FILENAME = "queue.json"; const REFRESH_SINGLE_FLIGHT_PREFIX = "connector-runtime"; const NONCE_SIZE = 16; const MAX_OUTBOUND_BODY_BYTES = 1024 * 1024; @@ -110,6 +127,15 @@ function parseRequiredString(value: unknown, field: string): string { return value.trim(); } +function parseOptionalString(value: unknown): string | undefined { + if (typeof value !== "string") { + return undefined; + } + + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + function normalizeOutboundBaseUrl(baseUrlInput: string | undefined): URL { const raw = baseUrlInput?.trim() || DEFAULT_CONNECTOR_BASE_URL; let parsed: URL; @@ -194,6 +220,17 @@ function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { return new URL(normalizedHookPath, normalizedBase).toString(); } +function toHttpOriginFromWebSocketUrl(value: URL): string { + const normalized = new URL(value.toString()); + if (normalized.protocol === "wss:") { + normalized.protocol = "https:"; + } else if (normalized.protocol === "ws:") { + normalized.protocol = "http:"; + } + + return normalized.origin; +} + function parsePositiveIntEnv( key: string, fallback: number, @@ -232,6 +269,9 @@ class LocalOpenclawDeliveryError extends Error { type InboundReplayPolicy = { batchSize: number; + deadLetterNonRetryableMaxAttempts: number; + eventsMaxBytes: number; + eventsMaxFiles: number; inboxMaxBytes: number; inboxMaxMessages: number; replayIntervalMs: number; @@ -251,7 +291,7 @@ type InboundReplayStatus = { type InboundReplayView = { lastReplayAt?: string; lastReplayError?: string; - pending: ConnectorInboundInboxSnapshot; + snapshot: ConnectorInboundInboxSnapshot; replayerActive: boolean; openclawHook: { lastAttemptAt?: string; @@ -266,6 +306,18 @@ function loadInboundReplayPolicy(): InboundReplayPolicy { ); return { + deadLetterNonRetryableMaxAttempts: parsePositiveIntEnv( + "CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS", + DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, + ), + eventsMaxBytes: parsePositiveIntEnv( + "CONNECTOR_INBOUND_EVENTS_MAX_BYTES", + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, + ), + eventsMaxFiles: parsePositiveIntEnv( + "CONNECTOR_INBOUND_EVENTS_MAX_FILES", + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, + ), inboxMaxMessages: parsePositiveIntEnv( "CONNECTOR_INBOUND_INBOX_MAX_MESSAGES", DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, @@ -416,11 +468,27 @@ function parseOutboundRelayRequest(payload: unknown): OutboundRelayRequest { }); } + const replyTo = parseOptionalString(payload.replyTo); + if (replyTo !== undefined) { + try { + new URL(replyTo); + } catch { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_INVALID_REQUEST", + message: "Outbound relay replyTo must be a valid URL", + status: 400, + expose: true, + }); + } + } + return { peer: parseRequiredString(payload.peer, "peer"), peerDid: parseRequiredString(payload.peerDid, "peerDid"), peerProxyUrl: parseRequiredString(payload.peerProxyUrl, "peerProxyUrl"), payload: payload.payload, + conversationId: parseOptionalString(payload.conversationId), + replyTo, }; } @@ -591,6 +659,89 @@ async function readRegistryAuthFromDisk(input: { return auth; } +function resolveOutboundQueuePath(input: { + agentName: string; + configDir: string; +}): string { + return join( + input.configDir, + AGENTS_DIR_NAME, + input.agentName, + OUTBOUND_QUEUE_DIR_NAME, + OUTBOUND_QUEUE_FILENAME, + ); +} + +function createOutboundQueuePersistence(input: { + agentName: string; + configDir: string; + logger: Logger; +}): ConnectorOutboundQueuePersistence { + const queuePath = resolveOutboundQueuePath({ + configDir: input.configDir, + agentName: input.agentName, + }); + + const load = async (): Promise => { + let raw: string; + try { + raw = await readFile(queuePath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return []; + } + + input.logger.warn("connector.outbound.persistence_read_failed", { + queuePath, + reason: sanitizeErrorReason(error), + }); + return []; + } + + if (raw.trim().length === 0) { + return []; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch (error) { + input.logger.warn("connector.outbound.persistence_invalid_json", { + queuePath, + reason: sanitizeErrorReason(error), + }); + return []; + } + + if (!Array.isArray(parsed)) { + return []; + } + + const frames: EnqueueFrame[] = []; + for (const candidate of parsed) { + const parsedFrame = enqueueFrameSchema.safeParse(candidate); + if (parsedFrame.success) { + frames.push(parsedFrame.data); + } + } + return frames; + }; + + const save = async (frames: EnqueueFrame[]): Promise => { + await mkdir(dirname(queuePath), { recursive: true }); + const tmpPath = `${queuePath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; + await writeFile(tmpPath, `${JSON.stringify(frames, null, 2)}\n`, "utf8"); + await rename(tmpPath, queuePath); + }; + + return { load, save }; +} + async function readRequestJson(req: IncomingMessage): Promise { const chunks: Buffer[] = []; let totalBytes = 0; @@ -626,6 +777,18 @@ async function readRequestJson(req: IncomingMessage): Promise { } } +function parseRequestIds(value: unknown): string[] | undefined { + if (!Array.isArray(value)) { + return undefined; + } + + const requestIds = value + .map((item) => (typeof item === "string" ? item.trim() : "")) + .filter((item) => item.length > 0); + + return requestIds.length > 0 ? Array.from(new Set(requestIds)) : undefined; +} + function writeJson( res: ServerResponse, status: number, @@ -733,6 +896,10 @@ export async function startConnectorRuntime( const wsUrl = normalizeWebSocketUrl(input.proxyWebsocketUrl); const wsParsed = new URL(wsUrl); + const defaultReceiptCallbackUrl = new URL( + RELAY_DELIVERY_RECEIPTS_PATH.slice(1), + `${toHttpOriginFromWebSocketUrl(wsParsed)}/`, + ).toString(); const openclawBaseUrl = resolveOpenclawBaseUrl(input.openclawBaseUrl); const openclawHookPath = resolveOpenclawHookPath(input.openclawHookPath); const openclawHookToken = resolveOpenclawHookToken(input.openclawHookToken); @@ -741,6 +908,8 @@ export async function startConnectorRuntime( const inboundInbox = createConnectorInboundInbox({ configDir: input.configDir, agentName: input.agentName, + eventsMaxBytes: inboundReplayPolicy.eventsMaxBytes, + eventsMaxFiles: inboundReplayPolicy.eventsMaxFiles, maxPendingMessages: inboundReplayPolicy.inboxMaxMessages, maxPendingBytes: inboundReplayPolicy.inboxMaxBytes, }); @@ -774,54 +943,118 @@ export async function startConnectorRuntime( nowMs: nowUtcMs(), limit: inboundReplayPolicy.batchSize, }); + if (dueItems.length === 0) { + return; + } + + const laneByKey = new Map(); for (const pending of dueItems) { - inboundReplayStatus.lastAttemptAt = nowIso(); - try { - await deliverToOpenclawHook({ - fetchImpl, - openclawHookUrl, - openclawHookToken, - requestId: pending.requestId, - payload: pending.payload, - }); - await inboundInbox.markDelivered(pending.requestId); - inboundReplayStatus.lastReplayAt = nowIso(); - inboundReplayStatus.lastReplayError = undefined; - inboundReplayStatus.lastAttemptStatus = "ok"; - logger.info("connector.inbound.replay_succeeded", { - requestId: pending.requestId, - attemptCount: pending.attemptCount + 1, - }); - } catch (error) { - const reason = sanitizeErrorReason(error); - const retryable = - error instanceof LocalOpenclawDeliveryError - ? error.retryable - : true; - const nextAttemptAt = toIso( - nowUtcMs() + - computeReplayDelayMs({ - attemptCount: pending.attemptCount + 1, - policy: inboundReplayPolicy, - }) * - (retryable ? 1 : 10), - ); - await inboundInbox.markReplayFailure({ - requestId: pending.requestId, - errorMessage: reason, - nextAttemptAt, - }); - inboundReplayStatus.lastReplayError = reason; - inboundReplayStatus.lastAttemptStatus = "failed"; - logger.warn("connector.inbound.replay_failed", { - requestId: pending.requestId, - attemptCount: pending.attemptCount + 1, - retryable, - nextAttemptAt, - reason, - }); + const laneKey = + pending.conversationId !== undefined + ? `conversation:${pending.conversationId}` + : "legacy-best-effort"; + const lane = laneByKey.get(laneKey); + if (lane) { + lane.push(pending); + } else { + laneByKey.set(laneKey, [pending]); } } + + await Promise.all( + Array.from(laneByKey.values()).map(async (laneItems) => { + for (const pending of laneItems) { + inboundReplayStatus.lastAttemptAt = nowIso(); + try { + await deliverToOpenclawHook({ + fetchImpl, + openclawHookUrl, + openclawHookToken, + requestId: pending.requestId, + payload: pending.payload, + }); + await inboundInbox.markDelivered(pending.requestId); + inboundReplayStatus.lastReplayAt = nowIso(); + inboundReplayStatus.lastReplayError = undefined; + inboundReplayStatus.lastAttemptStatus = "ok"; + logger.info("connector.inbound.replay_succeeded", { + requestId: pending.requestId, + attemptCount: pending.attemptCount + 1, + conversationId: pending.conversationId, + }); + + if (pending.replyTo) { + try { + await postDeliveryReceipt({ + requestId: pending.requestId, + senderAgentDid: pending.fromAgentDid, + recipientAgentDid: pending.toAgentDid, + replyTo: pending.replyTo, + status: "processed_by_openclaw", + }); + } catch (error) { + logger.warn("connector.inbound.delivery_receipt_failed", { + requestId: pending.requestId, + reason: sanitizeErrorReason(error), + status: "processed_by_openclaw", + }); + } + } + } catch (error) { + const reason = sanitizeErrorReason(error); + const retryable = + error instanceof LocalOpenclawDeliveryError + ? error.retryable + : true; + const nextAttemptAt = toIso( + nowUtcMs() + + computeReplayDelayMs({ + attemptCount: pending.attemptCount + 1, + policy: inboundReplayPolicy, + }) * + (retryable ? 1 : 10), + ); + const markResult = await inboundInbox.markReplayFailure({ + requestId: pending.requestId, + errorMessage: reason, + nextAttemptAt, + retryable, + maxNonRetryableAttempts: + inboundReplayPolicy.deadLetterNonRetryableMaxAttempts, + }); + inboundReplayStatus.lastReplayError = reason; + inboundReplayStatus.lastAttemptStatus = "failed"; + logger.warn("connector.inbound.replay_failed", { + requestId: pending.requestId, + attemptCount: pending.attemptCount + 1, + retryable, + nextAttemptAt, + movedToDeadLetter: markResult.movedToDeadLetter, + reason, + }); + + if (markResult.movedToDeadLetter && pending.replyTo) { + try { + await postDeliveryReceipt({ + requestId: pending.requestId, + senderAgentDid: pending.fromAgentDid, + recipientAgentDid: pending.toAgentDid, + replyTo: pending.replyTo, + status: "dead_lettered", + reason, + }); + } catch (receiptError) { + logger.warn("connector.inbound.delivery_receipt_failed", { + requestId: pending.requestId, + reason: sanitizeErrorReason(receiptError), + status: "dead_lettered", + }); + } + } + } + } + }), + ); } finally { replayInFlight = false; inboundReplayStatus.replayerActive = false; @@ -829,9 +1062,9 @@ export async function startConnectorRuntime( }; const readInboundReplayView = async (): Promise => { - const pending = await inboundInbox.getSnapshot(); + const snapshot = await inboundInbox.getSnapshot(); return { - pending, + snapshot, replayerActive: inboundReplayStatus.replayerActive || replayInFlight, lastReplayAt: inboundReplayStatus.lastReplayAt, lastReplayError: inboundReplayStatus.lastReplayError, @@ -843,6 +1076,12 @@ export async function startConnectorRuntime( }; }; + const outboundQueuePersistence = createOutboundQueuePersistence({ + configDir: input.configDir, + agentName: input.agentName, + logger, + }); + const connectorClient = new ConnectorClient({ connectorUrl: wsParsed.toString(), connectionHeadersProvider: resolveUpgradeHeaders, @@ -870,6 +1109,7 @@ export async function startConnectorRuntime( } }, }, + outboundQueuePersistence, inboundDeliverHandler: async (frame) => { const persisted = await inboundInbox.enqueue(frame); if (!persisted.accepted) { @@ -898,6 +1138,9 @@ export async function startConnectorRuntime( const outboundBaseUrl = normalizeOutboundBaseUrl(input.outboundBaseUrl); const outboundPath = normalizeOutboundPath(input.outboundPath); const statusPath = DEFAULT_CONNECTOR_STATUS_PATH; + const deadLetterPath = "/v1/inbound/dead-letter"; + const deadLetterReplayPath = "/v1/inbound/dead-letter/replay"; + const deadLetterPurgePath = "/v1/inbound/dead-letter/purge"; const outboundUrl = new URL(outboundPath, outboundBaseUrl).toString(); const relayToPeer = async (request: OutboundRelayRequest): Promise => { @@ -907,6 +1150,7 @@ export async function startConnectorRuntime( const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; const performRelay = async (auth: AgentAuthBundle): Promise => { + const replyTo = request.replyTo ?? defaultReceiptCallbackUrl; const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); const signed = await signHttpRequest({ @@ -925,6 +1169,10 @@ export async function startConnectorRuntime( "Content-Type": "application/json", [AGENT_ACCESS_HEADER]: auth.accessToken, [RELAY_RECIPIENT_AGENT_DID_HEADER]: request.peerDid, + ...(request.conversationId + ? { [RELAY_CONVERSATION_ID_HEADER]: request.conversationId } + : {}), + [RELAY_DELIVERY_RECEIPT_URL_HEADER]: replyTo, ...signed.headers, }, body, @@ -975,6 +1223,95 @@ export async function startConnectorRuntime( }); }; + const postDeliveryReceipt = async (inputReceipt: { + reason?: string; + recipientAgentDid: string; + replyTo: string; + requestId: string; + senderAgentDid: string; + status: OutboundDeliveryReceiptStatus; + }): Promise => { + await syncAuthFromDisk(); + const receiptUrl = new URL(inputReceipt.replyTo); + const body = JSON.stringify({ + requestId: inputReceipt.requestId, + senderAgentDid: inputReceipt.senderAgentDid, + recipientAgentDid: inputReceipt.recipientAgentDid, + status: inputReceipt.status, + reason: inputReceipt.reason, + processedAt: nowIso(), + }); + const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}:delivery-receipt`; + + const performReceipt = async (auth: AgentAuthBundle): Promise => { + const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); + const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(receiptUrl), + timestamp: unixSeconds, + nonce, + body: new TextEncoder().encode(body), + secretKey, + }); + + const response = await fetchImpl(receiptUrl.toString(), { + method: "POST", + headers: { + Authorization: `Claw ${input.credentials.ait}`, + "Content-Type": "application/json", + [AGENT_ACCESS_HEADER]: auth.accessToken, + ...signed.headers, + }, + body, + }); + + if (!response.ok) { + if (response.status === 401) { + throw new AppError({ + code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", + message: + "Delivery receipt callback rejected agent auth credentials", + status: 401, + expose: true, + }); + } + + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_FAILED", + message: "Delivery receipt callback request failed", + status: 502, + }); + } + }; + + await executeWithAgentAuthRefreshRetry({ + key: refreshKey, + shouldRetry: isRetryableRelayAuthError, + getAuth: async () => { + await syncAuthFromDisk(); + return currentAuth; + }, + persistAuth: async (nextAuth) => { + currentAuth = nextAuth; + await writeRegistryAuthAtomic({ + configDir: input.configDir, + agentName: input.agentName, + auth: nextAuth, + }); + }, + refreshAuth: async (auth) => + refreshAgentAuthWithClawProof({ + registryUrl: input.registryUrl, + ait: input.credentials.ait, + secretKey, + refreshToken: auth.refreshToken, + fetchImpl, + }), + perform: performReceipt, + }); + }; + const server = createServer(async (req, res) => { const requestPath = req.url ? new URL(req.url, outboundBaseUrl).pathname @@ -1003,25 +1340,98 @@ export async function startConnectorRuntime( }, outboundUrl, websocketUrl: wsUrl, - websocketConnected: connectorClient.isConnected(), + websocket: { + connected: connectorClient.isConnected(), + }, }); return; } + const clientMetrics = connectorClient.getMetricsSnapshot(); writeJson(res, 200, { status: "ok", outboundUrl, websocketUrl: wsUrl, - websocketConnected: connectorClient.isConnected(), - inboundInbox: { - pendingCount: inboundReplayView.pending.pendingCount, - pendingBytes: inboundReplayView.pending.pendingBytes, - oldestPendingAt: inboundReplayView.pending.oldestPendingAt, - nextAttemptAt: inboundReplayView.pending.nextAttemptAt, - replayerActive: inboundReplayView.replayerActive, - lastReplayAt: inboundReplayView.lastReplayAt, - lastReplayError: inboundReplayView.lastReplayError, + websocket: { + ...clientMetrics.connection, + }, + inbound: { + pending: inboundReplayView.snapshot.pending, + deadLetter: inboundReplayView.snapshot.deadLetter, + replay: { + replayerActive: inboundReplayView.replayerActive, + lastReplayAt: inboundReplayView.lastReplayAt, + lastReplayError: inboundReplayView.lastReplayError, + }, + openclawHook: inboundReplayView.openclawHook, + }, + outbound: { + queue: { + pendingCount: connectorClient.getQueuedOutboundCount(), + }, + }, + metrics: { + heartbeat: clientMetrics.heartbeat, + inboundDelivery: clientMetrics.inboundDelivery, + outboundQueue: clientMetrics.outboundQueue, }, - openclawHook: inboundReplayView.openclawHook, + }); + return; + } + + if (requestPath === deadLetterPath) { + if (req.method !== "GET") { + res.statusCode = 405; + res.setHeader("allow", "GET"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + const deadLetterItems = await inboundInbox.listDeadLetter(); + writeJson(res, 200, { + status: "ok", + count: deadLetterItems.length, + items: deadLetterItems, + }); + return; + } + + if (requestPath === deadLetterReplayPath) { + if (req.method !== "POST") { + res.statusCode = 405; + res.setHeader("allow", "POST"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + const body = await readRequestJson(req); + const requestIds = isRecord(body) + ? parseRequestIds(body.requestIds) + : undefined; + const replayResult = await inboundInbox.replayDeadLetter({ requestIds }); + void replayPendingInboundMessages(); + writeJson(res, 200, { + status: "ok", + replayedCount: replayResult.replayedCount, + }); + return; + } + + if (requestPath === deadLetterPurgePath) { + if (req.method !== "POST") { + res.statusCode = 405; + res.setHeader("allow", "POST"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + const body = await readRequestJson(req); + const requestIds = isRecord(body) + ? parseRequestIds(body.requestIds) + : undefined; + const purgeResult = await inboundInbox.purgeDeadLetter({ requestIds }); + writeJson(res, 200, { + status: "ok", + purgedCount: purgeResult.purgedCount, }); return; } diff --git a/packages/protocol/AGENTS.md b/packages/protocol/AGENTS.md index b10ca06..b8f732b 100644 --- a/packages/protocol/AGENTS.md +++ b/packages/protocol/AGENTS.md @@ -12,7 +12,7 @@ - Validate risky identity fields (`name`, `description`) with explicit allowlists/length caps; never pass through raw control characters. - Enforce `cnf.jwk.x` semantics for AIT parsing: value must be base64url and decode to exactly 32 bytes for Ed25519 (`kty=OKP`, `crv=Ed25519`). - Reuse existing protocol validators/parsers (`parseDid`, `parseUlid`, base64url helpers) instead of duplicating claim validation logic. -- Keep HTTP signing canonical strings deterministic: canonicalize method, normalized path (path + query), timestamp, nonce, and body hash exactly as `README.md`, `PRD.md`, and the policy docs describe (see `CLAW-PROOF-V1\n\n\n\n\n`). +- Keep HTTP signing canonical strings deterministic: canonicalize method, normalized path (path + query), timestamp, nonce, and body hash exactly as `README.md`, `ARCHITECTURE.md`, and the policy docs describe (see `CLAW-PROOF-V1\n\n\n\n\n`). - Mirror the AIT guardrails for CRL payloads: `crl.ts` keeps `.strict()` definitions, requires at least one revocation entry, enforces `agentDid` is a `did:claw:agent`, `revocation.jti` is a ULID, `exp > iat`, and surfaces `INVALID_CRL_CLAIMS` via `ProtocolParseError`. - Reuse cross-module helpers (e.g., `text.ts`’s `hasControlChars`) so control-character checks stay consistent across AIT and CRL validation. - Share header names/values via protocol exports so SDK/Proxy layers import a single source of truth (e.g., `X-Claw-Timestamp`, `X-Claw-Nonce`, `X-Claw-Body-SHA256`, and `X-Claw-Proof`). diff --git a/packages/protocol/src/endpoints.ts b/packages/protocol/src/endpoints.ts index a2cdebd..28e2809 100644 --- a/packages/protocol/src/endpoints.ts +++ b/packages/protocol/src/endpoints.ts @@ -10,4 +10,7 @@ export const REGISTRY_METADATA_PATH = "/v1/metadata"; export const INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH = "/internal/v1/identity/agent-ownership"; export const RELAY_CONNECT_PATH = "/v1/relay/connect"; +export const RELAY_DELIVERY_RECEIPTS_PATH = "/v1/relay/delivery-receipts"; +export const RELAY_CONVERSATION_ID_HEADER = "x-claw-conversation-id"; +export const RELAY_DELIVERY_RECEIPT_URL_HEADER = "x-claw-delivery-receipt-url"; export const RELAY_RECIPIENT_AGENT_DID_HEADER = "x-claw-recipient-agent-did"; diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 6e16e42..98eaa2f 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -32,6 +32,9 @@ export { ME_API_KEYS_PATH, REGISTRY_METADATA_PATH, RELAY_CONNECT_PATH, + RELAY_CONVERSATION_ID_HEADER, + RELAY_DELIVERY_RECEIPT_URL_HEADER, + RELAY_DELIVERY_RECEIPTS_PATH, RELAY_RECIPIENT_AGENT_DID_HEADER, } from "./endpoints.js"; export type { ProtocolParseErrorCode } from "./errors.js"; From df759b0e0c48ab6753fc91a9092507cf9f78f302 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 12:34:10 +0530 Subject: [PATCH 111/190] Review feature/issue-144 changes --- apps/proxy/src/AGENTS.md | 1 + .../src/relay-delivery-receipt-route.test.ts | 111 ++++++++++ .../proxy/src/relay-delivery-receipt-route.ts | 42 ++-- packages/connector/src/AGENTS.md | 3 +- packages/connector/src/inbound-inbox.test.ts | 10 + packages/connector/src/inbound-inbox.ts | 20 +- packages/connector/src/runtime.ts | 194 +++++++++++++++++- 7 files changed, 355 insertions(+), 26 deletions(-) diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 1d1bc99..21bb710 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -86,6 +86,7 @@ - `/rpc/record-delivery-receipt` - `/rpc/get-delivery-receipt` - Receipt states must remain constrained to `processed_by_openclaw` and `dead_lettered`. +- Reject blank/whitespace `requestId`, `senderAgentDid`, and `recipientAgentDid` in `relay-delivery-receipt-route.ts` so invalid receipt payloads fail as `400` client errors before DO RPC. - Receipt reads/writes must verify authenticated/trusted sender-recipient pairs and enforce recipient DID ownership at the route layer. - Keep `conversationId` and `replyTo` metadata flowing from `/hooks/agent` into relay queue/deliver frames for downstream ordering and callback semantics. - Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. diff --git a/apps/proxy/src/relay-delivery-receipt-route.test.ts b/apps/proxy/src/relay-delivery-receipt-route.test.ts index c0e0ab2..3e35703 100644 --- a/apps/proxy/src/relay-delivery-receipt-route.test.ts +++ b/apps/proxy/src/relay-delivery-receipt-route.test.ts @@ -212,6 +212,117 @@ describe("relay delivery receipt route", () => { expect(relayHarness.recordInputs).toHaveLength(0); }); + it("rejects POST when requestId is whitespace only", async () => { + const relayHarness = createRelayReceiptHarness(); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:beta", + responder: "did:claw:agent:alpha", + }, + ], + }); + + const response = await app.request( + RELAY_DELIVERY_RECEIPTS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": "token", + }, + body: JSON.stringify({ + requestId: " ", + senderAgentDid: "did:claw:agent:beta", + recipientAgentDid: "did:claw:agent:alpha", + status: "processed_by_openclaw", + }), + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_RECEIPT_INVALID_INPUT"); + expect(relayHarness.recordInputs).toHaveLength(0); + }); + + it("rejects POST when senderAgentDid is whitespace only", async () => { + const relayHarness = createRelayReceiptHarness(); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:beta", + responder: "did:claw:agent:alpha", + }, + ], + }); + + const response = await app.request( + RELAY_DELIVERY_RECEIPTS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": "token", + }, + body: JSON.stringify({ + requestId: "req-6", + senderAgentDid: "\n\t", + recipientAgentDid: "did:claw:agent:alpha", + status: "processed_by_openclaw", + }), + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_RECEIPT_INVALID_INPUT"); + expect(relayHarness.recordInputs).toHaveLength(0); + }); + + it("rejects POST when recipientAgentDid is whitespace only", async () => { + const relayHarness = createRelayReceiptHarness(); + const app = createApp({ + allowedPairs: [ + { + initiator: "did:claw:agent:beta", + responder: "did:claw:agent:alpha", + }, + ], + }); + + const response = await app.request( + RELAY_DELIVERY_RECEIPTS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": "token", + }, + body: JSON.stringify({ + requestId: "req-7", + senderAgentDid: "did:claw:agent:beta", + recipientAgentDid: " ", + status: "processed_by_openclaw", + }), + }, + { + AGENT_RELAY_SESSION: relayHarness.namespace, + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_RELAY_RECEIPT_INVALID_INPUT"); + expect(relayHarness.recordInputs).toHaveLength(0); + }); + it("returns receipt on GET when trusted pair exists", async () => { const relayHarness = createRelayReceiptHarness(); const app = createApp({ diff --git a/apps/proxy/src/relay-delivery-receipt-route.ts b/apps/proxy/src/relay-delivery-receipt-route.ts index c4a6c86..75a4db5 100644 --- a/apps/proxy/src/relay-delivery-receipt-route.ts +++ b/apps/proxy/src/relay-delivery-receipt-route.ts @@ -25,14 +25,18 @@ type CreateRelayDeliveryReceiptHandlersInput = { trustStore: ProxyTrustStore; }; +function createRelayReceiptInvalidInputError(): AppError { + return new AppError({ + code: "PROXY_RELAY_RECEIPT_INVALID_INPUT", + message: "Relay delivery receipt payload is invalid", + status: 400, + expose: true, + }); +} + function parseRecordInput(payload: unknown): RelayReceiptRecordInput { if (typeof payload !== "object" || payload === null) { - throw new AppError({ - code: "PROXY_RELAY_RECEIPT_INVALID_INPUT", - message: "Relay delivery receipt payload is invalid", - status: 400, - expose: true, - }); + throw createRelayReceiptInvalidInputError(); } const input = payload as Partial; @@ -43,18 +47,17 @@ function parseRecordInput(payload: unknown): RelayReceiptRecordInput { (input.status !== "processed_by_openclaw" && input.status !== "dead_lettered") ) { - throw new AppError({ - code: "PROXY_RELAY_RECEIPT_INVALID_INPUT", - message: "Relay delivery receipt payload is invalid", - status: 400, - expose: true, - }); + throw createRelayReceiptInvalidInputError(); } + const requestId = ensureNonBlank(input.requestId); + const senderAgentDid = ensureNonBlank(input.senderAgentDid); + const recipientAgentDid = ensureNonBlank(input.recipientAgentDid); + return { - requestId: input.requestId, - senderAgentDid: input.senderAgentDid, - recipientAgentDid: input.recipientAgentDid, + requestId, + senderAgentDid, + recipientAgentDid, status: input.status, reason: typeof input.reason === "string" && input.reason.trim().length > 0 @@ -63,6 +66,15 @@ function parseRecordInput(payload: unknown): RelayReceiptRecordInput { }; } +function ensureNonBlank(value: string): string { + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw createRelayReceiptInvalidInputError(); + } + + return trimmed; +} + function parseRequiredQuery(value: string | undefined, field: string): string { if (typeof value !== "string" || value.trim().length === 0) { throw new AppError({ diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 35a7241..6739fc9 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -34,7 +34,8 @@ - `GET /v1/inbound/dead-letter` - `POST /v1/inbound/dead-letter/replay` - `POST /v1/inbound/dead-letter/purge` -- For replay delivery callbacks, post signed receipts to peer proxies using `replyTo` with statuses `processed_by_openclaw` and `dead_lettered`. +- For dead-letter replay/purge targeting, treat omitted `requestIds` as "all", but treat `requestIds: []` (or empty after sanitization) as a no-op. +- For replay delivery callbacks, post signed receipts to peer proxies using `replyTo` with statuses `processed_by_openclaw` and `dead_lettered`, but only when `replyTo` points to trusted peer proxy origins and the relay receipt path. ## WebSocket Resilience Rules - Keep websocket reconnect behavior centralized in `client.ts` (single cleanup path for close/error/unexpected-response/timeout). diff --git a/packages/connector/src/inbound-inbox.test.ts b/packages/connector/src/inbound-inbox.test.ts index 7d4a577..e01523d 100644 --- a/packages/connector/src/inbound-inbox.test.ts +++ b/packages/connector/src/inbound-inbox.test.ts @@ -245,11 +245,21 @@ describe("ConnectorInboundInbox", () => { }); expect(replayResult.replayedCount).toBe(1); + const replayNoOpResult = await inbox.replayDeadLetter({ + requestIds: [], + }); + expect(replayNoOpResult.replayedCount).toBe(0); + const purgeResult = await inbox.purgeDeadLetter({ requestIds: [secondId], }); expect(purgeResult.purgedCount).toBe(1); + const purgeNoOpResult = await inbox.purgeDeadLetter({ + requestIds: [], + }); + expect(purgeNoOpResult.purgedCount).toBe(0); + const snapshot = await inbox.getSnapshot(); expect(snapshot.pending.pendingCount).toBe(1); expect(snapshot.deadLetter.deadLetterCount).toBe(0); diff --git a/packages/connector/src/inbound-inbox.ts b/packages/connector/src/inbound-inbox.ts index aa8daa0..963084e 100644 --- a/packages/connector/src/inbound-inbox.ts +++ b/packages/connector/src/inbound-inbox.ts @@ -529,8 +529,14 @@ export class ConnectorInboundInbox { return await this.withWriteLock(async () => { const index = await this.loadIndex(); const requestIds = - input?.requestIds && input.requestIds.length > 0 - ? Array.from(new Set(input.requestIds.map((item) => item.trim()))) + input?.requestIds !== undefined + ? Array.from( + new Set( + input.requestIds + .map((item) => item.trim()) + .filter((item) => item.length > 0), + ), + ) : Object.keys(index.deadLetterByRequestId); let replayedCount = 0; @@ -582,8 +588,14 @@ export class ConnectorInboundInbox { return await this.withWriteLock(async () => { const index = await this.loadIndex(); const requestIds = - input?.requestIds && input.requestIds.length > 0 - ? Array.from(new Set(input.requestIds.map((item) => item.trim()))) + input?.requestIds !== undefined + ? Array.from( + new Set( + input.requestIds + .map((item) => item.trim()) + .filter((item) => item.length > 0), + ), + ) : Object.keys(index.deadLetterByRequestId); let purgedCount = 0; diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index 76205fc..824c0d2 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -5,7 +5,7 @@ import { type IncomingMessage, type ServerResponse, } from "node:http"; -import { dirname, join } from "node:path"; +import { dirname, isAbsolute, join } from "node:path"; import { decodeBase64url, encodeBase64url, @@ -102,7 +102,13 @@ type OutboundRelayRequest = { type OutboundDeliveryReceiptStatus = "processed_by_openclaw" | "dead_lettered"; +type TrustedReceiptTargets = { + byAgentDid: Map; + origins: Set; +}; + const REGISTRY_AUTH_FILENAME = "registry-auth.json"; +const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; const AGENTS_DIR_NAME = "agents"; const OUTBOUND_QUEUE_DIR_NAME = "outbound-queue"; const OUTBOUND_QUEUE_FILENAME = "queue.json"; @@ -136,6 +142,18 @@ function parseOptionalString(value: unknown): string | undefined { return trimmed.length > 0 ? trimmed : undefined; } +function parseOptionalProxyOrigin(value: unknown): string | undefined { + if (typeof value !== "string" || value.trim().length === 0) { + return undefined; + } + + try { + return new URL(value.trim()).origin; + } catch { + return undefined; + } +} + function normalizeOutboundBaseUrl(baseUrlInput: string | undefined): URL { const raw = baseUrlInput?.trim() || DEFAULT_CONNECTOR_BASE_URL; let parsed: URL; @@ -778,15 +796,21 @@ async function readRequestJson(req: IncomingMessage): Promise { } function parseRequestIds(value: unknown): string[] | undefined { - if (!Array.isArray(value)) { + if (value === undefined) { return undefined; } - const requestIds = value - .map((item) => (typeof item === "string" ? item.trim() : "")) - .filter((item) => item.length > 0); + if (!Array.isArray(value)) { + return []; + } - return requestIds.length > 0 ? Array.from(new Set(requestIds)) : undefined; + return Array.from( + new Set( + value + .map((item) => (typeof item === "string" ? item.trim() : "")) + .filter((item) => item.length > 0), + ), + ); } function writeJson( @@ -830,6 +854,125 @@ async function buildUpgradeHeaders(input: { }; } +async function loadTrustedReceiptTargets(input: { + configDir: string; + logger: Logger; +}): Promise { + const trustedReceiptTargets: TrustedReceiptTargets = { + origins: new Set(), + byAgentDid: new Map(), + }; + + const relayRuntimeConfigPath = join( + input.configDir, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + ); + let relayRuntimeRaw: string; + try { + relayRuntimeRaw = await readFile(relayRuntimeConfigPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return trustedReceiptTargets; + } + + input.logger.warn("connector.delivery_receipt.runtime_config_read_failed", { + relayRuntimeConfigPath, + reason: sanitizeErrorReason(error), + }); + return trustedReceiptTargets; + } + + let relayRuntimeParsed: unknown; + try { + relayRuntimeParsed = JSON.parse(relayRuntimeRaw); + } catch (error) { + input.logger.warn( + "connector.delivery_receipt.runtime_config_invalid_json", + { + relayRuntimeConfigPath, + reason: sanitizeErrorReason(error), + }, + ); + return trustedReceiptTargets; + } + + if (!isRecord(relayRuntimeParsed)) { + return trustedReceiptTargets; + } + + const relayTransformPeersPathRaw = + typeof relayRuntimeParsed.relayTransformPeersPath === "string" && + relayRuntimeParsed.relayTransformPeersPath.trim().length > 0 + ? relayRuntimeParsed.relayTransformPeersPath.trim() + : undefined; + if (!relayTransformPeersPathRaw) { + return trustedReceiptTargets; + } + + const relayTransformPeersPath = isAbsolute(relayTransformPeersPathRaw) + ? relayTransformPeersPathRaw + : join(input.configDir, relayTransformPeersPathRaw); + + let relayTransformPeersRaw: string; + try { + relayTransformPeersRaw = await readFile(relayTransformPeersPath, "utf8"); + } catch (error) { + input.logger.warn("connector.delivery_receipt.peers_snapshot_read_failed", { + relayTransformPeersPath, + reason: sanitizeErrorReason(error), + }); + return trustedReceiptTargets; + } + + let relayTransformPeersParsed: unknown; + try { + relayTransformPeersParsed = JSON.parse(relayTransformPeersRaw); + } catch (error) { + input.logger.warn( + "connector.delivery_receipt.peers_snapshot_invalid_json", + { + relayTransformPeersPath, + reason: sanitizeErrorReason(error), + }, + ); + return trustedReceiptTargets; + } + + if (!isRecord(relayTransformPeersParsed)) { + return trustedReceiptTargets; + } + + const peersValue = relayTransformPeersParsed.peers; + if (!isRecord(peersValue)) { + return trustedReceiptTargets; + } + + for (const peerValue of Object.values(peersValue)) { + if (!isRecord(peerValue)) { + continue; + } + + const agentDid = + typeof peerValue.did === "string" && peerValue.did.trim().length > 0 + ? peerValue.did.trim() + : undefined; + const origin = parseOptionalProxyOrigin(peerValue.proxyUrl); + if (!agentDid || !origin) { + continue; + } + + trustedReceiptTargets.origins.add(origin); + trustedReceiptTargets.byAgentDid.set(agentDid, origin); + } + + return trustedReceiptTargets; +} + export async function startConnectorRuntime( input: StartConnectorRuntimeInput, ): Promise { @@ -900,11 +1043,18 @@ export async function startConnectorRuntime( RELAY_DELIVERY_RECEIPTS_PATH.slice(1), `${toHttpOriginFromWebSocketUrl(wsParsed)}/`, ).toString(); + const defaultReceiptCallbackOrigin = new URL(defaultReceiptCallbackUrl) + .origin; const openclawBaseUrl = resolveOpenclawBaseUrl(input.openclawBaseUrl); const openclawHookPath = resolveOpenclawHookPath(input.openclawHookPath); const openclawHookToken = resolveOpenclawHookToken(input.openclawHookToken); const openclawHookUrl = toOpenclawHookUrl(openclawBaseUrl, openclawHookPath); const inboundReplayPolicy = loadInboundReplayPolicy(); + const trustedReceiptTargets = await loadTrustedReceiptTargets({ + configDir: input.configDir, + logger, + }); + trustedReceiptTargets.origins.add(defaultReceiptCallbackOrigin); const inboundInbox = createConnectorInboundInbox({ configDir: input.configDir, agentName: input.agentName, @@ -1146,6 +1296,8 @@ export async function startConnectorRuntime( const relayToPeer = async (request: OutboundRelayRequest): Promise => { await syncAuthFromDisk(); const peerUrl = new URL(request.peerProxyUrl); + trustedReceiptTargets.origins.add(peerUrl.origin); + trustedReceiptTargets.byAgentDid.set(request.peerDid, peerUrl.origin); const body = JSON.stringify(request.payload ?? {}); const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; @@ -1233,6 +1385,36 @@ export async function startConnectorRuntime( }): Promise => { await syncAuthFromDisk(); const receiptUrl = new URL(inputReceipt.replyTo); + if (receiptUrl.pathname !== RELAY_DELIVERY_RECEIPTS_PATH) { + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_INVALID_TARGET", + message: "Delivery receipt callback target is invalid", + status: 400, + }); + } + const expectedSenderOrigin = trustedReceiptTargets.byAgentDid.get( + inputReceipt.senderAgentDid, + ); + if ( + expectedSenderOrigin !== undefined && + receiptUrl.origin !== expectedSenderOrigin + ) { + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_UNTRUSTED_TARGET", + message: "Delivery receipt callback target is untrusted", + status: 400, + }); + } + if ( + expectedSenderOrigin === undefined && + !trustedReceiptTargets.origins.has(receiptUrl.origin) + ) { + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_UNTRUSTED_TARGET", + message: "Delivery receipt callback target is untrusted", + status: 400, + }); + } const body = JSON.stringify({ requestId: inputReceipt.requestId, senderAgentDid: inputReceipt.senderAgentDid, From f6662d0e3de0d5029dab6f937ae9859fd2825153 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 13:14:54 +0530 Subject: [PATCH 112/190] feat: add worktree-safe env sync for Codex worktrees --- .codex/environments/AGENTS.md | 21 +++ .codex/environments/environment.toml | 39 ++++++ .env.example | 55 ++++++++ AGENTS.md | 2 + README.md | 22 ++++ apps/cli/.env.example | 24 ++++ apps/openclaw-skill/.env.example | 7 + apps/proxy/.env.example | 4 +- apps/proxy/AGENTS.md | 1 + apps/registry/.env.example | 6 +- apps/registry/AGENTS.md | 1 + package.json | 3 +- scripts/env/sync-worktree-env.sh | 187 +++++++++++++++++++++++++++ 13 files changed, 367 insertions(+), 5 deletions(-) create mode 100644 .codex/environments/AGENTS.md create mode 100644 .codex/environments/environment.toml create mode 100644 .env.example create mode 100644 apps/cli/.env.example create mode 100644 apps/openclaw-skill/.env.example create mode 100755 scripts/env/sync-worktree-env.sh diff --git a/.codex/environments/AGENTS.md b/.codex/environments/AGENTS.md new file mode 100644 index 0000000..e151619 --- /dev/null +++ b/.codex/environments/AGENTS.md @@ -0,0 +1,21 @@ +# AGENTS.md (.codex/environments) + +## Purpose +- Define local Codex environment setup for deterministic worktree onboarding. +- Keep environment bootstrap reproducible without committing secrets. + +## Rules +- Keep setup script idempotent and fail-fast when required shared env keys are missing. +- Keep `environment.toml` setup/actions aligned with workspace scripts in `package.json`. +- Use `scripts/env/sync-worktree-env.sh` as the single generator for local `.env` files. +- Do not commit secret-bearing `.env` files; only commit templates (`.env.example`). +- If env contract keys change, update these together in one change: + - `scripts/env/sync-worktree-env.sh` + - `.env.example` + - `apps/*/.env.example` + - `README.md` + - repository/app `AGENTS.md` files with env guidance + +## Validation +- `pnpm env:sync` should fail with a clear error when shared source is missing. +- `pnpm env:sync` should produce deterministic output for root/app env files. diff --git a/.codex/environments/environment.toml b/.codex/environments/environment.toml new file mode 100644 index 0000000..1451e12 --- /dev/null +++ b/.codex/environments/environment.toml @@ -0,0 +1,39 @@ +[setup_scripts] +default_script = "bash ./scripts/env/sync-worktree-env.sh" +default_script_macos = "bash ./scripts/env/sync-worktree-env.sh" +default_script_windows = "powershell -NoProfile -ExecutionPolicy Bypass -Command \"bash ./scripts/env/sync-worktree-env.sh\"" + +[[actions]] +name = "Sync Env" +icon = "gear" +script = "pnpm env:sync" +script_macos = "pnpm env:sync" +script_windows = "pnpm env:sync" + +[[actions]] +name = "Registry Local" +icon = "play" +script = "pnpm dev:registry:local" +script_macos = "pnpm dev:registry:local" +script_windows = "pnpm dev:registry:local" + +[[actions]] +name = "Proxy Local" +icon = "play" +script = "pnpm dev:proxy:local" +script_macos = "pnpm dev:proxy:local" +script_windows = "pnpm dev:proxy:local" + +[[actions]] +name = "Typecheck" +icon = "check" +script = "pnpm -r typecheck" +script_macos = "pnpm -r typecheck" +script_windows = "pnpm -r typecheck" + +[[actions]] +name = "Tests" +icon = "check" +script = "pnpm -r test" +script_macos = "pnpm -r test" +script_windows = "pnpm -r test" diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..198e4fa --- /dev/null +++ b/.env.example @@ -0,0 +1,55 @@ +# Clawdentity shared local env template +# +# Copy this file to ~/.clawdentity/worktree.env and set real values. +# Then run: pnpm env:sync +# +# Optional override for non-default source path: +# export CLAWDENTITY_SHARED_ENV_FILE=/absolute/path/to/worktree.env + +# Required keys +CLAWDENTITY_REGISTRY_URL=http://127.0.0.1:8788 +CLAWDENTITY_PROXY_URL=http://127.0.0.1:8787 +BOOTSTRAP_SECRET=replace-with-random-secret +REGISTRY_SIGNING_KEY=replace-with-base64url-ed25519-private-key +REGISTRY_SIGNING_KEYS=[{"kid":"reg-dev-key-1","alg":"EdDSA","crv":"Ed25519","x":"replace-with-base64url-ed25519-public-key","status":"active"}] +REGISTRY_INTERNAL_SERVICE_ID=replace-with-internal-service-id +REGISTRY_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret + +# Optional keys +APP_VERSION=local-dev +EVENT_BUS_BACKEND=memory +OPENCLAW_BASE_URL=http://127.0.0.1:18789 +INJECT_IDENTITY_INTO_MESSAGE=true + +# CLI/user profile overrides +# CLAWDENTITY_API_KEY=clw_pat_replace_me +# CLAWDENTITY_HUMAN_NAME=Your Name + +# Connector/OpenClaw optional overrides +# CLAWDENTITY_PROXY_WS_URL=ws://127.0.0.1:8787/v1/relay/connect +# CLAWDENTITY_CONNECTOR_BASE_URL=http://127.0.0.1:19400 +# CLAWDENTITY_CONNECTOR_OUTBOUND_PATH=/v1/outbound +# OPENCLAW_HOOK_PATH=/hooks/agent +# OPENCLAW_HOOK_TOKEN=replace-with-random-token +# OPENCLAW_GATEWAY_TOKEN=replace-with-openclaw-gateway-token +# OPENCLAW_HOME=~/.openclaw +# OPENCLAW_STATE_DIR=~/.openclaw +# OPENCLAW_CONFIG_PATH=~/.openclaw/openclaw.json +# CLAWDBOT_STATE_DIR=~/.clawdbot +# CLAWDBOT_CONFIG_PATH=~/.clawdbot/clawdbot.json +# OPENCLAW_GATEWAY_APPROVAL_COMMAND=openclaw + +# Proxy runtime tuning optional overrides +# CRL_REFRESH_INTERVAL_MS=300000 +# CRL_MAX_AGE_MS=900000 +# CRL_STALE_BEHAVIOR=fail-open +# AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE=60 +# AGENT_RATE_LIMIT_WINDOW_MS=60000 +# RELAY_QUEUE_MAX_MESSAGES_PER_AGENT=500 +# RELAY_QUEUE_TTL_SECONDS=3600 +# RELAY_RETRY_INITIAL_MS=1000 +# RELAY_RETRY_MAX_MS=30000 +# RELAY_RETRY_MAX_ATTEMPTS=25 +# RELAY_RETRY_JITTER_RATIO=0.2 +# RELAY_MAX_IN_FLIGHT_DELIVERIES=5 +# RELAY_MAX_FRAME_BYTES=1048576 diff --git a/AGENTS.md b/AGENTS.md index b001cc3..0f57ee3 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -50,6 +50,8 @@ - `--env dev` for development (Worker: `clawdentity-registry-dev`, D1: `clawdentity-db-dev`) - `--env production` for production (Worker: `clawdentity-registry`, D1: `clawdentity-db`) - **Local dev** uses `wrangler dev --env dev` with local SQLite. Override vars via per-worker `.env` files (for example `apps/registry/.env`). +- Worktree-safe local env bootstrap must use `scripts/env/sync-worktree-env.sh` with shared source `~/.clawdentity/worktree.env` (override with `CLAWDENTITY_SHARED_ENV_FILE`). +- Run `pnpm env:sync` after cloning or creating a worktree to generate root/app `.env` files deterministically. - Use `pnpm -F @clawdentity/registry run dev:local` (or root alias `pnpm dev:registry:local`) to apply local D1 migrations before starting dev server. - **One-touch deploy** scripts in `apps/registry/package.json`: - `deploy:dev` — migrates remote dev D1 + deploys dev Worker diff --git a/README.md b/README.md index 99f1f1b..fa23972 100644 --- a/README.md +++ b/README.md @@ -94,6 +94,28 @@ clawdentity openclaw doctor +## Worktree-Safe Env Setup + +Clawdentity supports deterministic env bootstrapping for Codex worktrees. + +1. Copy `/Users/dev/Workdir/clawdentity/.env.example` to `~/.clawdentity/worktree.env`. +2. Fill required keys in `~/.clawdentity/worktree.env`. +3. Run `pnpm env:sync` from repo root. + +`pnpm env:sync` generates local env files for root + apps using +`scripts/env/sync-worktree-env.sh`: + +- `/Users/dev/Workdir/clawdentity/.env` +- `/Users/dev/Workdir/clawdentity/apps/registry/.env` +- `/Users/dev/Workdir/clawdentity/apps/proxy/.env` +- `/Users/dev/Workdir/clawdentity/apps/cli/.env` +- `/Users/dev/Workdir/clawdentity/apps/openclaw-skill/.env` + +`pnpm env:sync` is authoritative and overwrites those generated files. + +Codex app worktree setup is configured in +`/Users/dev/Workdir/clawdentity/.codex/environments/environment.toml` and runs the same sync script automatically. + ## Shared Tokens vs Clawdentity | Property | Shared Webhook Token | Clawdentity | diff --git a/apps/cli/.env.example b/apps/cli/.env.example new file mode 100644 index 0000000..4767d2c --- /dev/null +++ b/apps/cli/.env.example @@ -0,0 +1,24 @@ +# CLI local template +# Generated values are written by: scripts/env/sync-worktree-env.sh + +CLAWDENTITY_REGISTRY_URL=http://127.0.0.1:8788 +CLAWDENTITY_PROXY_URL=http://127.0.0.1:8787 + +# Optional profile/auth +# CLAWDENTITY_API_KEY=clw_pat_replace_me +# CLAWDENTITY_HUMAN_NAME=Your Name + +# Optional connector and OpenClaw runtime overrides +# CLAWDENTITY_PROXY_WS_URL=ws://127.0.0.1:8787/v1/relay/connect +# CLAWDENTITY_CONNECTOR_BASE_URL=http://127.0.0.1:19400 +# CLAWDENTITY_CONNECTOR_OUTBOUND_PATH=/v1/outbound +# OPENCLAW_BASE_URL=http://127.0.0.1:18789 +# OPENCLAW_HOOK_PATH=/hooks/agent +# OPENCLAW_HOOK_TOKEN=replace-with-random-token +# OPENCLAW_GATEWAY_TOKEN=replace-with-openclaw-gateway-token +# OPENCLAW_HOME=~/.openclaw +# OPENCLAW_STATE_DIR=~/.openclaw +# OPENCLAW_CONFIG_PATH=~/.openclaw/openclaw.json +# CLAWDBOT_STATE_DIR=~/.clawdbot +# CLAWDBOT_CONFIG_PATH=~/.clawdbot/clawdbot.json +# OPENCLAW_GATEWAY_APPROVAL_COMMAND=openclaw diff --git a/apps/openclaw-skill/.env.example b/apps/openclaw-skill/.env.example new file mode 100644 index 0000000..d86483d --- /dev/null +++ b/apps/openclaw-skill/.env.example @@ -0,0 +1,7 @@ +# OpenClaw skill local template +# Generated values are written by: scripts/env/sync-worktree-env.sh + +# Optional relay transform overrides +# CLAWDENTITY_CONNECTOR_BASE_URL=http://127.0.0.1:19400 +# CLAWDENTITY_CONNECTOR_OUTBOUND_PATH=/v1/outbound +# OPENCLAW_BASE_URL=http://127.0.0.1:18789 diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index 5f67499..b5595ff 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -1,5 +1,5 @@ # Proxy local/development template -# For local Wrangler development, copy values into apps/proxy/.env. +# Generated values are written by: scripts/env/sync-worktree-env.sh # OPENCLAW_BASE_URL is optional for relay-mode proxy operation. # OPENCLAW_BASE_URL=http://127.0.0.1:18789 @@ -29,3 +29,5 @@ INJECT_IDENTITY_INTO_MESSAGE=true # RELAY_RETRY_MAX_MS=30000 # RELAY_RETRY_MAX_ATTEMPTS=25 # RELAY_RETRY_JITTER_RATIO=0.2 +# RELAY_MAX_IN_FLIGHT_DELIVERIES=5 +# RELAY_MAX_FRAME_BYTES=1048576 diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 82514fe..29bcc32 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -23,6 +23,7 @@ - Keep `INJECT_IDENTITY_INTO_MESSAGE` explicit and default-on (`true`); disable only when operators need unchanged webhook `message` forwarding. - Keep OpenClaw base URL input (`OPENCLAW_BASE_URL`) optional for relay-mode startup. - Keep `.dev.vars` and `.env.example` synchronized when adding/changing proxy config fields (registry URL, optional OpenClaw base URL, and policy/rate-limit vars). +- Generate local `apps/proxy/.env` via `pnpm env:sync` (source `~/.clawdentity/worktree.env`) instead of manual edits. - Load env files with OpenClaw precedence and no overrides: - first `./.env` from the proxy working directory - then `$OPENCLAW_STATE_DIR/.env` (or default state dir: `~/.openclaw`) diff --git a/apps/registry/.env.example b/apps/registry/.env.example index 2bbbd2d..4c5b32a 100644 --- a/apps/registry/.env.example +++ b/apps/registry/.env.example @@ -1,6 +1,6 @@ # Registry local/development template -# For local Wrangler development, copy values into apps/registry/.env. -# For cloud deploys, set secrets with: +# Generated values are written by: scripts/env/sync-worktree-env.sh +# For cloud deploys, keep secrets in Wrangler: # wrangler secret put BOOTSTRAP_SECRET --env # wrangler secret put REGISTRY_SIGNING_KEY --env # wrangler secret put REGISTRY_SIGNING_KEYS --env @@ -12,7 +12,7 @@ EVENT_BUS_BACKEND=memory PROXY_URL=https://dev.proxy.clawdentity.com REGISTRY_ISSUER_URL=https://dev.registry.clawdentity.com -# Secrets (required at startup) +# Secrets (required at startup for non-test environments) BOOTSTRAP_SECRET=replace-with-random-secret REGISTRY_SIGNING_KEY=replace-with-base64url-ed25519-private-key REGISTRY_SIGNING_KEYS=[{"kid":"reg-key-1","alg":"EdDSA","crv":"Ed25519","x":"replace-with-base64url-ed25519-public-key","status":"active"}] diff --git a/apps/registry/AGENTS.md b/apps/registry/AGENTS.md index 97843bc..d82f600 100644 --- a/apps/registry/AGENTS.md +++ b/apps/registry/AGENTS.md @@ -26,6 +26,7 @@ - Keep Wrangler observability logging enabled (`observability.enabled=true`, `logs.enabled=true`, `invocation_logs=true`) so deploy/runtime failures are visible without ad-hoc debugging. - Keep `worker-configuration.d.ts` committed and regenerate with `CLOUDFLARE_LOAD_DEV_VARS_FROM_DOT_ENV=false wrangler types --env dev` (or `pnpm -F @clawdentity/registry run types:dev`) after `wrangler.jsonc` or binding changes. - Keep `.dev.vars` and `.env.example` synchronized when adding/changing runtime config fields (`ENVIRONMENT`, `APP_VERSION`, `PROXY_URL`, `EVENT_BUS_BACKEND`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). +- Generate local `apps/registry/.env` via `pnpm env:sync` (source `~/.clawdentity/worktree.env`) instead of manual edits. - Use memory event bus in `development` while no downstream consumers exist (`EVENT_BUS_BACKEND=memory`). - Keep production queue-backed (`EVENT_BUS_BACKEND=queue` + `EVENT_BUS_QUEUE`) until rollout policy changes. diff --git a/package.json b/package.json index d35bb4a..7ecbb65 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,8 @@ "dev:proxy": "pnpm -F @clawdentity/proxy run dev", "dev:proxy:dev": "pnpm -F @clawdentity/proxy run dev:dev", "dev:proxy:local": "pnpm -F @clawdentity/proxy run dev:local", - "dev:proxy:fresh": "pnpm -F @clawdentity/proxy run dev:fresh" + "dev:proxy:fresh": "pnpm -F @clawdentity/proxy run dev:fresh", + "env:sync": "bash ./scripts/env/sync-worktree-env.sh" }, "devDependencies": { "@biomejs/biome": "^2.3.14", diff --git a/scripts/env/sync-worktree-env.sh b/scripts/env/sync-worktree-env.sh new file mode 100755 index 0000000..fb38813 --- /dev/null +++ b/scripts/env/sync-worktree-env.sh @@ -0,0 +1,187 @@ +#!/usr/bin/env bash +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +SHARED_ENV_FILE="${CLAWDENTITY_SHARED_ENV_FILE:-$HOME/.clawdentity/worktree.env}" +if [[ "$SHARED_ENV_FILE" == ~/* ]]; then + SHARED_ENV_FILE="$HOME/${SHARED_ENV_FILE#~/}" +fi + +fail() { + printf '[env:sync] error: %s\n' "$*" >&2 + exit 1 +} + +trim() { + local value="$1" + value="${value#"${value%%[![:space:]]*}"}" + value="${value%"${value##*[![:space:]]}"}" + printf '%s' "$value" +} + +append_if_set() { + local file_path="$1" + local key="$2" + local value="${3:-}" + if [[ -n "$(trim "$value")" ]]; then + printf '%s=%s\n' "$key" "$value" >> "$file_path" + fi +} + +write_header() { + local file_path="$1" + cat > "$file_path" < 0 )); then + fail "missing required keys in $SHARED_ENV_FILE: ${missing_keys[*]}" +fi + +APP_VERSION="${APP_VERSION:-local-dev}" +EVENT_BUS_BACKEND="${EVENT_BUS_BACKEND:-memory}" +OPENCLAW_BASE_URL="${OPENCLAW_BASE_URL:-http://127.0.0.1:18789}" +INJECT_IDENTITY_INTO_MESSAGE="${INJECT_IDENTITY_INTO_MESSAGE:-true}" + +ROOT_ENV_PATH="$REPO_ROOT/.env" +REGISTRY_ENV_PATH="$REPO_ROOT/apps/registry/.env" +PROXY_ENV_PATH="$REPO_ROOT/apps/proxy/.env" +CLI_ENV_PATH="$REPO_ROOT/apps/cli/.env" +SKILL_ENV_PATH="$REPO_ROOT/apps/openclaw-skill/.env" + +write_header "$ROOT_ENV_PATH" +root_keys=( + "CLAWDENTITY_REGISTRY_URL" + "CLAWDENTITY_PROXY_URL" + "CLAWDENTITY_API_KEY" + "CLAWDENTITY_HUMAN_NAME" + "CLAWDENTITY_PROXY_WS_URL" + "CLAWDENTITY_CONNECTOR_BASE_URL" + "CLAWDENTITY_CONNECTOR_OUTBOUND_PATH" + "OPENCLAW_BASE_URL" + "OPENCLAW_HOOK_PATH" + "OPENCLAW_HOOK_TOKEN" + "OPENCLAW_GATEWAY_TOKEN" + "OPENCLAW_HOME" + "OPENCLAW_STATE_DIR" + "OPENCLAW_CONFIG_PATH" + "CLAWDBOT_STATE_DIR" + "CLAWDBOT_CONFIG_PATH" + "OPENCLAW_GATEWAY_APPROVAL_COMMAND" +) +for key in "${root_keys[@]}"; do + append_if_set "$ROOT_ENV_PATH" "$key" "${!key:-}" +done + +write_header "$REGISTRY_ENV_PATH" +append_if_set "$REGISTRY_ENV_PATH" "ENVIRONMENT" "development" +append_if_set "$REGISTRY_ENV_PATH" "APP_VERSION" "$APP_VERSION" +append_if_set "$REGISTRY_ENV_PATH" "EVENT_BUS_BACKEND" "$EVENT_BUS_BACKEND" +append_if_set "$REGISTRY_ENV_PATH" "PROXY_URL" "$CLAWDENTITY_PROXY_URL" +append_if_set "$REGISTRY_ENV_PATH" "REGISTRY_ISSUER_URL" "$CLAWDENTITY_REGISTRY_URL" +append_if_set "$REGISTRY_ENV_PATH" "BOOTSTRAP_SECRET" "$BOOTSTRAP_SECRET" +append_if_set "$REGISTRY_ENV_PATH" "REGISTRY_SIGNING_KEY" "$REGISTRY_SIGNING_KEY" +append_if_set "$REGISTRY_ENV_PATH" "REGISTRY_SIGNING_KEYS" "$REGISTRY_SIGNING_KEYS" + +write_header "$PROXY_ENV_PATH" +append_if_set "$PROXY_ENV_PATH" "ENVIRONMENT" "development" +append_if_set "$PROXY_ENV_PATH" "APP_VERSION" "$APP_VERSION" +append_if_set "$PROXY_ENV_PATH" "REGISTRY_URL" "$CLAWDENTITY_REGISTRY_URL" +append_if_set "$PROXY_ENV_PATH" "OPENCLAW_BASE_URL" "$OPENCLAW_BASE_URL" +append_if_set "$PROXY_ENV_PATH" "REGISTRY_INTERNAL_SERVICE_ID" "$REGISTRY_INTERNAL_SERVICE_ID" +append_if_set "$PROXY_ENV_PATH" "REGISTRY_INTERNAL_SERVICE_SECRET" "$REGISTRY_INTERNAL_SERVICE_SECRET" +append_if_set "$PROXY_ENV_PATH" "INJECT_IDENTITY_INTO_MESSAGE" "$INJECT_IDENTITY_INTO_MESSAGE" + +proxy_optional_keys=( + "CRL_REFRESH_INTERVAL_MS" + "CRL_MAX_AGE_MS" + "CRL_STALE_BEHAVIOR" + "AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE" + "AGENT_RATE_LIMIT_WINDOW_MS" + "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT" + "RELAY_QUEUE_TTL_SECONDS" + "RELAY_RETRY_INITIAL_MS" + "RELAY_RETRY_MAX_MS" + "RELAY_RETRY_MAX_ATTEMPTS" + "RELAY_RETRY_JITTER_RATIO" + "RELAY_MAX_IN_FLIGHT_DELIVERIES" + "RELAY_MAX_FRAME_BYTES" +) +for key in "${proxy_optional_keys[@]}"; do + append_if_set "$PROXY_ENV_PATH" "$key" "${!key:-}" +done + +write_header "$CLI_ENV_PATH" +cli_keys=( + "CLAWDENTITY_REGISTRY_URL" + "CLAWDENTITY_PROXY_URL" + "CLAWDENTITY_API_KEY" + "CLAWDENTITY_HUMAN_NAME" + "CLAWDENTITY_PROXY_WS_URL" + "CLAWDENTITY_CONNECTOR_BASE_URL" + "CLAWDENTITY_CONNECTOR_OUTBOUND_PATH" + "OPENCLAW_BASE_URL" + "OPENCLAW_HOOK_PATH" + "OPENCLAW_HOOK_TOKEN" + "OPENCLAW_GATEWAY_TOKEN" + "OPENCLAW_HOME" + "OPENCLAW_STATE_DIR" + "OPENCLAW_CONFIG_PATH" + "CLAWDBOT_STATE_DIR" + "CLAWDBOT_CONFIG_PATH" + "OPENCLAW_GATEWAY_APPROVAL_COMMAND" +) +for key in "${cli_keys[@]}"; do + append_if_set "$CLI_ENV_PATH" "$key" "${!key:-}" +done + +write_header "$SKILL_ENV_PATH" +skill_keys=( + "CLAWDENTITY_CONNECTOR_BASE_URL" + "CLAWDENTITY_CONNECTOR_OUTBOUND_PATH" + "OPENCLAW_BASE_URL" +) +for key in "${skill_keys[@]}"; do + append_if_set "$SKILL_ENV_PATH" "$key" "${!key:-}" +done + +chmod 600 "$ROOT_ENV_PATH" "$REGISTRY_ENV_PATH" "$PROXY_ENV_PATH" "$CLI_ENV_PATH" "$SKILL_ENV_PATH" + +printf '[env:sync] updated:\n' +printf ' - %s\n' "$ROOT_ENV_PATH" +printf ' - %s\n' "$REGISTRY_ENV_PATH" +printf ' - %s\n' "$PROXY_ENV_PATH" +printf ' - %s\n' "$CLI_ENV_PATH" +printf ' - %s\n' "$SKILL_ENV_PATH" From 4cdf4a0693f13c2cc691a729cfc5f8058243409e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 13:17:25 +0530 Subject: [PATCH 113/190] feat(connector): harden openclaw gateway liveness and token rotation --- ARCHITECTURE.md | 18 + README.md | 1 + packages/connector/AGENTS.md | 5 + packages/connector/src/AGENTS.md | 6 + packages/connector/src/client.test.ts | 60 ++++ packages/connector/src/client.ts | 5 + packages/connector/src/constants.ts | 6 + packages/connector/src/index.ts | 6 + packages/connector/src/runtime.test.ts | 479 +++++++++++++++++++++++++ packages/connector/src/runtime.ts | 389 +++++++++++++++++++- 10 files changed, 961 insertions(+), 14 deletions(-) create mode 100644 packages/connector/src/runtime.test.ts diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index d77ad41..6712abf 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -382,6 +382,24 @@ Bob's OpenClaw relay-to-peer.ts Alice's Proxy Alice's Ope - `clawdentity connector service uninstall ` to remove connector autostart service. - `clawdentity skill install` to install/update OpenClaw relay skill artifacts under `~/.openclaw`. +#### Connector Local OpenClaw Resilience + +- Runtime probes local OpenClaw base URL reachability on an interval: + - `CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS` (default `10000`) + - `CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS` (default `3000`) +- While probe state is down, inbound replay skips direct hook delivery attempts and keeps messages pending in the connector inbox. +- Runtime replay retries OpenClaw hook delivery with bounded backoff: + - `CONNECTOR_RUNTIME_REPLAY_MAX_ATTEMPTS` (default `3`) + - `CONNECTOR_RUNTIME_REPLAY_RETRY_INITIAL_DELAY_MS` (default `2000`) + - `CONNECTOR_RUNTIME_REPLAY_RETRY_MAX_DELAY_MS` (default `8000`) + - `CONNECTOR_RUNTIME_REPLAY_RETRY_BACKOFF_FACTOR` (default `2`) +- Hook `401/403` responses are treated as auth-rotation signals: connector re-reads `~/.clawdentity/openclaw-relay.json` and retries. +- Connector forwards structured identity headers to local OpenClaw hooks: + - `x-clawdentity-agent-did` + - `x-clawdentity-to-agent-did` + - `x-clawdentity-verified` +- Connector `/v1/status` now surfaces `inbound.openclawGateway` alongside `inbound.openclawHook`. + ### 5) Onboarding and Control Model - Handled by: `apps/registry`, `apps/cli` diff --git a/README.md b/README.md index 99f1f1b..1c17fe8 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,7 @@ Clawdentity works **with** OpenClaw (not a fork) and adds the missing identity l - **Per-agent access control** — trust policies, rate limits, and replay protection at the proxy - **OpenClaw stays private** — only the proxy is public; your OpenClaw instance stays on localhost - **QR-code pairing** — one scan to approve trust between two agents +- **Resilient local relay delivery** — connector probes local OpenClaw liveness and recovers from hook-token rotation without dropping inbound messages ## How It Works diff --git a/packages/connector/AGENTS.md b/packages/connector/AGENTS.md index 691843e..d650daa 100644 --- a/packages/connector/AGENTS.md +++ b/packages/connector/AGENTS.md @@ -13,10 +13,15 @@ - Keep local inbox storage portable and inspectable (`index.json` + `events.jsonl`) with atomic index writes (`.tmp` + rename); do not introduce runtime-specific persistence dependencies for connector inbox state. - Keep replay behavior restart-safe: on runtime boot, replay pending inbox entries in background before relying on new WebSocket traffic. - Keep local OpenClaw replay backoff bounded and deterministic (`CONNECTOR_INBOUND_RETRY_*` / `CONNECTOR_INBOUND_REPLAY_*`) with structured logging for replay success/failure. +- Keep local OpenClaw replay delivery liveness-aware: probe `OPENCLAW_BASE_URL` on a fixed interval (`CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS` / `CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS`) and skip replay delivery attempts while the gateway is known down. +- Keep runtime replay delivery retries explicit and bounded (`CONNECTOR_RUNTIME_REPLAY_*`) and apply retries only for retryable OpenClaw errors. - Refresh agent access credentials at runtime startup when cached access tokens are missing or near expiry before attempting relay WebSocket connection, while persisting refreshed auth atomically to `registry-auth.json`. - Sync `registry-auth.json` from disk before runtime auth refresh/retry decisions so external `agent auth refresh` updates are picked up without requiring a connector restart. - Accept base proxy websocket URLs (`ws://host:port` / `wss://host`) and normalize them to relay connect path (`/v1/relay/connect`) before connector dial; avoid requiring callers to know the relay path details. - Regenerate relay WebSocket auth headers (timestamp/nonce/signature) on every reconnect attempt; never reuse a previously-signed header set across retries. +- Keep OpenClaw hook token rotation resilient: re-read `openclaw-relay.json` before replay batches and treat OpenClaw hook `401/403` as retryable auth-rejection signals that trigger token refresh + retry. +- Keep connector-to-OpenClaw metadata explicit by forwarding structured identity headers (`x-clawdentity-agent-did`, `x-clawdentity-to-agent-did`, `x-clawdentity-verified`) alongside the payload. +- Keep connector shutdown fast and deterministic: abort in-flight OpenClaw hook requests on runtime stop instead of waiting for full request timeout. ## Testing Rules - `src/frames.test.ts` must cover roundtrip serialization and explicit invalid-frame failures. diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 6739fc9..3ecf424 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -21,11 +21,14 @@ ## Replay/Health Rules - Keep replay configuration environment-driven via `CONNECTOR_INBOUND_*` vars with safe defaults from `constants.ts`. +- Keep OpenClaw liveness probing environment-driven via `CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS` and `CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS`; replay should skip direct hook delivery while probe state is down. +- Keep runtime replay retry bounds environment-driven via `CONNECTOR_RUNTIME_REPLAY_*`; only retry retryable OpenClaw hook failures. - `/v1/status` must use the nested contract: - `websocket.{connected,connectAttempts,reconnectCount,uptimeMs,lastConnectedAt}` - `inbound.pending` - `inbound.deadLetter` - `inbound.replay` + - `inbound.openclawGateway` - `inbound.openclawHook` - `metrics.{heartbeat,inboundDelivery,outboundQueue}` - On inbox/status read failures, return explicit structured errors instead of crashing runtime. @@ -45,6 +48,9 @@ - Handle `unexpected-response` status codes from ws upgrade failures; for `401`, trigger `onAuthUpgradeRejected` and allow one immediate reconnect before normal backoff. - Keep outbound enqueue buffering durable when configured via `outboundQueuePersistence`; load once before replaying queued frames and persist on enqueue/dequeue transitions. - Keep websocket/client metrics in `ConnectorClient` (`getMetricsSnapshot`) so runtime health does not recompute transport stats ad hoc. +- Keep local OpenClaw hook auth rejection (`401/403`) retryable in connector delivery paths so token rotation windows do not permanently fail deliveries. +- Keep structured identity headers on connector hook delivery requests (`x-clawdentity-agent-did`, `x-clawdentity-to-agent-did`, `x-clawdentity-verified`) in both runtime replay and direct client-delivery modes. +- Keep runtime stop behavior fail-fast by aborting in-flight local OpenClaw hook requests via shared runtime shutdown signals. ## Testing Rules - `inbound-inbox.test.ts` must cover persistence, dedupe, cap enforcement, replay bookkeeping, dead-letter thresholding, dead-letter replay, and dead-letter purge transitions. diff --git a/packages/connector/src/client.test.ts b/packages/connector/src/client.test.ts index 0aeafae..7d89b91 100644 --- a/packages/connector/src/client.test.ts +++ b/packages/connector/src/client.test.ts @@ -178,6 +178,9 @@ describe("ConnectorClient", () => { expect(requestInit?.method).toBe("POST"); expect(requestInit?.headers).toMatchObject({ "content-type": "application/json", + "x-clawdentity-agent-did": expect.stringMatching(/^did:claw:agent:/), + "x-clawdentity-to-agent-did": expect.stringMatching(/^did:claw:agent:/), + "x-clawdentity-verified": "true", "x-openclaw-token": "hook-secret", "x-request-id": deliverId, }); @@ -354,6 +357,63 @@ describe("ConnectorClient", () => { client.disconnect(); }); + it("retries when local openclaw hook auth rejects with 401", async () => { + const sockets: MockWebSocket[] = []; + const fetchMock = vi + .fn() + .mockResolvedValueOnce(new Response("unauthorized", { status: 401 })) + .mockResolvedValueOnce(new Response("ok", { status: 200 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + openclawDeliverTimeoutMs: 100, + openclawDeliverRetryInitialDelayMs: 1, + openclawDeliverRetryMaxDelayMs: 2, + openclawDeliverRetryBudgetMs: 500, + webSocketFactory: (url) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + it("reconnects when heartbeat acknowledgement times out", async () => { vi.useFakeTimers(); diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index 096e9cf..c53a973 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -1222,6 +1222,9 @@ export class ConnectorClient { const headers: Record = { "content-type": "application/json", + "x-clawdentity-agent-did": frame.fromAgentDid, + "x-clawdentity-to-agent-did": frame.toAgentDid, + "x-clawdentity-verified": "true", "x-request-id": frame.id, }; @@ -1241,6 +1244,8 @@ export class ConnectorClient { throw new LocalOpenclawDeliveryError({ message: `Local OpenClaw hook rejected payload with status ${response.status}`, retryable: + response.status === 401 || + response.status === 403 || response.status >= 500 || response.status === 404 || response.status === 429, diff --git a/packages/connector/src/constants.ts b/packages/connector/src/constants.ts index 9efacc4..96c91af 100644 --- a/packages/connector/src/constants.ts +++ b/packages/connector/src/constants.ts @@ -34,6 +34,12 @@ export const DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR = 2; export const DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES = 10 * 1024 * 1024; export const DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES = 5; export const DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS = 5; +export const DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = 10_000; +export const DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS = 3_000; +export const DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS = 3; +export const DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS = 2_000; +export const DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS = 8_000; +export const DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR = 2; export const AGENT_ACCESS_HEADER = "x-claw-agent-access"; diff --git a/packages/connector/src/index.ts b/packages/connector/src/index.ts index 5cf2144..9871422 100644 --- a/packages/connector/src/index.ts +++ b/packages/connector/src/index.ts @@ -23,7 +23,13 @@ export { DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, + DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, + DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, DEFAULT_CONNECTOR_STATUS_PATH, DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS, DEFAULT_HEARTBEAT_INTERVAL_MS, diff --git a/packages/connector/src/runtime.test.ts b/packages/connector/src/runtime.test.ts new file mode 100644 index 0000000..a2e9f55 --- /dev/null +++ b/packages/connector/src/runtime.test.ts @@ -0,0 +1,479 @@ +import { randomBytes } from "node:crypto"; +import { mkdirSync, mkdtempSync, rmSync } from "node:fs"; +import { writeFile } from "node:fs/promises"; +import { createServer } from "node:http"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { afterEach, describe, expect, it, vi } from "vitest"; +import { WebSocketServer } from "ws"; +import { parseFrame, serializeFrame } from "./frames.js"; +import { startConnectorRuntime } from "./runtime.js"; + +type Sandbox = { + cleanup: () => void; + rootDir: string; +}; + +type WsHarness = { + cleanup: () => Promise; + sendDeliverFrame: (input: { + payload: unknown; + requestId: string; + fromAgentDid?: string; + toAgentDid?: string; + }) => Promise; + waitForDeliverAck: (requestId: string) => Promise; + wsUrl: string; +}; + +const ENV_KEYS = [ + "CONNECTOR_INBOUND_REPLAY_INTERVAL_MS", + "CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS", + "CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS", + "CONNECTOR_RUNTIME_REPLAY_MAX_ATTEMPTS", + "CONNECTOR_RUNTIME_REPLAY_RETRY_INITIAL_DELAY_MS", + "CONNECTOR_RUNTIME_REPLAY_RETRY_MAX_DELAY_MS", +] as const; + +function createSandbox(): Sandbox { + const rootDir = mkdtempSync(join(tmpdir(), "clawdentity-connector-runtime-")); + mkdirSync(join(rootDir, "agents", "alpha"), { recursive: true }); + + return { + rootDir, + cleanup: () => { + rmSync(rootDir, { force: true, recursive: true }); + }, + }; +} + +async function findAvailablePort(): Promise { + return await new Promise((resolve, reject) => { + const server = createServer(); + server.once("error", reject); + server.listen(0, "127.0.0.1", () => { + const address = server.address(); + if (address === null || typeof address === "string") { + reject(new Error("Unable to allocate test port")); + return; + } + + server.close((error) => { + if (error) { + reject(error); + return; + } + resolve(address.port); + }); + }); + }); +} + +async function createWsHarness(port: number): Promise { + const wss = new WebSocketServer({ + host: "127.0.0.1", + path: "/v1/relay/connect", + port, + }); + + const frames: unknown[] = []; + let socket: import("ws").WebSocket | undefined; + + const connectedPromise = new Promise((resolve) => { + wss.on("connection", (ws) => { + socket = ws; + ws.on("message", (payload) => { + const text = payload.toString(); + frames.push(parseFrame(text)); + }); + resolve(); + }); + }); + + const waitForDeliverAck = async (requestId: string): Promise => { + await vi.waitFor(() => { + expect( + frames.some((frame) => { + if (!frame || typeof frame !== "object") { + return false; + } + const typed = frame as { + ackId?: string; + accepted?: boolean; + type?: string; + }; + return ( + typed.type === "deliver_ack" && + typed.ackId === requestId && + typed.accepted === true + ); + }), + ).toBe(true); + }); + }; + + const sendDeliverFrame = async (input: { + payload: unknown; + requestId: string; + fromAgentDid?: string; + toAgentDid?: string; + }): Promise => { + await connectedPromise; + if (socket === undefined) { + throw new Error("WebSocket connection was not established"); + } + + socket.send( + serializeFrame({ + v: 1, + type: "deliver", + id: input.requestId, + ts: "2026-02-20T00:00:00.000Z", + fromAgentDid: input.fromAgentDid ?? makeAgentDid(generateUlid(1)), + toAgentDid: input.toAgentDid ?? makeAgentDid(generateUlid(2)), + payload: input.payload, + }), + ); + }; + + return { + wsUrl: `ws://127.0.0.1:${port}/v1/relay/connect`, + sendDeliverFrame, + waitForDeliverAck, + cleanup: async () => { + await new Promise((resolve) => { + wss.close(() => resolve()); + }); + }, + }; +} + +function createRuntimeCredentials() { + return { + agentDid: makeAgentDid(generateUlid(100)), + ait: "test-ait", + secretKey: Buffer.from(randomBytes(32)).toString("base64url"), + accessToken: "access-token", + accessExpiresAt: "2100-01-01T00:00:00.000Z", + refreshToken: "refresh-token", + refreshExpiresAt: "2100-01-01T00:00:00.000Z", + tokenType: "Bearer" as const, + }; +} + +async function writeRelayRuntimeConfig(configDir: string, token: string) { + await writeFile( + join(configDir, "openclaw-relay.json"), + `${JSON.stringify({ openclawHookToken: token }, null, 2)}\n`, + "utf8", + ); +} + +async function readConnectorStatus(outboundUrl: string): Promise { + const statusUrl = new URL("/v1/status", outboundUrl).toString(); + const response = await fetch(statusUrl); + expect(response.status).toBe(200); + return await response.json(); +} + +afterEach(() => { + vi.restoreAllMocks(); + for (const key of ENV_KEYS) { + delete process.env[key]; + } +}); + +describe("startConnectorRuntime", () => { + it("skips replay while gateway probe is down and resumes after recovery", async () => { + process.env.CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = "20"; + process.env.CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = "25"; + process.env.CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS = "20"; + + const sandbox = createSandbox(); + const wsPort = await findAvailablePort(); + const wsHarness = await createWsHarness(wsPort); + const outboundPort = await findAvailablePort(); + const openclawBaseUrl = "http://127.0.0.1:39101"; + const openclawHookUrl = `${openclawBaseUrl}/hooks/agent`; + let probeReachable = false; + let hookPostCount = 0; + + const fetchMock = vi.fn(async (input, init) => { + const url = input instanceof URL ? input.toString() : String(input); + const method = init?.method ?? "GET"; + + if (method === "GET" && url === openclawBaseUrl) { + if (!probeReachable) { + throw new Error("connect ECONNREFUSED"); + } + return new Response("ok", { status: 200 }); + } + + if (method === "POST" && url === openclawHookUrl) { + hookPostCount += 1; + return new Response("ok", { status: 200 }); + } + + throw new Error(`Unexpected fetch call: ${method} ${url}`); + }); + + const runtime = await startConnectorRuntime({ + agentName: "alpha", + configDir: sandbox.rootDir, + credentials: createRuntimeCredentials(), + fetchImpl: fetchMock, + openclawBaseUrl, + outboundBaseUrl: `http://127.0.0.1:${outboundPort}`, + proxyWebsocketUrl: wsHarness.wsUrl, + registryUrl: "https://registry.example.test", + }); + + try { + const requestId = generateUlid(200); + await wsHarness.sendDeliverFrame({ + requestId, + payload: { message: "queued while gateway down" }, + }); + await wsHarness.waitForDeliverAck(requestId); + + await vi.waitFor(async () => { + const status = (await readConnectorStatus(runtime.outboundUrl)) as { + inbound?: { + openclawGateway?: { reachable?: boolean }; + pending?: { pendingCount?: number }; + }; + }; + expect(status.inbound?.openclawGateway?.reachable).toBe(false); + expect(status.inbound?.pending?.pendingCount).toBe(1); + }); + + expect(hookPostCount).toBe(0); + + probeReachable = true; + await vi.waitFor(async () => { + const status = (await readConnectorStatus(runtime.outboundUrl)) as { + inbound?: { + openclawGateway?: { reachable?: boolean }; + pending?: { pendingCount?: number }; + }; + }; + expect(status.inbound?.openclawGateway?.reachable).toBe(true); + expect(status.inbound?.pending?.pendingCount).toBe(0); + }); + expect(hookPostCount).toBe(1); + } finally { + await runtime.stop(); + await wsHarness.cleanup(); + sandbox.cleanup(); + } + }); + + it("refreshes hook token from relay runtime config after hook 401", async () => { + process.env.CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = "20"; + process.env.CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = "25"; + process.env.CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS = "20"; + + const sandbox = createSandbox(); + await writeRelayRuntimeConfig(sandbox.rootDir, "token-a"); + const wsPort = await findAvailablePort(); + const wsHarness = await createWsHarness(wsPort); + const outboundPort = await findAvailablePort(); + const openclawBaseUrl = "http://127.0.0.1:39102"; + const openclawHookUrl = `${openclawBaseUrl}/hooks/agent`; + const postTokens: string[] = []; + + const fetchMock = vi.fn(async (input, init) => { + const url = input instanceof URL ? input.toString() : String(input); + const method = init?.method ?? "GET"; + + if (method === "GET" && url === openclawBaseUrl) { + return new Response("ok", { status: 200 }); + } + + if (method === "POST" && url === openclawHookUrl) { + const headers = new Headers(init?.headers); + const token = headers.get("x-openclaw-token") ?? ""; + postTokens.push(token); + if (postTokens.length === 1) { + await writeRelayRuntimeConfig(sandbox.rootDir, "token-b"); + return new Response("unauthorized", { status: 401 }); + } + return new Response("ok", { status: 200 }); + } + + throw new Error(`Unexpected fetch call: ${method} ${url}`); + }); + + const runtime = await startConnectorRuntime({ + agentName: "alpha", + configDir: sandbox.rootDir, + credentials: createRuntimeCredentials(), + fetchImpl: fetchMock, + openclawBaseUrl, + outboundBaseUrl: `http://127.0.0.1:${outboundPort}`, + proxyWebsocketUrl: wsHarness.wsUrl, + registryUrl: "https://registry.example.test", + }); + + try { + const requestId = generateUlid(201); + await wsHarness.sendDeliverFrame({ + requestId, + payload: { message: "token rotation flow" }, + }); + await wsHarness.waitForDeliverAck(requestId); + + await vi.waitFor(async () => { + const status = (await readConnectorStatus(runtime.outboundUrl)) as { + inbound?: { pending?: { pendingCount?: number } }; + }; + expect(status.inbound?.pending?.pendingCount).toBe(0); + }); + expect(postTokens).toEqual(["token-a", "token-b"]); + } finally { + await runtime.stop(); + await wsHarness.cleanup(); + sandbox.cleanup(); + } + }); + + it("retries replay delivery for transient hook failures", async () => { + process.env.CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = "20"; + process.env.CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = "25"; + process.env.CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS = "20"; + process.env.CONNECTOR_RUNTIME_REPLAY_MAX_ATTEMPTS = "3"; + process.env.CONNECTOR_RUNTIME_REPLAY_RETRY_INITIAL_DELAY_MS = "5"; + process.env.CONNECTOR_RUNTIME_REPLAY_RETRY_MAX_DELAY_MS = "5"; + + const sandbox = createSandbox(); + const wsPort = await findAvailablePort(); + const wsHarness = await createWsHarness(wsPort); + const outboundPort = await findAvailablePort(); + const openclawBaseUrl = "http://127.0.0.1:39103"; + const openclawHookUrl = `${openclawBaseUrl}/hooks/agent`; + let hookPostCount = 0; + + const fetchMock = vi.fn(async (input, init) => { + const url = input instanceof URL ? input.toString() : String(input); + const method = init?.method ?? "GET"; + + if (method === "GET" && url === openclawBaseUrl) { + return new Response("ok", { status: 200 }); + } + + if (method === "POST" && url === openclawHookUrl) { + hookPostCount += 1; + if (hookPostCount < 3) { + return new Response("temporary failure", { status: 500 }); + } + return new Response("ok", { status: 200 }); + } + + throw new Error(`Unexpected fetch call: ${method} ${url}`); + }); + + const runtime = await startConnectorRuntime({ + agentName: "alpha", + configDir: sandbox.rootDir, + credentials: createRuntimeCredentials(), + fetchImpl: fetchMock, + openclawBaseUrl, + outboundBaseUrl: `http://127.0.0.1:${outboundPort}`, + proxyWebsocketUrl: wsHarness.wsUrl, + registryUrl: "https://registry.example.test", + }); + + try { + const requestId = generateUlid(202); + await wsHarness.sendDeliverFrame({ + requestId, + payload: { message: "retry flow" }, + }); + await wsHarness.waitForDeliverAck(requestId); + + await vi.waitFor(async () => { + const status = (await readConnectorStatus(runtime.outboundUrl)) as { + inbound?: { pending?: { pendingCount?: number } }; + }; + expect(status.inbound?.pending?.pendingCount).toBe(0); + }); + expect(hookPostCount).toBe(3); + } finally { + await runtime.stop(); + await wsHarness.cleanup(); + sandbox.cleanup(); + } + }); + + it("aborts in-flight hook delivery when runtime stops", async () => { + process.env.CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = "20"; + process.env.CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = "25"; + process.env.CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS = "20"; + + const sandbox = createSandbox(); + const wsPort = await findAvailablePort(); + const wsHarness = await createWsHarness(wsPort); + const outboundPort = await findAvailablePort(); + const openclawBaseUrl = "http://127.0.0.1:39104"; + const openclawHookUrl = `${openclawBaseUrl}/hooks/agent`; + let hookPostStartedResolve: (() => void) | undefined; + const hookPostStarted = new Promise((resolve) => { + hookPostStartedResolve = resolve; + }); + + const fetchMock = vi.fn(async (input, init) => { + const url = input instanceof URL ? input.toString() : String(input); + const method = init?.method ?? "GET"; + + if (method === "GET" && url === openclawBaseUrl) { + return new Response("ok", { status: 200 }); + } + + if (method === "POST" && url === openclawHookUrl) { + hookPostStartedResolve?.(); + const signal = init?.signal; + return await new Promise((_resolve, reject) => { + signal?.addEventListener( + "abort", + () => { + reject(new DOMException("Aborted", "AbortError")); + }, + { once: true }, + ); + }); + } + + throw new Error(`Unexpected fetch call: ${method} ${url}`); + }); + + const runtime = await startConnectorRuntime({ + agentName: "alpha", + configDir: sandbox.rootDir, + credentials: createRuntimeCredentials(), + fetchImpl: fetchMock, + openclawBaseUrl, + outboundBaseUrl: `http://127.0.0.1:${outboundPort}`, + proxyWebsocketUrl: wsHarness.wsUrl, + registryUrl: "https://registry.example.test", + }); + + try { + const requestId = generateUlid(203); + await wsHarness.sendDeliverFrame({ + requestId, + payload: { message: "shutdown flow" }, + }); + await wsHarness.waitForDeliverAck(requestId); + + await hookPostStarted; + const startedAt = Date.now(); + await runtime.stop(); + const elapsedMs = Date.now() - startedAt; + expect(elapsedMs).toBeLessThan(3_000); + } finally { + await wsHarness.cleanup(); + sandbox.cleanup(); + } + }); +}); diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index 824c0d2..3749dcc 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -46,7 +46,13 @@ import { DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, + DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, + DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, DEFAULT_CONNECTOR_STATUS_PATH, DEFAULT_OPENCLAW_BASE_URL, DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, @@ -276,11 +282,17 @@ function sanitizeErrorReason(error: unknown): string { } class LocalOpenclawDeliveryError extends Error { + readonly code?: "HOOK_AUTH_REJECTED" | "RUNTIME_STOPPING"; readonly retryable: boolean; - constructor(input: { message: string; retryable: boolean }) { + constructor(input: { + code?: "HOOK_AUTH_REJECTED" | "RUNTIME_STOPPING"; + message: string; + retryable: boolean; + }) { super(input.message); this.name = "LocalOpenclawDeliveryError"; + this.code = input.code; this.retryable = input.retryable; } } @@ -296,6 +308,15 @@ type InboundReplayPolicy = { retryBackoffFactor: number; retryInitialDelayMs: number; retryMaxDelayMs: number; + runtimeReplayMaxAttempts: number; + runtimeReplayRetryBackoffFactor: number; + runtimeReplayRetryInitialDelayMs: number; + runtimeReplayRetryMaxDelayMs: number; +}; + +type OpenclawProbePolicy = { + intervalMs: number; + timeoutMs: number; }; type InboundReplayStatus = { @@ -311,6 +332,13 @@ type InboundReplayView = { lastReplayError?: string; snapshot: ConnectorInboundInboxSnapshot; replayerActive: boolean; + openclawGateway: { + lastCheckedAt?: string; + lastFailureReason?: string; + lastSuccessAt?: string; + reachable: boolean; + url: string; + }; openclawHook: { lastAttemptAt?: string; lastAttemptStatus?: "ok" | "failed"; @@ -318,10 +346,20 @@ type InboundReplayView = { }; }; +type OpenclawGatewayProbeStatus = { + lastCheckedAt?: string; + lastFailureReason?: string; + lastSuccessAt?: string; + reachable: boolean; +}; + function loadInboundReplayPolicy(): InboundReplayPolicy { const retryBackoffFactor = Number.parseFloat( process.env.CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR ?? "", ); + const runtimeReplayRetryBackoffFactor = Number.parseFloat( + process.env.CONNECTOR_RUNTIME_REPLAY_RETRY_BACKOFF_FACTOR ?? "", + ); return { deadLetterNonRetryableMaxAttempts: parsePositiveIntEnv( @@ -364,6 +402,36 @@ function loadInboundReplayPolicy(): InboundReplayPolicy { Number.isFinite(retryBackoffFactor) && retryBackoffFactor >= 1 ? retryBackoffFactor : DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, + runtimeReplayMaxAttempts: parsePositiveIntEnv( + "CONNECTOR_RUNTIME_REPLAY_MAX_ATTEMPTS", + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, + ), + runtimeReplayRetryInitialDelayMs: parsePositiveIntEnv( + "CONNECTOR_RUNTIME_REPLAY_RETRY_INITIAL_DELAY_MS", + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, + ), + runtimeReplayRetryMaxDelayMs: parsePositiveIntEnv( + "CONNECTOR_RUNTIME_REPLAY_RETRY_MAX_DELAY_MS", + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, + ), + runtimeReplayRetryBackoffFactor: + Number.isFinite(runtimeReplayRetryBackoffFactor) && + runtimeReplayRetryBackoffFactor >= 1 + ? runtimeReplayRetryBackoffFactor + : DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, + }; +} + +function loadOpenclawProbePolicy(): OpenclawProbePolicy { + return { + intervalMs: parsePositiveIntEnv( + "CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS", + DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, + ), + timeoutMs: parsePositiveIntEnv( + "CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS", + DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, + ), }; } @@ -382,20 +450,126 @@ function computeReplayDelayMs(input: { return Math.max(1, delay); } +function computeRuntimeReplayRetryDelayMs(input: { + attemptCount: number; + policy: InboundReplayPolicy; +}): number { + const exponent = Math.max(0, input.attemptCount - 1); + const delay = Math.min( + input.policy.runtimeReplayRetryMaxDelayMs, + Math.floor( + input.policy.runtimeReplayRetryInitialDelayMs * + input.policy.runtimeReplayRetryBackoffFactor ** exponent, + ), + ); + return Math.max(1, delay); +} + +async function waitWithAbort(input: { + delayMs: number; + signal: AbortSignal; +}): Promise { + if (input.signal.aborted) { + throw new LocalOpenclawDeliveryError({ + code: "RUNTIME_STOPPING", + message: "Connector runtime is stopping", + retryable: false, + }); + } + + await new Promise((resolve, reject) => { + const timeoutHandle = setTimeout(() => { + input.signal.removeEventListener("abort", onAbort); + resolve(); + }, input.delayMs); + + const onAbort = () => { + clearTimeout(timeoutHandle); + input.signal.removeEventListener("abort", onAbort); + reject( + new LocalOpenclawDeliveryError({ + code: "RUNTIME_STOPPING", + message: "Connector runtime is stopping", + retryable: false, + }), + ); + }; + + input.signal.addEventListener("abort", onAbort, { once: true }); + }); +} + +async function readOpenclawHookTokenFromRelayRuntimeConfig(input: { + configDir: string; + logger: Logger; +}): Promise { + const runtimeConfigPath = join( + input.configDir, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + ); + let raw: string; + try { + raw = await readFile(runtimeConfigPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return undefined; + } + + input.logger.warn("connector.runtime.openclaw_relay_config_read_failed", { + runtimeConfigPath, + reason: sanitizeErrorReason(error), + }); + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + input.logger.warn("connector.runtime.openclaw_relay_config_invalid_json", { + runtimeConfigPath, + }); + return undefined; + } + + if (!isRecord(parsed)) { + return undefined; + } + + const tokenValue = parsed.openclawHookToken; + if (typeof tokenValue !== "string") { + return undefined; + } + + const trimmed = tokenValue.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + async function deliverToOpenclawHook(input: { fetchImpl: typeof fetch; + fromAgentDid: string; openclawHookToken?: string; openclawHookUrl: string; payload: unknown; requestId: string; + shutdownSignal: AbortSignal; + toAgentDid: string; }): Promise { - const controller = new AbortController(); - const timeoutHandle = setTimeout(() => { - controller.abort(); - }, DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS); + const timeoutSignal = AbortSignal.timeout( + DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, + ); + const signal = AbortSignal.any([input.shutdownSignal, timeoutSignal]); const headers: Record = { "content-type": "application/json", + "x-clawdentity-agent-did": input.fromAgentDid, + "x-clawdentity-to-agent-did": input.toAgentDid, + "x-clawdentity-verified": "true", "x-request-id": input.requestId, }; if (input.openclawHookToken !== undefined) { @@ -407,19 +581,32 @@ async function deliverToOpenclawHook(input: { method: "POST", headers, body: JSON.stringify(input.payload), - signal: controller.signal, + signal, }); if (!response.ok) { throw new LocalOpenclawDeliveryError({ message: `Local OpenClaw hook rejected payload with status ${response.status}`, retryable: + response.status === 401 || + response.status === 403 || response.status >= 500 || response.status === 404 || response.status === 429, + code: + response.status === 401 || response.status === 403 + ? "HOOK_AUTH_REJECTED" + : undefined, }); } } catch (error) { if (error instanceof Error && error.name === "AbortError") { + if (input.shutdownSignal.aborted) { + throw new LocalOpenclawDeliveryError({ + code: "RUNTIME_STOPPING", + message: "Connector runtime is stopping", + retryable: false, + }); + } throw new LocalOpenclawDeliveryError({ message: "Local OpenClaw hook request timed out", retryable: true, @@ -432,8 +619,6 @@ async function deliverToOpenclawHook(input: { message: sanitizeErrorReason(error), retryable: true, }); - } finally { - clearTimeout(timeoutHandle); } } @@ -1046,10 +1231,15 @@ export async function startConnectorRuntime( const defaultReceiptCallbackOrigin = new URL(defaultReceiptCallbackUrl) .origin; const openclawBaseUrl = resolveOpenclawBaseUrl(input.openclawBaseUrl); + const openclawProbeUrl = openclawBaseUrl; const openclawHookPath = resolveOpenclawHookPath(input.openclawHookPath); - const openclawHookToken = resolveOpenclawHookToken(input.openclawHookToken); + const fallbackOpenclawHookToken = resolveOpenclawHookToken( + input.openclawHookToken, + ); + let currentOpenclawHookToken = fallbackOpenclawHookToken; const openclawHookUrl = toOpenclawHookUrl(openclawBaseUrl, openclawHookPath); const inboundReplayPolicy = loadInboundReplayPolicy(); + const openclawProbePolicy = loadOpenclawProbePolicy(); const trustedReceiptTargets = await loadTrustedReceiptTargets({ configDir: input.configDir, logger, @@ -1066,9 +1256,15 @@ export async function startConnectorRuntime( const inboundReplayStatus: InboundReplayStatus = { replayerActive: false, }; + const openclawGatewayProbeStatus: OpenclawGatewayProbeStatus = { + reachable: true, + }; + let openclawProbeInFlight = false; let runtimeStopping = false; let replayInFlight = false; let replayIntervalHandle: ReturnType | undefined; + let openclawProbeIntervalHandle: ReturnType | undefined; + const runtimeShutdownController = new AbortController(); const resolveUpgradeHeaders = async (): Promise> => { await refreshCurrentAuthIfNeeded(); @@ -1080,6 +1276,136 @@ export async function startConnectorRuntime( }); }; + const syncOpenclawHookToken = async (reason: "auth_rejected" | "batch") => { + const diskToken = await readOpenclawHookTokenFromRelayRuntimeConfig({ + configDir: input.configDir, + logger, + }); + const nextToken = diskToken ?? fallbackOpenclawHookToken; + if (nextToken === currentOpenclawHookToken) { + return; + } + + currentOpenclawHookToken = nextToken; + logger.info("connector.runtime.openclaw_hook_token_synced", { + reason, + source: diskToken !== undefined ? "openclaw-relay.json" : "fallback", + hasToken: currentOpenclawHookToken !== undefined, + }); + }; + + const probeOpenclawGateway = async (): Promise => { + if (runtimeStopping || openclawProbeInFlight) { + return; + } + openclawProbeInFlight = true; + + const checkedAt = nowIso(); + try { + const timeoutSignal = AbortSignal.timeout(openclawProbePolicy.timeoutMs); + const signal = AbortSignal.any([ + runtimeShutdownController.signal, + timeoutSignal, + ]); + await fetchImpl(openclawProbeUrl, { + method: "GET", + signal, + }); + openclawGatewayProbeStatus.reachable = true; + openclawGatewayProbeStatus.lastCheckedAt = checkedAt; + openclawGatewayProbeStatus.lastSuccessAt = checkedAt; + openclawGatewayProbeStatus.lastFailureReason = undefined; + } catch (error) { + if (runtimeShutdownController.signal.aborted) { + return; + } + openclawGatewayProbeStatus.reachable = false; + openclawGatewayProbeStatus.lastCheckedAt = checkedAt; + openclawGatewayProbeStatus.lastFailureReason = sanitizeErrorReason(error); + } finally { + openclawProbeInFlight = false; + } + }; + + const deliverToOpenclawHookWithRetry = async (inputReplay: { + fromAgentDid: string; + payload: unknown; + requestId: string; + toAgentDid: string; + }): Promise => { + let attempt = 1; + + while (true) { + try { + await deliverToOpenclawHook({ + fetchImpl, + fromAgentDid: inputReplay.fromAgentDid, + openclawHookUrl, + openclawHookToken: currentOpenclawHookToken, + payload: inputReplay.payload, + requestId: inputReplay.requestId, + shutdownSignal: runtimeShutdownController.signal, + toAgentDid: inputReplay.toAgentDid, + }); + return; + } catch (error) { + if ( + error instanceof LocalOpenclawDeliveryError && + error.code === "RUNTIME_STOPPING" + ) { + throw error; + } + + const retryable = + error instanceof LocalOpenclawDeliveryError ? error.retryable : true; + const authRejected = + error instanceof LocalOpenclawDeliveryError && + error.code === "HOOK_AUTH_REJECTED"; + + if (authRejected) { + const previousToken = currentOpenclawHookToken; + await syncOpenclawHookToken("auth_rejected"); + const tokenChanged = currentOpenclawHookToken !== previousToken; + const attemptsRemaining = + attempt < inboundReplayPolicy.runtimeReplayMaxAttempts; + if (tokenChanged && !runtimeStopping && attemptsRemaining) { + logger.warn( + "connector.inbound.replay_hook_auth_rejected_retrying", + { + requestId: inputReplay.requestId, + attempt, + }, + ); + attempt += 1; + continue; + } + } + + const attemptsRemaining = + attempt < inboundReplayPolicy.runtimeReplayMaxAttempts; + if (!retryable || !attemptsRemaining || runtimeStopping) { + throw error; + } + + const retryDelayMs = computeRuntimeReplayRetryDelayMs({ + attemptCount: attempt, + policy: inboundReplayPolicy, + }); + logger.warn("connector.inbound.replay_retry_scheduled", { + requestId: inputReplay.requestId, + attempt, + retryDelayMs, + reason: sanitizeErrorReason(error), + }); + await waitWithAbort({ + delayMs: retryDelayMs, + signal: runtimeShutdownController.signal, + }); + attempt += 1; + } + } + }; + const replayPendingInboundMessages = async (): Promise => { if (runtimeStopping || replayInFlight) { return; @@ -1096,6 +1422,15 @@ export async function startConnectorRuntime( if (dueItems.length === 0) { return; } + await syncOpenclawHookToken("batch"); + if (!openclawGatewayProbeStatus.reachable) { + logger.info("connector.inbound.replay_skipped_gateway_unreachable", { + pendingCount: dueItems.length, + openclawBaseUrl: openclawProbeUrl, + lastFailureReason: openclawGatewayProbeStatus.lastFailureReason, + }); + return; + } const laneByKey = new Map(); for (const pending of dueItems) { @@ -1116,12 +1451,11 @@ export async function startConnectorRuntime( for (const pending of laneItems) { inboundReplayStatus.lastAttemptAt = nowIso(); try { - await deliverToOpenclawHook({ - fetchImpl, - openclawHookUrl, - openclawHookToken, + await deliverToOpenclawHookWithRetry({ + fromAgentDid: pending.fromAgentDid, requestId: pending.requestId, payload: pending.payload, + toAgentDid: pending.toAgentDid, }); await inboundInbox.markDelivered(pending.requestId); inboundReplayStatus.lastReplayAt = nowIso(); @@ -1151,6 +1485,15 @@ export async function startConnectorRuntime( } } } catch (error) { + if ( + error instanceof LocalOpenclawDeliveryError && + error.code === "RUNTIME_STOPPING" + ) { + logger.info("connector.inbound.replay_stopped", { + requestId: pending.requestId, + }); + return; + } const reason = sanitizeErrorReason(error); const retryable = error instanceof LocalOpenclawDeliveryError @@ -1218,6 +1561,13 @@ export async function startConnectorRuntime( replayerActive: inboundReplayStatus.replayerActive || replayInFlight, lastReplayAt: inboundReplayStatus.lastReplayAt, lastReplayError: inboundReplayStatus.lastReplayError, + openclawGateway: { + url: openclawProbeUrl, + reachable: openclawGatewayProbeStatus.reachable, + lastCheckedAt: openclawGatewayProbeStatus.lastCheckedAt, + lastSuccessAt: openclawGatewayProbeStatus.lastSuccessAt, + lastFailureReason: openclawGatewayProbeStatus.lastFailureReason, + }, openclawHook: { url: openclawHookUrl, lastAttemptAt: inboundReplayStatus.lastAttemptAt, @@ -1237,7 +1587,7 @@ export async function startConnectorRuntime( connectionHeadersProvider: resolveUpgradeHeaders, openclawBaseUrl, openclawHookPath, - openclawHookToken, + openclawHookToken: currentOpenclawHookToken, fetchImpl, logger, hooks: { @@ -1544,6 +1894,7 @@ export async function startConnectorRuntime( lastReplayAt: inboundReplayView.lastReplayAt, lastReplayError: inboundReplayView.lastReplayError, }, + openclawGateway: inboundReplayView.openclawGateway, openclawHook: inboundReplayView.openclawHook, }, outbound: { @@ -1670,10 +2021,15 @@ export async function startConnectorRuntime( const stop = async (): Promise => { runtimeStopping = true; + runtimeShutdownController.abort(); if (replayIntervalHandle !== undefined) { clearInterval(replayIntervalHandle); replayIntervalHandle = undefined; } + if (openclawProbeIntervalHandle !== undefined) { + clearInterval(openclawProbeIntervalHandle); + openclawProbeIntervalHandle = undefined; + } connectorClient.disconnect(); await new Promise((resolve, reject) => { server.close((error) => { @@ -1699,12 +2055,17 @@ export async function startConnectorRuntime( ); }); + await syncOpenclawHookToken("batch"); + await probeOpenclawGateway(); connectorClient.connect(); await inboundInbox.pruneDelivered(); void replayPendingInboundMessages(); replayIntervalHandle = setInterval(() => { void replayPendingInboundMessages(); }, inboundReplayPolicy.replayIntervalMs); + openclawProbeIntervalHandle = setInterval(() => { + void probeOpenclawGateway(); + }, openclawProbePolicy.intervalMs); logger.info("connector.runtime.started", { outboundUrl, From 461fd6230d6d6fa32f9ba0ca1a265f517cf7c854 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 13:36:44 +0530 Subject: [PATCH 114/190] Enforce explicit OpenClaw token --- packages/connector/src/AGENTS.md | 1 + packages/connector/src/runtime.test.ts | 65 ++++++++++++++++++++++++++ packages/connector/src/runtime.ts | 13 ++++-- 3 files changed, 75 insertions(+), 4 deletions(-) diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 3ecf424..108eced 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -23,6 +23,7 @@ - Keep replay configuration environment-driven via `CONNECTOR_INBOUND_*` vars with safe defaults from `constants.ts`. - Keep OpenClaw liveness probing environment-driven via `CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS` and `CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS`; replay should skip direct hook delivery while probe state is down. - Keep runtime replay retry bounds environment-driven via `CONNECTOR_RUNTIME_REPLAY_*`; only retry retryable OpenClaw hook failures. +- Keep OpenClaw hook-token precedence deterministic: explicit connector token input (`--openclaw-hook-token` / `OPENCLAW_HOOK_TOKEN`) must override `openclaw-relay.json`, and runtime disk sync applies only when explicit token input is absent. - `/v1/status` must use the nested contract: - `websocket.{connected,connectAttempts,reconnectCount,uptimeMs,lastConnectedAt}` - `inbound.pending` diff --git a/packages/connector/src/runtime.test.ts b/packages/connector/src/runtime.test.ts index a2e9f55..abad332 100644 --- a/packages/connector/src/runtime.test.ts +++ b/packages/connector/src/runtime.test.ts @@ -338,6 +338,71 @@ describe("startConnectorRuntime", () => { } }); + it("preserves explicit hook token over relay runtime config token", async () => { + process.env.CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = "20"; + process.env.CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = "25"; + process.env.CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS = "20"; + + const sandbox = createSandbox(); + await writeRelayRuntimeConfig(sandbox.rootDir, "token-from-relay-config"); + const wsPort = await findAvailablePort(); + const wsHarness = await createWsHarness(wsPort); + const outboundPort = await findAvailablePort(); + const openclawBaseUrl = "http://127.0.0.1:39105"; + const openclawHookUrl = `${openclawBaseUrl}/hooks/agent`; + const postTokens: string[] = []; + + const fetchMock = vi.fn(async (input, init) => { + const url = input instanceof URL ? input.toString() : String(input); + const method = init?.method ?? "GET"; + + if (method === "GET" && url === openclawBaseUrl) { + return new Response("ok", { status: 200 }); + } + + if (method === "POST" && url === openclawHookUrl) { + const headers = new Headers(init?.headers); + postTokens.push(headers.get("x-openclaw-token") ?? ""); + return new Response("ok", { status: 200 }); + } + + throw new Error(`Unexpected fetch call: ${method} ${url}`); + }); + + const runtime = await startConnectorRuntime({ + agentName: "alpha", + configDir: sandbox.rootDir, + credentials: createRuntimeCredentials(), + fetchImpl: fetchMock, + openclawBaseUrl, + openclawHookToken: "token-from-cli", + outboundBaseUrl: `http://127.0.0.1:${outboundPort}`, + proxyWebsocketUrl: wsHarness.wsUrl, + registryUrl: "https://registry.example.test", + }); + + try { + const requestId = generateUlid(204); + await wsHarness.sendDeliverFrame({ + requestId, + payload: { message: "explicit token precedence" }, + }); + await wsHarness.waitForDeliverAck(requestId); + + await vi.waitFor(async () => { + const status = (await readConnectorStatus(runtime.outboundUrl)) as { + inbound?: { pending?: { pendingCount?: number } }; + }; + expect(status.inbound?.pending?.pendingCount).toBe(0); + }); + expect(postTokens).toEqual(["token-from-cli"]); + } finally { + await runtime.stop(); + await wsHarness.cleanup(); + sandbox.cleanup(); + } + }); + it("retries replay delivery for transient hook failures", async () => { process.env.CONNECTOR_INBOUND_REPLAY_INTERVAL_MS = "20"; process.env.CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS = "25"; diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index 3749dcc..a1a2815 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -1233,10 +1233,11 @@ export async function startConnectorRuntime( const openclawBaseUrl = resolveOpenclawBaseUrl(input.openclawBaseUrl); const openclawProbeUrl = openclawBaseUrl; const openclawHookPath = resolveOpenclawHookPath(input.openclawHookPath); - const fallbackOpenclawHookToken = resolveOpenclawHookToken( + const explicitOpenclawHookToken = resolveOpenclawHookToken( input.openclawHookToken, ); - let currentOpenclawHookToken = fallbackOpenclawHookToken; + const hasExplicitOpenclawHookToken = explicitOpenclawHookToken !== undefined; + let currentOpenclawHookToken = explicitOpenclawHookToken; const openclawHookUrl = toOpenclawHookUrl(openclawBaseUrl, openclawHookPath); const inboundReplayPolicy = loadInboundReplayPolicy(); const openclawProbePolicy = loadOpenclawProbePolicy(); @@ -1277,11 +1278,15 @@ export async function startConnectorRuntime( }; const syncOpenclawHookToken = async (reason: "auth_rejected" | "batch") => { + if (hasExplicitOpenclawHookToken) { + return; + } + const diskToken = await readOpenclawHookTokenFromRelayRuntimeConfig({ configDir: input.configDir, logger, }); - const nextToken = diskToken ?? fallbackOpenclawHookToken; + const nextToken = diskToken; if (nextToken === currentOpenclawHookToken) { return; } @@ -1289,7 +1294,7 @@ export async function startConnectorRuntime( currentOpenclawHookToken = nextToken; logger.info("connector.runtime.openclaw_hook_token_synced", { reason, - source: diskToken !== undefined ? "openclaw-relay.json" : "fallback", + source: diskToken !== undefined ? "openclaw-relay.json" : "unset", hasToken: currentOpenclawHookToken !== undefined, }); }; From ff341e430d8846a74afc9176d0c60cf1ee169daa Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 14:22:30 +0530 Subject: [PATCH 115/190] fix(cli,proxy): harden pairing polling, recovery, and ticket security --- apps/cli/README.md | 11 + apps/cli/src/AGENTS.md | 4 + apps/cli/src/commands/openclaw.test.ts | 165 +++++- apps/cli/src/commands/openclaw.ts | 204 +++++++ apps/cli/src/commands/pair.test.ts | 434 +++++++++++++- apps/cli/src/commands/pair.ts | 694 +++++++++++++++++++++-- apps/openclaw-skill/skill/SKILL.md | 14 +- apps/proxy/src/AGENTS.md | 9 + apps/proxy/src/pairing-route.test.ts | 359 +++++++++++- apps/proxy/src/pairing-route.ts | 140 +++++ apps/proxy/src/proxy-trust-state.test.ts | 109 +++- apps/proxy/src/proxy-trust-state.ts | 130 ++++- apps/proxy/src/proxy-trust-store.test.ts | 21 +- apps/proxy/src/proxy-trust-store.ts | 137 ++++- 14 files changed, 2342 insertions(+), 89 deletions(-) diff --git a/apps/cli/README.md b/apps/cli/README.md index cd13af0..4403962 100644 --- a/apps/cli/README.md +++ b/apps/cli/README.md @@ -68,6 +68,7 @@ clawdentity openclaw doctor | `pair start ` | Initiate QR pairing | | `pair confirm ` | Confirm peer pairing | | `pair status ` | Poll pairing status | +| `pair recover ` | Recover pending pairing without re-entering ticket | | `skill install` | Install skill artifacts | | `connector start ` | Start connector runtime | | `connector service install ` | Auto-start service at login | @@ -88,6 +89,16 @@ Config files are stored in `~/.clawdentity/`. Environment variables override values in the config file. +## Pairing Recovery + +When using `pair start --wait` or `pair status --wait`, the CLI stores pending +pairing tickets per agent under the local state directory. If wait times out or +is cancelled, recover later with: + +```bash +clawdentity pair recover +``` + ## Requirements - Node >= 22 diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 2ba7706..e85dd80 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -19,7 +19,11 @@ - Agent auth refresh state is stored per-agent at `~/.clawdentity/agents//registry-auth.json` and must be written with secure file permissions. - `agent auth refresh` must use `Authorization: Claw ` + PoP headers from local agent keys and must not require PAT config. - `pair` command logic should stay in `commands/pair.ts`; keep proxy pairing bootstrap (`/pair/start`, `/pair/confirm`) CLI-driven with local AIT + PoP proof headers and one-time ticket QR support (`--qr`, `--qr-file`). +- Pair wait flows (`pair start --wait`, `pair status --wait`) must be resilient: retry transient proxy/network errors, use adaptive poll intervals, and emit periodic progress updates instead of silent waits. +- Pair wait flows must persist per-agent pending ticket state under CLI state config paths so timeout/cancel recovery does not depend on manually copied ticket strings. +- `pair recover ` must use persisted pending ticket state and clear it only after confirmed peer persistence succeeds. - `pair start`/`pair confirm` must send profile metadata (`initiatorProfile`/`responderProfile`) with both `agentName` and `humanName`. +- `pair start` may optionally constrain confirms to a single responder DID (`--allow-responder`) and register optional completion callbacks (`--callback-url`) without changing default open pairing behavior. - Pairing must fail fast with `CLI_PAIR_HUMAN_NAME_MISSING` when local config does not include `humanName`. - Pairing ticket parsing must normalize pasted input (trim, remove markdown backticks, collapse whitespace) before confirm/status requests so wrapped terminal/UI copies do not fail at proxy. - `pair confirm`/`pair status` must fail fast on local issuer mismatch: ticket `iss` must match configured proxy origin, with explicit remediation in the CLI error. diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts index 5927a42..c7d8514 100644 --- a/apps/cli/src/commands/openclaw.test.ts +++ b/apps/cli/src/commands/openclaw.test.ts @@ -75,8 +75,22 @@ function resolveCliStateDir(homeDir: string): string { function seedLocalAgentCredentials(homeDir: string, agentName: string): void { const agentDir = join(resolveCliStateDir(homeDir), "agents", agentName); mkdirSync(agentDir, { recursive: true }); - writeFileSync(join(agentDir, "secret.key"), "secret-key-value", "utf8"); - writeFileSync(join(agentDir, "ait.jwt"), "mock.ait.jwt", "utf8"); + writeFileSync( + join(agentDir, "secret.key"), + Buffer.alloc(32, 7).toString("base64url"), + "utf8", + ); + const header = Buffer.from(JSON.stringify({ alg: "EdDSA", typ: "JWT" })) + .toString("base64url") + .trim(); + const payload = Buffer.from( + JSON.stringify({ + sub: "did:claw:agent:01HAAA11111111111111111111", + }), + ) + .toString("base64url") + .trim(); + writeFileSync(join(agentDir, "ait.jwt"), `${header}.${payload}.sig`, "utf8"); } function seedPeersConfig( @@ -1147,6 +1161,153 @@ describe("openclaw command helpers", () => { } }); + it("flags half-paired state when proxy confirms but local peer is missing", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const pendingPath = join( + resolveCliStateDir(sandbox.homeDir), + "pairing", + "pending", + "alpha.json", + ); + mkdirSync(dirname(pendingPath), { recursive: true }); + writeFileSync( + pendingPath, + JSON.stringify( + { + agentName: "alpha", + ticket, + proxyUrl: "https://alpha.proxy.example/", + createdAt: "2026-02-18T00:00:00.000Z", + }, + null, + 2, + ), + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + includeConnectorRuntimeCheck: false, + fetchImpl: (async (requestUrl: string) => { + if (requestUrl.endsWith("/pair/status")) { + return Response.json( + { + status: "confirmed", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: { agentName: "alpha", humanName: "Ravi" }, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: { agentName: "beta", humanName: "Ira" }, + expiresAt: "2026-02-18T00:00:00.000Z", + confirmedAt: "2026-02-18T00:00:05.000Z", + }, + { status: 200 }, + ); + } + return Response.json({ status: "ok" }, { status: 200 }); + }) as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect( + result.checks.some( + (check) => + check.id === "state.pairingConsistency" && + check.status === "fail" && + check.message.includes("proxy pairing is confirmed"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("keeps pairing consistency check passing when pending ticket is still pending", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const pendingPath = join( + resolveCliStateDir(sandbox.homeDir), + "pairing", + "pending", + "alpha.json", + ); + mkdirSync(dirname(pendingPath), { recursive: true }); + writeFileSync( + pendingPath, + JSON.stringify( + { + agentName: "alpha", + ticket, + proxyUrl: "https://alpha.proxy.example/", + createdAt: "2026-02-18T00:00:00.000Z", + }, + null, + 2, + ), + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + includeConnectorRuntimeCheck: false, + fetchImpl: (async (requestUrl: string) => { + if (requestUrl.endsWith("/pair/status")) { + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: { agentName: "alpha", humanName: "Ravi" }, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + } + return Response.json({ status: "ok" }, { status: 200 }); + }) as unknown as typeof fetch, + resolveConfigImpl: async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }), + }); + + expect( + result.checks.some( + (check) => + check.id === "state.pairingConsistency" && check.status === "pass", + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + it("reports missing peer alias in doctor output", async () => { const sandbox = createSandbox(); seedLocalAgentCredentials(sandbox.homeDir, "alpha"); diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index d9d673f..53007dd 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -17,6 +17,7 @@ import { writeStdoutLine } from "../io.js"; import { assertValidAgentName } from "./agent-name.js"; import { installConnectorServiceForAgent } from "./connector.js"; import { withErrorHandling } from "./helpers.js"; +import { getPairingStatusSnapshot, loadPendingPairingTicket } from "./pair.js"; const logger = createLogger({ service: "cli", module: "openclaw" }); @@ -255,6 +256,7 @@ type OpenclawDoctorCheckId = | "state.selectedAgent" | "state.credentials" | "state.peers" + | "state.pairingConsistency" | "state.transform" | "state.hookMapping" | "state.hookToken" @@ -893,6 +895,59 @@ async function ensureLocalAgentCredentials( } } +async function resolveLocalAgentDid( + homeDir: string, + agentName: string, +): Promise { + const aitPath = join( + resolveAgentDirectory(homeDir, agentName), + AIT_FILE_NAME, + ); + let rawAit: string; + try { + rawAit = (await readFile(aitPath, "utf8")).trim(); + } catch { + return undefined; + } + + const segments = rawAit.split("."); + if (segments.length < 2) { + return undefined; + } + + let payloadRaw: string; + try { + payloadRaw = textDecoder.decode(decodeBase64url(segments[1] ?? "")); + } catch { + return undefined; + } + + let payload: unknown; + try { + payload = JSON.parse(payloadRaw); + } catch { + return undefined; + } + if (typeof payload !== "object" || payload === null) { + return undefined; + } + + const subject = (payload as { sub?: unknown }).sub; + if (typeof subject !== "string" || subject.trim().length === 0) { + return undefined; + } + try { + const parsed = parseDid(subject.trim()); + if (parsed.kind !== "agent") { + return undefined; + } + } catch { + return undefined; + } + + return subject.trim(); +} + function encodeInvitePayload(payload: OpenclawInvitePayload): string { const encoded = encodeBase64url(textEncoder.encode(JSON.stringify(payload))); return `${INVITE_CODE_PREFIX}${encoded}`; @@ -2514,6 +2569,155 @@ export async function runOpenclawDoctor( ); } + if (selectedAgentName === undefined) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "fail", + message: "cannot validate pairing consistency without selected agent", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + } else if (peersConfig === undefined) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "fail", + message: + "cannot validate pairing consistency due to invalid peers config", + remediationHint: `Fix JSON in ${peersPath} or rerun openclaw setup`, + }), + ); + } else { + const getConfigDirForHome = () => getConfigDir({ homeDir }); + let pendingPair: Awaited>; + try { + pendingPair = await loadPendingPairingTicket(selectedAgentName, { + getConfigDirImpl: getConfigDirForHome, + readFileImpl: readFile, + }); + } catch (error) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "fail", + message: + error instanceof Error && error.message.length > 0 + ? `invalid pending pairing state: ${error.message}` + : "invalid pending pairing state", + remediationHint: `Run: clawdentity pair recover ${selectedAgentName}`, + }), + ); + pendingPair = undefined; + } + if (pendingPair === undefined) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "pass", + message: "no pending pairing recovery state", + }), + ); + } else { + const localAgentDid = await resolveLocalAgentDid( + homeDir, + selectedAgentName, + ); + if (localAgentDid === undefined) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "fail", + message: "unable to resolve local agent DID from ait.jwt", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + } else { + try { + const pairingStatus = await getPairingStatusSnapshot( + selectedAgentName, + { + ticket: pendingPair.ticket, + proxyUrl: pendingPair.proxyUrl, + }, + { + fetchImpl: options.fetchImpl, + resolveConfigImpl: options.resolveConfigImpl, + getConfigDirImpl: getConfigDirForHome, + readFileImpl: readFile, + }, + ); + + if (pairingStatus.status === "pending") { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "pass", + message: + "pending pairing recovery exists but is not confirmed yet", + remediationHint: `Run: clawdentity pair recover ${selectedAgentName}`, + }), + ); + } else { + const peerDid = + localAgentDid === pairingStatus.initiatorAgentDid + ? pairingStatus.responderAgentDid + : localAgentDid === pairingStatus.responderAgentDid + ? pairingStatus.initiatorAgentDid + : undefined; + const hasPeer = + peerDid !== undefined && + Object.values(peersConfig.peers).some( + (entry) => entry.did === peerDid, + ); + + if (!peerDid || !hasPeer) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "fail", + message: + "proxy pairing is confirmed but local peers.json is missing the paired peer", + remediationHint: `Run: clawdentity pair recover ${selectedAgentName}`, + }), + ); + } else { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "pass", + message: + "pending pairing state and local peers are consistent", + }), + ); + } + } + } catch (error) { + checks.push( + toDoctorCheck({ + id: "state.pairingConsistency", + label: "Pairing consistency", + status: "fail", + message: + error instanceof Error && error.message.length > 0 + ? `unable to validate pending pairing: ${error.message}` + : "unable to validate pending pairing", + remediationHint: `Run: clawdentity pair recover ${selectedAgentName}`, + }), + ); + } + } + } + } + const transformTargetPath = resolveTransformTargetPath(openclawDir); const relayTransformRuntimePath = resolveTransformRuntimePath(openclawDir); const relayTransformPeersPath = resolveTransformPeersPath(openclawDir); diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts index 25ad1a0..fd95b34 100644 --- a/apps/cli/src/commands/pair.test.ts +++ b/apps/cli/src/commands/pair.test.ts @@ -9,6 +9,7 @@ import { confirmPairing, createPairCommand, getPairingStatus, + recoverPairing, startPairing, } from "./pair.js"; @@ -129,6 +130,8 @@ describe("pair command helpers", () => { "alpha", { ttlSeconds: "900", + allowResponder: "did:claw:agent:01HBBB22222222222222222222", + callbackUrl: "https://callbacks.example.com/pairing", qr: true, }, { @@ -176,6 +179,8 @@ describe("pair command helpers", () => { expect(headers.get("x-claw-nonce")).toBe("nonce-start"); expect(String(init?.body ?? "")).toContain("ttlSeconds"); expect(String(init?.body ?? "")).toContain("initiatorProfile"); + expect(String(init?.body ?? "")).toContain("allowResponderAgentDid"); + expect(String(init?.body ?? "")).toContain("callbackUrl"); }); it("uses CLAWDENTITY_PROXY_URL when no proxy override options are present", async () => { @@ -756,10 +761,12 @@ describe("pair command helpers", () => { expect(result.status).toBe("confirmed"); expect(result.peerAlias).toBe("peer-22222222"); expect(sleepImpl).toHaveBeenCalledTimes(1); - expect(writeFileImpl).toHaveBeenCalledTimes(1); - expect(mkdirImpl).toHaveBeenCalledTimes(1); - expect(chmodImpl).toHaveBeenCalledTimes(1); - const peerWriteCall = writeFileImpl.mock.calls[0]; + expect(writeFileImpl.mock.calls.length).toBeGreaterThanOrEqual(2); + expect(mkdirImpl.mock.calls.length).toBeGreaterThanOrEqual(2); + expect(chmodImpl.mock.calls.length).toBeGreaterThanOrEqual(2); + const peerWriteCall = writeFileImpl.mock.calls.find(([filePath]) => + String(filePath).endsWith("/peers.json"), + ); const persistedPeers = JSON.parse(String(peerWriteCall?.[1] ?? "{}")) as { peers: { [key: string]: { @@ -772,6 +779,360 @@ describe("pair command helpers", () => { "https://beta.proxy.example/hooks/agent", ); }); + + it("retries transient polling failures and resolves metadata only once", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const mkdirImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const unlinkImpl = vi.fn(async () => undefined); + const sleepImpl = vi.fn(async () => undefined); + let statusAttempt = 0; + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { status: "ok", proxyUrl: "https://alpha.proxy.example" }, + { status: 200 }, + ); + } + + statusAttempt += 1; + if (statusAttempt <= 2) { + return Response.json( + { error: { code: "PROXY_PAIR_STATE_UNAVAILABLE", message: "busy" } }, + { status: 503 }, + ); + } + if (statusAttempt === 3) { + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + } + return Response.json( + { + status: "confirmed", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + confirmedAt: "2026-02-18T00:00:05.000Z", + }, + { status: 200 }, + ); + }); + + const result = await getPairingStatus( + "alpha", + { + ticket, + wait: true, + waitSeconds: "120", + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: (() => { + let now = 1_700_000_000; + return () => now++; + })(), + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + unlinkImpl: + unlinkImpl as unknown as typeof import("node:fs/promises").unlink, + getConfigDirImpl: () => "/tmp/.clawdentity", + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + sleepImpl, + }, + ); + + expect(result.status).toBe("confirmed"); + expect( + fetchImpl.mock.calls.filter(([url]) => + String(url).endsWith("/v1/metadata"), + ).length, + ).toBe(1); + expect(sleepImpl).toHaveBeenCalled(); + expect(unlinkImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/pending/alpha.json", + ); + }); + + it("persists pending ticket and prints recovery guidance on timeout", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const mkdirImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const unlinkImpl = vi.fn(async () => undefined); + const sleepImpl = vi.fn(async () => undefined); + const writeStdoutLineImpl = vi.fn(); + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { status: "ok", proxyUrl: "https://alpha.proxy.example" }, + { status: 200 }, + ); + } + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + const nowSequence = [1000, 1000, 1001, 1002]; + + await expect( + getPairingStatus( + "alpha", + { + ticket, + wait: true, + waitSeconds: "2", + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => nowSequence.shift() ?? 1002, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + unlinkImpl: + unlinkImpl as unknown as typeof import("node:fs/promises").unlink, + getConfigDirImpl: () => "/tmp/.clawdentity", + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + sleepImpl, + writeStdoutLineImpl, + }, + ), + ).rejects.toMatchObject({ + code: "CLI_PAIR_STATUS_WAIT_TIMEOUT", + }); + + expect(writeFileImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/pending/alpha.json", + expect.any(String), + "utf8", + ); + expect(unlinkImpl).not.toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/pending/alpha.json", + ); + expect( + writeStdoutLineImpl.mock.calls.some(([message]) => + String(message).includes("clawdentity pair recover alpha"), + ), + ).toBe(true); + }); + + it("handles SIGINT cancellation with recovery hint and keeps pending ticket", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const writeFileImpl = vi.fn(async () => undefined); + const unlinkImpl = vi.fn(async () => undefined); + const writeStdoutLineImpl = vi.fn(); + let sigintHandler: (() => void) | undefined; + const registerSigintHandlerImpl = vi.fn((handler: () => void) => { + sigintHandler = handler; + }); + const unregisterSigintHandlerImpl = vi.fn(() => undefined); + const sleepImpl = vi.fn(async () => { + sigintHandler?.(); + }); + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { status: "ok", proxyUrl: "https://alpha.proxy.example" }, + { status: 200 }, + ); + } + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + await expect( + getPairingStatus( + "alpha", + { + ticket, + wait: true, + waitSeconds: "30", + }, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: (() => { + let now = 1_700_000_000; + return () => now++; + })(), + nonceFactoryImpl: () => "nonce-status", + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").chmod, + unlinkImpl: + unlinkImpl as unknown as typeof import("node:fs/promises").unlink, + getConfigDirImpl: () => "/tmp/.clawdentity", + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + sleepImpl, + writeStdoutLineImpl, + registerSigintHandlerImpl, + unregisterSigintHandlerImpl, + }, + ), + ).rejects.toMatchObject({ + code: "CLI_PAIR_STATUS_WAIT_CANCELLED", + }); + + expect(registerSigintHandlerImpl).toHaveBeenCalledTimes(1); + expect(unregisterSigintHandlerImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).not.toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/pending/alpha.json", + ); + expect( + writeStdoutLineImpl.mock.calls.some(([message]) => + String(message).includes("Pairing wait cancelled"), + ), + ).toBe(true); + }); + + it("recovers confirmed pending pairing and clears pending ticket state", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const readFileImpl = vi.fn( + async (filePath: string, _encoding?: BufferEncoding) => { + if (filePath.endsWith("/ait.jwt")) { + return fixture.ait; + } + if (filePath.endsWith("/secret.key")) { + return fixture.secretKeyBase64url; + } + if (filePath.endsWith("/pairing/pending/alpha.json")) { + return JSON.stringify({ + agentName: "alpha", + ticket, + proxyUrl: "https://alpha.proxy.example/", + createdAt: "2026-02-18T00:00:00.000Z", + }); + } + throw buildErrnoError("ENOENT"); + }, + ); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const unlinkImpl = vi.fn(async () => undefined); + const fetchImpl = vi.fn(async () => + Response.json( + { + status: "confirmed", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + confirmedAt: "2026-02-18T00:00:05.000Z", + }, + { status: 200 }, + ), + ); + + const result = await recoverPairing( + "alpha", + {}, + { + fetchImpl: fetchImpl as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: + readFileImpl as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: + writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: + mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: + chmodImpl as unknown as typeof import("node:fs/promises").chmod, + unlinkImpl: + unlinkImpl as unknown as typeof import("node:fs/promises").unlink, + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ); + + expect(result.status).toBe("confirmed"); + expect(unlinkImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/pending/alpha.json", + ); + }); + + it("fails recover when no pending pairing exists", async () => { + const fixture = await createPairFixture(); + await expect( + recoverPairing( + "alpha", + {}, + { + readFileImpl: createReadFileMock( + fixture, + ) as unknown as typeof import("node:fs/promises").readFile, + getConfigDirImpl: () => "/tmp/.clawdentity", + }, + ), + ).rejects.toMatchObject({ + code: "CLI_PAIR_RECOVER_NOT_FOUND", + }); + }); }); const runPairCommand = async ( @@ -1002,4 +1363,69 @@ describe("pair command output", () => { "Initiator Agent DID: did:claw:agent:01HAAA11111111111111111111", ); }); + + it("prints recovered output from pair recover", async () => { + const fixture = await createPairFixture(); + const ticket = `clwpair1_${Buffer.from( + JSON.stringify({ iss: "https://alpha.proxy.example" }), + ).toString("base64url")}`; + const readFileImpl = vi.fn( + async (filePath: string, _encoding?: BufferEncoding) => { + if (filePath.endsWith("/ait.jwt")) { + return fixture.ait; + } + if (filePath.endsWith("/secret.key")) { + return fixture.secretKeyBase64url; + } + if (filePath.endsWith("/pairing/pending/alpha.json")) { + return JSON.stringify({ + agentName: "alpha", + ticket, + proxyUrl: "https://alpha.proxy.example/", + createdAt: "2026-02-18T00:00:00.000Z", + }); + } + throw buildErrnoError("ENOENT"); + }, + ); + + const command = createPairCommand({ + fetchImpl: (async () => + Response.json( + { + status: "confirmed", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + confirmedAt: "2026-02-18T00:00:05.000Z", + }, + { status: 200 }, + )) as unknown as typeof fetch, + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: + readFileImpl as unknown as typeof import("node:fs/promises").readFile, + writeFileImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").writeFile, + mkdirImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").mkdir, + chmodImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").chmod, + unlinkImpl: vi.fn( + async () => undefined, + ) as unknown as typeof import("node:fs/promises").unlink, + getConfigDirImpl: () => "/tmp/.clawdentity", + }); + + const result = await runPairCommand(["recover", "alpha"], command); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Pairing recovered and saved"); + expect(result.stdout).toContain("Status: confirmed"); + }); }); diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 5c52d12..f754e39 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -35,6 +35,7 @@ const AGENTS_DIR_NAME = "agents"; const AIT_FILE_NAME = "ait.jwt"; const SECRET_KEY_FILE_NAME = "secret.key"; const PAIRING_QR_DIR_NAME = "pairing"; +const PAIRING_PENDING_DIR_NAME = "pairing/pending"; const PEERS_FILE_NAME = "peers.json"; const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; @@ -49,10 +50,14 @@ const FILE_MODE = 0o600; const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; const DEFAULT_STATUS_WAIT_SECONDS = 300; const DEFAULT_STATUS_POLL_INTERVAL_SECONDS = 3; +const DEFAULT_PROGRESS_UPDATE_SECONDS = 30; +const MAX_CONSECUTIVE_TRANSIENT_POLL_FAILURES = 5; const MAX_PROFILE_NAME_LENGTH = 64; export type PairStartOptions = { ttlSeconds?: string; + allowResponder?: string; + callbackUrl?: string; qr?: boolean; qrOutput?: string; wait?: boolean; @@ -87,6 +92,9 @@ type PairRequestOptions = { resolveConfigImpl?: () => Promise; qrEncodeImpl?: (ticket: string) => Promise; qrDecodeImpl?: (imageBytes: Uint8Array) => string; + registerSigintHandlerImpl?: (handler: () => void) => void; + unregisterSigintHandlerImpl?: (handler: () => void) => void; + writeStdoutLineImpl?: (message: string) => void; }; type PairCommandDependencies = PairRequestOptions; @@ -122,6 +130,14 @@ type PairStatusResult = { peerAlias?: string; }; +type PendingPairingTicket = { + agentName: string; + ticket: string; + proxyUrl: string; + createdAt: string; + expiresAt?: string; +}; + type RegistryErrorEnvelope = { error?: { code?: string; @@ -466,6 +482,138 @@ function resolvePeersConfigPath(getConfigDirImpl: typeof getConfigDir): string { return join(getConfigDirImpl(), PEERS_FILE_NAME); } +function resolvePendingPairingTicketPath(input: { + getConfigDirImpl: typeof getConfigDir; + agentName: string; +}): string { + return join( + input.getConfigDirImpl(), + PAIRING_PENDING_DIR_NAME, + `${input.agentName}.json`, + ); +} + +function parsePendingPairingTicket( + payload: unknown, + expectedAgentName: string, +): PendingPairingTicket { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_PENDING_TICKET_INVALID", + "Pending pairing state is invalid", + ); + } + + const agentName = parseNonEmptyString(payload.agentName); + const ticket = parsePairingTicket(payload.ticket); + const proxyUrl = parseProxyUrl(parseNonEmptyString(payload.proxyUrl)); + const createdAt = parseNonEmptyString(payload.createdAt); + const expiresAtRaw = parseNonEmptyString(payload.expiresAt); + if (agentName.length === 0 || createdAt.length === 0) { + throw createCliError( + "CLI_PAIR_PENDING_TICKET_INVALID", + "Pending pairing state is invalid", + ); + } + if (agentName !== expectedAgentName) { + throw createCliError( + "CLI_PAIR_PENDING_TICKET_INVALID", + "Pending pairing state does not match requested agent", + ); + } + + return { + agentName, + ticket, + proxyUrl, + createdAt, + expiresAt: expiresAtRaw.length > 0 ? expiresAtRaw : undefined, + }; +} + +export async function loadPendingPairingTicket( + agentName: string, + dependencies: Pick< + PairRequestOptions, + "getConfigDirImpl" | "readFileImpl" + > = {}, +): Promise { + const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; + const readFileImpl = dependencies.readFileImpl ?? readFile; + const normalizedAgentName = assertValidAgentName(agentName); + const pendingPath = resolvePendingPairingTicketPath({ + getConfigDirImpl, + agentName: normalizedAgentName, + }); + + let raw: string; + try { + raw = await readFileImpl(pendingPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return undefined; + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw createCliError( + "CLI_PAIR_PENDING_TICKET_INVALID", + `Pending pairing file is invalid JSON: ${pendingPath}`, + ); + } + + return parsePendingPairingTicket(parsed, normalizedAgentName); +} + +async function savePendingPairingTicket(input: { + record: PendingPairingTicket; + dependencies: Pick< + PairRequestOptions, + "getConfigDirImpl" | "mkdirImpl" | "writeFileImpl" | "chmodImpl" + >; +}): Promise { + const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; + const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; + const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; + const chmodImpl = input.dependencies.chmodImpl ?? chmod; + const pendingPath = resolvePendingPairingTicketPath({ + getConfigDirImpl, + agentName: input.record.agentName, + }); + + await mkdirImpl(dirname(pendingPath), { recursive: true }); + await writeFileImpl( + pendingPath, + `${JSON.stringify(input.record, null, 2)}\n`, + "utf8", + ); + await chmodImpl(pendingPath, FILE_MODE); +} + +async function clearPendingPairingTicket(input: { + agentName: string; + dependencies: Pick; +}): Promise { + const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; + const unlinkImpl = input.dependencies.unlinkImpl ?? unlink; + const pendingPath = resolvePendingPairingTicketPath({ + getConfigDirImpl, + agentName: input.agentName, + }); + await unlinkImpl(pendingPath).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + throw error; + }); +} + function parsePeerEntry(value: unknown): PeerEntry { if (!isRecord(value)) { throw createCliError( @@ -697,6 +845,38 @@ function parseTtlSeconds(value: string | undefined): number | undefined { return parsed; } +function parseAllowResponderAgentDid( + value: string | undefined, +): string | undefined { + const raw = parseNonEmptyString(value); + if (raw.length === 0) { + return undefined; + } + + try { + const parsed = parseDid(raw); + if (parsed.kind !== "agent") { + throw new Error("invalid kind"); + } + } catch { + throw createCliError( + "CLI_PAIR_START_INVALID_ALLOW_RESPONDER", + "allowResponder must be a valid agent DID", + ); + } + + return raw; +} + +function parseCallbackUrl(value: string | undefined): string | undefined { + const raw = parseNonEmptyString(value); + if (raw.length === 0) { + return undefined; + } + + return parseProxyUrl(raw); +} + function parsePositiveIntegerOption(input: { value: string | undefined; optionName: string; @@ -953,6 +1133,16 @@ function mapConfirmPairError(status: number, payload: unknown): string { const code = extractErrorCode(payload); const message = extractErrorMessage(payload); + if (code === "PROXY_PAIR_TICKET_ALREADY_CONFIRMED" || status === 409) { + return "Pairing ticket has already been confirmed"; + } + + if (code === "PROXY_PAIR_RESPONDER_FORBIDDEN") { + return message + ? `Pair confirm is forbidden for this responder: ${message}` + : "Pair confirm is forbidden for this responder."; + } + if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { return "Pairing ticket is invalid or expired"; } @@ -969,7 +1159,8 @@ function mapConfirmPairError(status: number, payload: unknown): string { if ( code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || - code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" + code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" || + code === "PROXY_PAIR_TICKET_INVALID_SIGNATURE" ) { return message ? `Pair confirm request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` @@ -1503,6 +1694,10 @@ export async function startPairing( (() => randomBytes(NONCE_SIZE).toString("base64url")); const ttlSeconds = parseTtlSeconds(options.ttlSeconds); + const allowResponderAgentDid = parseAllowResponderAgentDid( + options.allowResponder, + ); + const callbackUrl = parseCallbackUrl(options.callbackUrl); const config = await resolveConfigImpl(); const proxyUrl = await resolveProxyUrl({ config, @@ -1524,6 +1719,8 @@ export async function startPairing( const requestBody = JSON.stringify({ ttlSeconds, initiatorProfile, + allowResponderAgentDid, + callbackUrl, }); const bodyBytes = new TextEncoder().encode(requestBody); @@ -1720,37 +1917,125 @@ export async function confirmPairing( }; } -async function getPairingStatusOnce( - agentName: string, - options: { ticket: string }, - dependencies: PairRequestOptions = {}, -): Promise { - const fetchImpl = dependencies.fetchImpl ?? fetch; - const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; - const nonceFactoryImpl = - dependencies.nonceFactoryImpl ?? - (() => randomBytes(NONCE_SIZE).toString("base64url")); - const config = await resolveConfigImpl(); - const proxyUrl = await resolveProxyUrl({ - config, - fetchImpl, - }); +type PreparedPairingStatusContext = { + normalizedAgentName: string; + ticket: string; + proxyUrl: string; + ait: string; + secretKey: Uint8Array; + callerAgentDid: string; +}; + +function resolveWriteStdoutLine( + dependencies: PairRequestOptions, +): (message: string) => void { + return dependencies.writeStdoutLineImpl ?? writeStdoutLine; +} + +function isTransientPollError(error: unknown): boolean { + if (!(error instanceof AppError)) { + return false; + } + + return ( + error.code === "CLI_PAIR_REQUEST_FAILED" || + error.code === "CLI_PAIR_STATUS_TRANSIENT_FAILED" + ); +} + +function computePollIntervalSeconds(input: { + elapsedSeconds: number; + baseIntervalSeconds: number; +}): number { + if (input.elapsedSeconds < 30) { + return Math.max(2, Math.min(3, input.baseIntervalSeconds)); + } + if (input.elapsedSeconds < 60) { + return 5; + } + return 12; +} + +function printPairingRecoveryHint(input: { + agentName: string; + ticket: string; + reason: "timeout" | "cancelled" | "poll-failed"; + waitSeconds: number; + writeLine: (message: string) => void; +}): void { + if (input.reason === "timeout") { + input.writeLine(`Pairing wait timed out (${input.waitSeconds}s).`); + } else if (input.reason === "cancelled") { + input.writeLine("Pairing wait cancelled."); + } else { + input.writeLine( + "Pairing wait failed after repeated transient poll errors.", + ); + } + input.writeLine("The responder may still confirm."); + input.writeLine("To recover later:"); + input.writeLine( + ` clawdentity pair status ${input.agentName} --ticket ${input.ticket}`, + ); + input.writeLine(` clawdentity pair recover ${input.agentName}`); +} - const ticket = parsePairingTicket(options.ticket); +async function preparePairingStatusContext(input: { + agentName: string; + ticket: string; + dependencies: PairRequestOptions; + proxyUrl?: string; +}): Promise { + const fetchImpl = input.dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = + input.dependencies.resolveConfigImpl ?? resolveConfig; + const normalizedAgentName = assertValidAgentName(input.agentName); + const ticket = parsePairingTicket(input.ticket); + const proxyUrl = + input.proxyUrl ?? + (await (async () => { + const config = await resolveConfigImpl(); + return resolveProxyUrl({ + config, + fetchImpl, + }); + })()); assertTicketIssuerMatchesProxy({ ticket, proxyUrl, context: "status", }); const { ait, secretKey } = await readAgentProofMaterial( - agentName, - dependencies, + normalizedAgentName, + input.dependencies, ); - const callerAgentDid = parseAitAgentDid(ait); - const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_STATUS_PATH); - const requestBody = JSON.stringify({ ticket }); + return { + normalizedAgentName, + ticket, + proxyUrl, + ait, + secretKey, + callerAgentDid: parseAitAgentDid(ait), + }; +} + +async function getPairingStatusWithContext(input: { + context: PreparedPairingStatusContext; + dependencies: PairRequestOptions; + persistPeer: boolean; +}): Promise { + const fetchImpl = input.dependencies.fetchImpl ?? fetch; + const nowSecondsImpl = input.dependencies.nowSecondsImpl ?? nowUnixSeconds; + const nonceFactoryImpl = + input.dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + + const requestUrl = toProxyRequestUrl( + input.context.proxyUrl, + PAIR_STATUS_PATH, + ); + const requestBody = JSON.stringify({ ticket: input.context.ticket }); const bodyBytes = new TextEncoder().encode(requestBody); const timestampSeconds = nowSecondsImpl(); const nonce = nonceFactoryImpl(); @@ -1758,7 +2043,7 @@ async function getPairingStatusOnce( method: "POST", requestUrl, bodyBytes, - secretKey, + secretKey: input.context.secretKey, timestampSeconds, nonce, }); @@ -1769,7 +2054,7 @@ async function getPairingStatusOnce( init: { method: "POST", headers: { - authorization: `Claw ${ait}`, + authorization: `Claw ${input.context.ait}`, "content-type": "application/json", ...signedHeaders, }, @@ -1778,15 +2063,19 @@ async function getPairingStatusOnce( }); const responseBody = await parseJsonResponse(response); if (!response.ok) { + const errorCode = + response.status >= 500 + ? "CLI_PAIR_STATUS_TRANSIENT_FAILED" + : "CLI_PAIR_STATUS_FAILED"; throw createCliError( - "CLI_PAIR_STATUS_FAILED", + errorCode, mapStatusPairError(response.status, responseBody), ); } const parsed = parsePairStatusResponse(responseBody); let peerAlias: string | undefined; - if (parsed.status === "confirmed") { + if (input.persistPeer && parsed.status === "confirmed") { const responderAgentDid = parsed.responderAgentDid; if (!responderAgentDid) { throw createCliError( @@ -1796,15 +2085,15 @@ async function getPairingStatusOnce( } const peerDid = - callerAgentDid === parsed.initiatorAgentDid + input.context.callerAgentDid === parsed.initiatorAgentDid ? responderAgentDid - : callerAgentDid === responderAgentDid + : input.context.callerAgentDid === responderAgentDid ? parsed.initiatorAgentDid : undefined; const peerProfile = - callerAgentDid === parsed.initiatorAgentDid + input.context.callerAgentDid === parsed.initiatorAgentDid ? parsed.responderProfile - : callerAgentDid === responderAgentDid + : input.context.callerAgentDid === responderAgentDid ? parsed.initiatorProfile : undefined; if (!peerDid) { @@ -1821,33 +2110,68 @@ async function getPairingStatusOnce( } peerAlias = await persistPairedPeer({ - ticket, + ticket: input.context.ticket, peerDid, peerProfile, peerProxyOrigin: toPeerProxyOriginFromStatus({ - callerAgentDid, + callerAgentDid: input.context.callerAgentDid, initiatorAgentDid: parsed.initiatorAgentDid, responderAgentDid, initiatorProfile: parsed.initiatorProfile, responderProfile: parsed.responderProfile, }), - dependencies, + dependencies: input.dependencies, }); } return { ...parsed, - proxyUrl, + proxyUrl: input.context.proxyUrl, peerAlias, }; } +async function getPairingStatusOnce( + agentName: string, + options: { ticket: string; proxyUrl?: string; persistPeer?: boolean }, + dependencies: PairRequestOptions = {}, +): Promise { + const context = await preparePairingStatusContext({ + agentName, + ticket: options.ticket, + proxyUrl: options.proxyUrl, + dependencies, + }); + return getPairingStatusWithContext({ + context, + dependencies, + persistPeer: options.persistPeer ?? true, + }); +} + +export async function getPairingStatusSnapshot( + agentName: string, + options: { ticket: string; proxyUrl?: string }, + dependencies: PairRequestOptions = {}, +): Promise { + return getPairingStatusOnce( + agentName, + { + ticket: options.ticket, + proxyUrl: options.proxyUrl, + persistPeer: false, + }, + dependencies, + ); +} + async function waitForPairingStatus(input: { agentName: string; ticket: string; waitSeconds: number; pollIntervalSeconds: number; dependencies: PairRequestOptions; + proxyUrl?: string; }): Promise { const nowSecondsImpl = input.dependencies.nowSecondsImpl ?? nowUnixSeconds; const sleepImpl = @@ -1857,30 +2181,156 @@ async function waitForPairingStatus(input: { setTimeout(resolve, ms); }); }); + const registerSigintHandlerImpl = + input.dependencies.registerSigintHandlerImpl ?? + ((handler) => { + process.on("SIGINT", handler); + }); + const unregisterSigintHandlerImpl = + input.dependencies.unregisterSigintHandlerImpl ?? + ((handler) => { + process.off("SIGINT", handler); + }); + const writeLine = resolveWriteStdoutLine(input.dependencies); - const deadlineSeconds = nowSecondsImpl() + input.waitSeconds; - while (true) { - const status = await getPairingStatusOnce( - input.agentName, - { ticket: input.ticket }, - input.dependencies, - ); + const context = await preparePairingStatusContext({ + agentName: input.agentName, + ticket: input.ticket, + proxyUrl: input.proxyUrl, + dependencies: input.dependencies, + }); + await savePendingPairingTicket({ + record: { + agentName: context.normalizedAgentName, + ticket: context.ticket, + proxyUrl: context.proxyUrl, + createdAt: new Date(nowUtcMs()).toISOString(), + }, + dependencies: input.dependencies, + }); - if (status.status === "confirmed") { - return status; - } + const startedAtSeconds = nowSecondsImpl(); + const deadlineSeconds = startedAtSeconds + input.waitSeconds; + let nextProgressAtSeconds = + startedAtSeconds + DEFAULT_PROGRESS_UPDATE_SECONDS; + let cancelled = false; + let consecutiveTransientFailures = 0; + const onSigint = () => { + cancelled = true; + }; + registerSigintHandlerImpl(onSigint); - const nowSeconds = nowSecondsImpl(); - if (nowSeconds >= deadlineSeconds) { - throw createCliError( - "CLI_PAIR_STATUS_WAIT_TIMEOUT", - `Pairing is still pending after ${input.waitSeconds} seconds`, + try { + while (true) { + if (cancelled) { + printPairingRecoveryHint({ + agentName: context.normalizedAgentName, + ticket: context.ticket, + reason: "cancelled", + waitSeconds: input.waitSeconds, + writeLine, + }); + throw createCliError( + "CLI_PAIR_STATUS_WAIT_CANCELLED", + "Pairing wait cancelled by user", + ); + } + + const nowSeconds = nowSecondsImpl(); + if (nowSeconds >= deadlineSeconds) { + printPairingRecoveryHint({ + agentName: context.normalizedAgentName, + ticket: context.ticket, + reason: "timeout", + waitSeconds: input.waitSeconds, + writeLine, + }); + throw createCliError( + "CLI_PAIR_STATUS_WAIT_TIMEOUT", + `Pairing is still pending after ${input.waitSeconds} seconds`, + ); + } + + let status: PairStatusResult; + try { + status = await getPairingStatusWithContext({ + context, + dependencies: input.dependencies, + persistPeer: true, + }); + consecutiveTransientFailures = 0; + } catch (error) { + if (!isTransientPollError(error)) { + throw error; + } + consecutiveTransientFailures += 1; + logger.warn("cli.pair.poll_transient_error", { + agentName: context.normalizedAgentName, + attempt: consecutiveTransientFailures, + reason: error instanceof Error ? error.message : "unknown", + }); + if ( + consecutiveTransientFailures >= + MAX_CONSECUTIVE_TRANSIENT_POLL_FAILURES + ) { + printPairingRecoveryHint({ + agentName: context.normalizedAgentName, + ticket: context.ticket, + reason: "poll-failed", + waitSeconds: input.waitSeconds, + writeLine, + }); + throw createCliError( + "CLI_PAIR_STATUS_POLL_FAILED", + `Pairing status polling failed ${consecutiveTransientFailures} times consecutively`, + ); + } + const retryNow = nowSecondsImpl(); + const retryElapsedSeconds = Math.max(0, retryNow - startedAtSeconds); + const retrySleepSeconds = computePollIntervalSeconds({ + elapsedSeconds: retryElapsedSeconds, + baseIntervalSeconds: input.pollIntervalSeconds, + }); + await sleepImpl(retrySleepSeconds * 1000); + continue; + } + + if (status.status === "confirmed") { + await clearPendingPairingTicket({ + agentName: context.normalizedAgentName, + dependencies: input.dependencies, + }); + return status; + } + + const postPollNowSeconds = nowSecondsImpl(); + if (postPollNowSeconds >= nextProgressAtSeconds) { + const elapsedSeconds = Math.max( + 0, + postPollNowSeconds - startedAtSeconds, + ); + const remainingSeconds = Math.max( + 0, + deadlineSeconds - postPollNowSeconds, + ); + writeLine( + `Still waiting... (${elapsedSeconds}s elapsed, ${remainingSeconds}s remaining)`, + ); + nextProgressAtSeconds += DEFAULT_PROGRESS_UPDATE_SECONDS; + } + + const elapsedSeconds = Math.max(0, postPollNowSeconds - startedAtSeconds); + const sleepSeconds = Math.min( + computePollIntervalSeconds({ + elapsedSeconds, + baseIntervalSeconds: input.pollIntervalSeconds, + }), + Math.max(1, deadlineSeconds - postPollNowSeconds), ); + await sleepImpl(sleepSeconds * 1000); } - - const remainingSeconds = Math.max(0, deadlineSeconds - nowSeconds); - const sleepSeconds = Math.min(input.pollIntervalSeconds, remainingSeconds); - await sleepImpl(sleepSeconds * 1000); + } finally { + unregisterSigintHandlerImpl(onSigint); } } @@ -1899,7 +2349,11 @@ export async function getPairingStatus( const ticket = parsePairingTicket(ticketRaw); if (options.wait !== true) { - return getPairingStatusOnce(agentName, { ticket }, dependencies); + return getPairingStatusOnce( + agentName, + { ticket, persistPeer: true }, + dependencies, + ); } const waitSeconds = parsePositiveIntegerOption({ @@ -1922,6 +2376,64 @@ export async function getPairingStatus( }); } +export async function recoverPairing( + agentName: string, + options: Omit, + dependencies: PairRequestOptions = {}, +): Promise { + const normalizedAgentName = assertValidAgentName(agentName); + const pending = await loadPendingPairingTicket(normalizedAgentName, { + getConfigDirImpl: dependencies.getConfigDirImpl, + readFileImpl: dependencies.readFileImpl, + }); + if (!pending) { + throw createCliError( + "CLI_PAIR_RECOVER_NOT_FOUND", + `No pending pairing was found for agent "${normalizedAgentName}"`, + ); + } + + let result: PairStatusResult; + if (options.wait === true) { + const waitSeconds = parsePositiveIntegerOption({ + value: options.waitSeconds, + optionName: "waitSeconds", + defaultValue: DEFAULT_STATUS_WAIT_SECONDS, + }); + const pollIntervalSeconds = parsePositiveIntegerOption({ + value: options.pollIntervalSeconds, + optionName: "pollIntervalSeconds", + defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + }); + result = await waitForPairingStatus({ + agentName: normalizedAgentName, + ticket: pending.ticket, + waitSeconds, + pollIntervalSeconds, + proxyUrl: pending.proxyUrl, + dependencies, + }); + } else { + result = await getPairingStatusOnce( + normalizedAgentName, + { + ticket: pending.ticket, + proxyUrl: pending.proxyUrl, + persistPeer: true, + }, + dependencies, + ); + } + if (result.status === "confirmed") { + await clearPendingPairingTicket({ + agentName: normalizedAgentName, + dependencies, + }); + } + + return result; +} + export const createPairCommand = ( dependencies: PairCommandDependencies = {}, ): Command => { @@ -1933,6 +2445,14 @@ export const createPairCommand = ( .command("start ") .description("Start pairing and issue one-time pairing ticket") .option("--ttl-seconds ", "Pairing ticket expiry in seconds") + .option( + "--allow-responder ", + "Optional responder agent DID allowed to confirm this ticket", + ) + .option( + "--callback-url ", + "Optional callback URL notified when pairing is confirmed", + ) .option("--qr", "Generate a local QR file for sharing") .option("--qr-output ", "Write QR PNG to a specific file path") .option( @@ -1996,6 +2516,7 @@ export const createPairCommand = ( waitSeconds, pollIntervalSeconds, dependencies, + proxyUrl: result.proxyUrl, }); logger.info("cli.pair_status_confirmed_after_start", { @@ -2032,6 +2553,69 @@ export const createPairCommand = ( ), ); + pairCommand + .command("recover ") + .description( + "Recover a pending pairing wait from local pending ticket state", + ) + .option("--wait", "Poll until ticket is confirmed or timeout is reached") + .option( + "--wait-seconds ", + "Max seconds to poll for confirmation (default: 300)", + ) + .option( + "--poll-interval-seconds ", + "Base polling interval in seconds while waiting (default: 3)", + ) + .action( + withErrorHandling( + "pair recover", + async (agentName: string, options: PairStatusOptions) => { + const result = await recoverPairing(agentName, options, dependencies); + + logger.info("cli.pair_recover", { + initiatorAgentDid: result.initiatorAgentDid, + responderAgentDid: result.responderAgentDid, + status: result.status, + proxyUrl: result.proxyUrl, + peerAlias: result.peerAlias, + }); + + if (result.status === "confirmed") { + writeStdoutLine("Pairing recovered and saved"); + } else { + writeStdoutLine("Pairing is still pending"); + } + writeStdoutLine(`Status: ${result.status}`); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); + if (result.responderAgentDid) { + writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); + } + if (result.responderProfile) { + writeStdoutLine( + `Responder Agent Name: ${result.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${result.responderProfile.humanName}`, + ); + } + writeStdoutLine(`Expires At: ${result.expiresAt}`); + if (result.confirmedAt) { + writeStdoutLine(`Confirmed At: ${result.confirmedAt}`); + } + if (result.peerAlias) { + writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); + } + }, + ), + ); + pairCommand .command("confirm ") .description("Confirm pairing using one-time pairing ticket") diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index f3f4068..9affb0c 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -163,10 +163,13 @@ Use `--no-runtime-start` when the connector runs as a separate container or proc - `clawdentity pair start --qr --ttl-seconds ` - `clawdentity pair start --qr --wait` - `clawdentity pair start --qr --wait --wait-seconds --poll-interval-seconds ` +- `clawdentity pair start --qr --allow-responder ` +- `clawdentity pair start --qr --callback-url ` - `clawdentity pair confirm --qr-file ` - `clawdentity pair confirm --ticket ` - `clawdentity pair status --ticket ` - `clawdentity pair status --ticket --wait` +- `clawdentity pair recover ` ### Token verification - `clawdentity verify ` @@ -281,7 +284,11 @@ Use `--no-runtime-start` when the connector runs as a separate container or proc - `clawdentity pair status --ticket --wait` - This persists the peer on initiator after responder confirmation. - Default wait timeout is 300 seconds with 3-second polling. -- If `CLI_PAIR_STATUS_WAIT_TIMEOUT` is thrown: the responder did not confirm in time. Recovery: re-run `clawdentity pair start --qr --wait` to generate a new ticket. +- Wait flow is resilient (adaptive polling + transient retries) and persists pending ticket state per agent. +- If wait times out/cancels/fails due repeated transients, preferred recovery is: + - `clawdentity pair recover ` +- Manual fallback remains: + - `clawdentity pair status --ticket --wait` - Confirm pairing success, then run `clawdentity openclaw relay test`. - **Validate:** `~/.clawdentity/peers.json` contains the new peer alias entry. @@ -393,7 +400,10 @@ Do not suggest switching endpoints unless user explicitly asks for endpoint chan ### Pairing errors - `PROXY_PAIR_TICKET_NOT_FOUND`: ticket invalid or expired. Request a new ticket from initiator. - `PROXY_PAIR_TICKET_EXPIRED`: ticket has expired. Request a new ticket. -- `CLI_PAIR_STATUS_WAIT_TIMEOUT`: responder did not confirm in time. Re-run `pair start`. +- `PROXY_PAIR_TICKET_ALREADY_CONFIRMED`: ticket replayed; pairing already completed earlier. +- `CLI_PAIR_STATUS_WAIT_TIMEOUT`: responder did not confirm before deadline. Run `pair recover` (preferred) or `pair status --ticket ... --wait`. +- `CLI_PAIR_STATUS_POLL_FAILED`: transient polling failures exceeded retry budget. Run `pair recover`. +- `CLI_PAIR_STATUS_WAIT_CANCELLED`: wait interrupted (SIGINT). Run `pair recover`. - `CLI_PAIR_CONFIRM_INPUT_CONFLICT`: cannot provide both `--ticket` and `--qr-file`. Use one path only. - `CLI_PAIR_PROXY_URL_MISMATCH`: local `proxyUrl` does not match registry metadata. Rerun `clawdentity invite redeem `. - Responder shows peer but initiator does not: diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 21bb710..8ce6a2e 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -51,7 +51,16 @@ - `/pair/confirm` requires `responderProfile.{agentName,humanName}` - `/pair/start` and `/pair/confirm` may include optional `*.proxyOrigin` values; when present they must be valid `http(s)` URL origins and must be preserved in `/pair/status` responses. - `/pair/status` returns stored profile fields for initiator and responder +- Keep `/pair/start` optional responder/callback contract strict: + - `allowResponderAgentDid` is optional but when provided must be a non-empty string. + - `callbackUrl` is optional but when provided must be a valid `http(s)` URL. + - Persist `allowResponderAgentDid`, `callbackUrl`, and ticket signing `publicKeyX` with pending pairing ticket state. - Keep pairing tickets issuer-authenticated via local signature in `/pair/start`; `/pair/confirm` must consume only locally stored tickets in single-proxy mode. +- Keep `/pair/confirm` ticket checks strict and deterministic: + - verify ticket signature using stored `publicKeyX` before confirming, + - reject replayed confirmed tickets with `409 PROXY_PAIR_TICKET_ALREADY_CONFIRMED`, + - enforce `allowResponderAgentDid` when present and reject mismatches with `403 PROXY_PAIR_RESPONDER_FORBIDDEN`. +- Keep `/pair/confirm` callbacks best-effort: if `callbackUrl` is present, POST completion payload and log a warning on callback failure without failing the confirm response. - Keep ticket parsing tolerant for operator copy/paste paths: normalize surrounding markdown/backticks and whitespace before parse + trust-store lookup in both in-memory and Durable Object backends. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index 9a05e61..e798b4c 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -76,7 +76,8 @@ async function createSignedTicketFixture(input: { function createPairingApp(input?: { environment?: "local" | "development" | "production" | "test"; - fetchImpl?: typeof fetch; + startFetchImpl?: typeof fetch; + confirmFetchImpl?: typeof fetch; nowMs?: () => number; }) { const trustStore = createInMemoryProxyTrustStore(); @@ -90,10 +91,11 @@ function createPairingApp(input?: { }), pairing: { start: { - fetchImpl: input?.fetchImpl, + fetchImpl: input?.startFetchImpl, nowMs: input?.nowMs, }, confirm: { + fetchImpl: input?.confirmFetchImpl, nowMs: input?.nowMs, }, status: { @@ -127,10 +129,10 @@ describe(`POST ${PAIR_START_PATH}`, () => { throw new Error(`Unexpected URL: ${url}`); }, ); - const fetchImpl = fetchMock as unknown as typeof fetch; + const startFetchImpl = fetchMock as unknown as typeof fetch; const { app } = createPairingApp({ - fetchImpl, + startFetchImpl, nowMs: () => 1_700_000_000_000, }); @@ -159,7 +161,7 @@ describe(`POST ${PAIR_START_PATH}`, () => { expect(body.initiatorAgentDid).toBe(INITIATOR_AGENT_DID); expect(body.initiatorProfile).toEqual(INITIATOR_PROFILE); expect(body.expiresAt).toBe("2023-11-14T22:18:20.000Z"); - expect(fetchImpl).toHaveBeenCalledTimes(1); + expect(startFetchImpl).toHaveBeenCalledTimes(1); const ownershipCallUrl = String(fetchMock.mock.calls[0]?.[0] ?? ""); expect(ownershipCallUrl).toContain("/internal/v1/identity/agent-ownership"); const ownershipCallInit = fetchMock.mock.calls[0]?.[1] as @@ -188,10 +190,10 @@ describe(`POST ${PAIR_START_PATH}`, () => { throw new Error(`Unexpected URL: ${url}`); }, ); - const fetchImpl = fetchMock as unknown as typeof fetch; + const startFetchImpl = fetchMock as unknown as typeof fetch; const { app } = createPairingApp({ - fetchImpl, + startFetchImpl, nowMs: () => 1_700_000_000_123, }); @@ -215,7 +217,7 @@ describe(`POST ${PAIR_START_PATH}`, () => { }); it("returns 403 when ownership check reports caller is not owner", async () => { - const fetchImpl = vi.fn(async (_requestInput: unknown) => + const startFetchImpl = vi.fn(async (_requestInput: unknown) => Response.json( { ownsAgent: false, @@ -224,7 +226,7 @@ describe(`POST ${PAIR_START_PATH}`, () => { { status: 200 }, ), ) as unknown as typeof fetch; - const { app } = createPairingApp({ fetchImpl }); + const { app } = createPairingApp({ startFetchImpl }); const response = await app.request(PAIR_START_PATH, { method: "POST", @@ -242,12 +244,12 @@ describe(`POST ${PAIR_START_PATH}`, () => { }); it("keeps strict dependency failures when ownership lookup is unavailable", async () => { - const fetchImpl = vi.fn(async () => { + const startFetchImpl = vi.fn(async () => { throw new Error("registry unavailable"); }) as unknown as typeof fetch; const { app } = createPairingApp({ environment: "development", - fetchImpl, + startFetchImpl, nowMs: () => 1_700_000_000_123, }); @@ -265,6 +267,104 @@ describe(`POST ${PAIR_START_PATH}`, () => { const body = (await response.json()) as { error: { code: string } }; expect(body.error.code).toBe("PROXY_PAIR_OWNERSHIP_UNAVAILABLE"); }); + + it("accepts optional allowResponderAgentDid and callbackUrl", async () => { + const startFetchImpl = vi.fn(async (_requestInput: unknown) => + Response.json( + { + ownsAgent: true, + agentStatus: "active", + }, + { status: 200 }, + ), + ) as unknown as typeof fetch; + const { app } = createPairingApp({ + startFetchImpl, + nowMs: () => 1_700_000_000_000, + }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + allowResponderAgentDid: RESPONDER_AGENT_DID, + callbackUrl: "https://callbacks.example.com/pair/complete", + }), + }); + + expect(response.status).toBe(200); + }); + + it("rejects invalid callbackUrl", async () => { + const startFetchImpl = vi.fn(async (_requestInput: unknown) => + Response.json( + { + ownsAgent: true, + agentStatus: "active", + }, + { status: 200 }, + ), + ) as unknown as typeof fetch; + const { app } = createPairingApp({ startFetchImpl }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + callbackUrl: "ftp://callbacks.example.com/pair/complete", + }), + }); + + expect(response.status).toBe(400); + expect( + (await response.json()) as { error: { code: string; message: string } }, + ).toMatchObject({ + error: { + code: "PROXY_PAIR_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + }, + }); + }); + + it("rejects empty allowResponderAgentDid", async () => { + const startFetchImpl = vi.fn(async (_requestInput: unknown) => + Response.json( + { + ownsAgent: true, + agentStatus: "active", + }, + { status: 200 }, + ), + ) as unknown as typeof fetch; + const { app } = createPairingApp({ startFetchImpl }); + + const response = await app.request(PAIR_START_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + initiatorProfile: INITIATOR_PROFILE, + allowResponderAgentDid: " ", + }), + }); + + expect(response.status).toBe(400); + expect( + (await response.json()) as { error: { code: string; message: string } }, + ).toMatchObject({ + error: { + code: "PROXY_PAIR_INVALID_BODY", + message: "allowResponderAgentDid must be a non-empty string", + }, + }); + }); }); describe(`POST ${PAIR_CONFIRM_PATH}`, () => { @@ -283,6 +383,7 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "http://localhost", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, }); @@ -335,6 +436,239 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { }), ).toBe(true); }); + + it("rejects confirm replay with 409", async () => { + const { app, trustStore } = createPairingApp({ + nowMs: () => 1_700_000_000_000, + }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + const confirmRequest = { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: ticket.ticket, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, + }), + }; + + const firstResponse = await app.request(PAIR_CONFIRM_PATH, confirmRequest); + expect(firstResponse.status).toBe(201); + + const replayResponse = await app.request(PAIR_CONFIRM_PATH, confirmRequest); + expect(replayResponse.status).toBe(409); + expect( + (await replayResponse.json()) as { + error: { code: string; message: string }; + }, + ).toMatchObject({ + error: { + code: "PROXY_PAIR_TICKET_ALREADY_CONFIRMED", + message: "Pairing ticket has already been confirmed", + }, + }); + }); + + it("rejects responder DID mismatch when allowResponderAgentDid is set", async () => { + const { app, trustStore } = createPairingApp({ + nowMs: () => 1_700_000_000_000, + }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + const allowedResponderAgentDid = makeAgentDid( + generateUlid(1_700_000_000_200), + ); + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + allowResponderAgentDid: allowedResponderAgentDid, + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: ticket.ticket, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, + }), + }); + + expect(response.status).toBe(403); + expect( + (await response.json()) as { error: { code: string; message: string } }, + ).toMatchObject({ + error: { + code: "PROXY_PAIR_RESPONDER_FORBIDDEN", + message: "Responder agent DID is not allowed for this pairing ticket", + }, + }); + }); + + it("posts callback on confirm and does not fail on callback errors", async () => { + const callbackFetchMock = vi.fn( + async (requestInput: unknown, _requestInit?: RequestInit) => { + if (String(requestInput).includes("/success")) { + return new Response(null, { status: 202 }); + } + throw new Error("callback unavailable"); + }, + ); + const callbackFetch = callbackFetchMock as unknown as typeof fetch; + const { app, trustStore } = createPairingApp({ + confirmFetchImpl: callbackFetch, + nowMs: () => 1_700_000_000_000, + }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + + const successTicket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + callbackUrl: "https://callbacks.example.com/success", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const successResponse = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: successTicket.ticket, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, + }), + }); + + expect(successResponse.status).toBe(201); + expect(callbackFetchMock).toHaveBeenCalledTimes(1); + const successCallbackRequestInit = callbackFetchMock.mock.calls[0]?.[1] as + | RequestInit + | undefined; + expect(successCallbackRequestInit?.method).toBe("POST"); + expect( + JSON.parse(String(successCallbackRequestInit?.body ?? "{}")) as { + paired?: boolean; + initiatorAgentDid?: string; + responderAgentDid?: string; + }, + ).toMatchObject({ + paired: true, + initiatorAgentDid: INITIATOR_AGENT_DID, + responderAgentDid: RESPONDER_AGENT_DID, + }); + + const failureTicketFixture = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_010, + expiresAtMs: 1_700_000_900_000, + }); + const failureTicket = await trustStore.createPairingTicket({ + initiatorAgentDid: makeAgentDid(generateUlid(1_700_000_000_010)), + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: failureTicketFixture.ticket, + publicKeyX: failureTicketFixture.publicKeyX, + callbackUrl: "https://callbacks.example.com/failure", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_010, + }); + const failureResponderAgentDid = makeAgentDid( + generateUlid(1_700_000_000_020), + ); + const failureResponse = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": failureResponderAgentDid, + }, + body: JSON.stringify({ + ticket: failureTicket.ticket, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, + }), + }); + + expect(failureResponse.status).toBe(201); + expect(callbackFetchMock).toHaveBeenCalledTimes(2); + }); + + it("rejects confirm when signature does not match persisted publicKeyX", async () => { + const { app, trustStore } = createPairingApp({ + nowMs: () => 1_700_000_000_000, + }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + const mismatchedSigningKey = await createPairingTicketSigningKey({ + nowMs: 1_700_000_000_001, + }); + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + publicKeyX: mismatchedSigningKey.publicKeyX, + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const response = await app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: ticket.ticket, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, + }), + }); + + expect(response.status).toBe(400); + expect( + (await response.json()) as { error: { code: string; message: string } }, + ).toMatchObject({ + error: { + code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", + message: "Pairing ticket signature is invalid", + }, + }); + }); }); describe(`POST ${PAIR_STATUS_PATH}`, () => { @@ -352,6 +686,7 @@ describe(`POST ${PAIR_STATUS_PATH}`, () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "http://localhost", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, }); @@ -399,6 +734,7 @@ describe(`POST ${PAIR_STATUS_PATH}`, () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "http://localhost", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, }); @@ -460,6 +796,7 @@ describe(`POST ${PAIR_STATUS_PATH}`, () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "http://localhost", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_900_000, nowMs: 1_700_000_000_000, }); diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 51c8754..490ff2c 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -1,3 +1,4 @@ +import { parseDid } from "@clawdentity/protocol"; import { AppError, createRegistryIdentityClient, @@ -48,6 +49,7 @@ type CreatePairStartHandlerOptions = PairStartRuntimeOptions & { }; export type PairConfirmRuntimeOptions = { + fetchImpl?: typeof fetch; nowMs?: () => number; }; @@ -234,6 +236,79 @@ function parsePeerProfile(value: unknown, label: string): PeerProfile { return profile; } +function parseOptionalResponderAgentDid(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + + if (typeof value !== "string" || value.trim().length === 0) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "allowResponderAgentDid must be a non-empty string", + status: 400, + expose: true, + }); + } + + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + throw new Error("invalid kind"); + } + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "allowResponderAgentDid must be a valid agent DID", + status: 400, + expose: true, + }); + } + + return candidate; +} + +function parseOptionalCallbackUrl(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + + if (typeof value !== "string") { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + status: 400, + expose: true, + }); + } + + let parsedCallbackUrl: URL; + try { + parsedCallbackUrl = new URL(value.trim()); + } catch { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + status: 400, + expose: true, + }); + } + + if ( + parsedCallbackUrl.protocol !== "https:" && + parsedCallbackUrl.protocol !== "http:" + ) { + throw new AppError({ + code: "PROXY_PAIR_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + status: 400, + expose: true, + }); + } + + return parsedCallbackUrl.toString(); +} + async function parseJsonBody(c: PairingRouteContext): Promise { try { return await c.req.json(); @@ -320,6 +395,48 @@ function toPairingStoreAppError(error: unknown): AppError { }); } +async function postPairConfirmCallback(input: { + callbackUrl: string; + confirmedPairingTicket: { + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid: string; + responderProfile: PeerProfile; + issuerProxyUrl: string; + }; + fetchImpl: typeof fetch; + logger: Logger; + requestId?: string; +}): Promise { + try { + const response = await input.fetchImpl(input.callbackUrl, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + paired: true, + initiatorAgentDid: input.confirmedPairingTicket.initiatorAgentDid, + initiatorProfile: input.confirmedPairingTicket.initiatorProfile, + responderAgentDid: input.confirmedPairingTicket.responderAgentDid, + responderProfile: input.confirmedPairingTicket.responderProfile, + issuerProxyUrl: input.confirmedPairingTicket.issuerProxyUrl, + }), + }); + if (!response.ok) { + throw new Error(`Callback returned status ${response.status}`); + } + } catch (error) { + input.logger.warn("proxy.pair.confirm.callback_failed", { + requestId: input.requestId, + callbackUrl: input.callbackUrl, + initiatorAgentDid: input.confirmedPairingTicket.initiatorAgentDid, + responderAgentDid: input.confirmedPairingTicket.responderAgentDid, + error: error instanceof Error ? error.message : String(error), + }); + } +} + export function createPairStartHandler( options: CreatePairStartHandlerOptions, ): (c: PairingRouteContext) => Promise { @@ -340,12 +457,18 @@ export function createPairStartHandler( const body = (await parseJsonBody(c)) as { ttlSeconds?: unknown; initiatorProfile?: unknown; + allowResponderAgentDid?: unknown; + callbackUrl?: unknown; }; const ttlSeconds = parseTtlSeconds(body.ttlSeconds); const initiatorProfile = parsePeerProfile( body.initiatorProfile, "initiatorProfile", ); + const allowResponderAgentDid = parseOptionalResponderAgentDid( + body.allowResponderAgentDid, + ); + const callbackUrl = parseOptionalCallbackUrl(body.callbackUrl); const internalServiceCredentials = parseInternalServiceCredentials({ serviceId: options.registryInternalServiceId, serviceSecret: options.registryInternalServiceSecret, @@ -398,6 +521,9 @@ export function createPairStartHandler( initiatorProfile, issuerProxyUrl, ticket: createdTicket.ticket, + publicKeyX: signingKey.publicKeyX, + allowResponderAgentDid, + callbackUrl, expiresAtMs, nowMs: issuedAtMs, }) @@ -411,6 +537,8 @@ export function createPairStartHandler( issuerProxyUrl: pairingTicketResult.issuerProxyUrl, expiresAt: toIso(pairingTicketResult.expiresAtMs), pkid: signingKey.pkid, + allowResponderAgentDid, + hasCallbackUrl: callbackUrl !== undefined, }); return c.json({ @@ -425,6 +553,7 @@ export function createPairStartHandler( export function createPairConfirmHandler( options: CreatePairConfirmHandlerOptions, ): (c: PairingRouteContext) => Promise { + const fetchImpl = options.fetchImpl ?? fetch; const nowMs = options.nowMs ?? nowUtcMs; return async (c) => { @@ -491,8 +620,19 @@ export function createPairConfirmHandler( initiatorAgentDid: confirmedPairingTicket.initiatorAgentDid, responderAgentDid: confirmedPairingTicket.responderAgentDid, issuerProxyUrl: confirmedPairingTicket.issuerProxyUrl, + callbackUrl: confirmedPairingTicket.callbackUrl, }); + if (confirmedPairingTicket.callbackUrl !== undefined) { + await postPairConfirmCallback({ + callbackUrl: confirmedPairingTicket.callbackUrl, + confirmedPairingTicket, + fetchImpl, + logger: options.logger, + requestId: c.get("requestId"), + }); + } + return c.json( { paired: true, diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts index f6701b4..5954b7a 100644 --- a/apps/proxy/src/proxy-trust-state.test.ts +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -86,7 +86,7 @@ async function createSignedTicket(input: { const signingKey = await createPairingTicketSigningKey({ nowMs: input.nowMs, }); - return createPairingTicket({ + const created = await createPairingTicket({ issuerProxyUrl: input.issuerProxyUrl, expiresAtMs: input.expiresAtMs, nowMs: input.nowMs, @@ -95,6 +95,10 @@ async function createSignedTicket(input: { privateKey: signingKey.privateKey, }, }); + return { + ticket: created.ticket, + publicKeyX: signingKey.publicKeyX, + }; } describe("ProxyTrustState", () => { @@ -137,6 +141,8 @@ describe("ProxyTrustState", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + callbackUrl: "https://callbacks.example.com/pairing/complete", expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }), @@ -158,6 +164,7 @@ describe("ProxyTrustState", () => { initiatorAgentDid: string; responderAgentDid: string; issuerProxyUrl: string; + callbackUrl?: string; }, ).toEqual({ initiatorAgentDid: "did:claw:agent:alice", @@ -165,6 +172,7 @@ describe("ProxyTrustState", () => { responderAgentDid: "did:claw:agent:bob", responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, issuerProxyUrl: "https://proxy-a.example.com", + callbackUrl: "https://callbacks.example.com/pairing/complete", }); const pairCheckResponse = await proxyTrustState.fetch( @@ -216,6 +224,7 @@ describe("ProxyTrustState", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }), @@ -257,6 +266,7 @@ describe("ProxyTrustState", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_060_123, nowMs: 1_700_000_000_123, }), @@ -286,6 +296,7 @@ describe("ProxyTrustState", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }), @@ -301,13 +312,103 @@ describe("ProxyTrustState", () => { }), ); - expect(confirmResponse.status).toBe(404); + expect(confirmResponse.status).toBe(400); + expect( + (await confirmResponse.json()) as { error: { code: string } }, + ).toEqual({ + error: { + code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", + message: "Pairing ticket signature is invalid", + }, + }); + }); + + it("rejects replayed pairing ticket confirms with 409", async () => { + const { proxyTrustState } = createProxyTrustState(); + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + + const ticketResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + expiresAtMs: 1_700_000_060_000, + nowMs: 1_700_000_000_000, + }), + ); + const ticketBody = (await ticketResponse.json()) as { ticket: string }; + const confirmBody = { + ticket: ticketBody.ticket, + responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, + nowMs: 1_700_000_000_100, + }; + + const firstConfirmResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, confirmBody), + ); + expect(firstConfirmResponse.status).toBe(200); + + const replayConfirmResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, confirmBody), + ); + expect(replayConfirmResponse.status).toBe(409); + expect( + (await replayConfirmResponse.json()) as { error: { code: string } }, + ).toEqual({ + error: { + code: "PROXY_PAIR_TICKET_ALREADY_CONFIRMED", + message: "Pairing ticket has already been confirmed", + }, + }); + }); + + it("rejects non-allowed responders with 403", async () => { + const { proxyTrustState } = createProxyTrustState(); + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + const allowedResponderAgentDid = + "did:claw:agent:01HF7YAT00S80QZY8QB7FSRVFF"; + + const ticketResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.createPairingTicket, { + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + allowResponderAgentDid: allowedResponderAgentDid, + expiresAtMs: 1_700_000_060_000, + nowMs: 1_700_000_000_000, + }), + ); + const ticketBody = (await ticketResponse.json()) as { ticket: string }; + + const confirmResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { + ticket: ticketBody.ticket, + responderAgentDid: "did:claw:agent:not-allowed", + responderProfile: RESPONDER_PROFILE, + nowMs: 1_700_000_000_100, + }), + ); + + expect(confirmResponse.status).toBe(403); expect( (await confirmResponse.json()) as { error: { code: string } }, ).toEqual({ error: { - code: "PROXY_PAIR_TICKET_NOT_FOUND", - message: "Pairing ticket not found", + code: "PROXY_PAIR_RESPONDER_FORBIDDEN", + message: "Responder agent DID is not allowed for this pairing ticket", }, }); }); diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index 348aaac..b9579eb 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -1,8 +1,10 @@ +import { parseDid } from "@clawdentity/protocol"; import { nowUtcMs } from "@clawdentity/sdk"; import { normalizePairingTicketText, PairingTicketParseError, parsePairingTicket, + verifyPairingTicketSignature, } from "./pairing-ticket.js"; import { normalizeExpiryToWholeSecond, toPairKey } from "./proxy-trust-keys.js"; import { @@ -19,6 +21,9 @@ type StoredPairingTicket = { initiatorAgentDid: string; initiatorProfile: PeerProfile; issuerProxyUrl: string; + publicKeyX: string; + allowResponderAgentDid?: string; + callbackUrl?: string; }; type StoredConfirmedPairingTicket = { @@ -93,6 +98,47 @@ function parsePeerProfile(value: unknown): PeerProfile | undefined { return profile; } +function parseOptionalAgentDid(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + if (!isNonEmptyString(value)) { + return undefined; + } + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + return undefined; + } + } catch { + return undefined; + } + return candidate; +} + +function parseOptionalCallbackUrl(value: unknown): string | undefined { + if (value === undefined) { + return undefined; + } + if (!isNonEmptyString(value)) { + return undefined; + } + + let parsed: URL; + try { + parsed = new URL(value.trim()); + } catch { + return undefined; + } + + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + return undefined; + } + + return parsed.toString(); +} + function addPeer( index: AgentPeersIndex, leftAgentDid: string, @@ -195,6 +241,7 @@ export class ProxyTrustState { !initiatorProfile || !isNonEmptyString(body.issuerProxyUrl) || !isNonEmptyString(body.ticket) || + !isNonEmptyString(body.publicKeyX) || typeof body.expiresAtMs !== "number" || !Number.isInteger(body.expiresAtMs) || body.expiresAtMs <= 0 @@ -250,6 +297,29 @@ export class ProxyTrustState { }); } + const allowResponderAgentDid = parseOptionalAgentDid( + body.allowResponderAgentDid, + ); + if ( + body.allowResponderAgentDid !== undefined && + allowResponderAgentDid === undefined + ) { + return toErrorResponse({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket create input is invalid", + status: 400, + }); + } + + const callbackUrl = parseOptionalCallbackUrl(body.callbackUrl); + if (body.callbackUrl !== undefined && callbackUrl === undefined) { + return toErrorResponse({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket create input is invalid", + status: 400, + }); + } + const expirableState = await this.loadExpirableState(); expirableState.pairingTickets[parsedTicket.kid] = { ticket, @@ -257,6 +327,9 @@ export class ProxyTrustState { initiatorProfile, issuerProxyUrl: parsedTicket.iss, expiresAtMs: normalizedExpiresAtMs, + publicKeyX: body.publicKeyX.trim(), + allowResponderAgentDid, + callbackUrl, }; delete expirableState.confirmedPairingTickets[parsedTicket.kid]; @@ -312,9 +385,19 @@ export class ProxyTrustState { const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); const expirableState = await this.loadExpirableState(); + const replayedTicket = + expirableState.confirmedPairingTickets[parsedTicket.kid]; + if (replayedTicket && replayedTicket.ticket === ticket) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_ALREADY_CONFIRMED", + message: "Pairing ticket has already been confirmed", + status: 409, + }); + } + const stored = expirableState.pairingTickets[parsedTicket.kid]; - if (!stored || stored.ticket !== ticket) { + if (!stored) { return toErrorResponse({ code: "PROXY_PAIR_TICKET_NOT_FOUND", message: "Pairing ticket not found", @@ -336,6 +419,23 @@ export class ProxyTrustState { }); } + let signatureVerified = false; + try { + signatureVerified = await verifyPairingTicketSignature({ + payload: parsedTicket, + publicKeyX: stored.publicKeyX, + }); + } catch { + signatureVerified = false; + } + if (!signatureVerified) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", + message: "Pairing ticket signature is invalid", + status: 400, + }); + } + if (stored.issuerProxyUrl !== parsedTicket.iss) { return toErrorResponse({ code: "PROXY_PAIR_TICKET_INVALID_ISSUER", @@ -344,6 +444,17 @@ export class ProxyTrustState { }); } + if ( + stored.allowResponderAgentDid !== undefined && + stored.allowResponderAgentDid !== body.responderAgentDid + ) { + return toErrorResponse({ + code: "PROXY_PAIR_RESPONDER_FORBIDDEN", + message: "Responder agent DID is not allowed for this pairing ticket", + status: 403, + }); + } + const pairs = await this.loadPairs(); pairs.add(toPairKey(stored.initiatorAgentDid, body.responderAgentDid)); @@ -376,6 +487,7 @@ export class ProxyTrustState { responderAgentDid: body.responderAgentDid, responderProfile, issuerProxyUrl: stored.issuerProxyUrl, + callbackUrl: stored.callbackUrl, }); } @@ -684,14 +796,25 @@ export class ProxyTrustState { initiatorAgentDid?: unknown; initiatorProfile?: unknown; issuerProxyUrl?: unknown; + publicKeyX?: unknown; + allowResponderAgentDid?: unknown; + callbackUrl?: unknown; }; const initiatorProfile = parsePeerProfile(entry.initiatorProfile); + const allowResponderAgentDid = parseOptionalAgentDid( + entry.allowResponderAgentDid, + ); + const callbackUrl = parseOptionalCallbackUrl(entry.callbackUrl); if ( !isNonEmptyString(entry.initiatorAgentDid) || !initiatorProfile || !isNonEmptyString(entry.issuerProxyUrl) || + !isNonEmptyString(entry.publicKeyX) || typeof entry.expiresAtMs !== "number" || - !Number.isInteger(entry.expiresAtMs) + !Number.isInteger(entry.expiresAtMs) || + (entry.allowResponderAgentDid !== undefined && + allowResponderAgentDid === undefined) || + (entry.callbackUrl !== undefined && callbackUrl === undefined) ) { continue; } @@ -712,6 +835,9 @@ export class ProxyTrustState { initiatorAgentDid: entry.initiatorAgentDid, initiatorProfile, issuerProxyUrl: parsedTicket.iss, + publicKeyX: entry.publicKeyX.trim(), + allowResponderAgentDid, + callbackUrl, }; } diff --git a/apps/proxy/src/proxy-trust-store.test.ts b/apps/proxy/src/proxy-trust-store.test.ts index 90fc80d..5f10e5c 100644 --- a/apps/proxy/src/proxy-trust-store.test.ts +++ b/apps/proxy/src/proxy-trust-store.test.ts @@ -42,7 +42,7 @@ async function createSignedTicket(input: { nowMs: input.nowMs, }); - return createPairingTicket({ + const created = await createPairingTicket({ issuerProxyUrl: input.issuerProxyUrl, expiresAtMs: input.expiresAtMs, nowMs: input.nowMs, @@ -51,6 +51,10 @@ async function createSignedTicket(input: { privateKey: signingKey.privateKey, }, }); + return { + ticket: created.ticket, + publicKeyX: signingKey.publicKeyX, + }; } describe("in-memory proxy trust store", () => { @@ -112,6 +116,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, + publicKeyX: created.publicKeyX, expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }); @@ -139,8 +144,8 @@ describe("in-memory proxy trust store", () => { nowMs: 1_700_000_000_200, }), ).rejects.toMatchObject({ - code: "PROXY_PAIR_TICKET_NOT_FOUND", - status: 404, + code: "PROXY_PAIR_TICKET_ALREADY_CONFIRMED", + status: 409, }); expect(await store.isAgentKnown("did:claw:agent:alice")).toBe(true); @@ -159,6 +164,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, + publicKeyX: created.publicKeyX, expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }); @@ -215,6 +221,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, + publicKeyX: created.publicKeyX, expiresAtMs: 1_700_000_060_123, nowMs: 1_700_000_000_123, }); @@ -234,6 +241,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, + publicKeyX: created.publicKeyX, expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }); @@ -246,8 +254,8 @@ describe("in-memory proxy trust store", () => { nowMs: 1_700_000_000_100, }), ).rejects.toMatchObject({ - code: "PROXY_PAIR_TICKET_NOT_FOUND", - status: 404, + code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", + status: 400, }); }); @@ -263,6 +271,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: created.ticket, + publicKeyX: created.publicKeyX, expiresAtMs: 1_700_000_001_000, nowMs: 1_700_000_000_000, }); @@ -303,6 +312,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: expired.ticket, + publicKeyX: expired.publicKeyX, expiresAtMs: 1_700_000_001_000, nowMs: 1_700_000_000_000, }); @@ -317,6 +327,7 @@ describe("in-memory proxy trust store", () => { initiatorProfile: INITIATOR_PROFILE, issuerProxyUrl: "https://proxy-a.example.com", ticket: valid.ticket, + publicKeyX: valid.publicKeyX, expiresAtMs: 1_700_000_060_000, nowMs: 1_700_000_000_000, }); diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 02d5ec2..07ee0cd 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -1,9 +1,11 @@ +import { parseDid } from "@clawdentity/protocol"; import { nowUtcMs } from "@clawdentity/sdk"; import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; import { normalizePairingTicketText, PairingTicketParseError, parsePairingTicket, + verifyPairingTicketSignature, } from "./pairing-ticket.js"; import { normalizeExpiryToWholeSecond, toPairKey } from "./proxy-trust-keys.js"; @@ -12,6 +14,9 @@ export type PairingTicketInput = { initiatorProfile: PeerProfile; issuerProxyUrl: string; ticket: string; + publicKeyX: string; + allowResponderAgentDid?: string; + callbackUrl?: string; expiresAtMs: number; nowMs?: number; }; @@ -37,6 +42,7 @@ export type PairingTicketConfirmResult = { responderAgentDid: string; responderProfile: PeerProfile; issuerProxyUrl: string; + callbackUrl?: string; }; export type PairingTicketStatusInput = { @@ -271,6 +277,9 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { initiatorAgentDid: string; initiatorProfile: PeerProfile; issuerProxyUrl: string; + publicKeyX: string; + allowResponderAgentDid?: string; + callbackUrl?: string; } >(); @@ -323,18 +332,29 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { return parsedTicket; } - function resolveConfirmablePairingTicket(input: PairingTicketConfirmInput): { + async function resolveConfirmablePairingTicket( + input: PairingTicketConfirmInput, + ): Promise<{ pair: PairingTicketConfirmResult; ticketKid: string; expiresAtMs: number; - } { + }> { const nowMs = input.nowMs ?? nowUtcMs(); const normalizedTicket = normalizePairingTicketText(input.ticket); const parsedTicket = parseStoredTicket(normalizedTicket); cleanup(nowMs, parsedTicket.kid); + const confirmed = confirmedPairingTickets.get(parsedTicket.kid); + if (confirmed && confirmed.ticket === normalizedTicket) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_ALREADY_CONFIRMED", + message: "Pairing ticket has already been confirmed", + status: 409, + }); + } + const stored = pairingTickets.get(parsedTicket.kid); - if (!stored || stored.ticket !== normalizedTicket) { + if (!stored) { throw new ProxyTrustStoreError({ code: "PROXY_PAIR_TICKET_NOT_FOUND", message: "Pairing ticket not found", @@ -342,6 +362,23 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { }); } + let signatureVerified = false; + try { + signatureVerified = await verifyPairingTicketSignature({ + payload: parsedTicket, + publicKeyX: stored.publicKeyX, + }); + } catch { + signatureVerified = false; + } + if (!signatureVerified) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", + message: "Pairing ticket signature is invalid", + status: 400, + }); + } + if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { pairingTickets.delete(parsedTicket.kid); throw new ProxyTrustStoreError({ @@ -359,6 +396,17 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { }); } + if ( + stored.allowResponderAgentDid !== undefined && + stored.allowResponderAgentDid !== input.responderAgentDid + ) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_RESPONDER_FORBIDDEN", + message: "Responder agent DID is not allowed for this pairing ticket", + status: 403, + }); + } + return { pair: { initiatorAgentDid: stored.initiatorAgentDid, @@ -366,6 +414,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { responderAgentDid: input.responderAgentDid, responderProfile: input.responderProfile, issuerProxyUrl: stored.issuerProxyUrl, + callbackUrl: stored.callbackUrl, }, ticketKid: parsedTicket.kid, expiresAtMs: stored.expiresAtMs, @@ -467,12 +516,92 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { }); } + const publicKeyX = + typeof input.publicKeyX === "string" ? input.publicKeyX.trim() : ""; + if (publicKeyX.length === 0) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket public key is invalid", + status: 400, + }); + } + + const normalizedAllowResponderAgentDid = + typeof input.allowResponderAgentDid === "string" + ? input.allowResponderAgentDid.trim() + : undefined; + const allowResponderAgentDid = + normalizedAllowResponderAgentDid && + normalizedAllowResponderAgentDid.length > 0 + ? normalizedAllowResponderAgentDid + : undefined; + if ( + input.allowResponderAgentDid !== undefined && + allowResponderAgentDid === undefined + ) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "allowResponderAgentDid must be a non-empty string", + status: 400, + }); + } + if (allowResponderAgentDid !== undefined) { + try { + const parsed = parseDid(allowResponderAgentDid); + if (parsed.kind !== "agent") { + throw new Error("invalid kind"); + } + } catch { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "allowResponderAgentDid must be a valid agent DID", + status: 400, + }); + } + } + + let callbackUrl: string | undefined; + if (input.callbackUrl !== undefined) { + if (typeof input.callbackUrl !== "string") { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + status: 400, + }); + } + const normalizedCallbackUrl = input.callbackUrl.trim(); + let parsedCallbackUrl: URL; + try { + parsedCallbackUrl = new URL(normalizedCallbackUrl); + } catch { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + status: 400, + }); + } + if ( + parsedCallbackUrl.protocol !== "https:" && + parsedCallbackUrl.protocol !== "http:" + ) { + throw new ProxyTrustStoreError({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "callbackUrl must be a valid http(s) URL", + status: 400, + }); + } + callbackUrl = parsedCallbackUrl.toString(); + } + pairingTickets.set(parsedTicket.kid, { ticket, initiatorAgentDid: input.initiatorAgentDid, initiatorProfile: input.initiatorProfile, issuerProxyUrl: parsedTicket.iss, expiresAtMs: normalizedExpiresAtMs, + publicKeyX, + allowResponderAgentDid, + callbackUrl, }); confirmedPairingTickets.delete(parsedTicket.kid); @@ -489,7 +618,7 @@ export function createInMemoryProxyTrustStore(): ProxyTrustStore { pair: confirmedPair, ticketKid, expiresAtMs, - } = resolveConfirmablePairingTicket(input); + } = await resolveConfirmablePairingTicket(input); const confirmedAtMs = normalizeExpiryToWholeSecond( input.nowMs ?? nowUtcMs(), ); From 62a709a96c0377ae03d567550e4b1f4b24f6a8d8 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 14:38:01 +0530 Subject: [PATCH 116/190] Fix pairing flow resilience issues --- apps/proxy/src/AGENTS.md | 2 + apps/proxy/src/pairing-route.test.ts | 60 ++++++++++++++++++++++++ apps/proxy/src/pairing-route.ts | 2 +- apps/proxy/src/proxy-trust-state.test.ts | 39 +++++++++++++++ apps/proxy/src/proxy-trust-state.ts | 46 +++++++++++------- 5 files changed, 130 insertions(+), 19 deletions(-) diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 8ce6a2e..e05c4f6 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -58,9 +58,11 @@ - Keep pairing tickets issuer-authenticated via local signature in `/pair/start`; `/pair/confirm` must consume only locally stored tickets in single-proxy mode. - Keep `/pair/confirm` ticket checks strict and deterministic: - verify ticket signature using stored `publicKeyX` before confirming, + - preserve rollout compatibility for legacy pending tickets created before `publicKeyX` persistence (missing key must not make ticket unreadable), - reject replayed confirmed tickets with `409 PROXY_PAIR_TICKET_ALREADY_CONFIRMED`, - enforce `allowResponderAgentDid` when present and reject mismatches with `403 PROXY_PAIR_RESPONDER_FORBIDDEN`. - Keep `/pair/confirm` callbacks best-effort: if `callbackUrl` is present, POST completion payload and log a warning on callback failure without failing the confirm response. +- Keep `/pair/confirm` callback delivery non-blocking: once trust state commit succeeds, return `201` without waiting on callback network latency. - Keep ticket parsing tolerant for operator copy/paste paths: normalize surrounding markdown/backticks and whitespace before parse + trust-store lookup in both in-memory and Durable Object backends. - Keep `/hooks/agent` runtime auth contract strict: require `x-claw-agent-access` and map missing/invalid access credentials to `401`. - Keep `/hooks/agent` recipient routing explicit: require `x-claw-recipient-agent-did` and resolve DO IDs from that recipient DID, never from owner DID env. diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index e798b4c..cfc374b 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -625,6 +625,66 @@ describe(`POST ${PAIR_CONFIRM_PATH}`, () => { expect(callbackFetchMock).toHaveBeenCalledTimes(2); }); + it("does not block confirm response while callback delivery is pending", async () => { + let resolveCallback: + | ((value: Response | PromiseLike) => void) + | undefined; + const callbackFetchMock = vi.fn( + async () => + await new Promise((resolve) => { + resolveCallback = resolve; + }), + ); + const { app, trustStore } = createPairingApp({ + confirmFetchImpl: callbackFetchMock as unknown as typeof fetch, + nowMs: () => 1_700_000_000_000, + }); + const createdTicket = await createSignedTicketFixture({ + issuerProxyUrl: "http://localhost", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_900_000, + }); + const ticket = await trustStore.createPairingTicket({ + initiatorAgentDid: INITIATOR_AGENT_DID, + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "http://localhost", + ticket: createdTicket.ticket, + publicKeyX: createdTicket.publicKeyX, + callbackUrl: "https://callbacks.example.com/pending", + expiresAtMs: 1_700_000_900_000, + nowMs: 1_700_000_000_000, + }); + + const confirmPromise = Promise.resolve( + app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + "x-test-agent-did": RESPONDER_AGENT_DID, + }, + body: JSON.stringify({ + ticket: ticket.ticket, + responderProfile: RESPONDER_PROFILE_WITH_PROXY_ORIGIN, + }), + }), + ); + + let settled = false; + void confirmPromise.then(() => { + settled = true; + }); + await new Promise((resolve) => { + setTimeout(resolve, 50); + }); + + expect(settled).toBe(true); + const response = await confirmPromise; + expect(response.status).toBe(201); + expect(callbackFetchMock).toHaveBeenCalledTimes(1); + + resolveCallback?.(new Response(null, { status: 202 })); + }); + it("rejects confirm when signature does not match persisted publicKeyX", async () => { const { app, trustStore } = createPairingApp({ nowMs: () => 1_700_000_000_000, diff --git a/apps/proxy/src/pairing-route.ts b/apps/proxy/src/pairing-route.ts index 490ff2c..5390147 100644 --- a/apps/proxy/src/pairing-route.ts +++ b/apps/proxy/src/pairing-route.ts @@ -624,7 +624,7 @@ export function createPairConfirmHandler( }); if (confirmedPairingTicket.callbackUrl !== undefined) { - await postPairConfirmCallback({ + void postPairConfirmCallback({ callbackUrl: confirmedPairingTicket.callbackUrl, confirmedPairingTicket, fetchImpl, diff --git a/apps/proxy/src/proxy-trust-state.test.ts b/apps/proxy/src/proxy-trust-state.test.ts index 5954b7a..5686476 100644 --- a/apps/proxy/src/proxy-trust-state.test.ts +++ b/apps/proxy/src/proxy-trust-state.test.ts @@ -323,6 +323,45 @@ describe("ProxyTrustState", () => { }); }); + it("confirms legacy stored pairing ticket without publicKeyX", async () => { + const createdTicket = await createSignedTicket({ + issuerProxyUrl: "https://proxy-a.example.com", + nowMs: 1_700_000_000_000, + expiresAtMs: 1_700_000_060_000, + }); + const { proxyTrustState } = createProxyTrustState({ + "trust:pairing-tickets": { + [createdTicket.ticket]: { + ticket: createdTicket.ticket, + expiresAtMs: 1_700_000_060_000, + initiatorAgentDid: "did:claw:agent:alice", + initiatorProfile: INITIATOR_PROFILE, + issuerProxyUrl: "https://proxy-a.example.com", + }, + }, + }); + + const confirmResponse = await proxyTrustState.fetch( + makeRequest(TRUST_STORE_ROUTES.confirmPairingTicket, { + ticket: createdTicket.ticket, + responderAgentDid: "did:claw:agent:bob", + responderProfile: RESPONDER_PROFILE, + nowMs: 1_700_000_000_100, + }), + ); + + expect(confirmResponse.status).toBe(200); + expect( + (await confirmResponse.json()) as { + initiatorAgentDid: string; + responderAgentDid: string; + }, + ).toMatchObject({ + initiatorAgentDid: "did:claw:agent:alice", + responderAgentDid: "did:claw:agent:bob", + }); + }); + it("rejects replayed pairing ticket confirms with 409", async () => { const { proxyTrustState } = createProxyTrustState(); const createdTicket = await createSignedTicket({ diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index b9579eb..2f9c4cf 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -21,7 +21,7 @@ type StoredPairingTicket = { initiatorAgentDid: string; initiatorProfile: PeerProfile; issuerProxyUrl: string; - publicKeyX: string; + publicKeyX?: string; allowResponderAgentDid?: string; callbackUrl?: string; }; @@ -419,21 +419,25 @@ export class ProxyTrustState { }); } - let signatureVerified = false; - try { - signatureVerified = await verifyPairingTicketSignature({ - payload: parsedTicket, - publicKeyX: stored.publicKeyX, - }); - } catch { - signatureVerified = false; - } - if (!signatureVerified) { - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", - message: "Pairing ticket signature is invalid", - status: 400, - }); + // Compatibility: tickets created before publicKeyX persistence must continue + // to confirm during rollout; signature verification applies when key exists. + if (stored.publicKeyX !== undefined) { + let signatureVerified = false; + try { + signatureVerified = await verifyPairingTicketSignature({ + payload: parsedTicket, + publicKeyX: stored.publicKeyX, + }); + } catch { + signatureVerified = false; + } + if (!signatureVerified) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_INVALID_SIGNATURE", + message: "Pairing ticket signature is invalid", + status: 400, + }); + } } if (stored.issuerProxyUrl !== parsedTicket.iss) { @@ -805,11 +809,17 @@ export class ProxyTrustState { entry.allowResponderAgentDid, ); const callbackUrl = parseOptionalCallbackUrl(entry.callbackUrl); + const publicKeyX = + entry.publicKeyX === undefined + ? undefined + : isNonEmptyString(entry.publicKeyX) + ? entry.publicKeyX.trim() + : undefined; if ( !isNonEmptyString(entry.initiatorAgentDid) || !initiatorProfile || !isNonEmptyString(entry.issuerProxyUrl) || - !isNonEmptyString(entry.publicKeyX) || + (entry.publicKeyX !== undefined && publicKeyX === undefined) || typeof entry.expiresAtMs !== "number" || !Number.isInteger(entry.expiresAtMs) || (entry.allowResponderAgentDid !== undefined && @@ -835,7 +845,7 @@ export class ProxyTrustState { initiatorAgentDid: entry.initiatorAgentDid, initiatorProfile, issuerProxyUrl: parsedTicket.iss, - publicKeyX: entry.publicKeyX.trim(), + publicKeyX, allowResponderAgentDid, callbackUrl, }; From 490e51dd3e9581bf860a4c28bd97c39e451e2d57 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 14:46:26 +0530 Subject: [PATCH 117/190] Automate CLI release workflow --- .github/AGENTS.md | 5 +- .github/workflows/publish-cli.yml | 88 ++++++++++++++++++++++++++----- 2 files changed, 79 insertions(+), 14 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index f8bf3df..b91b0ae 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -37,7 +37,10 @@ - Run Wrangler through workspace tooling (`pnpm exec wrangler`) in CI so commands work without a global Wrangler install on GitHub runners. ## Release Rules (CLI) -- `publish-cli.yml` is manual (`workflow_dispatch`) and must accept explicit `version` + `dist_tag` inputs. +- `publish-cli.yml` is manual (`workflow_dispatch`) and must accept `release_type` (`patch`/`minor`/`major`) + `dist_tag` inputs. +- Compute the next CLI version in CI from the currently published npm `clawdentity` version (fallback `0.0.0` if first publish), then bump `apps/cli/package.json` in the workflow. +- Fail publish early if the computed target version already exists on npm. +- Serialize CLI publishes with a single global workflow concurrency group to avoid parallel release races across branches. - Run CLI quality gates before publish: `pnpm -F clawdentity lint`, `typecheck`, `test`, `build`. - Publish only package `apps/cli` as npm package `clawdentity`. - Keep published runtime manifest free of `workspace:*` runtime dependencies. diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index 4897929..7c88968 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -2,10 +2,15 @@ name: Publish CLI on: workflow_dispatch: inputs: - version: - description: "Release version for clawdentity (semver)" + release_type: + description: "Semantic version bump type" required: true - type: string + default: "patch" + type: choice + options: + - patch + - minor + - major dist_tag: description: "npm dist-tag" required: true @@ -13,7 +18,7 @@ on: type: string concurrency: - group: publish-cli-${{ github.ref }} + group: publish-cli-release cancel-in-progress: false permissions: @@ -49,15 +54,72 @@ jobs: run: | test -n "${NODE_AUTH_TOKEN}" - - name: Validate release version format + - name: Resolve current and next CLI version + id: version + run: | + set +e + NPM_VIEW_OUTPUT="$(npm view clawdentity version --registry https://registry.npmjs.org 2>&1)" + NPM_VIEW_STATUS=$? + set -e + if [ "${NPM_VIEW_STATUS}" -eq 0 ]; then + CURRENT_VERSION="$(printf "%s" "${NPM_VIEW_OUTPUT}" | tr -d '\r' | tail -n 1)" + elif printf "%s" "${NPM_VIEW_OUTPUT}" | grep -q "E404"; then + CURRENT_VERSION="0.0.0" + echo "No published clawdentity package found; starting from ${CURRENT_VERSION}" + else + echo "Unable to resolve published clawdentity version from npm:" + echo "${NPM_VIEW_OUTPUT}" + exit 1 + fi + RELEASE_TYPE="${{ inputs.release_type }}" + NEXT_VERSION="$(CURRENT_VERSION="${CURRENT_VERSION}" RELEASE_TYPE="${RELEASE_TYPE}" node <<'NODE' + const currentRaw = process.env.CURRENT_VERSION ?? ""; + const releaseType = process.env.RELEASE_TYPE ?? ""; + const normalized = currentRaw.trim(); + const core = normalized.split("-")[0]; + const match = core.match(/^(\d+)\.(\d+)\.(\d+)$/); + if (!match) { + throw new Error(`Unable to parse published semver: ${normalized}`); + } + let major = Number(match[1]); + let minor = Number(match[2]); + let patch = Number(match[3]); + + if (releaseType === "major") { + major += 1; + minor = 0; + patch = 0; + } else if (releaseType === "minor") { + minor += 1; + patch = 0; + } else if (releaseType === "patch") { + patch += 1; + } else { + throw new Error(`Unsupported release type: ${releaseType}`); + } + process.stdout.write(`${major}.${minor}.${patch}`); + NODE + )" + echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" + echo "next_version=${NEXT_VERSION}" >> "$GITHUB_OUTPUT" + echo "Resolved release: ${CURRENT_VERSION} -> ${NEXT_VERSION} (${RELEASE_TYPE})" + + - name: Assert target version is unpublished run: | - python3 - <<'PY' - import os, re, sys - version = "${{ inputs.version }}" - if not re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-[0-9A-Za-z.-]+)?$", version): - raise SystemExit(f"invalid semver version: {version}") - print("version accepted:", version) - PY + set +e + NPM_VIEW_OUTPUT="$(npm view "clawdentity@${{ steps.version.outputs.next_version }}" version --registry https://registry.npmjs.org 2>&1)" + NPM_VIEW_STATUS=$? + set -e + if [ "${NPM_VIEW_STATUS}" -eq 0 ]; then + echo "Version already published: ${{ steps.version.outputs.next_version }}" + exit 1 + elif printf "%s" "${NPM_VIEW_OUTPUT}" | grep -q "E404"; then + echo "Target version is available: ${{ steps.version.outputs.next_version }}" + else + echo "Unable to verify target version availability on npm:" + echo "${NPM_VIEW_OUTPUT}" + exit 1 + fi - name: Install dependencies run: pnpm install --frozen-lockfile @@ -78,7 +140,7 @@ jobs: run: pnpm -F clawdentity verify:skill-bundle - name: Set package version for release - run: npm --prefix apps/cli pkg set version=${{ inputs.version }} + run: npm --prefix apps/cli pkg set version=${{ steps.version.outputs.next_version }} - name: Validate publish manifest run: | From 9ec03b6cb2718754d0151e8d6650448006195655 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 14:49:44 +0530 Subject: [PATCH 118/190] Fix pnpm install ordering --- .github/workflows/publish-cli.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index 7c88968..7018b5e 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -38,6 +38,11 @@ jobs: with: fetch-depth: 0 + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.23.0 + - name: Set up Node.js uses: actions/setup-node@v4 with: @@ -45,11 +50,6 @@ jobs: cache: pnpm registry-url: https://registry.npmjs.org - - name: Install pnpm - uses: pnpm/action-setup@v4 - with: - version: 10.23.0 - - name: Validate required secrets run: | test -n "${NODE_AUTH_TOKEN}" From 73d3cecf1c84e16d9f398549f97394c7ce9100cf Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 14:51:52 +0530 Subject: [PATCH 119/190] Build workspace libs before CLI run --- .github/AGENTS.md | 1 + .github/workflows/publish-cli.yml | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index b91b0ae..74920f4 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -41,6 +41,7 @@ - Compute the next CLI version in CI from the currently published npm `clawdentity` version (fallback `0.0.0` if first publish), then bump `apps/cli/package.json` in the workflow. - Fail publish early if the computed target version already exists on npm. - Serialize CLI publishes with a single global workflow concurrency group to avoid parallel release races across branches. +- Build workspace libraries consumed by CLI tests (`@clawdentity/protocol`, `@clawdentity/sdk`, `@clawdentity/connector`) before running `pnpm -F clawdentity test` on clean runners. - Run CLI quality gates before publish: `pnpm -F clawdentity lint`, `typecheck`, `test`, `build`. - Publish only package `apps/cli` as npm package `clawdentity`. - Keep published runtime manifest free of `workspace:*` runtime dependencies. diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index 7018b5e..d26c768 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -124,6 +124,12 @@ jobs: - name: Install dependencies run: pnpm install --frozen-lockfile + - name: Build workspace dependencies required by CLI tests + run: | + pnpm -F @clawdentity/protocol build + pnpm -F @clawdentity/sdk build + pnpm -F @clawdentity/connector build + - name: Lint CLI package run: pnpm -F clawdentity lint From 309a8f9d3e3f99c2d86e609a809758f405f61a4c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 14:57:28 +0530 Subject: [PATCH 120/190] fix(ci): run cli pack/publish in package directory --- .github/AGENTS.md | 1 + .github/workflows/publish-cli.yml | 19 ++++++++++++++++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 74920f4..fab8b93 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -43,6 +43,7 @@ - Serialize CLI publishes with a single global workflow concurrency group to avoid parallel release races across branches. - Build workspace libraries consumed by CLI tests (`@clawdentity/protocol`, `@clawdentity/sdk`, `@clawdentity/connector`) before running `pnpm -F clawdentity test` on clean runners. - Run CLI quality gates before publish: `pnpm -F clawdentity lint`, `typecheck`, `test`, `build`. +- Run npm release commands (`pkg set`, `pack`, `publish`) with `working-directory: apps/cli`; avoid `npm --prefix apps/cli ...` for pack/publish because npm may target the workspace root manifest on monorepos missing a root `version`. - Publish only package `apps/cli` as npm package `clawdentity`. - Keep published runtime manifest free of `workspace:*` runtime dependencies. - Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index d26c768..6717303 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -146,9 +146,12 @@ jobs: run: pnpm -F clawdentity verify:skill-bundle - name: Set package version for release - run: npm --prefix apps/cli pkg set version=${{ steps.version.outputs.next_version }} + working-directory: apps/cli + run: npm pkg set version=${{ steps.version.outputs.next_version }} - name: Validate publish manifest + env: + EXPECTED_VERSION: ${{ steps.version.outputs.next_version }} run: | node <<'NODE' const fs = require("node:fs"); @@ -156,6 +159,14 @@ jobs: if (pkg.name !== "clawdentity") { throw new Error(`Unexpected package name: ${pkg.name}`); } + if (typeof pkg.version !== "string" || pkg.version.length === 0) { + throw new Error("Package version is missing; publish would fail"); + } + if (pkg.version !== process.env.EXPECTED_VERSION) { + throw new Error( + `Package version mismatch: expected ${process.env.EXPECTED_VERSION}, got ${pkg.version}`, + ); + } if (pkg.private === true) { throw new Error("Package is private; publish would fail"); } @@ -178,12 +189,14 @@ jobs: NODE - name: Dry-run package contents + working-directory: apps/cli run: | - PACK_OUTPUT="$(npm --prefix apps/cli pack --dry-run)" + PACK_OUTPUT="$(npm pack --dry-run)" printf "%s\n" "$PACK_OUTPUT" printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/skill/SKILL.md" printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md" printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/dist/relay-to-peer.mjs" - name: Publish package - run: npm --prefix apps/cli publish --access public --provenance --tag ${{ inputs.dist_tag }} + working-directory: apps/cli + run: npm publish --access public --provenance --tag ${{ inputs.dist_tag }} From be70c02a3eb787fbc5b5bfbabc4d92f149765d92 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 15:15:21 +0530 Subject: [PATCH 121/190] fix(ci): make cli pack validation deterministic --- .github/AGENTS.md | 2 ++ .github/workflows/publish-cli.yml | 25 ++++++++++++++++++++----- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index fab8b93..99144b4 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -44,6 +44,8 @@ - Build workspace libraries consumed by CLI tests (`@clawdentity/protocol`, `@clawdentity/sdk`, `@clawdentity/connector`) before running `pnpm -F clawdentity test` on clean runners. - Run CLI quality gates before publish: `pnpm -F clawdentity lint`, `typecheck`, `test`, `build`. - Run npm release commands (`pkg set`, `pack`, `publish`) with `working-directory: apps/cli`; avoid `npm --prefix apps/cli ...` for pack/publish because npm may target the workspace root manifest on monorepos missing a root `version`. +- Validate packaged artifact contents using `npm pack --dry-run --json` file metadata (not grepping console notices), because npm file-list notices are not guaranteed on stdout. +- Keep `npm pack --dry-run --json` deterministic by forcing `NPM_CONFIG_COLOR=false`, `NPM_CONFIG_LOGLEVEL=silent`, and `NPM_CONFIG_PROGRESS=false`, then parsing the `files` list instead of relying on noisy stderr/stdout lines that vary per npm version. - Publish only package `apps/cli` as npm package `clawdentity`. - Keep published runtime manifest free of `workspace:*` runtime dependencies. - Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index 6717303..21bbbc3 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -190,12 +190,27 @@ jobs: - name: Dry-run package contents working-directory: apps/cli + env: + NPM_CONFIG_COLOR: false + NPM_CONFIG_LOGLEVEL: silent + NPM_CONFIG_PROGRESS: false run: | - PACK_OUTPUT="$(npm pack --dry-run)" - printf "%s\n" "$PACK_OUTPUT" - printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/skill/SKILL.md" - printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md" - printf "%s\n" "$PACK_OUTPUT" | grep -q "skill-bundle/openclaw-skill/dist/relay-to-peer.mjs" + PACK_JSON="$(npm pack --dry-run --json --ignore-scripts)" + printf "%s\n" "$PACK_JSON" + PACK_JSON="$PACK_JSON" node <<'NODE' + const pack = JSON.parse(process.env.PACK_JSON ?? "[]"); + const files = new Set((pack[0]?.files ?? []).map((entry) => entry.path)); + const required = [ + "skill-bundle/openclaw-skill/skill/SKILL.md", + "skill-bundle/openclaw-skill/skill/references/clawdentity-protocol.md", + "skill-bundle/openclaw-skill/dist/relay-to-peer.mjs", + ]; + const missing = required.filter((file) => !files.has(file)); + if (missing.length > 0) { + throw new Error(`Missing required packaged files: ${missing.join(", ")}`); + } + console.log("package contents validated"); + NODE - name: Publish package working-directory: apps/cli From 0aabfda7d6aa3edffd052cd84402567c67d09521 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 15:20:06 +0530 Subject: [PATCH 122/190] refactor(packages): modularize connector and add shared common utils --- apps/cli/vitest.config.ts | 6 + apps/proxy/vitest.config.ts | 6 + apps/registry/vitest.config.ts | 6 + packages/common/AGENTS.md | 10 + packages/common/package.json | 24 + packages/common/src/index.ts | 29 + packages/common/tsconfig.json | 9 + packages/common/tsup.config.ts | 8 + packages/connector/package.json | 1 + packages/connector/src/AGENTS.md | 21 +- packages/connector/src/client.ts | 816 ++------ packages/connector/src/client/AGENTS.md | 11 + packages/connector/src/client/delivery.ts | 168 ++ packages/connector/src/client/heartbeat.ts | 163 ++ packages/connector/src/client/helpers.ts | 138 ++ .../connector/src/client/inbound-delivery.ts | 110 ++ packages/connector/src/client/queue.ts | 150 ++ packages/connector/src/client/retry.ts | 27 + packages/connector/src/client/types.ts | 115 ++ packages/connector/src/inbound-inbox.ts | 607 +----- .../connector/src/inbound-inbox/AGENTS.md | 10 + .../connector/src/inbound-inbox/constants.ts | 9 + .../connector/src/inbound-inbox/schema.ts | 168 ++ .../connector/src/inbound-inbox/storage.ts | 238 +++ packages/connector/src/inbound-inbox/types.ts | 81 + packages/connector/src/runtime.ts | 1688 ++--------------- packages/connector/src/runtime/AGENTS.md | 12 + .../connector/src/runtime/auth-storage.ts | 123 ++ packages/connector/src/runtime/constants.ts | 14 + packages/connector/src/runtime/errors.ts | 33 + packages/connector/src/runtime/http.ts | 86 + packages/connector/src/runtime/openclaw.ts | 164 ++ .../connector/src/runtime/outbound-queue.ts | 96 + packages/connector/src/runtime/parse.ts | 97 + packages/connector/src/runtime/policy.ts | 132 ++ .../connector/src/runtime/relay-service.ts | 249 +++ packages/connector/src/runtime/server.ts | 214 +++ .../connector/src/runtime/trusted-receipts.ts | 126 ++ packages/connector/src/runtime/types.ts | 109 ++ packages/connector/src/runtime/url.ts | 106 ++ packages/connector/src/runtime/ws.ts | 97 + packages/connector/vitest.config.ts | 8 + pnpm-lock.yaml | 9 + tsconfig.base.json | 1 + 44 files changed, 3522 insertions(+), 2773 deletions(-) create mode 100644 packages/common/AGENTS.md create mode 100644 packages/common/package.json create mode 100644 packages/common/src/index.ts create mode 100644 packages/common/tsconfig.json create mode 100644 packages/common/tsup.config.ts create mode 100644 packages/connector/src/client/AGENTS.md create mode 100644 packages/connector/src/client/delivery.ts create mode 100644 packages/connector/src/client/heartbeat.ts create mode 100644 packages/connector/src/client/helpers.ts create mode 100644 packages/connector/src/client/inbound-delivery.ts create mode 100644 packages/connector/src/client/queue.ts create mode 100644 packages/connector/src/client/retry.ts create mode 100644 packages/connector/src/client/types.ts create mode 100644 packages/connector/src/inbound-inbox/AGENTS.md create mode 100644 packages/connector/src/inbound-inbox/constants.ts create mode 100644 packages/connector/src/inbound-inbox/schema.ts create mode 100644 packages/connector/src/inbound-inbox/storage.ts create mode 100644 packages/connector/src/inbound-inbox/types.ts create mode 100644 packages/connector/src/runtime/AGENTS.md create mode 100644 packages/connector/src/runtime/auth-storage.ts create mode 100644 packages/connector/src/runtime/constants.ts create mode 100644 packages/connector/src/runtime/errors.ts create mode 100644 packages/connector/src/runtime/http.ts create mode 100644 packages/connector/src/runtime/openclaw.ts create mode 100644 packages/connector/src/runtime/outbound-queue.ts create mode 100644 packages/connector/src/runtime/parse.ts create mode 100644 packages/connector/src/runtime/policy.ts create mode 100644 packages/connector/src/runtime/relay-service.ts create mode 100644 packages/connector/src/runtime/server.ts create mode 100644 packages/connector/src/runtime/trusted-receipts.ts create mode 100644 packages/connector/src/runtime/types.ts create mode 100644 packages/connector/src/runtime/url.ts create mode 100644 packages/connector/src/runtime/ws.ts diff --git a/apps/cli/vitest.config.ts b/apps/cli/vitest.config.ts index a42bac4..d9dbd74 100644 --- a/apps/cli/vitest.config.ts +++ b/apps/cli/vitest.config.ts @@ -4,6 +4,12 @@ import { defineConfig } from "vitest/config"; export default defineConfig({ resolve: { alias: { + "@clawdentity/common": fileURLToPath( + new URL("../../packages/common/src/index.ts", import.meta.url), + ), + "@clawdentity/connector": fileURLToPath( + new URL("../../packages/connector/src/index.ts", import.meta.url), + ), "@clawdentity/protocol": fileURLToPath( new URL("../../packages/protocol/src/index.ts", import.meta.url), ), diff --git a/apps/proxy/vitest.config.ts b/apps/proxy/vitest.config.ts index e371378..500a5a2 100644 --- a/apps/proxy/vitest.config.ts +++ b/apps/proxy/vitest.config.ts @@ -4,6 +4,12 @@ import { defineConfig } from "vitest/config"; export default defineConfig({ resolve: { alias: { + "@clawdentity/common": fileURLToPath( + new URL("../../packages/common/src/index.ts", import.meta.url), + ), + "@clawdentity/connector": fileURLToPath( + new URL("../../packages/connector/src/index.ts", import.meta.url), + ), "@clawdentity/protocol": fileURLToPath( new URL("../../packages/protocol/src/index.ts", import.meta.url), ), diff --git a/apps/registry/vitest.config.ts b/apps/registry/vitest.config.ts index e371378..500a5a2 100644 --- a/apps/registry/vitest.config.ts +++ b/apps/registry/vitest.config.ts @@ -4,6 +4,12 @@ import { defineConfig } from "vitest/config"; export default defineConfig({ resolve: { alias: { + "@clawdentity/common": fileURLToPath( + new URL("../../packages/common/src/index.ts", import.meta.url), + ), + "@clawdentity/connector": fileURLToPath( + new URL("../../packages/connector/src/index.ts", import.meta.url), + ), "@clawdentity/protocol": fileURLToPath( new URL("../../packages/protocol/src/index.ts", import.meta.url), ), diff --git a/packages/common/AGENTS.md b/packages/common/AGENTS.md new file mode 100644 index 0000000..fc90519 --- /dev/null +++ b/packages/common/AGENTS.md @@ -0,0 +1,10 @@ +# AGENTS.md (packages/common) + +## Purpose +- Provide small, dependency-light shared helpers used across packages. + +## Design Rules +- Keep helpers pure and runtime-agnostic. +- Keep API surface minimal and stable. +- Avoid domain-specific logic that belongs in feature packages. +- Prefer composable utility functions over class-heavy abstractions. diff --git a/packages/common/package.json b/packages/common/package.json new file mode 100644 index 0000000..7427e08 --- /dev/null +++ b/packages/common/package.json @@ -0,0 +1,24 @@ +{ + "name": "@clawdentity/common", + "version": "0.0.0", + "private": true, + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "scripts": { + "build": "tsup", + "format": "biome format .", + "lint": "biome lint .", + "test": "vitest run --passWithNoTests", + "typecheck": "tsc --noEmit" + }, + "devDependencies": { + "@types/node": "^22.17.2" + } +} diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts new file mode 100644 index 0000000..8d15905 --- /dev/null +++ b/packages/common/src/index.ts @@ -0,0 +1,29 @@ +export function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function sanitizeErrorReason( + error: unknown, + input?: { + fallback?: string; + maxLength?: number; + }, +): string { + const fallback = input?.fallback ?? "Unknown error"; + const maxLength = Math.max(1, input?.maxLength ?? 240); + + if (!(error instanceof Error)) { + return fallback; + } + + const message = error.message.trim(); + return message.slice(0, maxLength) || fallback; +} + +export function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { + const normalizedBase = baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`; + const normalizedHookPath = hookPath.startsWith("/") + ? hookPath.slice(1) + : hookPath; + return new URL(normalizedHookPath, normalizedBase).toString(); +} diff --git a/packages/common/tsconfig.json b/packages/common/tsconfig.json new file mode 100644 index 0000000..f1f0e48 --- /dev/null +++ b/packages/common/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "lib": ["ES2022"], + "types": ["node"], + "outDir": "./dist" + }, + "include": ["src"] +} diff --git a/packages/common/tsup.config.ts b/packages/common/tsup.config.ts new file mode 100644 index 0000000..7a3d66a --- /dev/null +++ b/packages/common/tsup.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm"], + dts: true, + clean: true, +}); diff --git a/packages/connector/package.json b/packages/connector/package.json index 5870edd..47dee46 100644 --- a/packages/connector/package.json +++ b/packages/connector/package.json @@ -19,6 +19,7 @@ "typecheck": "tsc --noEmit" }, "dependencies": { + "@clawdentity/common": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", "ws": "^8.18.3", diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 108eced..f6089bc 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -2,9 +2,24 @@ ## Source Layout - Keep frame schema definitions in `frames.ts` and validate every inbound/outbound frame through parser helpers. -- Keep websocket lifecycle + ack behavior in `client.ts`. -- Keep local runtime orchestration (`/v1/outbound`, `/v1/status`, auth refresh, replay loop) in `runtime.ts`. -- Keep durable inbound storage logic isolated in `inbound-inbox.ts`. +- Keep `client.ts` as the stable public surface (`ConnectorClient` + exported client types) and route internal concerns through `client/` modules: + - `client/types.ts` for externally consumed client types. + - `client/helpers.ts` for shared pure helpers (event parsing, sanitization, normalization). + - `client/retry.ts` for reusable backoff math. + - `client/heartbeat.ts` for heartbeat scheduling, ack tracking, and RTT metrics. + - `client/queue.ts` for outbound queue + persistence orchestration. + - `client/delivery.ts` for local OpenClaw delivery + retry behavior. +- Keep `runtime.ts` as the runtime entrypoint and wire internal concerns through `runtime/` modules: + - `runtime/auth-storage.ts` for registry auth disk sync + atomic persistence. + - `runtime/openclaw.ts` for hook token discovery and abort-aware local hook delivery. + - `runtime/policy.ts` for replay/probe configuration loading and retry-delay calculation. + - `runtime/relay-service.ts` for outbound relay and signed delivery-receipt callbacks. + - `runtime/server.ts` for HTTP route handling (`/v1/status`, dead-letter ops, `/v1/outbound`). + - `runtime/trusted-receipts.ts`, `runtime/url.ts`, `runtime/ws.ts`, and `runtime/parse.ts` for focused helper concerns. +- Keep `inbound-inbox.ts` as the public API surface (`ConnectorInboundInbox`, factory helpers, exported types) and route internals through `inbound-inbox/` modules: + - `inbound-inbox/types.ts` for inbox/dead-letter/index/event type contracts. + - `inbound-inbox/schema.ts` for index parsing/normalization rules. + - `inbound-inbox/storage.ts` for lock/index/events file persistence concerns. ## Inbound Durability Rules - Connector must persist inbound relay payloads before sending `deliver_ack accepted=true`. diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index c53a973..6e2a35a 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -1,5 +1,32 @@ import { generateUlid } from "@clawdentity/protocol"; import { createLogger, type Logger, toIso } from "@clawdentity/sdk"; +import { LocalOpenclawDeliveryClient } from "./client/delivery.js"; +import { + ConnectorHeartbeatManager, + type HeartbeatAckTimeoutEvent, +} from "./client/heartbeat.js"; +import { + normalizeConnectionHeaders, + readCloseEvent, + readErrorEventReason, + readMessageEventData, + readUnexpectedResponseStatus, + resolveWebSocketFactory, + sanitizeErrorReason, + toOpenclawHookUrl, + WS_READY_STATE_CONNECTING, +} from "./client/helpers.js"; +import { handleInboundDeliverFrame } from "./client/inbound-delivery.js"; +import { ConnectorOutboundQueueManager } from "./client/queue.js"; +import { computeJitteredBackoffDelayMs } from "./client/retry.js"; +import type { + ConnectorClientHooks, + ConnectorClientMetricsSnapshot, + ConnectorClientOptions, + ConnectorOutboundEnqueueInput, + ConnectorOutboundQueuePersistence, + ConnectorWebSocket, +} from "./client/types.js"; import { CONNECTOR_FRAME_VERSION, DEFAULT_CONNECT_TIMEOUT_MS, @@ -20,7 +47,6 @@ import { } from "./constants.js"; import { type ConnectorFrame, - type DeliverAckFrame, type DeliverFrame, type EnqueueFrame, enqueueFrameSchema, @@ -30,263 +56,14 @@ import { serializeFrame, } from "./frames.js"; -type ConnectorWebSocketEventType = - | "open" - | "message" - | "close" - | "error" - | "unexpected-response"; -type ConnectorWebSocketListener = (event: unknown) => void; -const WS_READY_STATE_CONNECTING = 0; - -export type ConnectorWebSocket = { - readonly readyState: number; - send: (data: string) => void; - close: (code?: number, reason?: string) => void; - addEventListener: ( - type: ConnectorWebSocketEventType, - listener: ConnectorWebSocketListener, - ) => void; -}; - -export type ConnectorClientHooks = { - onConnected?: () => void; - onDisconnected?: (event: { - code: number; - reason: string; - wasClean: boolean; - }) => void; - onAuthUpgradeRejected?: (event: { - status: number; - immediateRetry: boolean; - }) => void | Promise; - onFrame?: (frame: ConnectorFrame) => void; - onDeliverSucceeded?: (frame: DeliverFrame) => void; - onDeliverFailed?: (frame: DeliverFrame, error: unknown) => void; -}; - -export type ConnectorOutboundQueuePersistence = { - load: () => Promise; - save: (frames: EnqueueFrame[]) => Promise; -}; - -export type ConnectorClientMetricsSnapshot = { - connection: { - connectAttempts: number; - connected: boolean; - reconnectCount: number; - uptimeMs: number; - lastConnectedAt?: string; - }; - heartbeat: { - avgRttMs?: number; - maxRttMs?: number; - lastRttMs?: number; - pendingAckCount: number; - sampleCount: number; - }; - inboundDelivery: { - avgAckLatencyMs?: number; - maxAckLatencyMs?: number; - lastAckLatencyMs?: number; - sampleCount: number; - }; - outboundQueue: { - currentDepth: number; - loadedFromPersistence: boolean; - maxDepth: number; - persistenceEnabled: boolean; - }; -}; - -export type ConnectorClientOptions = { - connectorUrl: string; - connectionHeaders?: Record; - connectionHeadersProvider?: - | (() => Record | Promise>) - | undefined; - openclawBaseUrl: string; - openclawHookToken?: string; - openclawHookPath?: string; - connectTimeoutMs?: number; - heartbeatIntervalMs?: number; - heartbeatAckTimeoutMs?: number; - reconnectMinDelayMs?: number; - reconnectMaxDelayMs?: number; - reconnectBackoffFactor?: number; - reconnectJitterRatio?: number; - openclawDeliverTimeoutMs?: number; - openclawDeliverMaxAttempts?: number; - openclawDeliverRetryInitialDelayMs?: number; - openclawDeliverRetryMaxDelayMs?: number; - openclawDeliverRetryBackoffFactor?: number; - openclawDeliverRetryBudgetMs?: number; - webSocketFactory?: ( - url: string, - headers: Record, - ) => ConnectorWebSocket; - fetchImpl?: typeof fetch; - logger?: Logger; - hooks?: ConnectorClientHooks; - outboundQueuePersistence?: ConnectorOutboundQueuePersistence; - inboundDeliverHandler?: - | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) - | undefined; - now?: () => number; - random?: () => number; - ulidFactory?: (time?: number) => string; -}; - -export type ConnectorOutboundEnqueueInput = { - toAgentDid: string; - payload: unknown; - conversationId?: string; - replyTo?: string; -}; - -function isAbortError(error: unknown): boolean { - return error instanceof Error && error.name === "AbortError"; -} - -function resolveWebSocketFactory( - webSocketFactory: ConnectorClientOptions["webSocketFactory"], -): (url: string, headers: Record) => ConnectorWebSocket { - if (webSocketFactory !== undefined) { - return webSocketFactory; - } - - if (typeof WebSocket !== "function") { - throw new Error("WebSocket implementation is required"); - } - - return (_url: string, headers: Record) => { - if (Object.keys(headers).length > 0) { - throw new Error( - "Connection headers require a custom webSocketFactory implementation", - ); - } - - return new WebSocket(_url) as ConnectorWebSocket; - }; -} - -function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { - const normalizedBase = baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`; - const normalizedHookPath = hookPath.startsWith("/") - ? hookPath.slice(1) - : hookPath; - return new URL(normalizedHookPath, normalizedBase).toString(); -} - -function sanitizeErrorReason(error: unknown): string { - if (!(error instanceof Error)) { - return "Unknown delivery error"; - } - - return error.message.trim().slice(0, 200) || "Unknown delivery error"; -} - -class LocalOpenclawDeliveryError extends Error { - readonly retryable: boolean; - - constructor(input: { message: string; retryable: boolean }) { - super(input.message); - this.name = "LocalOpenclawDeliveryError"; - this.retryable = input.retryable; - } -} - -function isRetryableOpenclawDeliveryError(error: unknown): boolean { - return ( - error instanceof LocalOpenclawDeliveryError && error.retryable === true - ); -} - -function isObject(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - -function readMessageEventData(event: unknown): unknown { - if (!isObject(event)) { - return undefined; - } - - return event.data; -} - -function readCloseEvent(event: unknown): { - code: number; - reason: string; - wasClean: boolean; -} { - if (!isObject(event)) { - return { - code: 1006, - reason: "", - wasClean: false, - }; - } - - return { - code: typeof event.code === "number" ? event.code : 1006, - reason: typeof event.reason === "string" ? event.reason : "", - wasClean: typeof event.wasClean === "boolean" ? event.wasClean : false, - }; -} - -function readUnexpectedResponseStatus(event: unknown): number | undefined { - if (!isObject(event)) { - return undefined; - } - - if (typeof event.status === "number") { - return event.status; - } - - if (typeof event.statusCode === "number") { - return event.statusCode; - } - - const response = event.response; - if (isObject(response)) { - if (typeof response.status === "number") { - return response.status; - } - if (typeof response.statusCode === "number") { - return response.statusCode; - } - } - - return undefined; -} - -function readErrorEventReason(event: unknown): string { - if (!isObject(event) || !("error" in event)) { - return "WebSocket error"; - } - - return sanitizeErrorReason(event.error); -} - -function normalizeConnectionHeaders( - headers: Record | undefined, -): Record { - if (headers === undefined) { - return {}; - } - - const normalized: Record = {}; - for (const [rawKey, rawValue] of Object.entries(headers)) { - const key = rawKey.trim(); - const value = rawValue.trim(); - if (key.length === 0 || value.length === 0) { - continue; - } - normalized[key] = value; - } - - return normalized; -} +export type { + ConnectorClientHooks, + ConnectorClientMetricsSnapshot, + ConnectorClientOptions, + ConnectorOutboundEnqueueInput, + ConnectorOutboundQueuePersistence, + ConnectorWebSocket, +} from "./client/types.js"; export class ConnectorClient { private readonly connectorUrl: string; @@ -294,26 +71,15 @@ export class ConnectorClient { private readonly connectionHeadersProvider: | (() => Record | Promise>) | undefined; - private readonly openclawHookUrl: string; - private readonly openclawHookToken?: string; private readonly connectTimeoutMs: number; - private readonly heartbeatIntervalMs: number; - private readonly heartbeatAckTimeoutMs: number; private readonly reconnectMinDelayMs: number; private readonly reconnectMaxDelayMs: number; private readonly reconnectBackoffFactor: number; private readonly reconnectJitterRatio: number; - private readonly openclawDeliverTimeoutMs: number; - private readonly openclawDeliverMaxAttempts: number; - private readonly openclawDeliverRetryInitialDelayMs: number; - private readonly openclawDeliverRetryMaxDelayMs: number; - private readonly openclawDeliverRetryBackoffFactor: number; - private readonly openclawDeliverRetryBudgetMs: number; private readonly webSocketFactory: ( url: string, headers: Record, ) => ConnectorWebSocket; - private readonly fetchImpl: typeof fetch; private readonly logger: Logger; private readonly hooks: ConnectorClientHooks; private readonly outboundQueuePersistence: @@ -326,33 +92,27 @@ export class ConnectorClient { private readonly random: () => number; private readonly ulidFactory: (time?: number) => string; + private readonly heartbeatManager: ConnectorHeartbeatManager; + private readonly outboundQueue: ConnectorOutboundQueueManager; + private readonly localOpenclawDelivery: LocalOpenclawDeliveryClient; + private socket: ConnectorWebSocket | undefined; private reconnectTimeout: ReturnType | undefined; private connectTimeout: ReturnType | undefined; - private heartbeatInterval: ReturnType | undefined; - private heartbeatAckTimeout: ReturnType | undefined; - private readonly pendingHeartbeatAcks = new Map(); private reconnectAttempt = 0; private reconnectCount = 0; private connectAttempts = 0; private connectedSinceMs: number | undefined; private accumulatedConnectedMs = 0; private lastConnectedAtIso: string | undefined; - private heartbeatRttSampleCount = 0; - private heartbeatRttTotalMs = 0; - private heartbeatRttMaxMs = 0; - private heartbeatRttLastMs: number | undefined; + private inboundAckLatencySampleCount = 0; private inboundAckLatencyTotalMs = 0; private inboundAckLatencyMaxMs = 0; private inboundAckLatencyLastMs: number | undefined; - private maxObservedOutboundQueueDepth = 0; - private outboundQueueLoaded = false; - private outboundQueueLoadPromise: Promise | undefined; - private outboundQueueSaveChain: Promise = Promise.resolve(); + private authUpgradeImmediateRetryUsed = false; private started = false; - private readonly outboundQueue: EnqueueFrame[] = []; constructor(options: ConnectorClientOptions) { this.connectorUrl = options.connectorUrl; @@ -360,19 +120,20 @@ export class ConnectorClient { options.connectionHeaders, ); this.connectionHeadersProvider = options.connectionHeadersProvider; - this.openclawHookToken = options.openclawHookToken; this.connectTimeoutMs = Math.max( 0, Math.floor(options.connectTimeoutMs ?? DEFAULT_CONNECT_TIMEOUT_MS), ); - this.heartbeatIntervalMs = + + const heartbeatIntervalMs = options.heartbeatIntervalMs ?? DEFAULT_HEARTBEAT_INTERVAL_MS; - this.heartbeatAckTimeoutMs = Math.max( + const heartbeatAckTimeoutMs = Math.max( 0, Math.floor( options.heartbeatAckTimeoutMs ?? DEFAULT_HEARTBEAT_ACK_TIMEOUT_MS, ), ); + this.reconnectMinDelayMs = options.reconnectMinDelayMs ?? DEFAULT_RECONNECT_MIN_DELAY_MS; this.reconnectMaxDelayMs = @@ -381,43 +142,44 @@ export class ConnectorClient { options.reconnectBackoffFactor ?? DEFAULT_RECONNECT_BACKOFF_FACTOR; this.reconnectJitterRatio = options.reconnectJitterRatio ?? DEFAULT_RECONNECT_JITTER_RATIO; - this.openclawDeliverTimeoutMs = + + const openclawDeliverTimeoutMs = options.openclawDeliverTimeoutMs ?? DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS; - this.openclawDeliverMaxAttempts = Math.max( + const openclawDeliverMaxAttempts = Math.max( 1, Math.floor( options.openclawDeliverMaxAttempts ?? DEFAULT_OPENCLAW_DELIVER_MAX_ATTEMPTS, ), ); - this.openclawDeliverRetryInitialDelayMs = Math.max( + const openclawDeliverRetryInitialDelayMs = Math.max( 0, Math.floor( options.openclawDeliverRetryInitialDelayMs ?? DEFAULT_OPENCLAW_DELIVER_RETRY_INITIAL_DELAY_MS, ), ); - this.openclawDeliverRetryMaxDelayMs = Math.max( - this.openclawDeliverRetryInitialDelayMs, + const openclawDeliverRetryMaxDelayMs = Math.max( + openclawDeliverRetryInitialDelayMs, Math.floor( options.openclawDeliverRetryMaxDelayMs ?? DEFAULT_OPENCLAW_DELIVER_RETRY_MAX_DELAY_MS, ), ); - this.openclawDeliverRetryBackoffFactor = Math.max( + const openclawDeliverRetryBackoffFactor = Math.max( 1, options.openclawDeliverRetryBackoffFactor ?? DEFAULT_OPENCLAW_DELIVER_RETRY_BACKOFF_FACTOR, ); - this.openclawDeliverRetryBudgetMs = Math.max( - this.openclawDeliverTimeoutMs, + const openclawDeliverRetryBudgetMs = Math.max( + openclawDeliverTimeoutMs, Math.floor( options.openclawDeliverRetryBudgetMs ?? DEFAULT_OPENCLAW_DELIVER_RETRY_BUDGET_MS, ), ); + this.webSocketFactory = resolveWebSocketFactory(options.webSocketFactory); - this.fetchImpl = options.fetchImpl ?? fetch; this.logger = options.logger ?? createLogger({ service: "connector", module: "client" }); @@ -428,10 +190,38 @@ export class ConnectorClient { this.random = options.random ?? Math.random; this.ulidFactory = options.ulidFactory ?? generateUlid; - this.openclawHookUrl = toOpenclawHookUrl( + this.heartbeatManager = new ConnectorHeartbeatManager({ + heartbeatIntervalMs, + heartbeatAckTimeoutMs, + now: this.now, + onAckTimeout: (event) => { + this.handleHeartbeatAckTimeout(event); + }, + }); + + this.outboundQueue = new ConnectorOutboundQueueManager({ + persistence: this.outboundQueuePersistence, + logger: this.logger, + }); + + const openclawHookUrl = toOpenclawHookUrl( options.openclawBaseUrl, options.openclawHookPath ?? DEFAULT_OPENCLAW_HOOK_PATH, ); + + this.localOpenclawDelivery = new LocalOpenclawDeliveryClient({ + fetchImpl: options.fetchImpl ?? fetch, + openclawHookUrl, + openclawHookToken: options.openclawHookToken, + openclawDeliverTimeoutMs, + openclawDeliverMaxAttempts, + openclawDeliverRetryInitialDelayMs, + openclawDeliverRetryMaxDelayMs, + openclawDeliverRetryBackoffFactor, + openclawDeliverRetryBudgetMs, + now: this.now, + logger: this.logger, + }); } connect(): void { @@ -463,7 +253,7 @@ export class ConnectorClient { } getQueuedOutboundCount(): number { - return this.outboundQueue.length; + return this.outboundQueue.getDepth(); } getMetricsSnapshot(): ConnectorClientMetricsSnapshot { @@ -480,19 +270,7 @@ export class ConnectorClient { uptimeMs: Math.max(0, uptimeMs), lastConnectedAt: this.lastConnectedAtIso, }, - heartbeat: { - pendingAckCount: this.pendingHeartbeatAcks.size, - sampleCount: this.heartbeatRttSampleCount, - lastRttMs: this.heartbeatRttLastMs, - maxRttMs: - this.heartbeatRttSampleCount > 0 ? this.heartbeatRttMaxMs : undefined, - avgRttMs: - this.heartbeatRttSampleCount > 0 - ? Math.floor( - this.heartbeatRttTotalMs / this.heartbeatRttSampleCount, - ) - : undefined, - }, + heartbeat: this.heartbeatManager.getMetricsSnapshot(), inboundDelivery: { sampleCount: this.inboundAckLatencySampleCount, lastAckLatencyMs: this.inboundAckLatencyLastMs, @@ -508,12 +286,7 @@ export class ConnectorClient { ) : undefined, }, - outboundQueue: { - currentDepth: this.outboundQueue.length, - maxDepth: this.maxObservedOutboundQueueDepth, - loadedFromPersistence: this.outboundQueueLoaded, - persistenceEnabled: this.outboundQueuePersistence !== undefined, - }, + outboundQueue: this.outboundQueue.getMetricsSnapshot(), }; } @@ -529,9 +302,7 @@ export class ConnectorClient { replyTo: input.replyTo, }); - this.outboundQueue.push(frame); - this.recordOutboundQueueDepth(); - this.persistOutboundQueue(); + this.outboundQueue.enqueue(frame); this.flushOutboundQueue(); return frame; } @@ -539,6 +310,7 @@ export class ConnectorClient { private async connectSocket(): Promise { this.clearReconnectTimeout(); this.connectAttempts += 1; + if (this.outboundQueuePersistence !== undefined) { await this.ensureOutboundQueueLoaded(); } @@ -581,7 +353,6 @@ export class ConnectorClient { } this.clearConnectTimeout(); - this.clearHeartbeatTracking(); this.reconnectAttempt = 0; this.authUpgradeImmediateRetryUsed = false; this.connectedSinceMs = this.now(); @@ -685,14 +456,14 @@ export class ConnectorClient { if (options?.delayMs !== undefined) { delayMs = Math.max(0, Math.floor(options.delayMs)); } else { - const exponentialDelay = - this.reconnectMinDelayMs * - this.reconnectBackoffFactor ** this.reconnectAttempt; - const boundedDelay = Math.min(exponentialDelay, this.reconnectMaxDelayMs); - const jitterRange = boundedDelay * this.reconnectJitterRatio; - const jitterOffset = - jitterRange === 0 ? 0 : (this.random() * 2 - 1) * jitterRange; - delayMs = Math.max(0, Math.floor(boundedDelay + jitterOffset)); + delayMs = computeJitteredBackoffDelayMs({ + minDelayMs: this.reconnectMinDelayMs, + maxDelayMs: this.reconnectMaxDelayMs, + backoffFactor: this.reconnectBackoffFactor, + attempt: this.reconnectAttempt, + jitterRatio: this.reconnectJitterRatio, + random: this.random, + }); } if (options?.incrementAttempt ?? true) { @@ -734,6 +505,7 @@ export class ConnectorClient { reason: "WebSocket connect timed out", wasClean: false, }); + if (this.started) { this.scheduleReconnect(); } @@ -749,19 +521,7 @@ export class ConnectorClient { private clearSocketState(): void { this.clearConnectTimeout(); - this.clearHeartbeatTracking(); - } - - private clearHeartbeatTracking(): void { - if (this.heartbeatInterval !== undefined) { - clearInterval(this.heartbeatInterval); - this.heartbeatInterval = undefined; - } - if (this.heartbeatAckTimeout !== undefined) { - clearTimeout(this.heartbeatAckTimeout); - this.heartbeatAckTimeout = undefined; - } - this.pendingHeartbeatAcks.clear(); + this.heartbeatManager.stop(); } private detachSocket(socket: ConnectorWebSocket): boolean { @@ -860,13 +620,7 @@ export class ConnectorClient { } private startHeartbeatInterval(): void { - this.clearHeartbeatTracking(); - - if (this.heartbeatIntervalMs <= 0) { - return; - } - - this.heartbeatInterval = setInterval(() => { + this.heartbeatManager.start(() => { const frame: HeartbeatFrame = { v: CONNECTOR_FRAME_VERSION, type: "heartbeat", @@ -874,89 +628,20 @@ export class ConnectorClient { ts: this.makeTimestamp(), }; - if (this.sendFrame(frame)) { - this.trackHeartbeatAck(frame.id); - } - }, this.heartbeatIntervalMs); - } - - private trackHeartbeatAck(ackId: string): void { - if (this.heartbeatAckTimeoutMs <= 0) { - return; - } - - this.pendingHeartbeatAcks.set(ackId, this.now()); - this.scheduleHeartbeatAckTimeoutCheck(); - } - - private handleHeartbeatAckFrame(frame: HeartbeatAckFrame): void { - const sentAtMs = this.pendingHeartbeatAcks.get(frame.ackId); - if (sentAtMs === undefined) { - return; - } - this.pendingHeartbeatAcks.delete(frame.ackId); - const rttMs = Math.max(0, this.now() - sentAtMs); - this.heartbeatRttSampleCount += 1; - this.heartbeatRttTotalMs += rttMs; - this.heartbeatRttMaxMs = Math.max(this.heartbeatRttMaxMs, rttMs); - this.heartbeatRttLastMs = rttMs; - - this.scheduleHeartbeatAckTimeoutCheck(); - } - - private scheduleHeartbeatAckTimeoutCheck(): void { - if (this.heartbeatAckTimeout !== undefined) { - clearTimeout(this.heartbeatAckTimeout); - this.heartbeatAckTimeout = undefined; - } - - if ( - this.pendingHeartbeatAcks.size === 0 || - this.heartbeatAckTimeoutMs <= 0 - ) { - return; - } - - let oldestSentAt = Number.POSITIVE_INFINITY; - for (const sentAt of this.pendingHeartbeatAcks.values()) { - oldestSentAt = Math.min(oldestSentAt, sentAt); - } - - const elapsedMs = this.now() - oldestSentAt; - const delayMs = Math.max(0, this.heartbeatAckTimeoutMs - elapsedMs); - this.heartbeatAckTimeout = setTimeout(() => { - this.heartbeatAckTimeout = undefined; - this.handleHeartbeatAckTimeout(); - }, delayMs); + return this.sendFrame(frame) ? frame.id : undefined; + }); } - private handleHeartbeatAckTimeout(): void { - const pendingCount = this.pendingHeartbeatAcks.size; - if (pendingCount === 0) { - return; - } - - let oldestSentAt = Number.POSITIVE_INFINITY; - for (const sentAt of this.pendingHeartbeatAcks.values()) { - oldestSentAt = Math.min(oldestSentAt, sentAt); - } - - const nowMs = this.now(); - const oldestPendingAgeMs = nowMs - oldestSentAt; - if (oldestPendingAgeMs < this.heartbeatAckTimeoutMs) { - this.scheduleHeartbeatAckTimeoutCheck(); - return; - } - + private handleHeartbeatAckTimeout(event: HeartbeatAckTimeoutEvent): void { const socket = this.socket; if (socket === undefined || !this.detachSocket(socket)) { return; } this.logger.warn("connector.websocket.heartbeat_ack_timeout", { - pendingCount, - oldestPendingAgeMs, - timeoutMs: this.heartbeatAckTimeoutMs, + pendingCount: event.pendingCount, + oldestPendingAgeMs: event.oldestPendingAgeMs, + timeoutMs: event.timeoutMs, }); this.closeSocketQuietly(socket, 1000, "heartbeat ack timeout"); this.hooks.onDisconnected?.({ @@ -971,97 +656,14 @@ export class ConnectorClient { } private flushOutboundQueue(): void { - if (!this.isConnected()) { - return; - } - - while (this.outboundQueue.length > 0 && this.isConnected()) { - const nextFrame = this.outboundQueue[0]; - const sent = this.sendFrame(nextFrame); - if (!sent) { - return; - } - this.outboundQueue.shift(); - this.persistOutboundQueue(); - } - } - - private recordOutboundQueueDepth(): void { - this.maxObservedOutboundQueueDepth = Math.max( - this.maxObservedOutboundQueueDepth, - this.outboundQueue.length, - ); - } - - private persistOutboundQueue(): void { - if (this.outboundQueuePersistence === undefined) { - return; - } - - this.outboundQueueSaveChain = this.outboundQueueSaveChain - .then(async () => { - await this.ensureOutboundQueueLoaded(); - await this.outboundQueuePersistence?.save([...this.outboundQueue]); - }) - .catch((error) => { - this.logger.warn("connector.outbound.persistence_save_failed", { - reason: sanitizeErrorReason(error), - }); - }); + this.outboundQueue.flush({ + isConnected: () => this.isConnected(), + sendFrame: (frame) => this.sendFrame(frame), + }); } private async ensureOutboundQueueLoaded(): Promise { - if (this.outboundQueueLoaded) { - return; - } - - if (this.outboundQueuePersistence === undefined) { - this.outboundQueueLoaded = true; - return; - } - - if (this.outboundQueueLoadPromise !== undefined) { - await this.outboundQueueLoadPromise; - return; - } - - this.outboundQueueLoadPromise = (async () => { - try { - const loadedFrames = await this.outboundQueuePersistence?.load(); - if (!loadedFrames || loadedFrames.length === 0) { - return; - } - - const existingIds = new Set(this.outboundQueue.map((item) => item.id)); - const validLoadedFrames: EnqueueFrame[] = []; - for (const candidate of loadedFrames) { - const parsed = enqueueFrameSchema.safeParse(candidate); - if (!parsed.success) { - continue; - } - if (existingIds.has(parsed.data.id)) { - continue; - } - validLoadedFrames.push(parsed.data); - existingIds.add(parsed.data.id); - } - - if (validLoadedFrames.length === 0) { - return; - } - - this.outboundQueue.unshift(...validLoadedFrames); - this.recordOutboundQueueDepth(); - } catch (error) { - this.logger.warn("connector.outbound.persistence_load_failed", { - reason: sanitizeErrorReason(error), - }); - } finally { - this.outboundQueueLoaded = true; - } - })(); - - await this.outboundQueueLoadPromise; + await this.outboundQueue.ensureLoaded(); this.flushOutboundQueue(); } @@ -1105,7 +707,7 @@ export class ConnectorClient { } if (frame.type === "heartbeat_ack") { - this.handleHeartbeatAckFrame(frame); + this.heartbeatManager.handleHeartbeatAck(frame); return; } @@ -1128,79 +730,22 @@ export class ConnectorClient { } private async handleDeliverFrame(frame: DeliverFrame): Promise { - const startedAtMs = this.now(); - if (this.inboundDeliverHandler !== undefined) { - try { - const result = await this.inboundDeliverHandler(frame); - const ackFrame: DeliverAckFrame = { - v: CONNECTOR_FRAME_VERSION, - type: "deliver_ack", - id: this.makeFrameId(), - ts: this.makeTimestamp(), - ackId: frame.id, - accepted: result.accepted, - reason: result.reason, - }; - + await handleInboundDeliverFrame({ + frame, + inboundDeliverHandler: this.inboundDeliverHandler, + localOpenclawDelivery: this.localOpenclawDelivery, + isStarted: () => this.started, + hooks: this.hooks, + now: this.now, + makeFrameId: () => this.makeFrameId(), + makeTimestamp: () => this.makeTimestamp(), + sendDeliverAckFrame: (ackFrame) => { this.sendFrame(ackFrame); - if (result.accepted) { - this.hooks.onDeliverSucceeded?.(frame); - } else { - this.hooks.onDeliverFailed?.( - frame, - new Error( - result.reason ?? - "Inbound delivery was rejected by runtime handler", - ), - ); - } - this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); - } catch (error) { - const ackFrame: DeliverAckFrame = { - v: CONNECTOR_FRAME_VERSION, - type: "deliver_ack", - id: this.makeFrameId(), - ts: this.makeTimestamp(), - ackId: frame.id, - accepted: false, - reason: sanitizeErrorReason(error), - }; - this.sendFrame(ackFrame); - this.hooks.onDeliverFailed?.(frame, error); - this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); - } - return; - } - - try { - await this.deliverToLocalOpenclawWithRetry(frame); - const ackFrame: DeliverAckFrame = { - v: CONNECTOR_FRAME_VERSION, - type: "deliver_ack", - id: this.makeFrameId(), - ts: this.makeTimestamp(), - ackId: frame.id, - accepted: true, - }; - - this.sendFrame(ackFrame); - this.hooks.onDeliverSucceeded?.(frame); - this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); - } catch (error) { - const ackFrame: DeliverAckFrame = { - v: CONNECTOR_FRAME_VERSION, - type: "deliver_ack", - id: this.makeFrameId(), - ts: this.makeTimestamp(), - ackId: frame.id, - accepted: false, - reason: sanitizeErrorReason(error), - }; - - this.sendFrame(ackFrame); - this.hooks.onDeliverFailed?.(frame, error); - this.recordInboundDeliveryAckLatency(this.now() - startedAtMs); - } + }, + recordAckLatency: (durationMs) => { + this.recordInboundDeliveryAckLatency(durationMs); + }, + }); } private recordInboundDeliveryAckLatency(durationMs: number): void { @@ -1214,113 +759,6 @@ export class ConnectorClient { this.inboundAckLatencyLastMs = latencyMs; } - private async deliverToLocalOpenclaw(frame: DeliverFrame): Promise { - const controller = new AbortController(); - const timeout = setTimeout(() => { - controller.abort(); - }, this.openclawDeliverTimeoutMs); - - const headers: Record = { - "content-type": "application/json", - "x-clawdentity-agent-did": frame.fromAgentDid, - "x-clawdentity-to-agent-did": frame.toAgentDid, - "x-clawdentity-verified": "true", - "x-request-id": frame.id, - }; - - if (this.openclawHookToken !== undefined) { - headers["x-openclaw-token"] = this.openclawHookToken; - } - - try { - const response = await this.fetchImpl(this.openclawHookUrl, { - method: "POST", - headers, - body: JSON.stringify(frame.payload), - signal: controller.signal, - }); - - if (!response.ok) { - throw new LocalOpenclawDeliveryError({ - message: `Local OpenClaw hook rejected payload with status ${response.status}`, - retryable: - response.status === 401 || - response.status === 403 || - response.status >= 500 || - response.status === 404 || - response.status === 429, - }); - } - } catch (error) { - if (isAbortError(error)) { - throw new LocalOpenclawDeliveryError({ - message: "Local OpenClaw hook request timed out", - retryable: true, - }); - } - - if (error instanceof LocalOpenclawDeliveryError) { - throw error; - } - - throw new LocalOpenclawDeliveryError({ - message: sanitizeErrorReason(error), - retryable: true, - }); - } finally { - clearTimeout(timeout); - } - } - - private async deliverToLocalOpenclawWithRetry( - frame: DeliverFrame, - ): Promise { - const startedAt = this.now(); - let attempt = 1; - let retryDelayMs = this.openclawDeliverRetryInitialDelayMs; - - while (true) { - try { - await this.deliverToLocalOpenclaw(frame); - return; - } catch (error) { - const retryable = isRetryableOpenclawDeliveryError(error); - const attemptsRemaining = attempt < this.openclawDeliverMaxAttempts; - const elapsedMs = this.now() - startedAt; - const hasBudgetForRetry = - elapsedMs + retryDelayMs + this.openclawDeliverTimeoutMs <= - this.openclawDeliverRetryBudgetMs; - const shouldRetry = - retryable && attemptsRemaining && hasBudgetForRetry && this.started; - - this.logger.warn("connector.openclaw.deliver_failed", { - ackId: frame.id, - attempt, - retryable, - shouldRetry, - reason: sanitizeErrorReason(error), - }); - - if (!shouldRetry) { - throw error; - } - - await this.wait(retryDelayMs); - retryDelayMs = Math.min( - this.openclawDeliverRetryMaxDelayMs, - Math.floor(retryDelayMs * this.openclawDeliverRetryBackoffFactor), - ); - attempt += 1; - } - } - } - - private async wait(delayMs: number): Promise { - await new Promise((resolve) => { - setTimeout(resolve, delayMs); - }); - } - private makeFrameId(): string { return this.ulidFactory(this.now()); } diff --git a/packages/connector/src/client/AGENTS.md b/packages/connector/src/client/AGENTS.md new file mode 100644 index 0000000..2d21287 --- /dev/null +++ b/packages/connector/src/client/AGENTS.md @@ -0,0 +1,11 @@ +# AGENTS.md (packages/connector/src/client) + +## Purpose +- Keep `ConnectorClient` internals modular, testable, and deterministic. + +## Rules +- Keep frame/event parsing and sanitization in `helpers.ts` as pure functions. +- Keep reconnect delay math in `retry.ts` and avoid inline backoff duplication. +- Keep heartbeat tracking and metrics centralized in `heartbeat.ts`. +- Keep outbound queue persistence and load/flush semantics centralized in `queue.ts`. +- Keep local OpenClaw delivery/retry behavior in `delivery.ts` and inbound ack orchestration in `inbound-delivery.ts`. diff --git a/packages/connector/src/client/delivery.ts b/packages/connector/src/client/delivery.ts new file mode 100644 index 0000000..3af9bfa --- /dev/null +++ b/packages/connector/src/client/delivery.ts @@ -0,0 +1,168 @@ +import type { Logger } from "@clawdentity/sdk"; +import type { DeliverFrame } from "../frames.js"; +import { isAbortError, sanitizeErrorReason, wait } from "./helpers.js"; +import { computeNextBackoffDelayMs } from "./retry.js"; + +class LocalOpenclawDeliveryError extends Error { + readonly retryable: boolean; + + constructor(input: { message: string; retryable: boolean }) { + super(input.message); + this.name = "LocalOpenclawDeliveryError"; + this.retryable = input.retryable; + } +} + +function isRetryableOpenclawDeliveryError(error: unknown): boolean { + return ( + error instanceof LocalOpenclawDeliveryError && error.retryable === true + ); +} + +export class LocalOpenclawDeliveryClient { + private readonly fetchImpl: typeof fetch; + private readonly openclawHookUrl: string; + private readonly openclawHookToken: string | undefined; + private readonly openclawDeliverTimeoutMs: number; + private readonly openclawDeliverMaxAttempts: number; + private readonly openclawDeliverRetryInitialDelayMs: number; + private readonly openclawDeliverRetryMaxDelayMs: number; + private readonly openclawDeliverRetryBackoffFactor: number; + private readonly openclawDeliverRetryBudgetMs: number; + private readonly now: () => number; + private readonly logger: Logger; + + constructor(input: { + fetchImpl: typeof fetch; + openclawHookUrl: string; + openclawHookToken: string | undefined; + openclawDeliverTimeoutMs: number; + openclawDeliverMaxAttempts: number; + openclawDeliverRetryInitialDelayMs: number; + openclawDeliverRetryMaxDelayMs: number; + openclawDeliverRetryBackoffFactor: number; + openclawDeliverRetryBudgetMs: number; + now: () => number; + logger: Logger; + }) { + this.fetchImpl = input.fetchImpl; + this.openclawHookUrl = input.openclawHookUrl; + this.openclawHookToken = input.openclawHookToken; + this.openclawDeliverTimeoutMs = input.openclawDeliverTimeoutMs; + this.openclawDeliverMaxAttempts = input.openclawDeliverMaxAttempts; + this.openclawDeliverRetryInitialDelayMs = + input.openclawDeliverRetryInitialDelayMs; + this.openclawDeliverRetryMaxDelayMs = input.openclawDeliverRetryMaxDelayMs; + this.openclawDeliverRetryBackoffFactor = + input.openclawDeliverRetryBackoffFactor; + this.openclawDeliverRetryBudgetMs = input.openclawDeliverRetryBudgetMs; + this.now = input.now; + this.logger = input.logger; + } + + async deliverWithRetry( + frame: DeliverFrame, + shouldContinue: () => boolean, + ): Promise { + const startedAt = this.now(); + let attempt = 1; + let retryDelayMs = this.openclawDeliverRetryInitialDelayMs; + + while (true) { + try { + await this.deliverOnce(frame); + return; + } catch (error) { + const retryable = isRetryableOpenclawDeliveryError(error); + const attemptsRemaining = attempt < this.openclawDeliverMaxAttempts; + const elapsedMs = this.now() - startedAt; + const hasBudgetForRetry = + elapsedMs + retryDelayMs + this.openclawDeliverTimeoutMs <= + this.openclawDeliverRetryBudgetMs; + const shouldRetry = + retryable && + attemptsRemaining && + hasBudgetForRetry && + shouldContinue(); + + this.logger.warn("connector.openclaw.deliver_failed", { + ackId: frame.id, + attempt, + retryable, + shouldRetry, + reason: sanitizeErrorReason(error), + }); + + if (!shouldRetry) { + throw error; + } + + await wait(retryDelayMs); + retryDelayMs = computeNextBackoffDelayMs({ + currentDelayMs: retryDelayMs, + maxDelayMs: this.openclawDeliverRetryMaxDelayMs, + backoffFactor: this.openclawDeliverRetryBackoffFactor, + }); + attempt += 1; + } + } + } + + private async deliverOnce(frame: DeliverFrame): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => { + controller.abort(); + }, this.openclawDeliverTimeoutMs); + + const headers: Record = { + "content-type": "application/json", + "x-clawdentity-agent-did": frame.fromAgentDid, + "x-clawdentity-to-agent-did": frame.toAgentDid, + "x-clawdentity-verified": "true", + "x-request-id": frame.id, + }; + + if (this.openclawHookToken !== undefined) { + headers["x-openclaw-token"] = this.openclawHookToken; + } + + try { + const response = await this.fetchImpl(this.openclawHookUrl, { + method: "POST", + headers, + body: JSON.stringify(frame.payload), + signal: controller.signal, + }); + + if (!response.ok) { + throw new LocalOpenclawDeliveryError({ + message: `Local OpenClaw hook rejected payload with status ${response.status}`, + retryable: + response.status === 401 || + response.status === 403 || + response.status >= 500 || + response.status === 404 || + response.status === 429, + }); + } + } catch (error) { + if (isAbortError(error)) { + throw new LocalOpenclawDeliveryError({ + message: "Local OpenClaw hook request timed out", + retryable: true, + }); + } + + if (error instanceof LocalOpenclawDeliveryError) { + throw error; + } + + throw new LocalOpenclawDeliveryError({ + message: sanitizeErrorReason(error), + retryable: true, + }); + } finally { + clearTimeout(timeout); + } + } +} diff --git a/packages/connector/src/client/heartbeat.ts b/packages/connector/src/client/heartbeat.ts new file mode 100644 index 0000000..f7eb9fe --- /dev/null +++ b/packages/connector/src/client/heartbeat.ts @@ -0,0 +1,163 @@ +import type { HeartbeatAckFrame } from "../frames.js"; + +export type HeartbeatAckTimeoutEvent = { + pendingCount: number; + oldestPendingAgeMs: number; + timeoutMs: number; +}; + +export type HeartbeatMetricsSnapshot = { + avgRttMs?: number; + maxRttMs?: number; + lastRttMs?: number; + pendingAckCount: number; + sampleCount: number; +}; + +export class ConnectorHeartbeatManager { + private readonly heartbeatIntervalMs: number; + private readonly heartbeatAckTimeoutMs: number; + private readonly now: () => number; + private readonly onAckTimeout: (event: HeartbeatAckTimeoutEvent) => void; + + private heartbeatInterval: ReturnType | undefined; + private heartbeatAckTimeout: ReturnType | undefined; + private readonly pendingHeartbeatAcks = new Map(); + + private heartbeatRttSampleCount = 0; + private heartbeatRttTotalMs = 0; + private heartbeatRttMaxMs = 0; + private heartbeatRttLastMs: number | undefined; + + constructor(input: { + heartbeatIntervalMs: number; + heartbeatAckTimeoutMs: number; + now: () => number; + onAckTimeout: (event: HeartbeatAckTimeoutEvent) => void; + }) { + this.heartbeatIntervalMs = input.heartbeatIntervalMs; + this.heartbeatAckTimeoutMs = input.heartbeatAckTimeoutMs; + this.now = input.now; + this.onAckTimeout = input.onAckTimeout; + } + + start(emitHeartbeat: () => string | undefined): void { + this.stop(); + + if (this.heartbeatIntervalMs <= 0) { + return; + } + + this.heartbeatInterval = setInterval(() => { + const ackId = emitHeartbeat(); + if (ackId !== undefined) { + this.trackHeartbeatAck(ackId); + } + }, this.heartbeatIntervalMs); + } + + stop(): void { + if (this.heartbeatInterval !== undefined) { + clearInterval(this.heartbeatInterval); + this.heartbeatInterval = undefined; + } + + if (this.heartbeatAckTimeout !== undefined) { + clearTimeout(this.heartbeatAckTimeout); + this.heartbeatAckTimeout = undefined; + } + + this.pendingHeartbeatAcks.clear(); + } + + handleHeartbeatAck(frame: HeartbeatAckFrame): void { + const sentAtMs = this.pendingHeartbeatAcks.get(frame.ackId); + if (sentAtMs === undefined) { + return; + } + + this.pendingHeartbeatAcks.delete(frame.ackId); + const rttMs = Math.max(0, this.now() - sentAtMs); + this.heartbeatRttSampleCount += 1; + this.heartbeatRttTotalMs += rttMs; + this.heartbeatRttMaxMs = Math.max(this.heartbeatRttMaxMs, rttMs); + this.heartbeatRttLastMs = rttMs; + + this.scheduleHeartbeatAckTimeoutCheck(); + } + + getMetricsSnapshot(): HeartbeatMetricsSnapshot { + return { + pendingAckCount: this.pendingHeartbeatAcks.size, + sampleCount: this.heartbeatRttSampleCount, + lastRttMs: this.heartbeatRttLastMs, + maxRttMs: + this.heartbeatRttSampleCount > 0 ? this.heartbeatRttMaxMs : undefined, + avgRttMs: + this.heartbeatRttSampleCount > 0 + ? Math.floor(this.heartbeatRttTotalMs / this.heartbeatRttSampleCount) + : undefined, + }; + } + + private trackHeartbeatAck(ackId: string): void { + if (this.heartbeatAckTimeoutMs <= 0) { + return; + } + + this.pendingHeartbeatAcks.set(ackId, this.now()); + this.scheduleHeartbeatAckTimeoutCheck(); + } + + private scheduleHeartbeatAckTimeoutCheck(): void { + if (this.heartbeatAckTimeout !== undefined) { + clearTimeout(this.heartbeatAckTimeout); + this.heartbeatAckTimeout = undefined; + } + + if ( + this.pendingHeartbeatAcks.size === 0 || + this.heartbeatAckTimeoutMs <= 0 + ) { + return; + } + + const oldestSentAt = this.getOldestPendingSentAtMs(); + const elapsedMs = this.now() - oldestSentAt; + const delayMs = Math.max(0, this.heartbeatAckTimeoutMs - elapsedMs); + + this.heartbeatAckTimeout = setTimeout(() => { + this.heartbeatAckTimeout = undefined; + this.handleHeartbeatAckTimeout(); + }, delayMs); + } + + private handleHeartbeatAckTimeout(): void { + const pendingCount = this.pendingHeartbeatAcks.size; + if (pendingCount === 0) { + return; + } + + const oldestSentAt = this.getOldestPendingSentAtMs(); + const oldestPendingAgeMs = this.now() - oldestSentAt; + if (oldestPendingAgeMs < this.heartbeatAckTimeoutMs) { + this.scheduleHeartbeatAckTimeoutCheck(); + return; + } + + this.onAckTimeout({ + pendingCount, + oldestPendingAgeMs, + timeoutMs: this.heartbeatAckTimeoutMs, + }); + } + + private getOldestPendingSentAtMs(): number { + let oldestSentAt = Number.POSITIVE_INFINITY; + for (const sentAt of this.pendingHeartbeatAcks.values()) { + oldestSentAt = Math.min(oldestSentAt, sentAt); + } + + return oldestSentAt; + } +} diff --git a/packages/connector/src/client/helpers.ts b/packages/connector/src/client/helpers.ts new file mode 100644 index 0000000..6c62017 --- /dev/null +++ b/packages/connector/src/client/helpers.ts @@ -0,0 +1,138 @@ +import { + toOpenclawHookUrl as buildOpenclawHookUrl, + sanitizeErrorReason as sanitizeReason, +} from "@clawdentity/common"; +import type { ConnectorClientOptions, ConnectorWebSocket } from "./types.js"; + +export const WS_READY_STATE_CONNECTING = 0; + +export function isAbortError(error: unknown): boolean { + return error instanceof Error && error.name === "AbortError"; +} + +export function resolveWebSocketFactory( + webSocketFactory: ConnectorClientOptions["webSocketFactory"], +): (url: string, headers: Record) => ConnectorWebSocket { + if (webSocketFactory !== undefined) { + return webSocketFactory; + } + + if (typeof WebSocket !== "function") { + throw new Error("WebSocket implementation is required"); + } + + return (_url: string, headers: Record) => { + if (Object.keys(headers).length > 0) { + throw new Error( + "Connection headers require a custom webSocketFactory implementation", + ); + } + + return new WebSocket(_url) as ConnectorWebSocket; + }; +} + +export function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { + return buildOpenclawHookUrl(baseUrl, hookPath); +} + +export function sanitizeErrorReason(error: unknown): string { + return sanitizeReason(error, { + fallback: "Unknown delivery error", + maxLength: 200, + }); +} + +export function normalizeConnectionHeaders( + headers: Record | undefined, +): Record { + if (headers === undefined) { + return {}; + } + + const normalized: Record = {}; + for (const [rawKey, rawValue] of Object.entries(headers)) { + const key = rawKey.trim(); + const value = rawValue.trim(); + if (key.length === 0 || value.length === 0) { + continue; + } + normalized[key] = value; + } + + return normalized; +} + +function isObject(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function readMessageEventData(event: unknown): unknown { + if (!isObject(event)) { + return undefined; + } + + return event.data; +} + +export function readCloseEvent(event: unknown): { + code: number; + reason: string; + wasClean: boolean; +} { + if (!isObject(event)) { + return { + code: 1006, + reason: "", + wasClean: false, + }; + } + + return { + code: typeof event.code === "number" ? event.code : 1006, + reason: typeof event.reason === "string" ? event.reason : "", + wasClean: typeof event.wasClean === "boolean" ? event.wasClean : false, + }; +} + +export function readUnexpectedResponseStatus( + event: unknown, +): number | undefined { + if (!isObject(event)) { + return undefined; + } + + if (typeof event.status === "number") { + return event.status; + } + + if (typeof event.statusCode === "number") { + return event.statusCode; + } + + const response = event.response; + if (isObject(response)) { + if (typeof response.status === "number") { + return response.status; + } + if (typeof response.statusCode === "number") { + return response.statusCode; + } + } + + return undefined; +} + +export function readErrorEventReason(event: unknown): string { + if (!isObject(event) || !("error" in event)) { + return "WebSocket error"; + } + + return sanitizeErrorReason(event.error); +} + +export async function wait(delayMs: number): Promise { + await new Promise((resolve) => { + setTimeout(resolve, delayMs); + }); +} diff --git a/packages/connector/src/client/inbound-delivery.ts b/packages/connector/src/client/inbound-delivery.ts new file mode 100644 index 0000000..e2cb70f --- /dev/null +++ b/packages/connector/src/client/inbound-delivery.ts @@ -0,0 +1,110 @@ +import { CONNECTOR_FRAME_VERSION } from "../constants.js"; +import type { DeliverAckFrame, DeliverFrame } from "../frames.js"; +import type { LocalOpenclawDeliveryClient } from "./delivery.js"; +import { sanitizeErrorReason } from "./helpers.js"; +import type { ConnectorClientHooks } from "./types.js"; + +function createDeliverAckFrame(input: { + ackId: string; + accepted: boolean; + reason?: string; + makeFrameId: () => string; + makeTimestamp: () => string; +}): DeliverAckFrame { + return { + v: CONNECTOR_FRAME_VERSION, + type: "deliver_ack", + id: input.makeFrameId(), + ts: input.makeTimestamp(), + ackId: input.ackId, + accepted: input.accepted, + reason: input.reason, + }; +} + +export async function handleInboundDeliverFrame(input: { + frame: DeliverFrame; + inboundDeliverHandler: + | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) + | undefined; + localOpenclawDelivery: LocalOpenclawDeliveryClient; + isStarted: () => boolean; + hooks: ConnectorClientHooks; + now: () => number; + makeFrameId: () => string; + makeTimestamp: () => string; + sendDeliverAckFrame: (frame: DeliverAckFrame) => void; + recordAckLatency: (durationMs: number) => void; +}): Promise { + const startedAtMs = input.now(); + + if (input.inboundDeliverHandler !== undefined) { + try { + const result = await input.inboundDeliverHandler(input.frame); + input.sendDeliverAckFrame( + createDeliverAckFrame({ + ackId: input.frame.id, + accepted: result.accepted, + reason: result.reason, + makeFrameId: input.makeFrameId, + makeTimestamp: input.makeTimestamp, + }), + ); + + if (result.accepted) { + input.hooks.onDeliverSucceeded?.(input.frame); + } else { + input.hooks.onDeliverFailed?.( + input.frame, + new Error( + result.reason ?? "Inbound delivery was rejected by runtime handler", + ), + ); + } + + input.recordAckLatency(input.now() - startedAtMs); + } catch (error) { + input.sendDeliverAckFrame( + createDeliverAckFrame({ + ackId: input.frame.id, + accepted: false, + reason: sanitizeErrorReason(error), + makeFrameId: input.makeFrameId, + makeTimestamp: input.makeTimestamp, + }), + ); + input.hooks.onDeliverFailed?.(input.frame, error); + input.recordAckLatency(input.now() - startedAtMs); + } + return; + } + + try { + await input.localOpenclawDelivery.deliverWithRetry( + input.frame, + input.isStarted, + ); + input.sendDeliverAckFrame( + createDeliverAckFrame({ + ackId: input.frame.id, + accepted: true, + makeFrameId: input.makeFrameId, + makeTimestamp: input.makeTimestamp, + }), + ); + input.hooks.onDeliverSucceeded?.(input.frame); + input.recordAckLatency(input.now() - startedAtMs); + } catch (error) { + input.sendDeliverAckFrame( + createDeliverAckFrame({ + ackId: input.frame.id, + accepted: false, + reason: sanitizeErrorReason(error), + makeFrameId: input.makeFrameId, + makeTimestamp: input.makeTimestamp, + }), + ); + input.hooks.onDeliverFailed?.(input.frame, error); + input.recordAckLatency(input.now() - startedAtMs); + } +} diff --git a/packages/connector/src/client/queue.ts b/packages/connector/src/client/queue.ts new file mode 100644 index 0000000..677deb5 --- /dev/null +++ b/packages/connector/src/client/queue.ts @@ -0,0 +1,150 @@ +import type { Logger } from "@clawdentity/sdk"; +import { type EnqueueFrame, enqueueFrameSchema } from "../frames.js"; +import { sanitizeErrorReason } from "./helpers.js"; +import type { ConnectorOutboundQueuePersistence } from "./types.js"; + +export type OutboundQueueMetricsSnapshot = { + currentDepth: number; + loadedFromPersistence: boolean; + maxDepth: number; + persistenceEnabled: boolean; +}; + +export class ConnectorOutboundQueueManager { + private readonly persistence: ConnectorOutboundQueuePersistence | undefined; + private readonly logger: Logger; + + private readonly queue: EnqueueFrame[] = []; + private maxObservedDepth = 0; + private loaded = false; + private loadPromise: Promise | undefined; + private saveChain: Promise = Promise.resolve(); + + constructor(input: { + persistence: ConnectorOutboundQueuePersistence | undefined; + logger: Logger; + }) { + this.persistence = input.persistence; + this.logger = input.logger; + } + + isPersistenceEnabled(): boolean { + return this.persistence !== undefined; + } + + getDepth(): number { + return this.queue.length; + } + + getMetricsSnapshot(): OutboundQueueMetricsSnapshot { + return { + currentDepth: this.queue.length, + maxDepth: this.maxObservedDepth, + loadedFromPersistence: this.loaded, + persistenceEnabled: this.persistence !== undefined, + }; + } + + enqueue(frame: EnqueueFrame): void { + this.queue.push(frame); + this.recordDepth(); + this.persist(); + } + + flush(input: { + isConnected: () => boolean; + sendFrame: (frame: EnqueueFrame) => boolean; + }): void { + if (!input.isConnected()) { + return; + } + + while (this.queue.length > 0 && input.isConnected()) { + const nextFrame = this.queue[0]; + const sent = input.sendFrame(nextFrame); + if (!sent) { + return; + } + + this.queue.shift(); + this.persist(); + } + } + + async ensureLoaded(): Promise { + if (this.loaded) { + return; + } + + if (this.persistence === undefined) { + this.loaded = true; + return; + } + + if (this.loadPromise !== undefined) { + await this.loadPromise; + return; + } + + this.loadPromise = (async () => { + try { + const loadedFrames = await this.persistence?.load(); + if (!loadedFrames || loadedFrames.length === 0) { + return; + } + + const existingIds = new Set(this.queue.map((item) => item.id)); + const validLoadedFrames: EnqueueFrame[] = []; + + for (const candidate of loadedFrames) { + const parsed = enqueueFrameSchema.safeParse(candidate); + if (!parsed.success) { + continue; + } + if (existingIds.has(parsed.data.id)) { + continue; + } + + validLoadedFrames.push(parsed.data); + existingIds.add(parsed.data.id); + } + + if (validLoadedFrames.length === 0) { + return; + } + + this.queue.unshift(...validLoadedFrames); + this.recordDepth(); + } catch (error) { + this.logger.warn("connector.outbound.persistence_load_failed", { + reason: sanitizeErrorReason(error), + }); + } finally { + this.loaded = true; + } + })(); + + await this.loadPromise; + } + + private recordDepth(): void { + this.maxObservedDepth = Math.max(this.maxObservedDepth, this.queue.length); + } + + private persist(): void { + if (this.persistence === undefined) { + return; + } + + this.saveChain = this.saveChain + .then(async () => { + await this.ensureLoaded(); + await this.persistence?.save([...this.queue]); + }) + .catch((error) => { + this.logger.warn("connector.outbound.persistence_save_failed", { + reason: sanitizeErrorReason(error), + }); + }); + } +} diff --git a/packages/connector/src/client/retry.ts b/packages/connector/src/client/retry.ts new file mode 100644 index 0000000..8cb1db9 --- /dev/null +++ b/packages/connector/src/client/retry.ts @@ -0,0 +1,27 @@ +export function computeJitteredBackoffDelayMs(input: { + minDelayMs: number; + maxDelayMs: number; + backoffFactor: number; + attempt: number; + jitterRatio: number; + random: () => number; +}): number { + const exponentialDelay = + input.minDelayMs * input.backoffFactor ** input.attempt; + const boundedDelay = Math.min(exponentialDelay, input.maxDelayMs); + const jitterRange = boundedDelay * input.jitterRatio; + const jitterOffset = + jitterRange === 0 ? 0 : (input.random() * 2 - 1) * jitterRange; + return Math.max(0, Math.floor(boundedDelay + jitterOffset)); +} + +export function computeNextBackoffDelayMs(input: { + currentDelayMs: number; + maxDelayMs: number; + backoffFactor: number; +}): number { + return Math.min( + input.maxDelayMs, + Math.floor(input.currentDelayMs * input.backoffFactor), + ); +} diff --git a/packages/connector/src/client/types.ts b/packages/connector/src/client/types.ts new file mode 100644 index 0000000..78773e7 --- /dev/null +++ b/packages/connector/src/client/types.ts @@ -0,0 +1,115 @@ +import type { ConnectorFrame, DeliverFrame, EnqueueFrame } from "../frames.js"; + +export type ConnectorWebSocketEventType = + | "open" + | "message" + | "close" + | "error" + | "unexpected-response"; + +export type ConnectorWebSocketListener = (event: unknown) => void; + +export type ConnectorWebSocket = { + readonly readyState: number; + send: (data: string) => void; + close: (code?: number, reason?: string) => void; + addEventListener: ( + type: ConnectorWebSocketEventType, + listener: ConnectorWebSocketListener, + ) => void; +}; + +export type ConnectorClientHooks = { + onConnected?: () => void; + onDisconnected?: (event: { + code: number; + reason: string; + wasClean: boolean; + }) => void; + onAuthUpgradeRejected?: (event: { + status: number; + immediateRetry: boolean; + }) => void | Promise; + onFrame?: (frame: ConnectorFrame) => void; + onDeliverSucceeded?: (frame: DeliverFrame) => void; + onDeliverFailed?: (frame: DeliverFrame, error: unknown) => void; +}; + +export type ConnectorOutboundQueuePersistence = { + load: () => Promise; + save: (frames: EnqueueFrame[]) => Promise; +}; + +export type ConnectorClientMetricsSnapshot = { + connection: { + connectAttempts: number; + connected: boolean; + reconnectCount: number; + uptimeMs: number; + lastConnectedAt?: string; + }; + heartbeat: { + avgRttMs?: number; + maxRttMs?: number; + lastRttMs?: number; + pendingAckCount: number; + sampleCount: number; + }; + inboundDelivery: { + avgAckLatencyMs?: number; + maxAckLatencyMs?: number; + lastAckLatencyMs?: number; + sampleCount: number; + }; + outboundQueue: { + currentDepth: number; + loadedFromPersistence: boolean; + maxDepth: number; + persistenceEnabled: boolean; + }; +}; + +export type ConnectorClientOptions = { + connectorUrl: string; + connectionHeaders?: Record; + connectionHeadersProvider?: + | (() => Record | Promise>) + | undefined; + openclawBaseUrl: string; + openclawHookToken?: string; + openclawHookPath?: string; + connectTimeoutMs?: number; + heartbeatIntervalMs?: number; + heartbeatAckTimeoutMs?: number; + reconnectMinDelayMs?: number; + reconnectMaxDelayMs?: number; + reconnectBackoffFactor?: number; + reconnectJitterRatio?: number; + openclawDeliverTimeoutMs?: number; + openclawDeliverMaxAttempts?: number; + openclawDeliverRetryInitialDelayMs?: number; + openclawDeliverRetryMaxDelayMs?: number; + openclawDeliverRetryBackoffFactor?: number; + openclawDeliverRetryBudgetMs?: number; + webSocketFactory?: ( + url: string, + headers: Record, + ) => ConnectorWebSocket; + fetchImpl?: typeof fetch; + logger?: import("@clawdentity/sdk").Logger; + hooks?: ConnectorClientHooks; + outboundQueuePersistence?: ConnectorOutboundQueuePersistence; + inboundDeliverHandler?: + | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) + | undefined; + now?: () => number; + random?: () => number; + ulidFactory?: (time?: number) => string; +}; + +export type ConnectorOutboundEnqueueInput = { + toAgentDid: string; + payload: unknown; + conversationId?: string; + replyTo?: string; +}; diff --git a/packages/connector/src/inbound-inbox.ts b/packages/connector/src/inbound-inbox.ts index 963084e..9ece6c3 100644 --- a/packages/connector/src/inbound-inbox.ts +++ b/packages/connector/src/inbound-inbox.ts @@ -1,272 +1,33 @@ -import { - appendFile, - mkdir, - readFile, - rename, - stat, - unlink, - writeFile, -} from "node:fs/promises"; -import { dirname, join } from "node:path"; -import { nowIso, nowUtcMs } from "@clawdentity/sdk"; +import { join } from "node:path"; +import { nowIso } from "@clawdentity/sdk"; import type { DeliverFrame } from "./frames.js"; - -const INBOUND_INBOX_DIR_NAME = "inbound-inbox"; -const INBOUND_INBOX_INDEX_FILE_NAME = "index.json"; -const INBOUND_INBOX_INDEX_LOCK_FILE_NAME = "index.lock"; -const INBOUND_INBOX_EVENTS_FILE_NAME = "events.jsonl"; -const INBOUND_INBOX_SCHEMA_VERSION = 2; - -const DEFAULT_INDEX_LOCK_TIMEOUT_MS = 5_000; -const DEFAULT_INDEX_LOCK_STALE_MS = 30_000; -const DEFAULT_INDEX_LOCK_RETRY_MS = 50; - -export type ConnectorInboundInboxItem = { - attemptCount: number; - conversationId?: string; - fromAgentDid: string; - id: string; - lastAttemptAt?: string; - lastError?: string; - nextAttemptAt: string; - payload: unknown; - payloadBytes: number; - receivedAt: string; - replyTo?: string; - requestId: string; - toAgentDid: string; -}; - -export type ConnectorInboundDeadLetterItem = ConnectorInboundInboxItem & { - deadLetterReason: string; - deadLetteredAt: string; -}; - -type InboundInboxIndexFile = { - deadLetterByRequestId: Record; - deadLetterBytes: number; - pendingBytes: number; - pendingByRequestId: Record; - updatedAt: string; - version: number; -}; - -type InboundInboxEvent = { - details?: Record; - requestId?: string; - type: - | "inbound_persisted" - | "inbound_duplicate" - | "replay_succeeded" - | "replay_failed" - | "dead_letter_moved" - | "dead_letter_replayed" - | "dead_letter_purged" - | "inbox_pruned"; -}; - -export type ConnectorInboundInboxPendingSnapshot = { - nextAttemptAt?: string; - oldestPendingAt?: string; - pendingBytes: number; - pendingCount: number; -}; - -export type ConnectorInboundInboxDeadLetterSnapshot = { - deadLetterBytes: number; - deadLetterCount: number; - oldestDeadLetterAt?: string; -}; - -export type ConnectorInboundInboxSnapshot = { - deadLetter: ConnectorInboundInboxDeadLetterSnapshot; - pending: ConnectorInboundInboxPendingSnapshot; -}; - -export type ConnectorInboundInboxEnqueueResult = { - accepted: boolean; - duplicate: boolean; - pendingCount: number; - reason?: string; -}; - -export type ConnectorInboundInboxMarkFailureResult = { - movedToDeadLetter: boolean; -}; - -export type ConnectorInboundInboxOptions = { - agentName: string; - configDir: string; - eventsMaxBytes: number; - eventsMaxFiles: number; - maxPendingBytes: number; - maxPendingMessages: number; -}; - -type ReleaseLock = () => Promise; - -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - -function parseOptionalNonEmptyString(value: unknown): string | undefined { - if (typeof value !== "string") { - return undefined; - } - const trimmed = value.trim(); - return trimmed.length > 0 ? trimmed : undefined; -} - -function parsePendingItem( - value: unknown, -): ConnectorInboundInboxItem | undefined { - if (!isRecord(value)) { - return undefined; - } - - const id = parseOptionalNonEmptyString(value.id) ?? ""; - const requestId = parseOptionalNonEmptyString(value.requestId) ?? ""; - const fromAgentDid = parseOptionalNonEmptyString(value.fromAgentDid) ?? ""; - const toAgentDid = parseOptionalNonEmptyString(value.toAgentDid) ?? ""; - const receivedAt = parseOptionalNonEmptyString(value.receivedAt) ?? ""; - const nextAttemptAt = parseOptionalNonEmptyString(value.nextAttemptAt) ?? ""; - const attemptCount = - typeof value.attemptCount === "number" && - Number.isInteger(value.attemptCount) - ? value.attemptCount - : NaN; - const payloadBytes = - typeof value.payloadBytes === "number" && - Number.isInteger(value.payloadBytes) - ? value.payloadBytes - : NaN; - - if ( - id.length === 0 || - requestId.length === 0 || - fromAgentDid.length === 0 || - toAgentDid.length === 0 || - receivedAt.length === 0 || - nextAttemptAt.length === 0 || - !Number.isFinite(attemptCount) || - attemptCount < 0 || - !Number.isFinite(payloadBytes) || - payloadBytes < 0 - ) { - return undefined; - } - - return { - id, - requestId, - fromAgentDid, - toAgentDid, - payload: value.payload, - payloadBytes, - receivedAt, - nextAttemptAt, - attemptCount, - lastError: parseOptionalNonEmptyString(value.lastError), - lastAttemptAt: parseOptionalNonEmptyString(value.lastAttemptAt), - conversationId: parseOptionalNonEmptyString(value.conversationId), - replyTo: parseOptionalNonEmptyString(value.replyTo), - }; -} - -function parseDeadLetterItem( - value: unknown, -): ConnectorInboundDeadLetterItem | undefined { - const pending = parsePendingItem(value); - if (!pending) { - return undefined; - } - - if (!isRecord(value)) { - return undefined; - } - - const deadLetteredAt = - parseOptionalNonEmptyString(value.deadLetteredAt) ?? ""; - const deadLetterReason = - parseOptionalNonEmptyString(value.deadLetterReason) ?? ""; - if (deadLetteredAt.length === 0 || deadLetterReason.length === 0) { - return undefined; - } - - return { - ...pending, - deadLetteredAt, - deadLetterReason, - }; -} - -function toDefaultIndexFile(): InboundInboxIndexFile { - return { - version: INBOUND_INBOX_SCHEMA_VERSION, - pendingBytes: 0, - deadLetterBytes: 0, - pendingByRequestId: {}, - deadLetterByRequestId: {}, - updatedAt: nowIso(), - }; -} - -function normalizeIndexFile(raw: unknown): InboundInboxIndexFile { - if (!isRecord(raw)) { - throw new Error("Inbound inbox index root must be an object"); - } - - if (raw.version !== INBOUND_INBOX_SCHEMA_VERSION) { - throw new Error( - `Inbound inbox index schema version ${String(raw.version)} is unsupported`, - ); - } - - const pendingByRequestIdRaw = raw.pendingByRequestId; - const deadLetterByRequestIdRaw = raw.deadLetterByRequestId; - if (!isRecord(pendingByRequestIdRaw)) { - throw new Error("Inbound inbox index pendingByRequestId must be an object"); - } - if (!isRecord(deadLetterByRequestIdRaw)) { - throw new Error( - "Inbound inbox index deadLetterByRequestId must be an object", - ); - } - - const pendingByRequestId: Record = {}; - let pendingBytes = 0; - for (const [requestId, candidate] of Object.entries(pendingByRequestIdRaw)) { - const entry = parsePendingItem(candidate); - if (!entry || entry.requestId !== requestId) { - continue; - } - pendingByRequestId[requestId] = entry; - pendingBytes += entry.payloadBytes; - } - - const deadLetterByRequestId: Record = - {}; - let deadLetterBytes = 0; - for (const [requestId, candidate] of Object.entries( - deadLetterByRequestIdRaw, - )) { - const entry = parseDeadLetterItem(candidate); - if (!entry || entry.requestId !== requestId) { - continue; - } - deadLetterByRequestId[requestId] = entry; - deadLetterBytes += entry.payloadBytes; - } - - return { - version: INBOUND_INBOX_SCHEMA_VERSION, - pendingByRequestId, - deadLetterByRequestId, - pendingBytes, - deadLetterBytes, - updatedAt: parseOptionalNonEmptyString(raw.updatedAt) ?? nowIso(), - }; -} +import { + INBOUND_INBOX_DIR_NAME, + INBOUND_INBOX_EVENTS_FILE_NAME, + INBOUND_INBOX_INDEX_FILE_NAME, + INBOUND_INBOX_INDEX_LOCK_FILE_NAME, +} from "./inbound-inbox/constants.js"; +import { parseOptionalNonEmptyString } from "./inbound-inbox/schema.js"; +import { InboundInboxStorage } from "./inbound-inbox/storage.js"; +import type { + ConnectorInboundDeadLetterItem, + ConnectorInboundInboxEnqueueResult, + ConnectorInboundInboxItem, + ConnectorInboundInboxMarkFailureResult, + ConnectorInboundInboxOptions, + ConnectorInboundInboxSnapshot, +} from "./inbound-inbox/types.js"; + +export type { + ConnectorInboundDeadLetterItem, + ConnectorInboundInboxDeadLetterSnapshot, + ConnectorInboundInboxEnqueueResult, + ConnectorInboundInboxItem, + ConnectorInboundInboxMarkFailureResult, + ConnectorInboundInboxOptions, + ConnectorInboundInboxPendingSnapshot, + ConnectorInboundInboxSnapshot, +} from "./inbound-inbox/types.js"; function toComparableTimeMs(value: string): number { const parsed = Date.parse(value); @@ -277,49 +38,47 @@ function toComparableTimeMs(value: string): number { return Number.MAX_SAFE_INTEGER; } +function sanitizeRequestIds(requestIds: string[]): string[] { + return Array.from( + new Set( + requestIds.map((item) => item.trim()).filter((item) => item.length > 0), + ), + ); +} + export class ConnectorInboundInbox { - private readonly agentName: string; - private readonly eventsMaxBytes: number; - private readonly eventsMaxFiles: number; - private readonly eventsPath: string; - private readonly inboxDir: string; - private readonly indexPath: string; - private readonly indexLockPath: string; private readonly maxPendingBytes: number; private readonly maxPendingMessages: number; - - private writeChain: Promise = Promise.resolve(); + private readonly storage: InboundInboxStorage; constructor(options: ConnectorInboundInboxOptions) { - this.agentName = options.agentName; - this.inboxDir = join( - options.configDir, - "agents", - this.agentName, - INBOUND_INBOX_DIR_NAME, - ); - this.indexPath = join(this.inboxDir, INBOUND_INBOX_INDEX_FILE_NAME); - this.indexLockPath = join( - this.inboxDir, - INBOUND_INBOX_INDEX_LOCK_FILE_NAME, - ); - this.eventsPath = join(this.inboxDir, INBOUND_INBOX_EVENTS_FILE_NAME); + const inboxDir = resolveConnectorInboundInboxDir({ + configDir: options.configDir, + agentName: options.agentName, + }); + + this.storage = new InboundInboxStorage({ + inboxDir, + indexPath: join(inboxDir, INBOUND_INBOX_INDEX_FILE_NAME), + indexLockPath: join(inboxDir, INBOUND_INBOX_INDEX_LOCK_FILE_NAME), + eventsPath: join(inboxDir, INBOUND_INBOX_EVENTS_FILE_NAME), + eventsMaxBytes: Math.max(0, options.eventsMaxBytes), + eventsMaxFiles: Math.max(0, options.eventsMaxFiles), + }); this.maxPendingBytes = options.maxPendingBytes; this.maxPendingMessages = options.maxPendingMessages; - this.eventsMaxBytes = Math.max(0, options.eventsMaxBytes); - this.eventsMaxFiles = Math.max(0, options.eventsMaxFiles); } async enqueue( frame: DeliverFrame, ): Promise { - return await this.withWriteLock(async () => { - const index = await this.loadIndex(); + return await this.storage.withWriteLock(async () => { + const index = await this.storage.loadIndex(); if ( index.pendingByRequestId[frame.id] !== undefined || index.deadLetterByRequestId[frame.id] !== undefined ) { - await this.appendEvent({ + await this.storage.appendEvent({ type: "inbound_duplicate", requestId: frame.id, }); @@ -371,8 +130,8 @@ export class ConnectorInboundInbox { index.pendingByRequestId[pendingItem.requestId] = pendingItem; index.pendingBytes += pendingItem.payloadBytes; index.updatedAt = nowIso(); - await this.saveIndex(index); - await this.appendEvent({ + await this.storage.saveIndex(index); + await this.storage.appendEvent({ type: "inbound_persisted", requestId: pendingItem.requestId, details: { @@ -396,7 +155,7 @@ export class ConnectorInboundInbox { limit: number; nowMs: number; }): Promise { - const index = await this.loadIndex(); + const index = await this.storage.loadIndex(); const due = Object.values(index.pendingByRequestId) .filter((item) => toComparableTimeMs(item.nextAttemptAt) <= input.nowMs) .sort((left, right) => { @@ -416,8 +175,8 @@ export class ConnectorInboundInbox { } async markDelivered(requestId: string): Promise { - await this.withWriteLock(async () => { - const index = await this.loadIndex(); + await this.storage.withWriteLock(async () => { + const index = await this.storage.loadIndex(); const entry = index.pendingByRequestId[requestId]; if (entry === undefined) { return; @@ -426,8 +185,8 @@ export class ConnectorInboundInbox { delete index.pendingByRequestId[requestId]; index.pendingBytes = Math.max(0, index.pendingBytes - entry.payloadBytes); index.updatedAt = nowIso(); - await this.saveIndex(index); - await this.appendEvent({ + await this.storage.saveIndex(index); + await this.storage.appendEvent({ type: "replay_succeeded", requestId, }); @@ -441,8 +200,8 @@ export class ConnectorInboundInbox { requestId: string; retryable: boolean; }): Promise { - return await this.withWriteLock(async () => { - const index = await this.loadIndex(); + return await this.storage.withWriteLock(async () => { + const index = await this.storage.loadIndex(); const entry = index.pendingByRequestId[input.requestId]; if (entry === undefined) { return { movedToDeadLetter: false }; @@ -470,8 +229,8 @@ export class ConnectorInboundInbox { index.deadLetterByRequestId[input.requestId] = deadLetterEntry; index.deadLetterBytes += deadLetterEntry.payloadBytes; index.updatedAt = nowIso(); - await this.saveIndex(index); - await this.appendEvent({ + await this.storage.saveIndex(index); + await this.storage.appendEvent({ type: "dead_letter_moved", requestId: input.requestId, details: { @@ -485,8 +244,8 @@ export class ConnectorInboundInbox { entry.nextAttemptAt = input.nextAttemptAt; index.updatedAt = nowIso(); - await this.saveIndex(index); - await this.appendEvent({ + await this.storage.saveIndex(index); + await this.storage.appendEvent({ type: "replay_failed", requestId: input.requestId, details: { @@ -503,7 +262,7 @@ export class ConnectorInboundInbox { async listDeadLetter(input?: { limit?: number; }): Promise { - const index = await this.loadIndex(); + const index = await this.storage.loadIndex(); const entries = Object.values(index.deadLetterByRequestId).sort( (left, right) => { const leftDeadAt = toComparableTimeMs(left.deadLetteredAt); @@ -526,17 +285,11 @@ export class ConnectorInboundInbox { async replayDeadLetter(input?: { requestIds?: string[]; }): Promise<{ replayedCount: number }> { - return await this.withWriteLock(async () => { - const index = await this.loadIndex(); + return await this.storage.withWriteLock(async () => { + const index = await this.storage.loadIndex(); const requestIds = input?.requestIds !== undefined - ? Array.from( - new Set( - input.requestIds - .map((item) => item.trim()) - .filter((item) => item.length > 0), - ), - ) + ? sanitizeRequestIds(input.requestIds) : Object.keys(index.deadLetterByRequestId); let replayedCount = 0; @@ -563,7 +316,7 @@ export class ConnectorInboundInbox { }; index.pendingBytes += dead.payloadBytes; replayedCount += 1; - await this.appendEvent({ + await this.storage.appendEvent({ type: "dead_letter_replayed", requestId, details: { @@ -575,7 +328,7 @@ export class ConnectorInboundInbox { if (replayedCount > 0) { index.updatedAt = nowIso(); - await this.saveIndex(index); + await this.storage.saveIndex(index); } return { replayedCount }; @@ -585,17 +338,11 @@ export class ConnectorInboundInbox { async purgeDeadLetter(input?: { requestIds?: string[]; }): Promise<{ purgedCount: number }> { - return await this.withWriteLock(async () => { - const index = await this.loadIndex(); + return await this.storage.withWriteLock(async () => { + const index = await this.storage.loadIndex(); const requestIds = input?.requestIds !== undefined - ? Array.from( - new Set( - input.requestIds - .map((item) => item.trim()) - .filter((item) => item.length > 0), - ), - ) + ? sanitizeRequestIds(input.requestIds) : Object.keys(index.deadLetterByRequestId); let purgedCount = 0; @@ -615,7 +362,7 @@ export class ConnectorInboundInbox { index.deadLetterBytes - dead.payloadBytes, ); purgedCount += 1; - await this.appendEvent({ + await this.storage.appendEvent({ type: "dead_letter_purged", requestId, details: { @@ -627,7 +374,7 @@ export class ConnectorInboundInbox { if (purgedCount > 0) { index.updatedAt = nowIso(); - await this.saveIndex(index); + await this.storage.saveIndex(index); } return { purgedCount }; @@ -635,8 +382,8 @@ export class ConnectorInboundInbox { } async pruneDelivered(): Promise { - await this.withWriteLock(async () => { - const index = await this.loadIndex(); + await this.storage.withWriteLock(async () => { + const index = await this.storage.loadIndex(); const beforePendingCount = Object.keys(index.pendingByRequestId).length; const beforeDeadLetterCount = Object.keys( index.deadLetterByRequestId, @@ -674,8 +421,8 @@ export class ConnectorInboundInbox { index.deadLetterByRequestId = nextDead; index.deadLetterBytes = deadLetterBytes; index.updatedAt = nowIso(); - await this.saveIndex(index); - await this.appendEvent({ + await this.storage.saveIndex(index); + await this.storage.appendEvent({ type: "inbox_pruned", details: { beforePendingCount, @@ -688,7 +435,7 @@ export class ConnectorInboundInbox { } async getSnapshot(): Promise { - const index = await this.loadIndex(); + const index = await this.storage.loadIndex(); const pendingEntries = Object.values(index.pendingByRequestId).sort( (left, right) => toComparableTimeMs(left.receivedAt) - @@ -720,194 +467,6 @@ export class ConnectorInboundInbox { }, }; } - - private async withWriteLock(fn: () => Promise): Promise { - const previous = this.writeChain; - let release: (() => void) | undefined; - this.writeChain = new Promise((resolve) => { - release = resolve; - }); - - await previous; - const releaseFileLock = await this.acquireIndexFileLock(); - try { - return await fn(); - } finally { - await releaseFileLock(); - release?.(); - } - } - - private async acquireIndexFileLock(): Promise { - const startedAt = nowUtcMs(); - await mkdir(this.inboxDir, { recursive: true }); - - while (true) { - try { - await writeFile( - this.indexLockPath, - `${JSON.stringify({ pid: process.pid, createdAt: nowIso() })}\n`, - { - encoding: "utf8", - flag: "wx", - }, - ); - - let released = false; - return async () => { - if (released) { - return; - } - released = true; - try { - await unlink(this.indexLockPath); - } catch { - // ignore - } - }; - } catch (error) { - const code = - error && typeof error === "object" && "code" in error - ? (error as { code?: string }).code - : undefined; - if (code !== "EEXIST") { - throw error; - } - - const lockStats = await this.readLockStats(); - if ( - lockStats !== undefined && - nowUtcMs() - lockStats.mtimeMs > DEFAULT_INDEX_LOCK_STALE_MS - ) { - try { - await unlink(this.indexLockPath); - } catch { - // ignore stale lock unlink race - } - continue; - } - - if (nowUtcMs() - startedAt >= DEFAULT_INDEX_LOCK_TIMEOUT_MS) { - throw new Error("Timed out waiting for inbound inbox index lock"); - } - - await this.sleep(DEFAULT_INDEX_LOCK_RETRY_MS); - } - } - } - - private async readLockStats(): Promise<{ mtimeMs: number } | undefined> { - try { - const lockStat = await stat(this.indexLockPath); - return { mtimeMs: lockStat.mtimeMs }; - } catch { - return undefined; - } - } - - private async loadIndex(): Promise { - await mkdir(this.inboxDir, { recursive: true }); - - let raw: string; - try { - raw = await readFile(this.indexPath, "utf8"); - } catch (error) { - if ( - error && - typeof error === "object" && - "code" in error && - (error as { code?: string }).code === "ENOENT" - ) { - return toDefaultIndexFile(); - } - - throw error; - } - - if (raw.trim().length === 0) { - return toDefaultIndexFile(); - } - - const parsed = JSON.parse(raw) as unknown; - return normalizeIndexFile(parsed); - } - - private async saveIndex(index: InboundInboxIndexFile): Promise { - await mkdir(dirname(this.indexPath), { recursive: true }); - - const payload = { - ...index, - version: INBOUND_INBOX_SCHEMA_VERSION, - updatedAt: nowIso(), - } satisfies InboundInboxIndexFile; - - const tmpPath = `${this.indexPath}.tmp-${nowUtcMs()}`; - await writeFile(tmpPath, `${JSON.stringify(payload, null, 2)}\n`, "utf8"); - await rename(tmpPath, this.indexPath); - } - - private async appendEvent(event: InboundInboxEvent): Promise { - await mkdir(dirname(this.eventsPath), { recursive: true }); - await appendFile( - this.eventsPath, - `${JSON.stringify({ ...event, at: nowIso() })}\n`, - "utf8", - ); - await this.rotateEventsIfNeeded(); - } - - private async rotateEventsIfNeeded(): Promise { - if (this.eventsMaxBytes <= 0 || this.eventsMaxFiles <= 0) { - return; - } - - let currentSize: number; - try { - const current = await stat(this.eventsPath); - currentSize = current.size; - } catch { - return; - } - - if (currentSize <= this.eventsMaxBytes) { - return; - } - - for (let index = this.eventsMaxFiles; index >= 1; index -= 1) { - const fromPath = - index === 1 ? this.eventsPath : `${this.eventsPath}.${index - 1}`; - const toPath = `${this.eventsPath}.${index}`; - - const fromExists = await this.pathExists(fromPath); - if (!fromExists) { - continue; - } - - const toExists = await this.pathExists(toPath); - if (toExists) { - await unlink(toPath); - } - - await rename(fromPath, toPath); - } - - await writeFile(this.eventsPath, "", "utf8"); - } - - private async pathExists(pathValue: string): Promise { - try { - await stat(pathValue); - return true; - } catch { - return false; - } - } - - private async sleep(durationMs: number): Promise { - await new Promise((resolve) => { - setTimeout(resolve, durationMs); - }); - } } export function createConnectorInboundInbox( diff --git a/packages/connector/src/inbound-inbox/AGENTS.md b/packages/connector/src/inbound-inbox/AGENTS.md new file mode 100644 index 0000000..354fe9c --- /dev/null +++ b/packages/connector/src/inbound-inbox/AGENTS.md @@ -0,0 +1,10 @@ +# AGENTS.md (packages/connector/src/inbound-inbox) + +## Purpose +- Keep inbound inbox durability logic modular and corruption-resistant. + +## Rules +- Keep data contracts in `types.ts` and schema normalization in `schema.ts`. +- Keep lock/index/events file operations in `storage.ts`; do not duplicate file-lock logic. +- Preserve atomic index write semantics and append-only event logging. +- Keep request-id dedupe and byte/count accounting consistent with index snapshots. diff --git a/packages/connector/src/inbound-inbox/constants.ts b/packages/connector/src/inbound-inbox/constants.ts new file mode 100644 index 0000000..7246f8e --- /dev/null +++ b/packages/connector/src/inbound-inbox/constants.ts @@ -0,0 +1,9 @@ +export const INBOUND_INBOX_DIR_NAME = "inbound-inbox"; +export const INBOUND_INBOX_INDEX_FILE_NAME = "index.json"; +export const INBOUND_INBOX_INDEX_LOCK_FILE_NAME = "index.lock"; +export const INBOUND_INBOX_EVENTS_FILE_NAME = "events.jsonl"; +export const INBOUND_INBOX_SCHEMA_VERSION = 2; + +export const DEFAULT_INDEX_LOCK_TIMEOUT_MS = 5_000; +export const DEFAULT_INDEX_LOCK_STALE_MS = 30_000; +export const DEFAULT_INDEX_LOCK_RETRY_MS = 50; diff --git a/packages/connector/src/inbound-inbox/schema.ts b/packages/connector/src/inbound-inbox/schema.ts new file mode 100644 index 0000000..a8498ba --- /dev/null +++ b/packages/connector/src/inbound-inbox/schema.ts @@ -0,0 +1,168 @@ +import { nowIso } from "@clawdentity/sdk"; +import { INBOUND_INBOX_SCHEMA_VERSION } from "./constants.js"; +import type { + ConnectorInboundDeadLetterItem, + ConnectorInboundInboxItem, + InboundInboxIndexFile, +} from "./types.js"; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function parseOptionalNonEmptyString( + value: unknown, +): string | undefined { + if (typeof value !== "string") { + return undefined; + } + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +function parsePendingItem( + value: unknown, +): ConnectorInboundInboxItem | undefined { + if (!isRecord(value)) { + return undefined; + } + + const id = parseOptionalNonEmptyString(value.id) ?? ""; + const requestId = parseOptionalNonEmptyString(value.requestId) ?? ""; + const fromAgentDid = parseOptionalNonEmptyString(value.fromAgentDid) ?? ""; + const toAgentDid = parseOptionalNonEmptyString(value.toAgentDid) ?? ""; + const receivedAt = parseOptionalNonEmptyString(value.receivedAt) ?? ""; + const nextAttemptAt = parseOptionalNonEmptyString(value.nextAttemptAt) ?? ""; + const attemptCount = + typeof value.attemptCount === "number" && + Number.isInteger(value.attemptCount) + ? value.attemptCount + : NaN; + const payloadBytes = + typeof value.payloadBytes === "number" && + Number.isInteger(value.payloadBytes) + ? value.payloadBytes + : NaN; + + if ( + id.length === 0 || + requestId.length === 0 || + fromAgentDid.length === 0 || + toAgentDid.length === 0 || + receivedAt.length === 0 || + nextAttemptAt.length === 0 || + !Number.isFinite(attemptCount) || + attemptCount < 0 || + !Number.isFinite(payloadBytes) || + payloadBytes < 0 + ) { + return undefined; + } + + return { + id, + requestId, + fromAgentDid, + toAgentDid, + payload: value.payload, + payloadBytes, + receivedAt, + nextAttemptAt, + attemptCount, + lastError: parseOptionalNonEmptyString(value.lastError), + lastAttemptAt: parseOptionalNonEmptyString(value.lastAttemptAt), + conversationId: parseOptionalNonEmptyString(value.conversationId), + replyTo: parseOptionalNonEmptyString(value.replyTo), + }; +} + +function parseDeadLetterItem( + value: unknown, +): ConnectorInboundDeadLetterItem | undefined { + const pending = parsePendingItem(value); + if (!pending || !isRecord(value)) { + return undefined; + } + + const deadLetteredAt = + parseOptionalNonEmptyString(value.deadLetteredAt) ?? ""; + const deadLetterReason = + parseOptionalNonEmptyString(value.deadLetterReason) ?? ""; + if (deadLetteredAt.length === 0 || deadLetterReason.length === 0) { + return undefined; + } + + return { + ...pending, + deadLetteredAt, + deadLetterReason, + }; +} + +export function toDefaultIndexFile(): InboundInboxIndexFile { + return { + version: INBOUND_INBOX_SCHEMA_VERSION, + pendingBytes: 0, + deadLetterBytes: 0, + pendingByRequestId: {}, + deadLetterByRequestId: {}, + updatedAt: nowIso(), + }; +} + +export function normalizeIndexFile(raw: unknown): InboundInboxIndexFile { + if (!isRecord(raw)) { + throw new Error("Inbound inbox index root must be an object"); + } + + if (raw.version !== INBOUND_INBOX_SCHEMA_VERSION) { + throw new Error( + `Inbound inbox index schema version ${String(raw.version)} is unsupported`, + ); + } + + const pendingByRequestIdRaw = raw.pendingByRequestId; + const deadLetterByRequestIdRaw = raw.deadLetterByRequestId; + if (!isRecord(pendingByRequestIdRaw)) { + throw new Error("Inbound inbox index pendingByRequestId must be an object"); + } + if (!isRecord(deadLetterByRequestIdRaw)) { + throw new Error( + "Inbound inbox index deadLetterByRequestId must be an object", + ); + } + + const pendingByRequestId: Record = {}; + let pendingBytes = 0; + for (const [requestId, candidate] of Object.entries(pendingByRequestIdRaw)) { + const entry = parsePendingItem(candidate); + if (!entry || entry.requestId !== requestId) { + continue; + } + pendingByRequestId[requestId] = entry; + pendingBytes += entry.payloadBytes; + } + + const deadLetterByRequestId: Record = + {}; + let deadLetterBytes = 0; + for (const [requestId, candidate] of Object.entries( + deadLetterByRequestIdRaw, + )) { + const entry = parseDeadLetterItem(candidate); + if (!entry || entry.requestId !== requestId) { + continue; + } + deadLetterByRequestId[requestId] = entry; + deadLetterBytes += entry.payloadBytes; + } + + return { + version: INBOUND_INBOX_SCHEMA_VERSION, + pendingByRequestId, + deadLetterByRequestId, + pendingBytes, + deadLetterBytes, + updatedAt: parseOptionalNonEmptyString(raw.updatedAt) ?? nowIso(), + }; +} diff --git a/packages/connector/src/inbound-inbox/storage.ts b/packages/connector/src/inbound-inbox/storage.ts new file mode 100644 index 0000000..d3da6ae --- /dev/null +++ b/packages/connector/src/inbound-inbox/storage.ts @@ -0,0 +1,238 @@ +import { + appendFile, + mkdir, + readFile, + rename, + stat, + unlink, + writeFile, +} from "node:fs/promises"; +import { dirname } from "node:path"; +import { nowIso, nowUtcMs } from "@clawdentity/sdk"; +import { + DEFAULT_INDEX_LOCK_RETRY_MS, + DEFAULT_INDEX_LOCK_STALE_MS, + DEFAULT_INDEX_LOCK_TIMEOUT_MS, + INBOUND_INBOX_SCHEMA_VERSION, +} from "./constants.js"; +import { normalizeIndexFile, toDefaultIndexFile } from "./schema.js"; +import type { InboundInboxEvent, InboundInboxIndexFile } from "./types.js"; + +type ReleaseLock = () => Promise; + +type InboundInboxStorageOptions = { + eventsMaxBytes: number; + eventsMaxFiles: number; + eventsPath: string; + inboxDir: string; + indexLockPath: string; + indexPath: string; +}; + +export class InboundInboxStorage { + private readonly eventsMaxBytes: number; + private readonly eventsMaxFiles: number; + private readonly eventsPath: string; + private readonly inboxDir: string; + private readonly indexLockPath: string; + private readonly indexPath: string; + + private writeChain: Promise = Promise.resolve(); + + constructor(options: InboundInboxStorageOptions) { + this.inboxDir = options.inboxDir; + this.indexPath = options.indexPath; + this.indexLockPath = options.indexLockPath; + this.eventsPath = options.eventsPath; + this.eventsMaxBytes = options.eventsMaxBytes; + this.eventsMaxFiles = options.eventsMaxFiles; + } + + async withWriteLock(fn: () => Promise): Promise { + const previous = this.writeChain; + let release: (() => void) | undefined; + this.writeChain = new Promise((resolve) => { + release = resolve; + }); + + await previous; + const releaseFileLock = await this.acquireIndexFileLock(); + try { + return await fn(); + } finally { + await releaseFileLock(); + release?.(); + } + } + + async loadIndex(): Promise { + await mkdir(this.inboxDir, { recursive: true }); + + let raw: string; + try { + raw = await readFile(this.indexPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return toDefaultIndexFile(); + } + + throw error; + } + + if (raw.trim().length === 0) { + return toDefaultIndexFile(); + } + + const parsed = JSON.parse(raw) as unknown; + return normalizeIndexFile(parsed); + } + + async saveIndex(index: InboundInboxIndexFile): Promise { + await mkdir(dirname(this.indexPath), { recursive: true }); + + const payload = { + ...index, + version: INBOUND_INBOX_SCHEMA_VERSION, + updatedAt: nowIso(), + } satisfies InboundInboxIndexFile; + + const tmpPath = `${this.indexPath}.tmp-${nowUtcMs()}`; + await writeFile(tmpPath, `${JSON.stringify(payload, null, 2)}\n`, "utf8"); + await rename(tmpPath, this.indexPath); + } + + async appendEvent(event: InboundInboxEvent): Promise { + await mkdir(dirname(this.eventsPath), { recursive: true }); + await appendFile( + this.eventsPath, + `${JSON.stringify({ ...event, at: nowIso() })}\n`, + "utf8", + ); + await this.rotateEventsIfNeeded(); + } + + private async acquireIndexFileLock(): Promise { + const startedAt = nowUtcMs(); + await mkdir(this.inboxDir, { recursive: true }); + + while (true) { + try { + await writeFile( + this.indexLockPath, + `${JSON.stringify({ pid: process.pid, createdAt: nowIso() })}\n`, + { + encoding: "utf8", + flag: "wx", + }, + ); + + let released = false; + return async () => { + if (released) { + return; + } + released = true; + try { + await unlink(this.indexLockPath); + } catch { + // ignore + } + }; + } catch (error) { + const code = + error && typeof error === "object" && "code" in error + ? (error as { code?: string }).code + : undefined; + if (code !== "EEXIST") { + throw error; + } + + const lockStats = await this.readLockStats(); + if ( + lockStats !== undefined && + nowUtcMs() - lockStats.mtimeMs > DEFAULT_INDEX_LOCK_STALE_MS + ) { + try { + await unlink(this.indexLockPath); + } catch { + // ignore stale lock unlink race + } + continue; + } + + if (nowUtcMs() - startedAt >= DEFAULT_INDEX_LOCK_TIMEOUT_MS) { + throw new Error("Timed out waiting for inbound inbox index lock"); + } + + await this.sleep(DEFAULT_INDEX_LOCK_RETRY_MS); + } + } + } + + private async readLockStats(): Promise<{ mtimeMs: number } | undefined> { + try { + const lockStat = await stat(this.indexLockPath); + return { mtimeMs: lockStat.mtimeMs }; + } catch { + return undefined; + } + } + + private async rotateEventsIfNeeded(): Promise { + if (this.eventsMaxBytes <= 0 || this.eventsMaxFiles <= 0) { + return; + } + + let currentSize: number; + try { + const current = await stat(this.eventsPath); + currentSize = current.size; + } catch { + return; + } + + if (currentSize <= this.eventsMaxBytes) { + return; + } + + for (let index = this.eventsMaxFiles; index >= 1; index -= 1) { + const fromPath = + index === 1 ? this.eventsPath : `${this.eventsPath}.${index - 1}`; + const toPath = `${this.eventsPath}.${index}`; + + const fromExists = await this.pathExists(fromPath); + if (!fromExists) { + continue; + } + + const toExists = await this.pathExists(toPath); + if (toExists) { + await unlink(toPath); + } + + await rename(fromPath, toPath); + } + + await writeFile(this.eventsPath, "", "utf8"); + } + + private async pathExists(pathValue: string): Promise { + try { + await stat(pathValue); + return true; + } catch { + return false; + } + } + + private async sleep(durationMs: number): Promise { + await new Promise((resolve) => { + setTimeout(resolve, durationMs); + }); + } +} diff --git a/packages/connector/src/inbound-inbox/types.ts b/packages/connector/src/inbound-inbox/types.ts new file mode 100644 index 0000000..fcf4ace --- /dev/null +++ b/packages/connector/src/inbound-inbox/types.ts @@ -0,0 +1,81 @@ +export type ConnectorInboundInboxItem = { + attemptCount: number; + conversationId?: string; + fromAgentDid: string; + id: string; + lastAttemptAt?: string; + lastError?: string; + nextAttemptAt: string; + payload: unknown; + payloadBytes: number; + receivedAt: string; + replyTo?: string; + requestId: string; + toAgentDid: string; +}; + +export type ConnectorInboundDeadLetterItem = ConnectorInboundInboxItem & { + deadLetterReason: string; + deadLetteredAt: string; +}; + +export type InboundInboxIndexFile = { + deadLetterByRequestId: Record; + deadLetterBytes: number; + pendingBytes: number; + pendingByRequestId: Record; + updatedAt: string; + version: number; +}; + +export type InboundInboxEvent = { + details?: Record; + requestId?: string; + type: + | "inbound_persisted" + | "inbound_duplicate" + | "replay_succeeded" + | "replay_failed" + | "dead_letter_moved" + | "dead_letter_replayed" + | "dead_letter_purged" + | "inbox_pruned"; +}; + +export type ConnectorInboundInboxPendingSnapshot = { + nextAttemptAt?: string; + oldestPendingAt?: string; + pendingBytes: number; + pendingCount: number; +}; + +export type ConnectorInboundInboxDeadLetterSnapshot = { + deadLetterBytes: number; + deadLetterCount: number; + oldestDeadLetterAt?: string; +}; + +export type ConnectorInboundInboxSnapshot = { + deadLetter: ConnectorInboundInboxDeadLetterSnapshot; + pending: ConnectorInboundInboxPendingSnapshot; +}; + +export type ConnectorInboundInboxEnqueueResult = { + accepted: boolean; + duplicate: boolean; + pendingCount: number; + reason?: string; +}; + +export type ConnectorInboundInboxMarkFailureResult = { + movedToDeadLetter: boolean; +}; + +export type ConnectorInboundInboxOptions = { + agentName: string; + configDir: string; + eventsMaxBytes: number; + eventsMaxFiles: number; + maxPendingBytes: number; + maxPendingMessages: number; +}; diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index a1a2815..a3c86e3 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -1,1162 +1,71 @@ -import { randomBytes } from "node:crypto"; -import { mkdir, readFile, rename, writeFile } from "node:fs/promises"; -import { - createServer, - type IncomingMessage, - type ServerResponse, -} from "node:http"; -import { dirname, isAbsolute, join } from "node:path"; +import { createServer } from "node:http"; import { decodeBase64url, - encodeBase64url, - RELAY_CONNECT_PATH, - RELAY_CONVERSATION_ID_HEADER, - RELAY_DELIVERY_RECEIPT_URL_HEADER, RELAY_DELIVERY_RECEIPTS_PATH, - RELAY_RECIPIENT_AGENT_DID_HEADER, } from "@clawdentity/protocol"; import { - type AgentAuthBundle, - AppError, createLogger, - executeWithAgentAuthRefreshRetry, - type Logger, nowIso, nowUtcMs, refreshAgentAuthWithClawProof, - signHttpRequest, toIso, } from "@clawdentity/sdk"; -import { WebSocket as NodeWebSocket } from "ws"; -import { - ConnectorClient, - type ConnectorOutboundQueuePersistence, - type ConnectorWebSocket, -} from "./client.js"; -import { - AGENT_ACCESS_HEADER, - DEFAULT_CONNECTOR_BASE_URL, - DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, - DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, - DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, - DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, - DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, - DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, - DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, - DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, - DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, - DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, - DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, - DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, - DEFAULT_CONNECTOR_OUTBOUND_PATH, - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, - DEFAULT_CONNECTOR_STATUS_PATH, - DEFAULT_OPENCLAW_BASE_URL, - DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, - DEFAULT_OPENCLAW_HOOK_PATH, -} from "./constants.js"; -import { type EnqueueFrame, enqueueFrameSchema } from "./frames.js"; +import { ConnectorClient } from "./client.js"; import { type ConnectorInboundInboxSnapshot, createConnectorInboundInbox, } from "./inbound-inbox.js"; - -type ConnectorRuntimeCredentials = { - accessExpiresAt?: string; - accessToken?: string; - agentDid: string; - ait: string; - refreshExpiresAt?: string; - refreshToken: string; - secretKey: string; - tokenType?: "Bearer"; -}; - -export type StartConnectorRuntimeInput = { - agentName: string; - configDir: string; - credentials: ConnectorRuntimeCredentials; - fetchImpl?: typeof fetch; - logger?: Logger; - openclawBaseUrl?: string; - openclawHookPath?: string; - openclawHookToken?: string; - outboundBaseUrl?: string; - outboundPath?: string; - proxyWebsocketUrl?: string; - registryUrl: string; -}; - -export type ConnectorRuntimeHandle = { - outboundUrl: string; - stop: () => Promise; - waitUntilStopped: () => Promise; - websocketUrl: string; -}; - -type OutboundRelayRequest = { - conversationId?: string; - payload: unknown; - peer: string; - peerDid: string; - peerProxyUrl: string; - replyTo?: string; -}; - -type OutboundDeliveryReceiptStatus = "processed_by_openclaw" | "dead_lettered"; - -type TrustedReceiptTargets = { - byAgentDid: Map; - origins: Set; -}; - -const REGISTRY_AUTH_FILENAME = "registry-auth.json"; -const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; -const AGENTS_DIR_NAME = "agents"; -const OUTBOUND_QUEUE_DIR_NAME = "outbound-queue"; -const OUTBOUND_QUEUE_FILENAME = "queue.json"; -const REFRESH_SINGLE_FLIGHT_PREFIX = "connector-runtime"; -const NONCE_SIZE = 16; -const MAX_OUTBOUND_BODY_BYTES = 1024 * 1024; -const ACCESS_TOKEN_REFRESH_SKEW_MS = 30_000; - -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - -function toPathWithQuery(url: URL): string { - return `${url.pathname}${url.search}`; -} - -function parseRequiredString(value: unknown, field: string): string { - if (typeof value !== "string" || value.trim().length === 0) { - throw new Error(`Invalid ${field}`); - } - - return value.trim(); -} - -function parseOptionalString(value: unknown): string | undefined { - if (typeof value !== "string") { - return undefined; - } - - const trimmed = value.trim(); - return trimmed.length > 0 ? trimmed : undefined; -} - -function parseOptionalProxyOrigin(value: unknown): string | undefined { - if (typeof value !== "string" || value.trim().length === 0) { - return undefined; - } - - try { - return new URL(value.trim()).origin; - } catch { - return undefined; - } -} - -function normalizeOutboundBaseUrl(baseUrlInput: string | undefined): URL { - const raw = baseUrlInput?.trim() || DEFAULT_CONNECTOR_BASE_URL; - let parsed: URL; - - try { - parsed = new URL(raw); - } catch { - throw new Error("Connector outbound base URL is invalid"); - } - - if (parsed.protocol !== "http:") { - throw new Error("Connector outbound base URL must use http://"); - } - - return parsed; -} - -function normalizeOutboundPath(pathInput: string | undefined): string { - const raw = pathInput?.trim() || DEFAULT_CONNECTOR_OUTBOUND_PATH; - if (raw.length === 0) { - throw new Error("Connector outbound path is invalid"); - } - - return raw.startsWith("/") ? raw : `/${raw}`; -} - -function normalizeWebSocketUrl(urlInput: string | undefined): string { - const raw = urlInput?.trim() ?? process.env.CLAWDENTITY_PROXY_WS_URL?.trim(); - if (!raw) { - throw new Error( - "Proxy websocket URL is required (set --proxy-ws-url or CLAWDENTITY_PROXY_WS_URL)", - ); - } - - const parsed = new URL(raw); - if (parsed.protocol === "https:") { - parsed.protocol = "wss:"; - } else if (parsed.protocol === "http:") { - parsed.protocol = "ws:"; - } - - if (parsed.protocol !== "wss:" && parsed.protocol !== "ws:") { - throw new Error("Proxy websocket URL must use ws:// or wss://"); - } - - if (parsed.pathname === "/") { - parsed.pathname = RELAY_CONNECT_PATH; - } - - return parsed.toString(); -} - -function resolveOpenclawBaseUrl(input?: string): string { - const value = - input?.trim() || - process.env.OPENCLAW_BASE_URL?.trim() || - DEFAULT_OPENCLAW_BASE_URL; - return value; -} - -function resolveOpenclawHookPath(input?: string): string { - const value = - input?.trim() || - process.env.OPENCLAW_HOOK_PATH?.trim() || - DEFAULT_OPENCLAW_HOOK_PATH; - return value.startsWith("/") ? value : `/${value}`; -} - -function resolveOpenclawHookToken(input?: string): string | undefined { - const value = input?.trim() || process.env.OPENCLAW_HOOK_TOKEN?.trim(); - if (!value) { - return undefined; - } - return value; -} - -function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { - const normalizedBase = baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`; - const normalizedHookPath = hookPath.startsWith("/") - ? hookPath.slice(1) - : hookPath; - return new URL(normalizedHookPath, normalizedBase).toString(); -} - -function toHttpOriginFromWebSocketUrl(value: URL): string { - const normalized = new URL(value.toString()); - if (normalized.protocol === "wss:") { - normalized.protocol = "https:"; - } else if (normalized.protocol === "ws:") { - normalized.protocol = "http:"; - } - - return normalized.origin; -} - -function parsePositiveIntEnv( - key: string, - fallback: number, - minimum = 1, -): number { - const raw = process.env[key]?.trim(); - if (!raw) { - return fallback; - } - - const parsed = Number.parseInt(raw, 10); - if (!Number.isFinite(parsed) || parsed < minimum) { - return fallback; - } - - return parsed; -} - -function sanitizeErrorReason(error: unknown): string { - if (!(error instanceof Error)) { - return "Unknown error"; - } - - return error.message.trim().slice(0, 240) || "Unknown error"; -} - -class LocalOpenclawDeliveryError extends Error { - readonly code?: "HOOK_AUTH_REJECTED" | "RUNTIME_STOPPING"; - readonly retryable: boolean; - - constructor(input: { - code?: "HOOK_AUTH_REJECTED" | "RUNTIME_STOPPING"; - message: string; - retryable: boolean; - }) { - super(input.message); - this.name = "LocalOpenclawDeliveryError"; - this.code = input.code; - this.retryable = input.retryable; - } -} - -type InboundReplayPolicy = { - batchSize: number; - deadLetterNonRetryableMaxAttempts: number; - eventsMaxBytes: number; - eventsMaxFiles: number; - inboxMaxBytes: number; - inboxMaxMessages: number; - replayIntervalMs: number; - retryBackoffFactor: number; - retryInitialDelayMs: number; - retryMaxDelayMs: number; - runtimeReplayMaxAttempts: number; - runtimeReplayRetryBackoffFactor: number; - runtimeReplayRetryInitialDelayMs: number; - runtimeReplayRetryMaxDelayMs: number; -}; - -type OpenclawProbePolicy = { - intervalMs: number; - timeoutMs: number; -}; - -type InboundReplayStatus = { - lastReplayAt?: string; - lastReplayError?: string; - lastAttemptAt?: string; - lastAttemptStatus?: "ok" | "failed"; - replayerActive: boolean; -}; - -type InboundReplayView = { - lastReplayAt?: string; - lastReplayError?: string; - snapshot: ConnectorInboundInboxSnapshot; - replayerActive: boolean; - openclawGateway: { - lastCheckedAt?: string; - lastFailureReason?: string; - lastSuccessAt?: string; - reachable: boolean; - url: string; - }; - openclawHook: { - lastAttemptAt?: string; - lastAttemptStatus?: "ok" | "failed"; - url: string; - }; -}; - -type OpenclawGatewayProbeStatus = { - lastCheckedAt?: string; - lastFailureReason?: string; - lastSuccessAt?: string; - reachable: boolean; -}; - -function loadInboundReplayPolicy(): InboundReplayPolicy { - const retryBackoffFactor = Number.parseFloat( - process.env.CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR ?? "", - ); - const runtimeReplayRetryBackoffFactor = Number.parseFloat( - process.env.CONNECTOR_RUNTIME_REPLAY_RETRY_BACKOFF_FACTOR ?? "", - ); - - return { - deadLetterNonRetryableMaxAttempts: parsePositiveIntEnv( - "CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS", - DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, - ), - eventsMaxBytes: parsePositiveIntEnv( - "CONNECTOR_INBOUND_EVENTS_MAX_BYTES", - DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, - ), - eventsMaxFiles: parsePositiveIntEnv( - "CONNECTOR_INBOUND_EVENTS_MAX_FILES", - DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, - ), - inboxMaxMessages: parsePositiveIntEnv( - "CONNECTOR_INBOUND_INBOX_MAX_MESSAGES", - DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, - ), - inboxMaxBytes: parsePositiveIntEnv( - "CONNECTOR_INBOUND_INBOX_MAX_BYTES", - DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, - ), - replayIntervalMs: parsePositiveIntEnv( - "CONNECTOR_INBOUND_REPLAY_INTERVAL_MS", - DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, - ), - batchSize: parsePositiveIntEnv( - "CONNECTOR_INBOUND_REPLAY_BATCH_SIZE", - DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, - ), - retryInitialDelayMs: parsePositiveIntEnv( - "CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS", - DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, - ), - retryMaxDelayMs: parsePositiveIntEnv( - "CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS", - DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, - ), - retryBackoffFactor: - Number.isFinite(retryBackoffFactor) && retryBackoffFactor >= 1 - ? retryBackoffFactor - : DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, - runtimeReplayMaxAttempts: parsePositiveIntEnv( - "CONNECTOR_RUNTIME_REPLAY_MAX_ATTEMPTS", - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, - ), - runtimeReplayRetryInitialDelayMs: parsePositiveIntEnv( - "CONNECTOR_RUNTIME_REPLAY_RETRY_INITIAL_DELAY_MS", - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, - ), - runtimeReplayRetryMaxDelayMs: parsePositiveIntEnv( - "CONNECTOR_RUNTIME_REPLAY_RETRY_MAX_DELAY_MS", - DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, - ), - runtimeReplayRetryBackoffFactor: - Number.isFinite(runtimeReplayRetryBackoffFactor) && - runtimeReplayRetryBackoffFactor >= 1 - ? runtimeReplayRetryBackoffFactor - : DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, - }; -} - -function loadOpenclawProbePolicy(): OpenclawProbePolicy { - return { - intervalMs: parsePositiveIntEnv( - "CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS", - DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, - ), - timeoutMs: parsePositiveIntEnv( - "CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS", - DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, - ), - }; -} - -function computeReplayDelayMs(input: { - attemptCount: number; - policy: InboundReplayPolicy; -}): number { - const exponent = Math.max(0, input.attemptCount - 1); - const delay = Math.min( - input.policy.retryMaxDelayMs, - Math.floor( - input.policy.retryInitialDelayMs * - input.policy.retryBackoffFactor ** exponent, - ), - ); - return Math.max(1, delay); -} - -function computeRuntimeReplayRetryDelayMs(input: { - attemptCount: number; - policy: InboundReplayPolicy; -}): number { - const exponent = Math.max(0, input.attemptCount - 1); - const delay = Math.min( - input.policy.runtimeReplayRetryMaxDelayMs, - Math.floor( - input.policy.runtimeReplayRetryInitialDelayMs * - input.policy.runtimeReplayRetryBackoffFactor ** exponent, - ), - ); - return Math.max(1, delay); -} - -async function waitWithAbort(input: { - delayMs: number; - signal: AbortSignal; -}): Promise { - if (input.signal.aborted) { - throw new LocalOpenclawDeliveryError({ - code: "RUNTIME_STOPPING", - message: "Connector runtime is stopping", - retryable: false, - }); - } - - await new Promise((resolve, reject) => { - const timeoutHandle = setTimeout(() => { - input.signal.removeEventListener("abort", onAbort); - resolve(); - }, input.delayMs); - - const onAbort = () => { - clearTimeout(timeoutHandle); - input.signal.removeEventListener("abort", onAbort); - reject( - new LocalOpenclawDeliveryError({ - code: "RUNTIME_STOPPING", - message: "Connector runtime is stopping", - retryable: false, - }), - ); - }; - - input.signal.addEventListener("abort", onAbort, { once: true }); - }); -} - -async function readOpenclawHookTokenFromRelayRuntimeConfig(input: { - configDir: string; - logger: Logger; -}): Promise { - const runtimeConfigPath = join( - input.configDir, - OPENCLAW_RELAY_RUNTIME_FILE_NAME, - ); - let raw: string; - try { - raw = await readFile(runtimeConfigPath, "utf8"); - } catch (error) { - if ( - error && - typeof error === "object" && - "code" in error && - (error as { code?: string }).code === "ENOENT" - ) { - return undefined; - } - - input.logger.warn("connector.runtime.openclaw_relay_config_read_failed", { - runtimeConfigPath, - reason: sanitizeErrorReason(error), - }); - return undefined; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - input.logger.warn("connector.runtime.openclaw_relay_config_invalid_json", { - runtimeConfigPath, - }); - return undefined; - } - - if (!isRecord(parsed)) { - return undefined; - } - - const tokenValue = parsed.openclawHookToken; - if (typeof tokenValue !== "string") { - return undefined; - } - - const trimmed = tokenValue.trim(); - return trimmed.length > 0 ? trimmed : undefined; -} - -async function deliverToOpenclawHook(input: { - fetchImpl: typeof fetch; - fromAgentDid: string; - openclawHookToken?: string; - openclawHookUrl: string; - payload: unknown; - requestId: string; - shutdownSignal: AbortSignal; - toAgentDid: string; -}): Promise { - const timeoutSignal = AbortSignal.timeout( - DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, - ); - const signal = AbortSignal.any([input.shutdownSignal, timeoutSignal]); - - const headers: Record = { - "content-type": "application/json", - "x-clawdentity-agent-did": input.fromAgentDid, - "x-clawdentity-to-agent-did": input.toAgentDid, - "x-clawdentity-verified": "true", - "x-request-id": input.requestId, - }; - if (input.openclawHookToken !== undefined) { - headers["x-openclaw-token"] = input.openclawHookToken; - } - - try { - const response = await input.fetchImpl(input.openclawHookUrl, { - method: "POST", - headers, - body: JSON.stringify(input.payload), - signal, - }); - if (!response.ok) { - throw new LocalOpenclawDeliveryError({ - message: `Local OpenClaw hook rejected payload with status ${response.status}`, - retryable: - response.status === 401 || - response.status === 403 || - response.status >= 500 || - response.status === 404 || - response.status === 429, - code: - response.status === 401 || response.status === 403 - ? "HOOK_AUTH_REJECTED" - : undefined, - }); - } - } catch (error) { - if (error instanceof Error && error.name === "AbortError") { - if (input.shutdownSignal.aborted) { - throw new LocalOpenclawDeliveryError({ - code: "RUNTIME_STOPPING", - message: "Connector runtime is stopping", - retryable: false, - }); - } - throw new LocalOpenclawDeliveryError({ - message: "Local OpenClaw hook request timed out", - retryable: true, - }); - } - if (error instanceof LocalOpenclawDeliveryError) { - throw error; - } - throw new LocalOpenclawDeliveryError({ - message: sanitizeErrorReason(error), - retryable: true, - }); - } -} - -function toInitialAuthBundle( - credentials: ConnectorRuntimeCredentials, -): AgentAuthBundle { - return { - tokenType: "Bearer", - accessToken: credentials.accessToken?.trim() || "", - accessExpiresAt: - credentials.accessExpiresAt?.trim() || "1970-01-01T00:00:00.000Z", - refreshToken: parseRequiredString(credentials.refreshToken, "refreshToken"), - refreshExpiresAt: - credentials.refreshExpiresAt?.trim() || "2100-01-01T00:00:00.000Z", - }; -} - -function parseIsoTimestampMs(value: string): number | undefined { - const parsed = Date.parse(value); - if (!Number.isFinite(parsed)) { - return undefined; - } - - return parsed; -} - -function shouldRefreshAccessToken( - auth: AgentAuthBundle, - nowMs: number, -): boolean { - if (auth.accessToken.trim().length === 0) { - return true; - } - - const expiresAtMs = parseIsoTimestampMs(auth.accessExpiresAt); - if (expiresAtMs === undefined) { - return false; - } - - return expiresAtMs <= nowMs + ACCESS_TOKEN_REFRESH_SKEW_MS; -} - -function parseOutboundRelayRequest(payload: unknown): OutboundRelayRequest { - if (!isRecord(payload)) { - throw new AppError({ - code: "CONNECTOR_OUTBOUND_INVALID_REQUEST", - message: "Outbound relay request must be an object", - status: 400, - expose: true, - }); - } - - const replyTo = parseOptionalString(payload.replyTo); - if (replyTo !== undefined) { - try { - new URL(replyTo); - } catch { - throw new AppError({ - code: "CONNECTOR_OUTBOUND_INVALID_REQUEST", - message: "Outbound relay replyTo must be a valid URL", - status: 400, - expose: true, - }); - } - } - - return { - peer: parseRequiredString(payload.peer, "peer"), - peerDid: parseRequiredString(payload.peerDid, "peerDid"), - peerProxyUrl: parseRequiredString(payload.peerProxyUrl, "peerProxyUrl"), - payload: payload.payload, - conversationId: parseOptionalString(payload.conversationId), - replyTo, - }; -} - -function createWebSocketFactory(): ( - url: string, - headers: Record, -) => ConnectorWebSocket { - return (url: string, headers: Record) => { - const socket = new NodeWebSocket(url, { - headers, - }); - - return { - get readyState() { - return socket.readyState; - }, - send: (data: string) => { - socket.send(data); - }, - close: (code?: number, reason?: string) => { - socket.close(code, reason); - }, - addEventListener: (type, listener) => { - if (type === "open") { - socket.on("open", () => listener({})); - return; - } - - if (type === "message") { - socket.on("message", (data) => { - const text = - typeof data === "string" - ? data - : Array.isArray(data) - ? Buffer.concat(data).toString("utf8") - : Buffer.isBuffer(data) - ? data.toString("utf8") - : Buffer.from(data).toString("utf8"); - listener({ data: text }); - }); - return; - } - - if (type === "close") { - socket.on("close", (code, reason) => { - listener({ - code: Number(code), - reason: reason.toString("utf8"), - wasClean: Number(code) === 1000, - }); - }); - return; - } - - if (type === "unexpected-response") { - socket.on("unexpected-response", (_request, response) => { - listener({ - status: response.statusCode, - }); - }); - return; - } - - socket.on("error", (error) => listener({ error })); - }, - }; - }; -} - -async function writeRegistryAuthAtomic(input: { - auth: AgentAuthBundle; - configDir: string; - agentName: string; -}): Promise { - const targetPath = join( - input.configDir, - AGENTS_DIR_NAME, - input.agentName, - REGISTRY_AUTH_FILENAME, - ); - const tmpPath = `${targetPath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; - - await mkdir(dirname(targetPath), { recursive: true }); - await writeFile(tmpPath, `${JSON.stringify(input.auth, null, 2)}\n`, "utf8"); - await rename(tmpPath, targetPath); -} - -function parseRegistryAuthFromDisk( - payload: unknown, -): AgentAuthBundle | undefined { - if (!isRecord(payload)) { - return undefined; - } - - const tokenType = payload.tokenType; - const accessToken = payload.accessToken; - const accessExpiresAt = payload.accessExpiresAt; - const refreshToken = payload.refreshToken; - const refreshExpiresAt = payload.refreshExpiresAt; - - if ( - tokenType !== "Bearer" || - typeof accessToken !== "string" || - typeof accessExpiresAt !== "string" || - typeof refreshToken !== "string" || - typeof refreshExpiresAt !== "string" - ) { - return undefined; - } - - return { - tokenType, - accessToken, - accessExpiresAt, - refreshToken, - refreshExpiresAt, - }; -} - -async function readRegistryAuthFromDisk(input: { - configDir: string; - agentName: string; - logger: Logger; -}): Promise { - const authPath = join( - input.configDir, - AGENTS_DIR_NAME, - input.agentName, - REGISTRY_AUTH_FILENAME, - ); - - let raw: string; - try { - raw = await readFile(authPath, "utf8"); - } catch (error) { - if ( - error && - typeof error === "object" && - "code" in error && - (error as { code?: string }).code === "ENOENT" - ) { - return undefined; - } - - input.logger.warn("connector.runtime.registry_auth_read_failed", { - authPath, - reason: error instanceof Error ? error.message : "unknown", - }); - return undefined; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - input.logger.warn("connector.runtime.registry_auth_invalid_json", { - authPath, - }); - return undefined; - } - - const auth = parseRegistryAuthFromDisk(parsed); - if (auth === undefined) { - input.logger.warn("connector.runtime.registry_auth_invalid_shape", { - authPath, - }); - } - return auth; -} - -function resolveOutboundQueuePath(input: { - agentName: string; - configDir: string; -}): string { - return join( - input.configDir, - AGENTS_DIR_NAME, - input.agentName, - OUTBOUND_QUEUE_DIR_NAME, - OUTBOUND_QUEUE_FILENAME, - ); -} - -function createOutboundQueuePersistence(input: { - agentName: string; - configDir: string; - logger: Logger; -}): ConnectorOutboundQueuePersistence { - const queuePath = resolveOutboundQueuePath({ - configDir: input.configDir, - agentName: input.agentName, - }); - - const load = async (): Promise => { - let raw: string; - try { - raw = await readFile(queuePath, "utf8"); - } catch (error) { - if ( - error && - typeof error === "object" && - "code" in error && - (error as { code?: string }).code === "ENOENT" - ) { - return []; - } - - input.logger.warn("connector.outbound.persistence_read_failed", { - queuePath, - reason: sanitizeErrorReason(error), - }); - return []; - } - - if (raw.trim().length === 0) { - return []; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch (error) { - input.logger.warn("connector.outbound.persistence_invalid_json", { - queuePath, - reason: sanitizeErrorReason(error), - }); - return []; - } - - if (!Array.isArray(parsed)) { - return []; - } - - const frames: EnqueueFrame[] = []; - for (const candidate of parsed) { - const parsedFrame = enqueueFrameSchema.safeParse(candidate); - if (parsedFrame.success) { - frames.push(parsedFrame.data); - } - } - return frames; - }; - - const save = async (frames: EnqueueFrame[]): Promise => { - await mkdir(dirname(queuePath), { recursive: true }); - const tmpPath = `${queuePath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; - await writeFile(tmpPath, `${JSON.stringify(frames, null, 2)}\n`, "utf8"); - await rename(tmpPath, queuePath); - }; - - return { load, save }; -} - -async function readRequestJson(req: IncomingMessage): Promise { - const chunks: Buffer[] = []; - let totalBytes = 0; - - for await (const chunk of req) { - const next = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as string); - totalBytes += next.length; - if (totalBytes > MAX_OUTBOUND_BODY_BYTES) { - throw new AppError({ - code: "CONNECTOR_OUTBOUND_TOO_LARGE", - message: "Outbound relay payload too large", - status: 413, - expose: true, - }); - } - chunks.push(next); - } - - const bodyText = Buffer.concat(chunks).toString("utf8").trim(); - if (bodyText.length === 0) { - return {}; - } - - try { - return JSON.parse(bodyText); - } catch { - throw new AppError({ - code: "CONNECTOR_OUTBOUND_INVALID_JSON", - message: "Outbound relay payload must be valid JSON", - status: 400, - expose: true, - }); - } -} - -function parseRequestIds(value: unknown): string[] | undefined { - if (value === undefined) { - return undefined; - } - - if (!Array.isArray(value)) { - return []; - } - - return Array.from( - new Set( - value - .map((item) => (typeof item === "string" ? item.trim() : "")) - .filter((item) => item.length > 0), - ), - ); -} - -function writeJson( - res: ServerResponse, - status: number, - payload: Record, -): void { - res.statusCode = status; - res.setHeader("content-type", "application/json; charset=utf-8"); - res.end(`${JSON.stringify(payload)}\n`); -} - -function isRetryableRelayAuthError(error: unknown): boolean { - return ( - error instanceof AppError && - error.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && - error.status === 401 - ); -} - -async function buildUpgradeHeaders(input: { - ait: string; - accessToken: string; - wsUrl: URL; - secretKey: Uint8Array; -}): Promise> { - const timestamp = Math.floor(nowUtcMs() / 1000).toString(); - const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); - const signed = await signHttpRequest({ - method: "GET", - pathWithQuery: toPathWithQuery(input.wsUrl), - timestamp, - nonce, - secretKey: input.secretKey, - }); - - return { - authorization: `Claw ${input.ait}`, - [AGENT_ACCESS_HEADER]: input.accessToken, - ...signed.headers, - }; -} - -async function loadTrustedReceiptTargets(input: { - configDir: string; - logger: Logger; -}): Promise { - const trustedReceiptTargets: TrustedReceiptTargets = { - origins: new Set(), - byAgentDid: new Map(), - }; - - const relayRuntimeConfigPath = join( - input.configDir, - OPENCLAW_RELAY_RUNTIME_FILE_NAME, - ); - let relayRuntimeRaw: string; - try { - relayRuntimeRaw = await readFile(relayRuntimeConfigPath, "utf8"); - } catch (error) { - if ( - error && - typeof error === "object" && - "code" in error && - (error as { code?: string }).code === "ENOENT" - ) { - return trustedReceiptTargets; - } - - input.logger.warn("connector.delivery_receipt.runtime_config_read_failed", { - relayRuntimeConfigPath, - reason: sanitizeErrorReason(error), - }); - return trustedReceiptTargets; - } - - let relayRuntimeParsed: unknown; - try { - relayRuntimeParsed = JSON.parse(relayRuntimeRaw); - } catch (error) { - input.logger.warn( - "connector.delivery_receipt.runtime_config_invalid_json", - { - relayRuntimeConfigPath, - reason: sanitizeErrorReason(error), - }, - ); - return trustedReceiptTargets; - } - - if (!isRecord(relayRuntimeParsed)) { - return trustedReceiptTargets; - } - - const relayTransformPeersPathRaw = - typeof relayRuntimeParsed.relayTransformPeersPath === "string" && - relayRuntimeParsed.relayTransformPeersPath.trim().length > 0 - ? relayRuntimeParsed.relayTransformPeersPath.trim() - : undefined; - if (!relayTransformPeersPathRaw) { - return trustedReceiptTargets; - } - - const relayTransformPeersPath = isAbsolute(relayTransformPeersPathRaw) - ? relayTransformPeersPathRaw - : join(input.configDir, relayTransformPeersPathRaw); - - let relayTransformPeersRaw: string; - try { - relayTransformPeersRaw = await readFile(relayTransformPeersPath, "utf8"); - } catch (error) { - input.logger.warn("connector.delivery_receipt.peers_snapshot_read_failed", { - relayTransformPeersPath, - reason: sanitizeErrorReason(error), - }); - return trustedReceiptTargets; - } - - let relayTransformPeersParsed: unknown; - try { - relayTransformPeersParsed = JSON.parse(relayTransformPeersRaw); - } catch (error) { - input.logger.warn( - "connector.delivery_receipt.peers_snapshot_invalid_json", - { - relayTransformPeersPath, - reason: sanitizeErrorReason(error), - }, - ); - return trustedReceiptTargets; - } - - if (!isRecord(relayTransformPeersParsed)) { - return trustedReceiptTargets; - } - - const peersValue = relayTransformPeersParsed.peers; - if (!isRecord(peersValue)) { - return trustedReceiptTargets; - } - - for (const peerValue of Object.values(peersValue)) { - if (!isRecord(peerValue)) { - continue; - } - - const agentDid = - typeof peerValue.did === "string" && peerValue.did.trim().length > 0 - ? peerValue.did.trim() - : undefined; - const origin = parseOptionalProxyOrigin(peerValue.proxyUrl); - if (!agentDid || !origin) { - continue; - } - - trustedReceiptTargets.origins.add(origin); - trustedReceiptTargets.byAgentDid.set(agentDid, origin); - } - - return trustedReceiptTargets; -} +import { + readRegistryAuthFromDisk, + toInitialAuthBundle, + writeRegistryAuthAtomic, +} from "./runtime/auth-storage.js"; +import { + LocalOpenclawDeliveryError, + sanitizeErrorReason, +} from "./runtime/errors.js"; +import { + deliverToOpenclawHook, + readOpenclawHookTokenFromRelayRuntimeConfig, + waitWithAbort, +} from "./runtime/openclaw.js"; +import { createOutboundQueuePersistence } from "./runtime/outbound-queue.js"; +import { + parseRequiredString, + shouldRefreshAccessToken, +} from "./runtime/parse.js"; +import { + computeReplayDelayMs, + computeRuntimeReplayRetryDelayMs, + loadInboundReplayPolicy, + loadOpenclawProbePolicy, +} from "./runtime/policy.js"; +import { createRelayService } from "./runtime/relay-service.js"; +import { createRuntimeRequestHandler } from "./runtime/server.js"; +import { loadTrustedReceiptTargets } from "./runtime/trusted-receipts.js"; +import type { + ConnectorRuntimeHandle, + InboundReplayStatus, + InboundReplayView, + OpenclawGatewayProbeStatus, + StartConnectorRuntimeInput, +} from "./runtime/types.js"; +import { + normalizeOutboundBaseUrl, + normalizeOutboundPath, + normalizeWebSocketUrl, + resolveOpenclawBaseUrl, + resolveOpenclawHookPath, + resolveOpenclawHookToken, + toHttpOriginFromWebSocketUrl, + toOpenclawHookUrl, +} from "./runtime/url.js"; +import { buildUpgradeHeaders, createWebSocketFactory } from "./runtime/ws.js"; + +export type { + ConnectorRuntimeHandle, + StartConnectorRuntimeInput, +} from "./runtime/types.js"; export async function startConnectorRuntime( input: StartConnectorRuntimeInput, @@ -1168,7 +77,6 @@ export async function startConnectorRuntime( const secretKey = decodeBase64url( parseRequiredString(input.credentials.secretKey, "secretKey"), ); - let currentAuth = toInitialAuthBundle(input.credentials); const syncAuthFromDisk = async (): Promise => { @@ -1196,28 +104,35 @@ export async function startConnectorRuntime( }); }; - const refreshCurrentAuthIfNeeded = async (): Promise => { - await syncAuthFromDisk(); - if (!shouldRefreshAccessToken(currentAuth, nowUtcMs())) { - return; - } - - await refreshCurrentAuth(); + const persistCurrentAuth = async ( + nextAuth: typeof currentAuth, + ): Promise => { + currentAuth = nextAuth; + await writeRegistryAuthAtomic({ + configDir: input.configDir, + agentName: input.agentName, + auth: nextAuth, + }); }; const refreshCurrentAuth = async (): Promise => { - currentAuth = await refreshAgentAuthWithClawProof({ + const refreshed = await refreshAgentAuthWithClawProof({ registryUrl: input.registryUrl, ait: input.credentials.ait, secretKey, refreshToken: currentAuth.refreshToken, fetchImpl, }); - await writeRegistryAuthAtomic({ - configDir: input.configDir, - agentName: input.agentName, - auth: currentAuth, - }); + await persistCurrentAuth(refreshed); + }; + + const refreshCurrentAuthIfNeeded = async (): Promise => { + await syncAuthFromDisk(); + if (!shouldRefreshAccessToken(currentAuth, nowUtcMs())) { + return; + } + + await refreshCurrentAuth(); }; await refreshCurrentAuthIfNeeded(); @@ -1246,6 +161,7 @@ export async function startConnectorRuntime( logger, }); trustedReceiptTargets.origins.add(defaultReceiptCallbackOrigin); + const inboundInbox = createConnectorInboundInbox({ configDir: input.configDir, agentName: input.agentName, @@ -1254,12 +170,14 @@ export async function startConnectorRuntime( maxPendingMessages: inboundReplayPolicy.inboxMaxMessages, maxPendingBytes: inboundReplayPolicy.inboxMaxBytes, }); + const inboundReplayStatus: InboundReplayStatus = { replayerActive: false, }; const openclawGatewayProbeStatus: OpenclawGatewayProbeStatus = { reachable: true, }; + let openclawProbeInFlight = false; let runtimeStopping = false; let replayInFlight = false; @@ -1286,12 +204,11 @@ export async function startConnectorRuntime( configDir: input.configDir, logger, }); - const nextToken = diskToken; - if (nextToken === currentOpenclawHookToken) { + if (diskToken === currentOpenclawHookToken) { return; } - currentOpenclawHookToken = nextToken; + currentOpenclawHookToken = diskToken; logger.info("connector.runtime.openclaw_hook_token_synced", { reason, source: diskToken !== undefined ? "openclaw-relay.json" : "unset", @@ -1411,6 +328,29 @@ export async function startConnectorRuntime( } }; + const readInboundReplayView = async (): Promise => { + const snapshot: ConnectorInboundInboxSnapshot = + await inboundInbox.getSnapshot(); + return { + snapshot, + replayerActive: inboundReplayStatus.replayerActive || replayInFlight, + lastReplayAt: inboundReplayStatus.lastReplayAt, + lastReplayError: inboundReplayStatus.lastReplayError, + openclawGateway: { + url: openclawProbeUrl, + reachable: openclawGatewayProbeStatus.reachable, + lastCheckedAt: openclawGatewayProbeStatus.lastCheckedAt, + lastSuccessAt: openclawGatewayProbeStatus.lastSuccessAt, + lastFailureReason: openclawGatewayProbeStatus.lastFailureReason, + }, + openclawHook: { + url: openclawHookUrl, + lastAttemptAt: inboundReplayStatus.lastAttemptAt, + lastAttemptStatus: inboundReplayStatus.lastAttemptStatus, + }, + }; + }; + const replayPendingInboundMessages = async (): Promise => { if (runtimeStopping || replayInFlight) { return; @@ -1474,7 +414,7 @@ export async function startConnectorRuntime( if (pending.replyTo) { try { - await postDeliveryReceipt({ + await relayService.postDeliveryReceipt({ requestId: pending.requestId, senderAgentDid: pending.fromAgentDid, recipientAgentDid: pending.toAgentDid, @@ -1533,7 +473,7 @@ export async function startConnectorRuntime( if (markResult.movedToDeadLetter && pending.replyTo) { try { - await postDeliveryReceipt({ + await relayService.postDeliveryReceipt({ requestId: pending.requestId, senderAgentDid: pending.fromAgentDid, recipientAgentDid: pending.toAgentDid, @@ -1559,28 +499,6 @@ export async function startConnectorRuntime( } }; - const readInboundReplayView = async (): Promise => { - const snapshot = await inboundInbox.getSnapshot(); - return { - snapshot, - replayerActive: inboundReplayStatus.replayerActive || replayInFlight, - lastReplayAt: inboundReplayStatus.lastReplayAt, - lastReplayError: inboundReplayStatus.lastReplayError, - openclawGateway: { - url: openclawProbeUrl, - reachable: openclawGatewayProbeStatus.reachable, - lastCheckedAt: openclawGatewayProbeStatus.lastCheckedAt, - lastSuccessAt: openclawGatewayProbeStatus.lastSuccessAt, - lastFailureReason: openclawGatewayProbeStatus.lastFailureReason, - }, - openclawHook: { - url: openclawHookUrl, - lastAttemptAt: inboundReplayStatus.lastAttemptAt, - lastAttemptStatus: inboundReplayStatus.lastAttemptStatus, - }, - }; - }; - const outboundQueuePersistence = createOutboundQueuePersistence({ configDir: input.configDir, agentName: input.agentName, @@ -1642,382 +560,38 @@ export async function startConnectorRuntime( const outboundBaseUrl = normalizeOutboundBaseUrl(input.outboundBaseUrl); const outboundPath = normalizeOutboundPath(input.outboundPath); - const statusPath = DEFAULT_CONNECTOR_STATUS_PATH; - const deadLetterPath = "/v1/inbound/dead-letter"; - const deadLetterReplayPath = "/v1/inbound/dead-letter/replay"; - const deadLetterPurgePath = "/v1/inbound/dead-letter/purge"; const outboundUrl = new URL(outboundPath, outboundBaseUrl).toString(); - const relayToPeer = async (request: OutboundRelayRequest): Promise => { - await syncAuthFromDisk(); - const peerUrl = new URL(request.peerProxyUrl); - trustedReceiptTargets.origins.add(peerUrl.origin); - trustedReceiptTargets.byAgentDid.set(request.peerDid, peerUrl.origin); - const body = JSON.stringify(request.payload ?? {}); - const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; - - const performRelay = async (auth: AgentAuthBundle): Promise => { - const replyTo = request.replyTo ?? defaultReceiptCallbackUrl; - const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); - const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: toPathWithQuery(peerUrl), - timestamp: unixSeconds, - nonce, - body: new TextEncoder().encode(body), - secretKey, - }); - - const response = await fetchImpl(peerUrl.toString(), { - method: "POST", - headers: { - Authorization: `Claw ${input.credentials.ait}`, - "Content-Type": "application/json", - [AGENT_ACCESS_HEADER]: auth.accessToken, - [RELAY_RECIPIENT_AGENT_DID_HEADER]: request.peerDid, - ...(request.conversationId - ? { [RELAY_CONVERSATION_ID_HEADER]: request.conversationId } - : {}), - [RELAY_DELIVERY_RECEIPT_URL_HEADER]: replyTo, - ...signed.headers, - }, - body, - }); - - if (!response.ok) { - if (response.status === 401) { - throw new AppError({ - code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", - message: "Peer relay rejected agent auth credentials", - status: 401, - expose: true, - }); - } - - throw new AppError({ - code: "CONNECTOR_OUTBOUND_DELIVERY_FAILED", - message: "Peer relay request failed", - status: 502, - }); - } - }; - - await executeWithAgentAuthRefreshRetry({ - key: refreshKey, - shouldRetry: isRetryableRelayAuthError, - getAuth: async () => { - await syncAuthFromDisk(); - return currentAuth; - }, - persistAuth: async (nextAuth) => { - currentAuth = nextAuth; - await writeRegistryAuthAtomic({ - configDir: input.configDir, - agentName: input.agentName, - auth: nextAuth, - }); - }, - refreshAuth: async (auth) => - refreshAgentAuthWithClawProof({ - registryUrl: input.registryUrl, - ait: input.credentials.ait, - secretKey, - refreshToken: auth.refreshToken, - fetchImpl, - }), - perform: performRelay, - }); - }; - - const postDeliveryReceipt = async (inputReceipt: { - reason?: string; - recipientAgentDid: string; - replyTo: string; - requestId: string; - senderAgentDid: string; - status: OutboundDeliveryReceiptStatus; - }): Promise => { - await syncAuthFromDisk(); - const receiptUrl = new URL(inputReceipt.replyTo); - if (receiptUrl.pathname !== RELAY_DELIVERY_RECEIPTS_PATH) { - throw new AppError({ - code: "CONNECTOR_DELIVERY_RECEIPT_INVALID_TARGET", - message: "Delivery receipt callback target is invalid", - status: 400, - }); - } - const expectedSenderOrigin = trustedReceiptTargets.byAgentDid.get( - inputReceipt.senderAgentDid, - ); - if ( - expectedSenderOrigin !== undefined && - receiptUrl.origin !== expectedSenderOrigin - ) { - throw new AppError({ - code: "CONNECTOR_DELIVERY_RECEIPT_UNTRUSTED_TARGET", - message: "Delivery receipt callback target is untrusted", - status: 400, - }); - } - if ( - expectedSenderOrigin === undefined && - !trustedReceiptTargets.origins.has(receiptUrl.origin) - ) { - throw new AppError({ - code: "CONNECTOR_DELIVERY_RECEIPT_UNTRUSTED_TARGET", - message: "Delivery receipt callback target is untrusted", - status: 400, - }); - } - const body = JSON.stringify({ - requestId: inputReceipt.requestId, - senderAgentDid: inputReceipt.senderAgentDid, - recipientAgentDid: inputReceipt.recipientAgentDid, - status: inputReceipt.status, - reason: inputReceipt.reason, - processedAt: nowIso(), - }); - const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}:delivery-receipt`; - - const performReceipt = async (auth: AgentAuthBundle): Promise => { - const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); - const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: toPathWithQuery(receiptUrl), - timestamp: unixSeconds, - nonce, - body: new TextEncoder().encode(body), - secretKey, - }); - - const response = await fetchImpl(receiptUrl.toString(), { - method: "POST", - headers: { - Authorization: `Claw ${input.credentials.ait}`, - "Content-Type": "application/json", - [AGENT_ACCESS_HEADER]: auth.accessToken, - ...signed.headers, - }, - body, - }); - - if (!response.ok) { - if (response.status === 401) { - throw new AppError({ - code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", - message: - "Delivery receipt callback rejected agent auth credentials", - status: 401, - expose: true, - }); - } - - throw new AppError({ - code: "CONNECTOR_DELIVERY_RECEIPT_FAILED", - message: "Delivery receipt callback request failed", - status: 502, - }); - } - }; + const relayService = createRelayService({ + configDir: input.configDir, + agentName: input.agentName, + registryUrl: input.registryUrl, + fetchImpl, + secretKey, + ait: input.credentials.ait, + defaultReceiptCallbackUrl, + trustedReceiptTargets, + getCurrentAuth: () => currentAuth, + setCurrentAuth: persistCurrentAuth, + syncAuthFromDisk, + }); - await executeWithAgentAuthRefreshRetry({ - key: refreshKey, - shouldRetry: isRetryableRelayAuthError, - getAuth: async () => { - await syncAuthFromDisk(); - return currentAuth; - }, - persistAuth: async (nextAuth) => { - currentAuth = nextAuth; - await writeRegistryAuthAtomic({ - configDir: input.configDir, - agentName: input.agentName, - auth: nextAuth, - }); + const server = createServer( + createRuntimeRequestHandler({ + connectorClient, + inboundInbox, + logger, + outboundBaseUrl, + outboundPath, + outboundUrl, + readInboundReplayView, + relayToPeer: relayService.relayToPeer, + replayPendingInboundMessages: () => { + void replayPendingInboundMessages(); }, - refreshAuth: async (auth) => - refreshAgentAuthWithClawProof({ - registryUrl: input.registryUrl, - ait: input.credentials.ait, - secretKey, - refreshToken: auth.refreshToken, - fetchImpl, - }), - perform: performReceipt, - }); - }; - - const server = createServer(async (req, res) => { - const requestPath = req.url - ? new URL(req.url, outboundBaseUrl).pathname - : "/"; - - if (requestPath === statusPath) { - if (req.method !== "GET") { - res.statusCode = 405; - res.setHeader("allow", "GET"); - writeJson(res, 405, { error: "Method Not Allowed" }); - return; - } - - let inboundReplayView: InboundReplayView; - try { - inboundReplayView = await readInboundReplayView(); - } catch (error) { - logger.warn("connector.status.inbound_inbox_unavailable", { - reason: sanitizeErrorReason(error), - }); - writeJson(res, 500, { - status: "error", - error: { - code: "CONNECTOR_INBOUND_INBOX_UNAVAILABLE", - message: "Connector inbound inbox status is unavailable", - }, - outboundUrl, - websocketUrl: wsUrl, - websocket: { - connected: connectorClient.isConnected(), - }, - }); - return; - } - const clientMetrics = connectorClient.getMetricsSnapshot(); - writeJson(res, 200, { - status: "ok", - outboundUrl, - websocketUrl: wsUrl, - websocket: { - ...clientMetrics.connection, - }, - inbound: { - pending: inboundReplayView.snapshot.pending, - deadLetter: inboundReplayView.snapshot.deadLetter, - replay: { - replayerActive: inboundReplayView.replayerActive, - lastReplayAt: inboundReplayView.lastReplayAt, - lastReplayError: inboundReplayView.lastReplayError, - }, - openclawGateway: inboundReplayView.openclawGateway, - openclawHook: inboundReplayView.openclawHook, - }, - outbound: { - queue: { - pendingCount: connectorClient.getQueuedOutboundCount(), - }, - }, - metrics: { - heartbeat: clientMetrics.heartbeat, - inboundDelivery: clientMetrics.inboundDelivery, - outboundQueue: clientMetrics.outboundQueue, - }, - }); - return; - } - - if (requestPath === deadLetterPath) { - if (req.method !== "GET") { - res.statusCode = 405; - res.setHeader("allow", "GET"); - writeJson(res, 405, { error: "Method Not Allowed" }); - return; - } - - const deadLetterItems = await inboundInbox.listDeadLetter(); - writeJson(res, 200, { - status: "ok", - count: deadLetterItems.length, - items: deadLetterItems, - }); - return; - } - - if (requestPath === deadLetterReplayPath) { - if (req.method !== "POST") { - res.statusCode = 405; - res.setHeader("allow", "POST"); - writeJson(res, 405, { error: "Method Not Allowed" }); - return; - } - - const body = await readRequestJson(req); - const requestIds = isRecord(body) - ? parseRequestIds(body.requestIds) - : undefined; - const replayResult = await inboundInbox.replayDeadLetter({ requestIds }); - void replayPendingInboundMessages(); - writeJson(res, 200, { - status: "ok", - replayedCount: replayResult.replayedCount, - }); - return; - } - - if (requestPath === deadLetterPurgePath) { - if (req.method !== "POST") { - res.statusCode = 405; - res.setHeader("allow", "POST"); - writeJson(res, 405, { error: "Method Not Allowed" }); - return; - } - - const body = await readRequestJson(req); - const requestIds = isRecord(body) - ? parseRequestIds(body.requestIds) - : undefined; - const purgeResult = await inboundInbox.purgeDeadLetter({ requestIds }); - writeJson(res, 200, { - status: "ok", - purgedCount: purgeResult.purgedCount, - }); - return; - } - - if (requestPath !== outboundPath) { - writeJson(res, 404, { error: "Not Found" }); - return; - } - - if (req.method !== "POST") { - res.statusCode = 405; - res.setHeader("allow", "POST"); - writeJson(res, 405, { error: "Method Not Allowed" }); - return; - } - - try { - const requestBody = await readRequestJson(req); - const relayRequest = parseOutboundRelayRequest(requestBody); - await relayToPeer(relayRequest); - writeJson(res, 202, { accepted: true, peer: relayRequest.peer }); - } catch (error) { - if (error instanceof AppError) { - logger.warn("connector.outbound.rejected", { - code: error.code, - status: error.status, - message: error.message, - }); - writeJson(res, error.status, { - error: { - code: error.code, - message: error.message, - }, - }); - return; - } - - logger.error("connector.outbound.failed", { - errorName: error instanceof Error ? error.name : "unknown", - }); - writeJson(res, 500, { - error: { - code: "CONNECTOR_OUTBOUND_INTERNAL", - message: "Connector outbound relay failed", - }, - }); - } - }); + wsUrl, + }), + ); let stoppedResolve: (() => void) | undefined; const stoppedPromise = new Promise((resolve) => { diff --git a/packages/connector/src/runtime/AGENTS.md b/packages/connector/src/runtime/AGENTS.md new file mode 100644 index 0000000..a96b326 --- /dev/null +++ b/packages/connector/src/runtime/AGENTS.md @@ -0,0 +1,12 @@ +# AGENTS.md (packages/connector/src/runtime) + +## Purpose +- Keep connector runtime orchestration readable by separating auth, transport, relay, and server concerns. + +## Rules +- Keep auth disk sync/persistence in `auth-storage.ts`; avoid ad-hoc credential writes. +- Keep hook-delivery retry and abort behavior in `openclaw.ts`. +- Keep replay/probe policy loading and retry-delay calculations in `policy.ts`. +- Keep outbound relay and receipt callbacks in `relay-service.ts`. +- Keep HTTP route handling in `server.ts` and avoid embedding route logic in helpers. +- Keep URL/header/parse helpers focused in `url.ts`, `ws.ts`, and `parse.ts`. diff --git a/packages/connector/src/runtime/auth-storage.ts b/packages/connector/src/runtime/auth-storage.ts new file mode 100644 index 0000000..684f3fb --- /dev/null +++ b/packages/connector/src/runtime/auth-storage.ts @@ -0,0 +1,123 @@ +import { mkdir, readFile, rename, writeFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import type { AgentAuthBundle, Logger } from "@clawdentity/sdk"; +import { nowUtcMs } from "@clawdentity/sdk"; +import { AGENTS_DIR_NAME, REGISTRY_AUTH_FILENAME } from "./constants.js"; +import { sanitizeErrorReason } from "./errors.js"; +import { isRecord, parseRequiredString } from "./parse.js"; +import type { ConnectorRuntimeCredentials } from "./types.js"; + +export function toInitialAuthBundle( + credentials: ConnectorRuntimeCredentials, +): AgentAuthBundle { + return { + tokenType: "Bearer", + accessToken: credentials.accessToken?.trim() || "", + accessExpiresAt: + credentials.accessExpiresAt?.trim() || "1970-01-01T00:00:00.000Z", + refreshToken: parseRequiredString(credentials.refreshToken, "refreshToken"), + refreshExpiresAt: + credentials.refreshExpiresAt?.trim() || "2100-01-01T00:00:00.000Z", + }; +} + +export async function writeRegistryAuthAtomic(input: { + auth: AgentAuthBundle; + configDir: string; + agentName: string; +}): Promise { + const targetPath = join( + input.configDir, + AGENTS_DIR_NAME, + input.agentName, + REGISTRY_AUTH_FILENAME, + ); + const tmpPath = `${targetPath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; + + await mkdir(dirname(targetPath), { recursive: true }); + await writeFile(tmpPath, `${JSON.stringify(input.auth, null, 2)}\n`, "utf8"); + await rename(tmpPath, targetPath); +} + +function parseRegistryAuthFromDisk( + payload: unknown, +): AgentAuthBundle | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const tokenType = payload.tokenType; + const accessToken = payload.accessToken; + const accessExpiresAt = payload.accessExpiresAt; + const refreshToken = payload.refreshToken; + const refreshExpiresAt = payload.refreshExpiresAt; + + if ( + tokenType !== "Bearer" || + typeof accessToken !== "string" || + typeof accessExpiresAt !== "string" || + typeof refreshToken !== "string" || + typeof refreshExpiresAt !== "string" + ) { + return undefined; + } + + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + }; +} + +export async function readRegistryAuthFromDisk(input: { + configDir: string; + agentName: string; + logger: Logger; +}): Promise { + const authPath = join( + input.configDir, + AGENTS_DIR_NAME, + input.agentName, + REGISTRY_AUTH_FILENAME, + ); + + let raw: string; + try { + raw = await readFile(authPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return undefined; + } + + input.logger.warn("connector.runtime.registry_auth_read_failed", { + authPath, + reason: sanitizeErrorReason(error), + }); + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + input.logger.warn("connector.runtime.registry_auth_invalid_json", { + authPath, + }); + return undefined; + } + + const auth = parseRegistryAuthFromDisk(parsed); + if (auth === undefined) { + input.logger.warn("connector.runtime.registry_auth_invalid_shape", { + authPath, + }); + } + return auth; +} diff --git a/packages/connector/src/runtime/constants.ts b/packages/connector/src/runtime/constants.ts new file mode 100644 index 0000000..3f155d2 --- /dev/null +++ b/packages/connector/src/runtime/constants.ts @@ -0,0 +1,14 @@ +export const REGISTRY_AUTH_FILENAME = "registry-auth.json"; +export const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; +export const AGENTS_DIR_NAME = "agents"; +export const OUTBOUND_QUEUE_DIR_NAME = "outbound-queue"; +export const OUTBOUND_QUEUE_FILENAME = "queue.json"; +export const REFRESH_SINGLE_FLIGHT_PREFIX = "connector-runtime"; +export const NONCE_SIZE = 16; +export const MAX_OUTBOUND_BODY_BYTES = 1024 * 1024; +export const ACCESS_TOKEN_REFRESH_SKEW_MS = 30_000; + +export const CONNECTOR_DEAD_LETTER_PATH = "/v1/inbound/dead-letter"; +export const CONNECTOR_DEAD_LETTER_REPLAY_PATH = + "/v1/inbound/dead-letter/replay"; +export const CONNECTOR_DEAD_LETTER_PURGE_PATH = "/v1/inbound/dead-letter/purge"; diff --git a/packages/connector/src/runtime/errors.ts b/packages/connector/src/runtime/errors.ts new file mode 100644 index 0000000..8620311 --- /dev/null +++ b/packages/connector/src/runtime/errors.ts @@ -0,0 +1,33 @@ +import { sanitizeErrorReason as sanitizeReason } from "@clawdentity/common"; +import { AppError } from "@clawdentity/sdk"; + +export class LocalOpenclawDeliveryError extends Error { + readonly code?: "HOOK_AUTH_REJECTED" | "RUNTIME_STOPPING"; + readonly retryable: boolean; + + constructor(input: { + code?: "HOOK_AUTH_REJECTED" | "RUNTIME_STOPPING"; + message: string; + retryable: boolean; + }) { + super(input.message); + this.name = "LocalOpenclawDeliveryError"; + this.code = input.code; + this.retryable = input.retryable; + } +} + +export function sanitizeErrorReason(error: unknown): string { + return sanitizeReason(error, { + fallback: "Unknown error", + maxLength: 240, + }); +} + +export function isRetryableRelayAuthError(error: unknown): boolean { + return ( + error instanceof AppError && + error.code === "OPENCLAW_RELAY_AGENT_AUTH_REJECTED" && + error.status === 401 + ); +} diff --git a/packages/connector/src/runtime/http.ts b/packages/connector/src/runtime/http.ts new file mode 100644 index 0000000..6e09076 --- /dev/null +++ b/packages/connector/src/runtime/http.ts @@ -0,0 +1,86 @@ +import type { IncomingMessage, ServerResponse } from "node:http"; +import { AppError } from "@clawdentity/sdk"; +import { MAX_OUTBOUND_BODY_BYTES } from "./constants.js"; +import { isRecord, parseOptionalString, parseRequiredString } from "./parse.js"; +import type { OutboundRelayRequest } from "./types.js"; + +export async function readRequestJson(req: IncomingMessage): Promise { + const chunks: Buffer[] = []; + let totalBytes = 0; + + for await (const chunk of req) { + const next = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as string); + totalBytes += next.length; + if (totalBytes > MAX_OUTBOUND_BODY_BYTES) { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_TOO_LARGE", + message: "Outbound relay payload too large", + status: 413, + expose: true, + }); + } + chunks.push(next); + } + + const bodyText = Buffer.concat(chunks).toString("utf8").trim(); + if (bodyText.length === 0) { + return {}; + } + + try { + return JSON.parse(bodyText); + } catch { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_INVALID_JSON", + message: "Outbound relay payload must be valid JSON", + status: 400, + expose: true, + }); + } +} + +export function parseOutboundRelayRequest( + payload: unknown, +): OutboundRelayRequest { + if (!isRecord(payload)) { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_INVALID_REQUEST", + message: "Outbound relay request must be an object", + status: 400, + expose: true, + }); + } + + const replyTo = parseOptionalString(payload.replyTo); + if (replyTo !== undefined) { + try { + new URL(replyTo); + } catch { + throw new AppError({ + code: "CONNECTOR_OUTBOUND_INVALID_REQUEST", + message: "Outbound relay replyTo must be a valid URL", + status: 400, + expose: true, + }); + } + } + + return { + peer: parseRequiredString(payload.peer, "peer"), + peerDid: parseRequiredString(payload.peerDid, "peerDid"), + peerProxyUrl: parseRequiredString(payload.peerProxyUrl, "peerProxyUrl"), + payload: payload.payload, + conversationId: parseOptionalString(payload.conversationId), + replyTo, + }; +} + +export function writeJson( + res: ServerResponse, + status: number, + payload: Record, +): void { + res.statusCode = status; + res.setHeader("content-type", "application/json; charset=utf-8"); + res.end(`${JSON.stringify(payload)}\n`); +} diff --git a/packages/connector/src/runtime/openclaw.ts b/packages/connector/src/runtime/openclaw.ts new file mode 100644 index 0000000..0a97a5b --- /dev/null +++ b/packages/connector/src/runtime/openclaw.ts @@ -0,0 +1,164 @@ +import { readFile } from "node:fs/promises"; +import { join } from "node:path"; +import type { Logger } from "@clawdentity/sdk"; +import { DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS } from "../constants.js"; +import { OPENCLAW_RELAY_RUNTIME_FILE_NAME } from "./constants.js"; +import { LocalOpenclawDeliveryError, sanitizeErrorReason } from "./errors.js"; +import { isRecord } from "./parse.js"; + +export async function waitWithAbort(input: { + delayMs: number; + signal: AbortSignal; +}): Promise { + if (input.signal.aborted) { + throw new LocalOpenclawDeliveryError({ + code: "RUNTIME_STOPPING", + message: "Connector runtime is stopping", + retryable: false, + }); + } + + await new Promise((resolve, reject) => { + const timeoutHandle = setTimeout(() => { + input.signal.removeEventListener("abort", onAbort); + resolve(); + }, input.delayMs); + + const onAbort = () => { + clearTimeout(timeoutHandle); + input.signal.removeEventListener("abort", onAbort); + reject( + new LocalOpenclawDeliveryError({ + code: "RUNTIME_STOPPING", + message: "Connector runtime is stopping", + retryable: false, + }), + ); + }; + + input.signal.addEventListener("abort", onAbort, { once: true }); + }); +} + +export async function readOpenclawHookTokenFromRelayRuntimeConfig(input: { + configDir: string; + logger: Logger; +}): Promise { + const runtimeConfigPath = join( + input.configDir, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + ); + let raw: string; + try { + raw = await readFile(runtimeConfigPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return undefined; + } + + input.logger.warn("connector.runtime.openclaw_relay_config_read_failed", { + runtimeConfigPath, + reason: sanitizeErrorReason(error), + }); + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + input.logger.warn("connector.runtime.openclaw_relay_config_invalid_json", { + runtimeConfigPath, + }); + return undefined; + } + + if (!isRecord(parsed)) { + return undefined; + } + + const tokenValue = parsed.openclawHookToken; + if (typeof tokenValue !== "string") { + return undefined; + } + + const trimmed = tokenValue.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +export async function deliverToOpenclawHook(input: { + fetchImpl: typeof fetch; + fromAgentDid: string; + openclawHookToken?: string; + openclawHookUrl: string; + payload: unknown; + requestId: string; + shutdownSignal: AbortSignal; + toAgentDid: string; +}): Promise { + const timeoutSignal = AbortSignal.timeout( + DEFAULT_OPENCLAW_DELIVER_TIMEOUT_MS, + ); + const signal = AbortSignal.any([input.shutdownSignal, timeoutSignal]); + + const headers: Record = { + "content-type": "application/json", + "x-clawdentity-agent-did": input.fromAgentDid, + "x-clawdentity-to-agent-did": input.toAgentDid, + "x-clawdentity-verified": "true", + "x-request-id": input.requestId, + }; + if (input.openclawHookToken !== undefined) { + headers["x-openclaw-token"] = input.openclawHookToken; + } + + try { + const response = await input.fetchImpl(input.openclawHookUrl, { + method: "POST", + headers, + body: JSON.stringify(input.payload), + signal, + }); + if (!response.ok) { + throw new LocalOpenclawDeliveryError({ + message: `Local OpenClaw hook rejected payload with status ${response.status}`, + retryable: + response.status === 401 || + response.status === 403 || + response.status >= 500 || + response.status === 404 || + response.status === 429, + code: + response.status === 401 || response.status === 403 + ? "HOOK_AUTH_REJECTED" + : undefined, + }); + } + } catch (error) { + if (error instanceof Error && error.name === "AbortError") { + if (input.shutdownSignal.aborted) { + throw new LocalOpenclawDeliveryError({ + code: "RUNTIME_STOPPING", + message: "Connector runtime is stopping", + retryable: false, + }); + } + throw new LocalOpenclawDeliveryError({ + message: "Local OpenClaw hook request timed out", + retryable: true, + }); + } + if (error instanceof LocalOpenclawDeliveryError) { + throw error; + } + throw new LocalOpenclawDeliveryError({ + message: sanitizeErrorReason(error), + retryable: true, + }); + } +} diff --git a/packages/connector/src/runtime/outbound-queue.ts b/packages/connector/src/runtime/outbound-queue.ts new file mode 100644 index 0000000..911e1e7 --- /dev/null +++ b/packages/connector/src/runtime/outbound-queue.ts @@ -0,0 +1,96 @@ +import { mkdir, readFile, rename, writeFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { nowUtcMs } from "@clawdentity/sdk"; +import type { ConnectorOutboundQueuePersistence } from "../client.js"; +import { type EnqueueFrame, enqueueFrameSchema } from "../frames.js"; +import { + AGENTS_DIR_NAME, + OUTBOUND_QUEUE_DIR_NAME, + OUTBOUND_QUEUE_FILENAME, +} from "./constants.js"; +import { sanitizeErrorReason } from "./errors.js"; + +function resolveOutboundQueuePath(input: { + agentName: string; + configDir: string; +}): string { + return join( + input.configDir, + AGENTS_DIR_NAME, + input.agentName, + OUTBOUND_QUEUE_DIR_NAME, + OUTBOUND_QUEUE_FILENAME, + ); +} + +export function createOutboundQueuePersistence(input: { + agentName: string; + configDir: string; + logger: { + warn: (event: string, payload?: Record) => void; + }; +}): ConnectorOutboundQueuePersistence { + const queuePath = resolveOutboundQueuePath({ + configDir: input.configDir, + agentName: input.agentName, + }); + + const load = async (): Promise => { + let raw: string; + try { + raw = await readFile(queuePath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return []; + } + + input.logger.warn("connector.outbound.persistence_read_failed", { + queuePath, + reason: sanitizeErrorReason(error), + }); + return []; + } + + if (raw.trim().length === 0) { + return []; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch (error) { + input.logger.warn("connector.outbound.persistence_invalid_json", { + queuePath, + reason: sanitizeErrorReason(error), + }); + return []; + } + + if (!Array.isArray(parsed)) { + return []; + } + + const frames: EnqueueFrame[] = []; + for (const candidate of parsed) { + const parsedFrame = enqueueFrameSchema.safeParse(candidate); + if (parsedFrame.success) { + frames.push(parsedFrame.data); + } + } + return frames; + }; + + const save = async (frames: EnqueueFrame[]): Promise => { + await mkdir(dirname(queuePath), { recursive: true }); + const tmpPath = `${queuePath}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; + await writeFile(tmpPath, `${JSON.stringify(frames, null, 2)}\n`, "utf8"); + await rename(tmpPath, queuePath); + }; + + return { load, save }; +} diff --git a/packages/connector/src/runtime/parse.ts b/packages/connector/src/runtime/parse.ts new file mode 100644 index 0000000..b9528f2 --- /dev/null +++ b/packages/connector/src/runtime/parse.ts @@ -0,0 +1,97 @@ +import { isRecord as isObjectRecord } from "@clawdentity/common"; +import type { AgentAuthBundle } from "@clawdentity/sdk"; +import { ACCESS_TOKEN_REFRESH_SKEW_MS } from "./constants.js"; + +export function isRecord(value: unknown): value is Record { + return isObjectRecord(value); +} + +export function parseRequiredString(value: unknown, field: string): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw new Error(`Invalid ${field}`); + } + + return value.trim(); +} + +export function parseOptionalString(value: unknown): string | undefined { + if (typeof value !== "string") { + return undefined; + } + + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +export function parseOptionalProxyOrigin(value: unknown): string | undefined { + if (typeof value !== "string" || value.trim().length === 0) { + return undefined; + } + + try { + return new URL(value.trim()).origin; + } catch { + return undefined; + } +} + +export function parsePositiveIntEnv( + key: string, + fallback: number, + minimum = 1, +): number { + const raw = process.env[key]?.trim(); + if (!raw) { + return fallback; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isFinite(parsed) || parsed < minimum) { + return fallback; + } + + return parsed; +} + +export function parseRequestIds(value: unknown): string[] | undefined { + if (value === undefined) { + return undefined; + } + + if (!Array.isArray(value)) { + return []; + } + + return Array.from( + new Set( + value + .map((item) => (typeof item === "string" ? item.trim() : "")) + .filter((item) => item.length > 0), + ), + ); +} + +export function parseIsoTimestampMs(value: string): number | undefined { + const parsed = Date.parse(value); + if (!Number.isFinite(parsed)) { + return undefined; + } + + return parsed; +} + +export function shouldRefreshAccessToken( + auth: AgentAuthBundle, + nowMs: number, +): boolean { + if (auth.accessToken.trim().length === 0) { + return true; + } + + const expiresAtMs = parseIsoTimestampMs(auth.accessExpiresAt); + if (expiresAtMs === undefined) { + return false; + } + + return expiresAtMs <= nowMs + ACCESS_TOKEN_REFRESH_SKEW_MS; +} diff --git a/packages/connector/src/runtime/policy.ts b/packages/connector/src/runtime/policy.ts new file mode 100644 index 0000000..7d568e4 --- /dev/null +++ b/packages/connector/src/runtime/policy.ts @@ -0,0 +1,132 @@ +import { + DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, + DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, + DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, + DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, + DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, + DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, + DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, + DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, +} from "../constants.js"; +import { parsePositiveIntEnv } from "./parse.js"; +import type { InboundReplayPolicy, OpenclawProbePolicy } from "./types.js"; + +export function loadInboundReplayPolicy(): InboundReplayPolicy { + const retryBackoffFactor = Number.parseFloat( + process.env.CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR ?? "", + ); + const runtimeReplayRetryBackoffFactor = Number.parseFloat( + process.env.CONNECTOR_RUNTIME_REPLAY_RETRY_BACKOFF_FACTOR ?? "", + ); + + return { + deadLetterNonRetryableMaxAttempts: parsePositiveIntEnv( + "CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS", + DEFAULT_CONNECTOR_INBOUND_DEAD_LETTER_NON_RETRYABLE_MAX_ATTEMPTS, + ), + eventsMaxBytes: parsePositiveIntEnv( + "CONNECTOR_INBOUND_EVENTS_MAX_BYTES", + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_BYTES, + ), + eventsMaxFiles: parsePositiveIntEnv( + "CONNECTOR_INBOUND_EVENTS_MAX_FILES", + DEFAULT_CONNECTOR_INBOUND_EVENTS_MAX_FILES, + ), + inboxMaxMessages: parsePositiveIntEnv( + "CONNECTOR_INBOUND_INBOX_MAX_MESSAGES", + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_MESSAGES, + ), + inboxMaxBytes: parsePositiveIntEnv( + "CONNECTOR_INBOUND_INBOX_MAX_BYTES", + DEFAULT_CONNECTOR_INBOUND_INBOX_MAX_BYTES, + ), + replayIntervalMs: parsePositiveIntEnv( + "CONNECTOR_INBOUND_REPLAY_INTERVAL_MS", + DEFAULT_CONNECTOR_INBOUND_REPLAY_INTERVAL_MS, + ), + batchSize: parsePositiveIntEnv( + "CONNECTOR_INBOUND_REPLAY_BATCH_SIZE", + DEFAULT_CONNECTOR_INBOUND_REPLAY_BATCH_SIZE, + ), + retryInitialDelayMs: parsePositiveIntEnv( + "CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS", + DEFAULT_CONNECTOR_INBOUND_RETRY_INITIAL_DELAY_MS, + ), + retryMaxDelayMs: parsePositiveIntEnv( + "CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS", + DEFAULT_CONNECTOR_INBOUND_RETRY_MAX_DELAY_MS, + ), + retryBackoffFactor: + Number.isFinite(retryBackoffFactor) && retryBackoffFactor >= 1 + ? retryBackoffFactor + : DEFAULT_CONNECTOR_INBOUND_RETRY_BACKOFF_FACTOR, + runtimeReplayMaxAttempts: parsePositiveIntEnv( + "CONNECTOR_RUNTIME_REPLAY_MAX_ATTEMPTS", + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_MAX_ATTEMPTS, + ), + runtimeReplayRetryInitialDelayMs: parsePositiveIntEnv( + "CONNECTOR_RUNTIME_REPLAY_RETRY_INITIAL_DELAY_MS", + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_INITIAL_DELAY_MS, + ), + runtimeReplayRetryMaxDelayMs: parsePositiveIntEnv( + "CONNECTOR_RUNTIME_REPLAY_RETRY_MAX_DELAY_MS", + DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_MAX_DELAY_MS, + ), + runtimeReplayRetryBackoffFactor: + Number.isFinite(runtimeReplayRetryBackoffFactor) && + runtimeReplayRetryBackoffFactor >= 1 + ? runtimeReplayRetryBackoffFactor + : DEFAULT_CONNECTOR_RUNTIME_REPLAY_DELIVER_RETRY_BACKOFF_FACTOR, + }; +} + +export function loadOpenclawProbePolicy(): OpenclawProbePolicy { + return { + intervalMs: parsePositiveIntEnv( + "CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS", + DEFAULT_CONNECTOR_OPENCLAW_PROBE_INTERVAL_MS, + ), + timeoutMs: parsePositiveIntEnv( + "CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS", + DEFAULT_CONNECTOR_OPENCLAW_PROBE_TIMEOUT_MS, + ), + }; +} + +export function computeReplayDelayMs(input: { + attemptCount: number; + policy: InboundReplayPolicy; +}): number { + const exponent = Math.max(0, input.attemptCount - 1); + const delay = Math.min( + input.policy.retryMaxDelayMs, + Math.floor( + input.policy.retryInitialDelayMs * + input.policy.retryBackoffFactor ** exponent, + ), + ); + return Math.max(1, delay); +} + +export function computeRuntimeReplayRetryDelayMs(input: { + attemptCount: number; + policy: InboundReplayPolicy; +}): number { + const exponent = Math.max(0, input.attemptCount - 1); + const delay = Math.min( + input.policy.runtimeReplayRetryMaxDelayMs, + Math.floor( + input.policy.runtimeReplayRetryInitialDelayMs * + input.policy.runtimeReplayRetryBackoffFactor ** exponent, + ), + ); + return Math.max(1, delay); +} diff --git a/packages/connector/src/runtime/relay-service.ts b/packages/connector/src/runtime/relay-service.ts new file mode 100644 index 0000000..e4f86f2 --- /dev/null +++ b/packages/connector/src/runtime/relay-service.ts @@ -0,0 +1,249 @@ +import { randomBytes } from "node:crypto"; +import { + encodeBase64url, + RELAY_CONVERSATION_ID_HEADER, + RELAY_DELIVERY_RECEIPT_URL_HEADER, + RELAY_DELIVERY_RECEIPTS_PATH, + RELAY_RECIPIENT_AGENT_DID_HEADER, +} from "@clawdentity/protocol"; +import { + type AgentAuthBundle, + AppError, + executeWithAgentAuthRefreshRetry, + nowIso, + nowUtcMs, + refreshAgentAuthWithClawProof, + signHttpRequest, +} from "@clawdentity/sdk"; +import { AGENT_ACCESS_HEADER } from "../constants.js"; +import { NONCE_SIZE, REFRESH_SINGLE_FLIGHT_PREFIX } from "./constants.js"; +import { isRetryableRelayAuthError } from "./errors.js"; +import type { + OutboundDeliveryReceiptStatus, + OutboundRelayRequest, + TrustedReceiptTargets, +} from "./types.js"; +import { toPathWithQuery } from "./url.js"; + +type RelayServiceInput = { + agentName: string; + ait: string; + configDir: string; + defaultReceiptCallbackUrl: string; + fetchImpl: typeof fetch; + getCurrentAuth: () => AgentAuthBundle; + registryUrl: string; + secretKey: Uint8Array; + setCurrentAuth: (nextAuth: AgentAuthBundle) => Promise; + syncAuthFromDisk: () => Promise; + trustedReceiptTargets: TrustedReceiptTargets; +}; + +export function createRelayService(input: RelayServiceInput): { + postDeliveryReceipt: (inputReceipt: { + reason?: string; + recipientAgentDid: string; + replyTo: string; + requestId: string; + senderAgentDid: string; + status: OutboundDeliveryReceiptStatus; + }) => Promise; + relayToPeer: (request: OutboundRelayRequest) => Promise; +} { + const relayToPeer = async (request: OutboundRelayRequest): Promise => { + await input.syncAuthFromDisk(); + const peerUrl = new URL(request.peerProxyUrl); + input.trustedReceiptTargets.origins.add(peerUrl.origin); + input.trustedReceiptTargets.byAgentDid.set(request.peerDid, peerUrl.origin); + const body = JSON.stringify(request.payload ?? {}); + const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}`; + + const performRelay = async (auth: AgentAuthBundle): Promise => { + const replyTo = request.replyTo ?? input.defaultReceiptCallbackUrl; + const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); + const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(peerUrl), + timestamp: unixSeconds, + nonce, + body: new TextEncoder().encode(body), + secretKey: input.secretKey, + }); + + const response = await input.fetchImpl(peerUrl.toString(), { + method: "POST", + headers: { + Authorization: `Claw ${input.ait}`, + "Content-Type": "application/json", + [AGENT_ACCESS_HEADER]: auth.accessToken, + [RELAY_RECIPIENT_AGENT_DID_HEADER]: request.peerDid, + ...(request.conversationId + ? { [RELAY_CONVERSATION_ID_HEADER]: request.conversationId } + : {}), + [RELAY_DELIVERY_RECEIPT_URL_HEADER]: replyTo, + ...signed.headers, + }, + body, + }); + + if (!response.ok) { + if (response.status === 401) { + throw new AppError({ + code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", + message: "Peer relay rejected agent auth credentials", + status: 401, + expose: true, + }); + } + + throw new AppError({ + code: "CONNECTOR_OUTBOUND_DELIVERY_FAILED", + message: "Peer relay request failed", + status: 502, + }); + } + }; + + await executeWithAgentAuthRefreshRetry({ + key: refreshKey, + shouldRetry: isRetryableRelayAuthError, + getAuth: async () => { + await input.syncAuthFromDisk(); + return input.getCurrentAuth(); + }, + persistAuth: async (nextAuth) => { + await input.setCurrentAuth(nextAuth); + }, + refreshAuth: async (auth) => + refreshAgentAuthWithClawProof({ + registryUrl: input.registryUrl, + ait: input.ait, + secretKey: input.secretKey, + refreshToken: auth.refreshToken, + fetchImpl: input.fetchImpl, + }), + perform: performRelay, + }); + }; + + const postDeliveryReceipt = async (inputReceipt: { + reason?: string; + recipientAgentDid: string; + replyTo: string; + requestId: string; + senderAgentDid: string; + status: OutboundDeliveryReceiptStatus; + }): Promise => { + await input.syncAuthFromDisk(); + const receiptUrl = new URL(inputReceipt.replyTo); + if (receiptUrl.pathname !== RELAY_DELIVERY_RECEIPTS_PATH) { + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_INVALID_TARGET", + message: "Delivery receipt callback target is invalid", + status: 400, + }); + } + const expectedSenderOrigin = input.trustedReceiptTargets.byAgentDid.get( + inputReceipt.senderAgentDid, + ); + if ( + expectedSenderOrigin !== undefined && + receiptUrl.origin !== expectedSenderOrigin + ) { + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_UNTRUSTED_TARGET", + message: "Delivery receipt callback target is untrusted", + status: 400, + }); + } + if ( + expectedSenderOrigin === undefined && + !input.trustedReceiptTargets.origins.has(receiptUrl.origin) + ) { + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_UNTRUSTED_TARGET", + message: "Delivery receipt callback target is untrusted", + status: 400, + }); + } + + const body = JSON.stringify({ + requestId: inputReceipt.requestId, + senderAgentDid: inputReceipt.senderAgentDid, + recipientAgentDid: inputReceipt.recipientAgentDid, + status: inputReceipt.status, + reason: inputReceipt.reason, + processedAt: nowIso(), + }); + const refreshKey = `${REFRESH_SINGLE_FLIGHT_PREFIX}:${input.configDir}:${input.agentName}:delivery-receipt`; + + const performReceipt = async (auth: AgentAuthBundle): Promise => { + const unixSeconds = Math.floor(nowUtcMs() / 1000).toString(); + const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: toPathWithQuery(receiptUrl), + timestamp: unixSeconds, + nonce, + body: new TextEncoder().encode(body), + secretKey: input.secretKey, + }); + + const response = await input.fetchImpl(receiptUrl.toString(), { + method: "POST", + headers: { + Authorization: `Claw ${input.ait}`, + "Content-Type": "application/json", + [AGENT_ACCESS_HEADER]: auth.accessToken, + ...signed.headers, + }, + body, + }); + + if (!response.ok) { + if (response.status === 401) { + throw new AppError({ + code: "OPENCLAW_RELAY_AGENT_AUTH_REJECTED", + message: + "Delivery receipt callback rejected agent auth credentials", + status: 401, + expose: true, + }); + } + + throw new AppError({ + code: "CONNECTOR_DELIVERY_RECEIPT_FAILED", + message: "Delivery receipt callback request failed", + status: 502, + }); + } + }; + + await executeWithAgentAuthRefreshRetry({ + key: refreshKey, + shouldRetry: isRetryableRelayAuthError, + getAuth: async () => { + await input.syncAuthFromDisk(); + return input.getCurrentAuth(); + }, + persistAuth: async (nextAuth) => { + await input.setCurrentAuth(nextAuth); + }, + refreshAuth: async (auth) => + refreshAgentAuthWithClawProof({ + registryUrl: input.registryUrl, + ait: input.ait, + secretKey: input.secretKey, + refreshToken: auth.refreshToken, + fetchImpl: input.fetchImpl, + }), + perform: performReceipt, + }); + }; + + return { + relayToPeer, + postDeliveryReceipt, + }; +} diff --git a/packages/connector/src/runtime/server.ts b/packages/connector/src/runtime/server.ts new file mode 100644 index 0000000..b8920fc --- /dev/null +++ b/packages/connector/src/runtime/server.ts @@ -0,0 +1,214 @@ +import type { IncomingMessage, ServerResponse } from "node:http"; +import { AppError } from "@clawdentity/sdk"; +import type { ConnectorClient } from "../client.js"; +import { DEFAULT_CONNECTOR_STATUS_PATH } from "../constants.js"; +import type { ConnectorInboundInbox } from "../inbound-inbox.js"; +import { + CONNECTOR_DEAD_LETTER_PATH, + CONNECTOR_DEAD_LETTER_PURGE_PATH, + CONNECTOR_DEAD_LETTER_REPLAY_PATH, +} from "./constants.js"; +import { sanitizeErrorReason } from "./errors.js"; +import { + parseOutboundRelayRequest, + readRequestJson, + writeJson, +} from "./http.js"; +import { isRecord, parseRequestIds } from "./parse.js"; +import type { InboundReplayView } from "./types.js"; + +type RuntimeRequestHandlerInput = { + connectorClient: ConnectorClient; + inboundInbox: ConnectorInboundInbox; + logger: { + error: (event: string, payload?: Record) => void; + warn: (event: string, payload?: Record) => void; + }; + outboundBaseUrl: URL; + outboundPath: string; + outboundUrl: string; + readInboundReplayView: () => Promise; + relayToPeer: ( + request: ReturnType, + ) => Promise; + replayPendingInboundMessages: () => void; + wsUrl: string; +}; + +export function createRuntimeRequestHandler( + input: RuntimeRequestHandlerInput, +): (req: IncomingMessage, res: ServerResponse) => Promise { + return async (req, res) => { + const requestPath = req.url + ? new URL(req.url, input.outboundBaseUrl).pathname + : "/"; + + if (requestPath === DEFAULT_CONNECTOR_STATUS_PATH) { + if (req.method !== "GET") { + res.statusCode = 405; + res.setHeader("allow", "GET"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + let inboundReplayView: InboundReplayView; + try { + inboundReplayView = await input.readInboundReplayView(); + } catch (error) { + input.logger.warn("connector.status.inbound_inbox_unavailable", { + reason: sanitizeErrorReason(error), + }); + writeJson(res, 500, { + status: "error", + error: { + code: "CONNECTOR_INBOUND_INBOX_UNAVAILABLE", + message: "Connector inbound inbox status is unavailable", + }, + outboundUrl: input.outboundUrl, + websocketUrl: input.wsUrl, + websocket: { + connected: input.connectorClient.isConnected(), + }, + }); + return; + } + const clientMetrics = input.connectorClient.getMetricsSnapshot(); + writeJson(res, 200, { + status: "ok", + outboundUrl: input.outboundUrl, + websocketUrl: input.wsUrl, + websocket: { + ...clientMetrics.connection, + }, + inbound: { + pending: inboundReplayView.snapshot.pending, + deadLetter: inboundReplayView.snapshot.deadLetter, + replay: { + replayerActive: inboundReplayView.replayerActive, + lastReplayAt: inboundReplayView.lastReplayAt, + lastReplayError: inboundReplayView.lastReplayError, + }, + openclawGateway: inboundReplayView.openclawGateway, + openclawHook: inboundReplayView.openclawHook, + }, + outbound: { + queue: { + pendingCount: input.connectorClient.getQueuedOutboundCount(), + }, + }, + metrics: { + heartbeat: clientMetrics.heartbeat, + inboundDelivery: clientMetrics.inboundDelivery, + outboundQueue: clientMetrics.outboundQueue, + }, + }); + return; + } + + if (requestPath === CONNECTOR_DEAD_LETTER_PATH) { + if (req.method !== "GET") { + res.statusCode = 405; + res.setHeader("allow", "GET"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + const deadLetterItems = await input.inboundInbox.listDeadLetter(); + writeJson(res, 200, { + status: "ok", + count: deadLetterItems.length, + items: deadLetterItems, + }); + return; + } + + if (requestPath === CONNECTOR_DEAD_LETTER_REPLAY_PATH) { + if (req.method !== "POST") { + res.statusCode = 405; + res.setHeader("allow", "POST"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + const body = await readRequestJson(req); + const requestIds = isRecord(body) + ? parseRequestIds(body.requestIds) + : undefined; + const replayResult = await input.inboundInbox.replayDeadLetter({ + requestIds, + }); + input.replayPendingInboundMessages(); + writeJson(res, 200, { + status: "ok", + replayedCount: replayResult.replayedCount, + }); + return; + } + + if (requestPath === CONNECTOR_DEAD_LETTER_PURGE_PATH) { + if (req.method !== "POST") { + res.statusCode = 405; + res.setHeader("allow", "POST"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + const body = await readRequestJson(req); + const requestIds = isRecord(body) + ? parseRequestIds(body.requestIds) + : undefined; + const purgeResult = await input.inboundInbox.purgeDeadLetter({ + requestIds, + }); + writeJson(res, 200, { + status: "ok", + purgedCount: purgeResult.purgedCount, + }); + return; + } + + if (requestPath !== input.outboundPath) { + writeJson(res, 404, { error: "Not Found" }); + return; + } + + if (req.method !== "POST") { + res.statusCode = 405; + res.setHeader("allow", "POST"); + writeJson(res, 405, { error: "Method Not Allowed" }); + return; + } + + try { + const requestBody = await readRequestJson(req); + const relayRequest = parseOutboundRelayRequest(requestBody); + await input.relayToPeer(relayRequest); + writeJson(res, 202, { accepted: true, peer: relayRequest.peer }); + } catch (error) { + if (error instanceof AppError) { + input.logger.warn("connector.outbound.rejected", { + code: error.code, + status: error.status, + message: error.message, + }); + writeJson(res, error.status, { + error: { + code: error.code, + message: error.message, + }, + }); + return; + } + + input.logger.error("connector.outbound.failed", { + errorName: error instanceof Error ? error.name : "unknown", + }); + writeJson(res, 500, { + error: { + code: "CONNECTOR_OUTBOUND_INTERNAL", + message: "Connector outbound relay failed", + }, + }); + } + }; +} diff --git a/packages/connector/src/runtime/trusted-receipts.ts b/packages/connector/src/runtime/trusted-receipts.ts new file mode 100644 index 0000000..7978830 --- /dev/null +++ b/packages/connector/src/runtime/trusted-receipts.ts @@ -0,0 +1,126 @@ +import { readFile } from "node:fs/promises"; +import { isAbsolute, join } from "node:path"; +import type { Logger } from "@clawdentity/sdk"; +import { OPENCLAW_RELAY_RUNTIME_FILE_NAME } from "./constants.js"; +import { sanitizeErrorReason } from "./errors.js"; +import { isRecord, parseOptionalProxyOrigin } from "./parse.js"; +import type { TrustedReceiptTargets } from "./types.js"; + +export async function loadTrustedReceiptTargets(input: { + configDir: string; + logger: Logger; +}): Promise { + const trustedReceiptTargets: TrustedReceiptTargets = { + origins: new Set(), + byAgentDid: new Map(), + }; + + const relayRuntimeConfigPath = join( + input.configDir, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + ); + let relayRuntimeRaw: string; + try { + relayRuntimeRaw = await readFile(relayRuntimeConfigPath, "utf8"); + } catch (error) { + if ( + error && + typeof error === "object" && + "code" in error && + (error as { code?: string }).code === "ENOENT" + ) { + return trustedReceiptTargets; + } + + input.logger.warn("connector.delivery_receipt.runtime_config_read_failed", { + relayRuntimeConfigPath, + reason: sanitizeErrorReason(error), + }); + return trustedReceiptTargets; + } + + let relayRuntimeParsed: unknown; + try { + relayRuntimeParsed = JSON.parse(relayRuntimeRaw); + } catch (error) { + input.logger.warn( + "connector.delivery_receipt.runtime_config_invalid_json", + { + relayRuntimeConfigPath, + reason: sanitizeErrorReason(error), + }, + ); + return trustedReceiptTargets; + } + + if (!isRecord(relayRuntimeParsed)) { + return trustedReceiptTargets; + } + + const relayTransformPeersPathRaw = + typeof relayRuntimeParsed.relayTransformPeersPath === "string" && + relayRuntimeParsed.relayTransformPeersPath.trim().length > 0 + ? relayRuntimeParsed.relayTransformPeersPath.trim() + : undefined; + if (!relayTransformPeersPathRaw) { + return trustedReceiptTargets; + } + + const relayTransformPeersPath = isAbsolute(relayTransformPeersPathRaw) + ? relayTransformPeersPathRaw + : join(input.configDir, relayTransformPeersPathRaw); + + let relayTransformPeersRaw: string; + try { + relayTransformPeersRaw = await readFile(relayTransformPeersPath, "utf8"); + } catch (error) { + input.logger.warn("connector.delivery_receipt.peers_snapshot_read_failed", { + relayTransformPeersPath, + reason: sanitizeErrorReason(error), + }); + return trustedReceiptTargets; + } + + let relayTransformPeersParsed: unknown; + try { + relayTransformPeersParsed = JSON.parse(relayTransformPeersRaw); + } catch (error) { + input.logger.warn( + "connector.delivery_receipt.peers_snapshot_invalid_json", + { + relayTransformPeersPath, + reason: sanitizeErrorReason(error), + }, + ); + return trustedReceiptTargets; + } + + if (!isRecord(relayTransformPeersParsed)) { + return trustedReceiptTargets; + } + + const peersValue = relayTransformPeersParsed.peers; + if (!isRecord(peersValue)) { + return trustedReceiptTargets; + } + + for (const peerValue of Object.values(peersValue)) { + if (!isRecord(peerValue)) { + continue; + } + + const agentDid = + typeof peerValue.did === "string" && peerValue.did.trim().length > 0 + ? peerValue.did.trim() + : undefined; + const origin = parseOptionalProxyOrigin(peerValue.proxyUrl); + if (!agentDid || !origin) { + continue; + } + + trustedReceiptTargets.origins.add(origin); + trustedReceiptTargets.byAgentDid.set(agentDid, origin); + } + + return trustedReceiptTargets; +} diff --git a/packages/connector/src/runtime/types.ts b/packages/connector/src/runtime/types.ts new file mode 100644 index 0000000..93bf085 --- /dev/null +++ b/packages/connector/src/runtime/types.ts @@ -0,0 +1,109 @@ +import type { Logger } from "@clawdentity/sdk"; +import type { ConnectorInboundInboxSnapshot } from "../inbound-inbox.js"; + +export type ConnectorRuntimeCredentials = { + accessExpiresAt?: string; + accessToken?: string; + agentDid: string; + ait: string; + refreshExpiresAt?: string; + refreshToken: string; + secretKey: string; + tokenType?: "Bearer"; +}; + +export type StartConnectorRuntimeInput = { + agentName: string; + configDir: string; + credentials: ConnectorRuntimeCredentials; + fetchImpl?: typeof fetch; + logger?: Logger; + openclawBaseUrl?: string; + openclawHookPath?: string; + openclawHookToken?: string; + outboundBaseUrl?: string; + outboundPath?: string; + proxyWebsocketUrl?: string; + registryUrl: string; +}; + +export type ConnectorRuntimeHandle = { + outboundUrl: string; + stop: () => Promise; + waitUntilStopped: () => Promise; + websocketUrl: string; +}; + +export type OutboundRelayRequest = { + conversationId?: string; + payload: unknown; + peer: string; + peerDid: string; + peerProxyUrl: string; + replyTo?: string; +}; + +export type OutboundDeliveryReceiptStatus = + | "processed_by_openclaw" + | "dead_lettered"; + +export type TrustedReceiptTargets = { + byAgentDid: Map; + origins: Set; +}; + +export type InboundReplayPolicy = { + batchSize: number; + deadLetterNonRetryableMaxAttempts: number; + eventsMaxBytes: number; + eventsMaxFiles: number; + inboxMaxBytes: number; + inboxMaxMessages: number; + replayIntervalMs: number; + retryBackoffFactor: number; + retryInitialDelayMs: number; + retryMaxDelayMs: number; + runtimeReplayMaxAttempts: number; + runtimeReplayRetryBackoffFactor: number; + runtimeReplayRetryInitialDelayMs: number; + runtimeReplayRetryMaxDelayMs: number; +}; + +export type OpenclawProbePolicy = { + intervalMs: number; + timeoutMs: number; +}; + +export type InboundReplayStatus = { + lastReplayAt?: string; + lastReplayError?: string; + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + replayerActive: boolean; +}; + +export type InboundReplayView = { + lastReplayAt?: string; + lastReplayError?: string; + snapshot: ConnectorInboundInboxSnapshot; + replayerActive: boolean; + openclawGateway: { + lastCheckedAt?: string; + lastFailureReason?: string; + lastSuccessAt?: string; + reachable: boolean; + url: string; + }; + openclawHook: { + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + url: string; + }; +}; + +export type OpenclawGatewayProbeStatus = { + lastCheckedAt?: string; + lastFailureReason?: string; + lastSuccessAt?: string; + reachable: boolean; +}; diff --git a/packages/connector/src/runtime/url.ts b/packages/connector/src/runtime/url.ts new file mode 100644 index 0000000..fb7913c --- /dev/null +++ b/packages/connector/src/runtime/url.ts @@ -0,0 +1,106 @@ +import { toOpenclawHookUrl as toResolvedOpenclawHookUrl } from "@clawdentity/common"; +import { RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { + DEFAULT_CONNECTOR_BASE_URL, + DEFAULT_CONNECTOR_OUTBOUND_PATH, + DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_OPENCLAW_HOOK_PATH, +} from "../constants.js"; + +export function toPathWithQuery(url: URL): string { + return `${url.pathname}${url.search}`; +} + +export function normalizeOutboundBaseUrl( + baseUrlInput: string | undefined, +): URL { + const raw = baseUrlInput?.trim() || DEFAULT_CONNECTOR_BASE_URL; + let parsed: URL; + + try { + parsed = new URL(raw); + } catch { + throw new Error("Connector outbound base URL is invalid"); + } + + if (parsed.protocol !== "http:") { + throw new Error("Connector outbound base URL must use http://"); + } + + return parsed; +} + +export function normalizeOutboundPath(pathInput: string | undefined): string { + const raw = pathInput?.trim() || DEFAULT_CONNECTOR_OUTBOUND_PATH; + if (raw.length === 0) { + throw new Error("Connector outbound path is invalid"); + } + + return raw.startsWith("/") ? raw : `/${raw}`; +} + +export function normalizeWebSocketUrl(urlInput: string | undefined): string { + const raw = urlInput?.trim() ?? process.env.CLAWDENTITY_PROXY_WS_URL?.trim(); + if (!raw) { + throw new Error( + "Proxy websocket URL is required (set --proxy-ws-url or CLAWDENTITY_PROXY_WS_URL)", + ); + } + + const parsed = new URL(raw); + if (parsed.protocol === "https:") { + parsed.protocol = "wss:"; + } else if (parsed.protocol === "http:") { + parsed.protocol = "ws:"; + } + + if (parsed.protocol !== "wss:" && parsed.protocol !== "ws:") { + throw new Error("Proxy websocket URL must use ws:// or wss://"); + } + + if (parsed.pathname === "/") { + parsed.pathname = RELAY_CONNECT_PATH; + } + + return parsed.toString(); +} + +export function resolveOpenclawBaseUrl(input?: string): string { + return ( + input?.trim() || + process.env.OPENCLAW_BASE_URL?.trim() || + DEFAULT_OPENCLAW_BASE_URL + ); +} + +export function resolveOpenclawHookPath(input?: string): string { + const value = + input?.trim() || + process.env.OPENCLAW_HOOK_PATH?.trim() || + DEFAULT_OPENCLAW_HOOK_PATH; + return value.startsWith("/") ? value : `/${value}`; +} + +export function resolveOpenclawHookToken(input?: string): string | undefined { + const value = input?.trim() || process.env.OPENCLAW_HOOK_TOKEN?.trim(); + if (!value) { + return undefined; + } + + return value; +} + +export function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { + return toResolvedOpenclawHookUrl(baseUrl, hookPath); +} + +export function toHttpOriginFromWebSocketUrl(value: URL): string { + const normalized = new URL(value.toString()); + if (normalized.protocol === "wss:") { + normalized.protocol = "https:"; + } else if (normalized.protocol === "ws:") { + normalized.protocol = "http:"; + } + + return normalized.origin; +} diff --git a/packages/connector/src/runtime/ws.ts b/packages/connector/src/runtime/ws.ts new file mode 100644 index 0000000..bf6ab36 --- /dev/null +++ b/packages/connector/src/runtime/ws.ts @@ -0,0 +1,97 @@ +import { randomBytes } from "node:crypto"; +import { encodeBase64url } from "@clawdentity/protocol"; +import { nowUtcMs, signHttpRequest } from "@clawdentity/sdk"; +import { WebSocket as NodeWebSocket } from "ws"; +import type { ConnectorWebSocket } from "../client.js"; +import { AGENT_ACCESS_HEADER } from "../constants.js"; +import { NONCE_SIZE } from "./constants.js"; +import { toPathWithQuery } from "./url.js"; + +export function createWebSocketFactory(): ( + url: string, + headers: Record, +) => ConnectorWebSocket { + return (url: string, headers: Record) => { + const socket = new NodeWebSocket(url, { + headers, + }); + + return { + get readyState() { + return socket.readyState; + }, + send: (data: string) => { + socket.send(data); + }, + close: (code?: number, reason?: string) => { + socket.close(code, reason); + }, + addEventListener: (type, listener) => { + if (type === "open") { + socket.on("open", () => listener({})); + return; + } + + if (type === "message") { + socket.on("message", (data) => { + const text = + typeof data === "string" + ? data + : Array.isArray(data) + ? Buffer.concat(data).toString("utf8") + : Buffer.isBuffer(data) + ? data.toString("utf8") + : Buffer.from(data).toString("utf8"); + listener({ data: text }); + }); + return; + } + + if (type === "close") { + socket.on("close", (code, reason) => { + listener({ + code: Number(code), + reason: reason.toString("utf8"), + wasClean: Number(code) === 1000, + }); + }); + return; + } + + if (type === "unexpected-response") { + socket.on("unexpected-response", (_request, response) => { + listener({ + status: response.statusCode, + }); + }); + return; + } + + socket.on("error", (error) => listener({ error })); + }, + }; + }; +} + +export async function buildUpgradeHeaders(input: { + ait: string; + accessToken: string; + wsUrl: URL; + secretKey: Uint8Array; +}): Promise> { + const timestamp = Math.floor(nowUtcMs() / 1000).toString(); + const nonce = encodeBase64url(randomBytes(NONCE_SIZE)); + const signed = await signHttpRequest({ + method: "GET", + pathWithQuery: toPathWithQuery(input.wsUrl), + timestamp, + nonce, + secretKey: input.secretKey, + }); + + return { + authorization: `Claw ${input.ait}`, + [AGENT_ACCESS_HEADER]: input.accessToken, + ...signed.headers, + }; +} diff --git a/packages/connector/vitest.config.ts b/packages/connector/vitest.config.ts index e2ec332..53a1e65 100644 --- a/packages/connector/vitest.config.ts +++ b/packages/connector/vitest.config.ts @@ -1,6 +1,14 @@ +import { fileURLToPath } from "node:url"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + alias: { + "@clawdentity/common": fileURLToPath( + new URL("../common/src/index.ts", import.meta.url), + ), + }, + }, test: { globals: true, }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f2f1bde..a3972a0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -139,8 +139,17 @@ importers: specifier: ^0.31.9 version: 0.31.9 + packages/common: + devDependencies: + '@types/node': + specifier: ^22.17.2 + version: 22.19.11 + packages/connector: dependencies: + '@clawdentity/common': + specifier: workspace:* + version: link:../common '@clawdentity/protocol': specifier: workspace:* version: link:../protocol diff --git a/tsconfig.base.json b/tsconfig.base.json index 0e2bffd..b6b3966 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -15,6 +15,7 @@ "sourceMap": true, "baseUrl": ".", "paths": { + "@clawdentity/common": ["packages/common/src/index.ts"], "@clawdentity/connector": ["packages/connector/src/index.ts"], "@clawdentity/protocol": ["packages/protocol/src/index.ts"], "@clawdentity/sdk": ["packages/sdk/src/index.ts"], From f1b77e141d63484746aec6efbbcc9dc1d57b011f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 15:21:32 +0530 Subject: [PATCH 123/190] fix(cli): add repository metadata for provenance publish --- .github/AGENTS.md | 1 + .github/workflows/publish-cli.yml | 8 ++++++++ apps/cli/package.json | 8 ++++++++ 3 files changed, 17 insertions(+) diff --git a/.github/AGENTS.md b/.github/AGENTS.md index 99144b4..d8d7373 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -46,6 +46,7 @@ - Run npm release commands (`pkg set`, `pack`, `publish`) with `working-directory: apps/cli`; avoid `npm --prefix apps/cli ...` for pack/publish because npm may target the workspace root manifest on monorepos missing a root `version`. - Validate packaged artifact contents using `npm pack --dry-run --json` file metadata (not grepping console notices), because npm file-list notices are not guaranteed on stdout. - Keep `npm pack --dry-run --json` deterministic by forcing `NPM_CONFIG_COLOR=false`, `NPM_CONFIG_LOGLEVEL=silent`, and `NPM_CONFIG_PROGRESS=false`, then parsing the `files` list instead of relying on noisy stderr/stdout lines that vary per npm version. +- Keep `apps/cli/package.json` `repository.url` pinned to `https://github.com/vrknetha/clawdentity`; npm provenance publish will fail if repository metadata is missing or mismatched. - Publish only package `apps/cli` as npm package `clawdentity`. - Keep published runtime manifest free of `workspace:*` runtime dependencies. - Use npm provenance (`--provenance`) and require `NPM_TOKEN` secret. diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index 21bbbc3..c9896f7 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -152,6 +152,7 @@ jobs: - name: Validate publish manifest env: EXPECTED_VERSION: ${{ steps.version.outputs.next_version }} + EXPECTED_REPOSITORY_URL: https://github.com/${{ github.repository }} run: | node <<'NODE' const fs = require("node:fs"); @@ -167,6 +168,13 @@ jobs: `Package version mismatch: expected ${process.env.EXPECTED_VERSION}, got ${pkg.version}`, ); } + const repositoryUrl = + typeof pkg.repository === "string" ? pkg.repository : pkg.repository?.url; + if (repositoryUrl !== process.env.EXPECTED_REPOSITORY_URL) { + throw new Error( + `Package repository URL mismatch: expected ${process.env.EXPECTED_REPOSITORY_URL}, got ${repositoryUrl ?? "undefined"}`, + ); + } if (pkg.private === true) { throw new Error("Package is private; publish would fail"); } diff --git a/apps/cli/package.json b/apps/cli/package.json index f6ec40e..625c6a8 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -5,6 +5,14 @@ "publishConfig": { "access": "public" }, + "repository": { + "type": "git", + "url": "https://github.com/vrknetha/clawdentity" + }, + "bugs": { + "url": "https://github.com/vrknetha/clawdentity/issues" + }, + "homepage": "https://github.com/vrknetha/clawdentity#readme", "main": "./dist/index.js", "types": "./dist/index.d.ts", "bin": { From 629be5a5e29f04553871af62675812af65638d51 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 15:47:26 +0530 Subject: [PATCH 124/190] refactor(cli): split openclaw command into modular files --- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/openclaw.ts | 4119 +---------------- apps/cli/src/commands/openclaw/AGENTS.md | 35 + apps/cli/src/commands/openclaw/command.ts | 221 + apps/cli/src/commands/openclaw/common.ts | 337 ++ apps/cli/src/commands/openclaw/config.ts | 283 ++ apps/cli/src/commands/openclaw/connector.ts | 628 +++ apps/cli/src/commands/openclaw/constants.ts | 61 + .../openclaw/doctor-connector-checks.ts | 274 ++ .../commands/openclaw/doctor-static-checks.ts | 787 ++++ apps/cli/src/commands/openclaw/doctor.ts | 52 + apps/cli/src/commands/openclaw/gateway.ts | 195 + apps/cli/src/commands/openclaw/output.ts | 62 + apps/cli/src/commands/openclaw/paths.ts | 169 + apps/cli/src/commands/openclaw/relay.ts | 384 ++ apps/cli/src/commands/openclaw/setup.ts | 427 ++ apps/cli/src/commands/openclaw/state.ts | 361 ++ apps/cli/src/commands/openclaw/types.ts | 292 ++ 18 files changed, 4592 insertions(+), 4096 deletions(-) create mode 100644 apps/cli/src/commands/openclaw/AGENTS.md create mode 100644 apps/cli/src/commands/openclaw/command.ts create mode 100644 apps/cli/src/commands/openclaw/common.ts create mode 100644 apps/cli/src/commands/openclaw/config.ts create mode 100644 apps/cli/src/commands/openclaw/connector.ts create mode 100644 apps/cli/src/commands/openclaw/constants.ts create mode 100644 apps/cli/src/commands/openclaw/doctor-connector-checks.ts create mode 100644 apps/cli/src/commands/openclaw/doctor-static-checks.ts create mode 100644 apps/cli/src/commands/openclaw/doctor.ts create mode 100644 apps/cli/src/commands/openclaw/gateway.ts create mode 100644 apps/cli/src/commands/openclaw/output.ts create mode 100644 apps/cli/src/commands/openclaw/paths.ts create mode 100644 apps/cli/src/commands/openclaw/relay.ts create mode 100644 apps/cli/src/commands/openclaw/setup.ts create mode 100644 apps/cli/src/commands/openclaw/state.ts create mode 100644 apps/cli/src/commands/openclaw/types.ts diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index fdd9349..b40ec70 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -34,6 +34,7 @@ - `openclaw setup` must treat `hooks.defaultSessionKey` as an OpenClaw request session key (`main`, `global`, `subagent:*`), not a canonical `agent::...` store key. - `openclaw setup` must normalize legacy canonical defaults (`agent::`) to request-key format (``) before writing config, so hook runs route to the expected UI session. - When deriving fallback hook session routing, follow OpenClaw runtime semantics (`session.scope=global` -> `global`; otherwise `session.mainKey` with fallback `main`). +- Keep `openclaw.ts` as a thin public facade; place domain implementations under `openclaw/*.ts` to keep files maintainable and testable. - Keep thrown command errors static (no interpolated runtime values); include variable context in error details/log fields. Diagnostic check output (`openclaw doctor`, `openclaw relay test`) may include concrete paths/aliases so operators can remediate quickly. - Keep invite-type distinction explicit in output/docs: - `clw_inv_...` = registry onboarding invite (`invite redeem`) diff --git a/apps/cli/src/commands/openclaw.ts b/apps/cli/src/commands/openclaw.ts index d9d673f..a67e1f5 100644 --- a/apps/cli/src/commands/openclaw.ts +++ b/apps/cli/src/commands/openclaw.ts @@ -1,4096 +1,23 @@ -import { spawn } from "node:child_process"; -import { randomBytes } from "node:crypto"; -import { closeSync, existsSync, openSync } from "node:fs"; -import { chmod, copyFile, mkdir, readFile, writeFile } from "node:fs/promises"; -import { homedir } from "node:os"; -import { dirname, join, resolve as resolvePath } from "node:path"; -import { fileURLToPath } from "node:url"; -import { - decodeBase64url, - encodeBase64url, - parseDid, -} from "@clawdentity/protocol"; -import { AppError, createLogger, nowIso, nowUtcMs } from "@clawdentity/sdk"; -import { Command } from "commander"; -import { getConfigDir, resolveConfig } from "../config/manager.js"; -import { writeStdoutLine } from "../io.js"; -import { assertValidAgentName } from "./agent-name.js"; -import { installConnectorServiceForAgent } from "./connector.js"; -import { withErrorHandling } from "./helpers.js"; - -const logger = createLogger({ service: "cli", module: "openclaw" }); - -const AGENTS_DIR_NAME = "agents"; -const AIT_FILE_NAME = "ait.jwt"; -const SECRET_KEY_FILE_NAME = "secret.key"; -const PEERS_FILE_NAME = "peers.json"; -const OPENCLAW_DIR_NAME = ".openclaw"; -const OPENCLAW_CONFIG_FILE_NAME = "openclaw.json"; -const LEGACY_OPENCLAW_STATE_DIR_NAMES = [ - ".clawdbot", - ".moldbot", - ".moltbot", -] as const; -const LEGACY_OPENCLAW_CONFIG_FILE_NAMES = [ - "clawdbot.json", - "moldbot.json", - "moltbot.json", -] as const; -const OPENCLAW_AGENT_FILE_NAME = "openclaw-agent-name"; -const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; -const OPENCLAW_CONNECTORS_FILE_NAME = "openclaw-connectors.json"; -const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; -const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; -const RELAY_RUNTIME_FILE_NAME = "clawdentity-relay.json"; -const RELAY_PEERS_FILE_NAME = "clawdentity-peers.json"; -const HOOK_MAPPING_ID = "clawdentity-send-to-peer"; -const HOOK_PATH_SEND_TO_PEER = "send-to-peer"; -const OPENCLAW_SEND_TO_PEER_HOOK_PATH = "hooks/send-to-peer"; -const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; -const DEFAULT_OPENCLAW_MAIN_SESSION_KEY = "main"; -const DEFAULT_CONNECTOR_PORT = 19400; -const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; -const DEFAULT_CONNECTOR_STATUS_PATH = "/v1/status"; -const DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS = 30; -const CONNECTOR_HOST_LOOPBACK = "127.0.0.1"; -const CONNECTOR_HOST_DOCKER = "host.docker.internal"; -const CONNECTOR_HOST_DOCKER_GATEWAY = "gateway.docker.internal"; -const CONNECTOR_HOST_LINUX_BRIDGE = "172.17.0.1"; -const CONNECTOR_RUN_DIR_NAME = "run"; -const CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX = "stdout.log"; -const CONNECTOR_DETACHED_STDERR_FILE_SUFFIX = "stderr.log"; -const INVITE_CODE_PREFIX = "clawd1_"; -const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; -const FILE_MODE = 0o600; -const OPENCLAW_HOOK_TOKEN_BYTES = 32; -const OPENCLAW_SETUP_COMMAND_HINT = - "Run: clawdentity openclaw setup "; -const OPENCLAW_SETUP_RESTART_COMMAND_HINT = `${OPENCLAW_SETUP_COMMAND_HINT} and restart OpenClaw`; -const OPENCLAW_SETUP_WITH_BASE_URL_HINT = `${OPENCLAW_SETUP_COMMAND_HINT} --openclaw-base-url `; -const OPENCLAW_PAIRING_COMMAND_HINT = - "Run QR pairing first: clawdentity pair start --qr and clawdentity pair confirm --qr-file "; -const OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT = - "Run: clawdentity openclaw setup (auto-recovers pending OpenClaw gateway device approvals)"; -const OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT = - "Run: clawdentity openclaw setup (ensures gateway auth mode/token are configured)"; -const OPENCLAW_GATEWAY_APPROVAL_COMMAND = "openclaw"; -const OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS = 10_000; -const OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS = 20; -const OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS = 1_000; -const textEncoder = new TextEncoder(); -const textDecoder = new TextDecoder(); - -type OpenclawInvitePayload = { - v: 1; - issuedAt: string; - did: string; - proxyUrl: string; - alias?: string; - agentName?: string; - humanName?: string; -}; - -type OpenclawInviteOptions = { - did: string; - proxyUrl: string; - peerAlias?: string; - agentName?: string; - humanName?: string; -}; - -type OpenclawSetupOptions = { - inviteCode?: string; - openclawDir?: string; - transformSource?: string; - openclawBaseUrl?: string; - runtimeMode?: string; - waitTimeoutSeconds?: string; - noRuntimeStart?: boolean; - homeDir?: string; - gatewayDeviceApprovalRunner?: OpenclawGatewayDeviceApprovalRunner; -}; - -type OpenclawDoctorOptions = { - homeDir?: string; - openclawDir?: string; - peerAlias?: string; - resolveConfigImpl?: typeof resolveConfig; - fetchImpl?: typeof fetch; - includeConfigCheck?: boolean; - includeConnectorRuntimeCheck?: boolean; - json?: boolean; -}; - -type OpenclawDoctorCommandOptions = { - peer?: string; - openclawDir?: string; - json?: boolean; -}; - -type OpenclawSetupCommandOptions = { - openclawDir?: string; - transformSource?: string; - openclawBaseUrl?: string; - runtimeMode?: string; - waitTimeoutSeconds?: string; - noRuntimeStart?: boolean; -}; - -type OpenclawRelayTestOptions = { - peer?: string; - homeDir?: string; - openclawDir?: string; - openclawBaseUrl?: string; - hookToken?: string; - sessionId?: string; - message?: string; - fetchImpl?: typeof fetch; - resolveConfigImpl?: typeof resolveConfig; - json?: boolean; -}; - -type OpenclawRelayWebsocketTestOptions = { - peer?: string; - homeDir?: string; - openclawDir?: string; - fetchImpl?: typeof fetch; - resolveConfigImpl?: typeof resolveConfig; - json?: boolean; -}; - -type OpenclawGatewayDeviceApprovalInput = { - requestId: string; - openclawDir: string; - openclawConfigPath: string; -}; - -type OpenclawGatewayDeviceApprovalExecution = { - ok: boolean; - unavailable?: boolean; - exitCode?: number; - stdout?: string; - stderr?: string; - errorMessage?: string; -}; - -type OpenclawGatewayDeviceApprovalRunner = ( - input: OpenclawGatewayDeviceApprovalInput, -) => Promise; - -type OpenclawGatewayDeviceApprovalAttempt = { - requestId: string; - ok: boolean; - unavailable: boolean; - reason?: string; - exitCode?: number; -}; - -type OpenclawGatewayDeviceApprovalSummary = { - gatewayDevicePendingPath: string; - pendingRequestIds: string[]; - attempts: OpenclawGatewayDeviceApprovalAttempt[]; -}; - -type PeerEntry = { - did: string; - proxyUrl: string; - agentName?: string; - humanName?: string; -}; - -type PeersConfig = { - peers: Record; -}; - -export type OpenclawInviteResult = { - code: string; - did: string; - proxyUrl: string; - peerAlias?: string; - agentName?: string; - humanName?: string; -}; - -export type OpenclawSetupResult = { - openclawConfigPath: string; - transformTargetPath: string; - relayTransformRuntimePath: string; - relayTransformPeersPath: string; - openclawBaseUrl: string; - connectorBaseUrl: string; - relayRuntimeConfigPath: string; - openclawConfigChanged: boolean; -}; - -type OpenclawRuntimeMode = "auto" | "service" | "detached"; - -type OpenclawRuntimeResult = { - runtimeMode: "none" | "service" | "detached" | "existing"; - runtimeStatus: "running" | "skipped"; - websocketStatus: "connected" | "skipped"; - connectorStatusUrl?: string; -}; - -export type OpenclawSelfSetupResult = OpenclawSetupResult & - OpenclawRuntimeResult; - -type OpenclawRelayRuntimeConfig = { - openclawBaseUrl: string; - openclawHookToken?: string; - relayTransformPeersPath?: string; - updatedAt?: string; -}; - -type ConnectorAssignmentEntry = { - connectorBaseUrl: string; - updatedAt: string; -}; - -type ConnectorAssignmentsConfig = { - agents: Record; -}; - -type OpenclawDoctorCheckId = - | "config.registry" - | "state.selectedAgent" - | "state.credentials" - | "state.peers" - | "state.transform" - | "state.hookMapping" - | "state.hookToken" - | "state.hookSessionRouting" - | "state.gatewayAuth" - | "state.gatewayDevicePairing" - | "state.openclawBaseUrl" - | "state.connectorRuntime" - | "state.connectorInboundInbox" - | "state.openclawHookHealth"; - -type OpenclawDoctorCheckStatus = "pass" | "fail"; - -export type OpenclawDoctorCheckResult = { - id: OpenclawDoctorCheckId; - label: string; - status: OpenclawDoctorCheckStatus; - message: string; - remediationHint?: string; - details?: Record; -}; - -export type OpenclawDoctorResult = { - status: "healthy" | "unhealthy"; - checkedAt: string; - checks: OpenclawDoctorCheckResult[]; -}; - -export type OpenclawRelayTestResult = { - status: "success" | "failure"; - checkedAt: string; - peerAlias: string; - endpoint: string; - message: string; - httpStatus?: number; - remediationHint?: string; - details?: Record; - preflight?: OpenclawDoctorResult; -}; - -export type OpenclawRelayWebsocketTestResult = { - status: "success" | "failure"; - checkedAt: string; - peerAlias: string; - message: string; - connectorBaseUrl?: string; - connectorStatusUrl?: string; - remediationHint?: string; - details?: Record; - preflight?: OpenclawDoctorResult; -}; - -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - -function createCliError( - code: string, - message: string, - details?: Record, -): AppError { - return new AppError({ - code, - message, - status: 400, - details, - }); -} - -function getErrorCode(error: unknown): string | undefined { - if (!isRecord(error)) { - return undefined; - } - - return typeof error.code === "string" ? error.code : undefined; -} - -function parseNonEmptyString(value: unknown, label: string): string { - if (typeof value !== "string") { - throw createCliError( - "CLI_OPENCLAW_INVALID_INPUT", - "Input must be a string", - { - label, - }, - ); - } - - const trimmed = value.trim(); - if (trimmed.length === 0) { - throw createCliError( - "CLI_OPENCLAW_INVALID_INPUT", - "Input must not be empty", - { label }, - ); - } - - return trimmed; -} - -function parseOptionalProfileName( - value: unknown, - label: "agentName" | "humanName", -): string | undefined { - if (value === undefined) { - return undefined; - } - - return parseNonEmptyString(value, label); -} - -function parsePeerAlias(value: unknown): string { - const alias = parseNonEmptyString(value, "peer alias"); - if (alias.length > 128) { - throw createCliError( - "CLI_OPENCLAW_INVALID_PEER_ALIAS", - "peer alias must be at most 128 characters", - ); - } - - if (!PEER_ALIAS_PATTERN.test(alias)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_PEER_ALIAS", - "peer alias must use only letters, numbers, dot, underscore, or hyphen", - ); - } - - return alias; -} - -function parseProxyUrl(value: unknown): string { - return parseHttpUrl(value, { - label: "proxy URL", - code: "CLI_OPENCLAW_INVALID_PROXY_URL", - message: "proxy URL must be a valid URL", - }); -} - -function parseHttpUrl( - value: unknown, - input: { - label: string; - code: string; - message: string; - }, -): string { - const candidate = parseNonEmptyString(value, input.label); - let parsedUrl: URL; - try { - parsedUrl = new URL(candidate); - } catch { - throw createCliError(input.code, input.message); - } - - if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { - throw createCliError(input.code, `${input.label} must use http or https`); - } - - if ( - parsedUrl.pathname === "/" && - parsedUrl.search.length === 0 && - parsedUrl.hash.length === 0 - ) { - return parsedUrl.origin; - } - - return parsedUrl.toString(); -} - -function parseOpenclawBaseUrl(value: unknown): string { - return parseHttpUrl(value, { - label: "OpenClaw base URL", - code: "CLI_OPENCLAW_INVALID_OPENCLAW_BASE_URL", - message: "OpenClaw base URL must be a valid URL", - }); -} - -function parseAgentDid(value: unknown, label: string): string { - const did = parseNonEmptyString(value, label); - - try { - const parsed = parseDid(did); - if (parsed.kind !== "agent") { - throw createCliError( - "CLI_OPENCLAW_INVALID_DID", - "DID is not an agent DID", - ); - } - } catch { - throw createCliError("CLI_OPENCLAW_INVALID_DID", "Agent DID is invalid", { - label, - }); - } - - return did; -} - -function parseInvitePayload(value: unknown): OpenclawInvitePayload { - if (!isRecord(value)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_INVITE", - "invite payload must be an object", - ); - } - - if (value.v !== 1) { - throw createCliError( - "CLI_OPENCLAW_INVALID_INVITE", - "invite payload version is unsupported", - ); - } - - const issuedAt = parseNonEmptyString(value.issuedAt, "invite issuedAt"); - const did = parseAgentDid(value.did, "invite did"); - const proxyUrl = parseProxyUrl(value.proxyUrl); - const alias = - value.alias === undefined ? undefined : parsePeerAlias(value.alias); - const agentName = parseOptionalProfileName(value.agentName, "agentName"); - const humanName = parseOptionalProfileName(value.humanName, "humanName"); - - if ( - alias === undefined && - agentName === undefined && - humanName === undefined - ) { - return { - v: 1, - issuedAt, - did, - proxyUrl, - }; - } - - if (agentName === undefined && humanName === undefined) { - return { - v: 1, - issuedAt, - did, - proxyUrl, - alias, - }; - } - - return { - v: 1, - issuedAt, - did, - proxyUrl, - alias, - agentName, - humanName, - }; -} - -function resolveHomeDir(homeDir?: string): string { - if (typeof homeDir === "string" && homeDir.trim().length > 0) { - return homeDir.trim(); - } - - return homedir(); -} - -function resolveHomePrefixedPath(input: string, homeDir: string): string { - const trimmed = input.trim(); - if (trimmed.startsWith("~")) { - return resolvePath(trimmed.replace(/^~(?=$|[\\/])/, homeDir)); - } - return resolvePath(trimmed); -} - -function readNonEmptyEnvPath( - value: string | undefined, - homeDir: string, -): string | undefined { - if (typeof value !== "string" || value.trim().length === 0) { - return undefined; - } - return resolveHomePrefixedPath(value, homeDir); -} - -function resolveOpenclawHomeDir(homeDir: string): string { - const envOpenclawHome = readNonEmptyEnvPath( - process.env.OPENCLAW_HOME, - homeDir, - ); - return envOpenclawHome ?? homeDir; -} - -function resolveDefaultOpenclawStateDir(openclawHomeDir: string): string { - const newStateDir = join(openclawHomeDir, OPENCLAW_DIR_NAME); - if (existsSync(newStateDir)) { - return newStateDir; - } - - for (const legacyDirName of LEGACY_OPENCLAW_STATE_DIR_NAMES) { - const legacyStateDir = join(openclawHomeDir, legacyDirName); - if (existsSync(legacyStateDir)) { - return legacyStateDir; - } - } - - return newStateDir; -} - -function resolveOpenclawDir(openclawDir: string | undefined, homeDir: string) { - if (typeof openclawDir === "string" && openclawDir.trim().length > 0) { - return resolveHomePrefixedPath(openclawDir, homeDir); - } - - const envStateDir = readNonEmptyEnvPath( - process.env.OPENCLAW_STATE_DIR ?? process.env.CLAWDBOT_STATE_DIR, - homeDir, - ); - if (envStateDir !== undefined) { - return envStateDir; - } - - const envConfigPath = readNonEmptyEnvPath( - process.env.OPENCLAW_CONFIG_PATH ?? process.env.CLAWDBOT_CONFIG_PATH, - homeDir, - ); - if (envConfigPath !== undefined) { - return dirname(envConfigPath); - } - - const openclawHomeDir = resolveOpenclawHomeDir(homeDir); - return resolveDefaultOpenclawStateDir(openclawHomeDir); -} - -function resolveAgentDirectory(homeDir: string, agentName: string): string { - return join(getConfigDir({ homeDir }), AGENTS_DIR_NAME, agentName); -} - -function resolvePeersPath(homeDir: string): string { - return join(getConfigDir({ homeDir }), PEERS_FILE_NAME); -} - -function resolveOpenclawConfigPath( - openclawDir: string, - homeDir: string, -): string { - const envConfigPath = readNonEmptyEnvPath( - process.env.OPENCLAW_CONFIG_PATH ?? process.env.CLAWDBOT_CONFIG_PATH, - homeDir, - ); - if (envConfigPath !== undefined) { - return envConfigPath; - } - - const configCandidates = [ - join(openclawDir, OPENCLAW_CONFIG_FILE_NAME), - ...LEGACY_OPENCLAW_CONFIG_FILE_NAMES.map((fileName) => - join(openclawDir, fileName), - ), - ]; - - for (const candidate of configCandidates) { - if (existsSync(candidate)) { - return candidate; - } - } - - return configCandidates[0]; -} - -function resolveDefaultTransformSource(openclawDir: string): string { - return join(openclawDir, "skills", SKILL_DIR_NAME, RELAY_MODULE_FILE_NAME); -} - -function resolveTransformTargetPath(openclawDir: string): string { - return join(openclawDir, "hooks", "transforms", RELAY_MODULE_FILE_NAME); -} - -function resolveOpenclawAgentNamePath(homeDir: string): string { - return join(getConfigDir({ homeDir }), OPENCLAW_AGENT_FILE_NAME); -} - -function resolveRelayRuntimeConfigPath(homeDir: string): string { - return join(getConfigDir({ homeDir }), OPENCLAW_RELAY_RUNTIME_FILE_NAME); -} - -function resolveConnectorAssignmentsPath(homeDir: string): string { - return join(getConfigDir({ homeDir }), OPENCLAW_CONNECTORS_FILE_NAME); -} - -function resolveTransformRuntimePath(openclawDir: string): string { - return join(openclawDir, "hooks", "transforms", RELAY_RUNTIME_FILE_NAME); -} - -function resolveTransformPeersPath(openclawDir: string): string { - return join(openclawDir, "hooks", "transforms", RELAY_PEERS_FILE_NAME); -} - -type OpenclawGatewayPendingState = - | { - status: "missing"; - gatewayDevicePendingPath: string; - } - | { - status: "invalid"; - gatewayDevicePendingPath: string; - } - | { - status: "unreadable"; - gatewayDevicePendingPath: string; - } - | { - status: "ok"; - gatewayDevicePendingPath: string; - pendingRequestIds: string[]; - }; - -async function readOpenclawGatewayPendingState( - openclawDir: string, -): Promise { - const gatewayDevicePendingPath = join(openclawDir, "devices", "pending.json"); - try { - const pendingPayload = await readJsonFile(gatewayDevicePendingPath); - if (!isRecord(pendingPayload)) { - return { - status: "invalid", - gatewayDevicePendingPath, - }; - } - return { - status: "ok", - gatewayDevicePendingPath, - pendingRequestIds: Object.keys(pendingPayload), - }; - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return { - status: "missing", - gatewayDevicePendingPath, - }; - } - return { - status: "unreadable", - gatewayDevicePendingPath, - }; - } -} - -function resolveOpenclawGatewayApprovalCommand(): string { - const envOverride = process.env.OPENCLAW_GATEWAY_APPROVAL_COMMAND?.trim(); - if (typeof envOverride === "string" && envOverride.length > 0) { - return envOverride; - } - return OPENCLAW_GATEWAY_APPROVAL_COMMAND; -} - -async function runOpenclawGatewayApprovalCommand(input: { - command: string; - args: string[]; - openclawDir: string; - openclawConfigPath: string; -}): Promise { - return await new Promise( - (resolve) => { - const child = spawn(input.command, input.args, { - env: { - ...process.env, - OPENCLAW_STATE_DIR: input.openclawDir, - OPENCLAW_CONFIG_PATH: input.openclawConfigPath, - }, - stdio: ["ignore", "pipe", "pipe"], - }); - - let settled = false; - let stdout = ""; - let stderr = ""; - - const finalize = (result: OpenclawGatewayDeviceApprovalExecution) => { - if (settled) { - return; - } - settled = true; - resolve({ - ...result, - stdout: stdout.trim(), - stderr: stderr.trim(), - }); - }; - - const timeout = setTimeout(() => { - try { - child.kill("SIGTERM"); - } catch { - // Best-effort timeout shutdown. - } - finalize({ - ok: false, - errorMessage: `command timed out after ${OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS}ms`, - }); - }, OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS); - - child.stdout?.on("data", (chunk: Buffer | string) => { - stdout += String(chunk); - }); - child.stderr?.on("data", (chunk: Buffer | string) => { - stderr += String(chunk); - }); - - child.once("error", (error) => { - clearTimeout(timeout); - const errorCode = getErrorCode(error); - finalize({ - ok: false, - unavailable: errorCode === "ENOENT", - errorMessage: - error instanceof Error - ? error.message - : "failed to run openclaw command", - }); - }); - - child.once("close", (exitCode) => { - clearTimeout(timeout); - finalize({ - ok: exitCode === 0, - exitCode: typeof exitCode === "number" ? exitCode : undefined, - }); - }); - }, - ); -} - -async function runOpenclawGatewayDeviceApproval( - input: OpenclawGatewayDeviceApprovalInput, -): Promise { - const command = resolveOpenclawGatewayApprovalCommand(); - return await runOpenclawGatewayApprovalCommand({ - command, - args: ["devices", "approve", input.requestId, "--json"], - openclawDir: input.openclawDir, - openclawConfigPath: input.openclawConfigPath, - }); -} - -async function autoApproveOpenclawGatewayDevices(input: { - homeDir: string; - openclawDir: string; - runner?: OpenclawGatewayDeviceApprovalRunner; -}): Promise { - const pendingState = await readOpenclawGatewayPendingState(input.openclawDir); - if ( - pendingState.status !== "ok" || - pendingState.pendingRequestIds.length === 0 - ) { - return undefined; - } - - const openclawConfigPath = resolveOpenclawConfigPath( - input.openclawDir, - input.homeDir, - ); - const approvalRunner = input.runner ?? runOpenclawGatewayDeviceApproval; - const attempts: OpenclawGatewayDeviceApprovalAttempt[] = []; - - for (const requestId of pendingState.pendingRequestIds) { - const execution = await approvalRunner({ - requestId, - openclawDir: input.openclawDir, - openclawConfigPath, - }); - attempts.push({ - requestId, - ok: execution.ok, - unavailable: execution.unavailable === true, - reason: - execution.errorMessage ?? - (execution.stderr && execution.stderr.length > 0 - ? execution.stderr - : execution.stdout && execution.stdout.length > 0 - ? execution.stdout - : undefined), - exitCode: execution.exitCode, - }); - if (execution.unavailable === true) { - break; - } - } - - return { - gatewayDevicePendingPath: pendingState.gatewayDevicePendingPath, - pendingRequestIds: pendingState.pendingRequestIds, - attempts, - }; -} - -async function readJsonFile(filePath: string): Promise { - const raw = await readFile(filePath, "utf8"); - - try { - return JSON.parse(raw); - } catch { - throw createCliError("CLI_OPENCLAW_INVALID_JSON", "JSON file is invalid", { - filePath, - }); - } -} - -async function ensureLocalAgentCredentials( - homeDir: string, - agentName: string, -): Promise { - const agentDir = resolveAgentDirectory(homeDir, agentName); - const requiredFiles = [ - join(agentDir, SECRET_KEY_FILE_NAME), - join(agentDir, AIT_FILE_NAME), - ]; - - for (const filePath of requiredFiles) { - let content: string; - try { - content = await readFile(filePath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - throw createCliError( - "CLI_OPENCLAW_MISSING_AGENT_CREDENTIALS", - "Local agent credentials are missing", - { agentName, filePath }, - ); - } - - throw error; - } - - if (content.trim().length === 0) { - throw createCliError( - "CLI_OPENCLAW_EMPTY_AGENT_CREDENTIALS", - "Agent credential file is empty", - { filePath }, - ); - } - } -} - -function encodeInvitePayload(payload: OpenclawInvitePayload): string { - const encoded = encodeBase64url(textEncoder.encode(JSON.stringify(payload))); - return `${INVITE_CODE_PREFIX}${encoded}`; -} - -function decodeInvitePayload(code: string): OpenclawInvitePayload { - const rawCode = parseNonEmptyString(code, "invite code"); - if (!rawCode.startsWith(INVITE_CODE_PREFIX)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_INVITE", - "Invite code has invalid prefix", - ); - } - - const encoded = rawCode.slice(INVITE_CODE_PREFIX.length); - if (encoded.length === 0) { - throw createCliError( - "CLI_OPENCLAW_INVALID_INVITE", - "invite code payload is empty", - ); - } - - let decodedJson: string; - try { - decodedJson = textDecoder.decode(decodeBase64url(encoded)); - } catch { - throw createCliError( - "CLI_OPENCLAW_INVALID_INVITE", - "invite code payload is not valid base64url", - ); - } - - let parsedPayload: unknown; - try { - parsedPayload = JSON.parse(decodedJson); - } catch { - throw createCliError( - "CLI_OPENCLAW_INVALID_INVITE", - "invite code payload is not valid JSON", - ); - } - - return parseInvitePayload(parsedPayload); -} - -async function writeSecureFile( - filePath: string, - content: string, -): Promise { - await mkdir(dirname(filePath), { recursive: true }); - await writeFile(filePath, content, "utf8"); - await chmod(filePath, FILE_MODE); -} - -async function loadPeersConfig(peersPath: string): Promise { - let parsed: unknown; - - try { - parsed = await readJsonFile(peersPath); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return { peers: {} }; - } - - throw error; - } - - if (!isRecord(parsed)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_PEERS_CONFIG", - "Peer config root must be a JSON object", - { peersPath }, - ); - } - - const peersValue = parsed.peers; - if (peersValue === undefined) { - return { peers: {} }; - } - - if (!isRecord(peersValue)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_PEERS_CONFIG", - "Peer config peers field must be an object", - { peersPath }, - ); - } - - const peers: Record = {}; - for (const [alias, value] of Object.entries(peersValue)) { - const normalizedAlias = parsePeerAlias(alias); - if (!isRecord(value)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_PEERS_CONFIG", - "Peer entry must be an object", - { alias: normalizedAlias }, - ); - } - - const did = parseAgentDid(value.did, `Peer ${normalizedAlias} did`); - const proxyUrl = parseProxyUrl(value.proxyUrl); - const agentName = parseOptionalProfileName(value.agentName, "agentName"); - const humanName = parseOptionalProfileName(value.humanName, "humanName"); - - if (agentName === undefined && humanName === undefined) { - peers[normalizedAlias] = { did, proxyUrl }; - continue; - } - - peers[normalizedAlias] = { did, proxyUrl, agentName, humanName }; - } - - return { peers }; -} - -async function savePeersConfig( - peersPath: string, - config: PeersConfig, -): Promise { - await writeSecureFile(peersPath, `${JSON.stringify(config, null, 2)}\n`); -} - -function parseConnectorBaseUrlForAssignment( - value: unknown, - label: string, -): string { - return parseHttpUrl(value, { - label, - code: "CLI_OPENCLAW_INVALID_CONNECTOR_BASE_URL", - message: "Connector base URL must be a valid URL", - }); -} - -function parseConnectorAssignments( - value: unknown, - connectorAssignmentsPath: string, -): ConnectorAssignmentsConfig { - if (!isRecord(value)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_CONNECTOR_ASSIGNMENTS", - "Connector assignments config must be an object", - { connectorAssignmentsPath }, - ); - } - - const agentsRaw = value.agents; - if (!isRecord(agentsRaw)) { - return { agents: {} }; - } - - const agents: Record = {}; - for (const [agentName, entryValue] of Object.entries(agentsRaw)) { - if (!isRecord(entryValue)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_CONNECTOR_ASSIGNMENTS", - "Connector assignment entry must be an object", - { connectorAssignmentsPath, agentName }, - ); - } - - const connectorBaseUrl = parseConnectorBaseUrlForAssignment( - entryValue.connectorBaseUrl, - "connectorBaseUrl", - ); - const updatedAt = - typeof entryValue.updatedAt === "string" && - entryValue.updatedAt.trim().length > 0 - ? entryValue.updatedAt.trim() - : nowIso(); - - agents[assertValidAgentName(agentName)] = { - connectorBaseUrl, - updatedAt, - }; - } - - return { agents }; -} - -async function loadConnectorAssignments( - connectorAssignmentsPath: string, -): Promise { - let parsed: unknown; - try { - parsed = await readJsonFile(connectorAssignmentsPath); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return { agents: {} }; - } - throw error; - } - - return parseConnectorAssignments(parsed, connectorAssignmentsPath); -} - -async function saveConnectorAssignments( - connectorAssignmentsPath: string, - config: ConnectorAssignmentsConfig, -): Promise { - await writeSecureFile( - connectorAssignmentsPath, - `${JSON.stringify(config, null, 2)}\n`, - ); -} - -function parseConnectorPortFromBaseUrl(baseUrl: string): number { - const parsed = new URL(baseUrl); - if (parsed.port) { - return Number(parsed.port); - } - return parsed.protocol === "https:" ? 443 : 80; -} - -function allocateConnectorPort( - assignments: ConnectorAssignmentsConfig, - agentName: string, -): number { - const existing = assignments.agents[agentName]; - if (existing) { - return parseConnectorPortFromBaseUrl(existing.connectorBaseUrl); - } - - const usedPorts = new Set(); - for (const entry of Object.values(assignments.agents)) { - usedPorts.add(parseConnectorPortFromBaseUrl(entry.connectorBaseUrl)); - } - - let nextPort = DEFAULT_CONNECTOR_PORT; - while (usedPorts.has(nextPort)) { - nextPort += 1; - } - - return nextPort; -} - -function buildConnectorBaseUrl(host: string, port: number): string { - return `http://${host}:${port}`; -} - -function buildRelayConnectorBaseUrls(port: number): string[] { - return [ - buildConnectorBaseUrl(CONNECTOR_HOST_DOCKER, port), - buildConnectorBaseUrl(CONNECTOR_HOST_DOCKER_GATEWAY, port), - buildConnectorBaseUrl(CONNECTOR_HOST_LINUX_BRIDGE, port), - buildConnectorBaseUrl(CONNECTOR_HOST_LOOPBACK, port), - ]; -} - -function parseOpenclawRuntimeMode(value: unknown): OpenclawRuntimeMode { - if (typeof value !== "string" || value.trim().length === 0) { - return "auto"; - } - - const normalized = value.trim().toLowerCase(); - if ( - normalized === "auto" || - normalized === "service" || - normalized === "detached" - ) { - return normalized; - } - - throw createCliError( - "CLI_OPENCLAW_SETUP_RUNTIME_MODE_INVALID", - "runtimeMode must be one of: auto, service, detached", - ); -} - -function parseWaitTimeoutSeconds(value: unknown): number { - if (typeof value !== "string" || value.trim().length === 0) { - return DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS; - } - - const parsed = Number.parseInt(value, 10); - if (!Number.isInteger(parsed) || parsed < 1) { - throw createCliError( - "CLI_OPENCLAW_SETUP_TIMEOUT_INVALID", - "waitTimeoutSeconds must be a positive integer", - ); - } - - return parsed; -} - -function resolveConnectorStatusUrl(connectorBaseUrl: string): string { - const normalizedBase = connectorBaseUrl.endsWith("/") - ? connectorBaseUrl - : `${connectorBaseUrl}/`; - return new URL( - DEFAULT_CONNECTOR_STATUS_PATH.slice(1), - normalizedBase, - ).toString(); -} - -type ConnectorHealthStatus = { - connected: boolean; - inboundInbox?: { - deadLetterBytes?: number; - deadLetterCount?: number; - oldestDeadLetterAt?: string; - lastReplayAt?: string; - lastReplayError?: string; - nextAttemptAt?: string; - oldestPendingAt?: string; - pendingBytes?: number; - pendingCount?: number; - replayerActive?: boolean; - }; - openclawHook?: { - lastAttemptAt?: string; - lastAttemptStatus?: "ok" | "failed"; - url?: string; - }; - reachable: boolean; - statusUrl: string; - reason?: string; -}; - -function parseConnectorStatusPayload(payload: unknown): { - inboundInbox?: { - deadLetterBytes?: number; - deadLetterCount?: number; - oldestDeadLetterAt?: string; - lastReplayAt?: string; - lastReplayError?: string; - nextAttemptAt?: string; - oldestPendingAt?: string; - pendingBytes?: number; - pendingCount?: number; - replayerActive?: boolean; - }; - openclawHook?: { - lastAttemptAt?: string; - lastAttemptStatus?: "ok" | "failed"; - url?: string; - }; - websocketConnected: boolean; -} { - if ( - !isRecord(payload) || - !isRecord(payload.websocket) || - typeof payload.websocket.connected !== "boolean" - ) { - throw createCliError( - "CLI_OPENCLAW_SETUP_CONNECTOR_STATUS_INVALID", - "Connector status response is invalid", - ); - } - - const inboundRoot = isRecord(payload.inbound) ? payload.inbound : undefined; - const pending = - inboundRoot && isRecord(inboundRoot.pending) - ? inboundRoot.pending - : undefined; - const deadLetter = - inboundRoot && isRecord(inboundRoot.deadLetter) - ? inboundRoot.deadLetter - : undefined; - const replay = - inboundRoot && isRecord(inboundRoot.replay) - ? inboundRoot.replay - : undefined; - const hook = - inboundRoot && isRecord(inboundRoot.openclawHook) - ? inboundRoot.openclawHook - : undefined; - - return { - websocketConnected: payload.websocket.connected, - inboundInbox: - pending || deadLetter || replay - ? { - pendingCount: - pending && typeof pending.pendingCount === "number" - ? pending.pendingCount - : undefined, - pendingBytes: - pending && typeof pending.pendingBytes === "number" - ? pending.pendingBytes - : undefined, - oldestPendingAt: - pending && typeof pending.oldestPendingAt === "string" - ? pending.oldestPendingAt - : undefined, - nextAttemptAt: - pending && typeof pending.nextAttemptAt === "string" - ? pending.nextAttemptAt - : undefined, - lastReplayAt: - replay && typeof replay.lastReplayAt === "string" - ? replay.lastReplayAt - : undefined, - lastReplayError: - replay && typeof replay.lastReplayError === "string" - ? replay.lastReplayError - : undefined, - replayerActive: - replay && typeof replay.replayerActive === "boolean" - ? replay.replayerActive - : undefined, - deadLetterCount: - deadLetter && typeof deadLetter.deadLetterCount === "number" - ? deadLetter.deadLetterCount - : undefined, - deadLetterBytes: - deadLetter && typeof deadLetter.deadLetterBytes === "number" - ? deadLetter.deadLetterBytes - : undefined, - oldestDeadLetterAt: - deadLetter && typeof deadLetter.oldestDeadLetterAt === "string" - ? deadLetter.oldestDeadLetterAt - : undefined, - } - : undefined, - openclawHook: hook - ? { - url: typeof hook.url === "string" ? hook.url : undefined, - lastAttemptAt: - typeof hook.lastAttemptAt === "string" - ? hook.lastAttemptAt - : undefined, - lastAttemptStatus: - hook.lastAttemptStatus === "ok" || - hook.lastAttemptStatus === "failed" - ? hook.lastAttemptStatus - : undefined, - } - : undefined, - }; -} - -async function fetchConnectorHealthStatus(input: { - connectorBaseUrl: string; - fetchImpl: typeof fetch; -}): Promise { - const statusUrl = resolveConnectorStatusUrl(input.connectorBaseUrl); - try { - const response = await input.fetchImpl(statusUrl, { - method: "GET", - headers: { - accept: "application/json", - }, - }); - if (!response.ok) { - return { - connected: false, - reachable: false, - statusUrl, - reason: `HTTP ${response.status}`, - }; - } - - let payload: unknown; - try { - payload = await response.json(); - } catch { - return { - connected: false, - reachable: false, - statusUrl, - reason: "invalid JSON payload", - }; - } - - const parsed = parseConnectorStatusPayload(payload); - return { - connected: parsed.websocketConnected, - inboundInbox: parsed.inboundInbox, - openclawHook: parsed.openclawHook, - reachable: true, - statusUrl, - reason: parsed.websocketConnected - ? undefined - : "connector websocket is disconnected", - }; - } catch { - return { - connected: false, - reachable: false, - statusUrl, - reason: "connector status endpoint is unreachable", - }; - } -} - -async function waitForConnectorConnected(input: { - connectorBaseUrl: string; - fetchImpl: typeof fetch; - waitTimeoutSeconds: number; -}): Promise { - const deadline = nowUtcMs() + input.waitTimeoutSeconds * 1000; - let latest = await fetchConnectorHealthStatus({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - }); - - while (!latest.connected && nowUtcMs() < deadline) { - await new Promise((resolve) => { - setTimeout(resolve, 1000); - }); - latest = await fetchConnectorHealthStatus({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - }); - } - - if (!latest.connected) { - throw createCliError( - "CLI_OPENCLAW_SETUP_CONNECTOR_NOT_READY", - `Connector runtime is not websocket-connected after ${input.waitTimeoutSeconds} seconds`, - { - connectorBaseUrl: input.connectorBaseUrl, - connectorStatusUrl: latest.statusUrl, - reason: latest.reason, - }, - ); - } - - return latest; -} - -function sleepMilliseconds(durationMs: number): Promise { - return new Promise((resolve) => { - setTimeout(resolve, durationMs); - }); -} - -async function monitorConnectorStabilityWindow(input: { - connectorBaseUrl: string; - fetchImpl: typeof fetch; - durationSeconds: number; - pollIntervalMs: number; -}): Promise { - if (input.durationSeconds <= 0) { - return fetchConnectorHealthStatus({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - }); - } - - const deadline = nowUtcMs() + input.durationSeconds * 1000; - let latest = await fetchConnectorHealthStatus({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - }); - if (!latest.connected) { - return latest; - } - - while (nowUtcMs() < deadline) { - await sleepMilliseconds(input.pollIntervalMs); - latest = await fetchConnectorHealthStatus({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - }); - if (!latest.connected) { - return latest; - } - } - - return latest; -} - -function resolveConnectorRunDir(homeDir: string): string { - return join(getConfigDir({ homeDir }), CONNECTOR_RUN_DIR_NAME); -} - -function resolveConnectorPidPath(homeDir: string, agentName: string): string { - return join(resolveConnectorRunDir(homeDir), `connector-${agentName}.pid`); -} - -function resolveDetachedConnectorLogPath( - homeDir: string, - agentName: string, - stream: "stdout" | "stderr", -): string { - const suffix = - stream === "stdout" - ? CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX - : CONNECTOR_DETACHED_STDERR_FILE_SUFFIX; - return join( - resolveConnectorRunDir(homeDir), - `connector-${agentName}.${suffix}`, - ); -} - -async function readConnectorPidFile( - pidPath: string, -): Promise { - try { - const raw = (await readFile(pidPath, "utf8")).trim(); - if (raw.length === 0) { - return undefined; - } - - const parsed = Number.parseInt(raw, 10); - if (!Number.isInteger(parsed) || parsed <= 0) { - return undefined; - } - - return parsed; - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return undefined; - } - throw error; - } -} - -function isPidRunning(pid: number): boolean { - try { - process.kill(pid, 0); - return true; - } catch { - return false; - } -} - -async function stopDetachedConnectorIfRunning(input: { - homeDir: string; - agentName: string; -}): Promise { - const pidPath = resolveConnectorPidPath(input.homeDir, input.agentName); - const pid = await readConnectorPidFile(pidPath); - if (pid === undefined || !isPidRunning(pid)) { - return; - } - - try { - process.kill(pid, "SIGTERM"); - } catch { - // Ignore stale pid races; setup health checks will verify readiness. - } -} - -function resolveCliEntryPathForDetachedStart(): string { - const argvEntry = typeof process.argv[1] === "string" ? process.argv[1] : ""; - if (argvEntry.length > 0 && existsSync(argvEntry)) { - return argvEntry; - } - - const modulePath = fileURLToPath(import.meta.url); - return join(dirname(modulePath), "..", "bin.js"); -} - -async function startDetachedConnectorRuntime(input: { - agentName: string; - homeDir: string; - openclawBaseUrl: string; -}): Promise { - await stopDetachedConnectorIfRunning({ - homeDir: input.homeDir, - agentName: input.agentName, - }); - const runDir = resolveConnectorRunDir(input.homeDir); - await mkdir(runDir, { recursive: true }); - - const cliEntryPath = resolveCliEntryPathForDetachedStart(); - const args = [ - cliEntryPath, - "connector", - "start", - input.agentName, - "--openclaw-base-url", - input.openclawBaseUrl, - ]; - const stdoutLogPath = resolveDetachedConnectorLogPath( - input.homeDir, - input.agentName, - "stdout", - ); - const stderrLogPath = resolveDetachedConnectorLogPath( - input.homeDir, - input.agentName, - "stderr", - ); - const stdoutFd = openSync(stdoutLogPath, "a"); - const stderrFd = openSync(stderrLogPath, "a"); - - try { - const child = spawn(process.execPath, args, { - detached: true, - stdio: ["ignore", stdoutFd, stderrFd], - env: process.env, - }); - child.unref(); - await writeSecureFile( - resolveConnectorPidPath(input.homeDir, input.agentName), - `${child.pid}\n`, - ); - logger.info("cli.openclaw.setup.detached_runtime_started", { - agentName: input.agentName, - pid: child.pid, - stdoutLogPath, - stderrLogPath, - }); - } finally { - closeSync(stdoutFd); - closeSync(stderrFd); - } -} - -async function startSetupConnectorRuntime(input: { - agentName: string; - homeDir: string; - openclawBaseUrl: string; - connectorBaseUrl: string; - mode: OpenclawRuntimeMode; - waitTimeoutSeconds: number; - fetchImpl: typeof fetch; -}): Promise { - if (input.mode !== "service") { - const existingStatus = await fetchConnectorHealthStatus({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - }); - if (existingStatus.connected) { - return { - runtimeMode: "existing", - runtimeStatus: "running", - websocketStatus: "connected", - connectorStatusUrl: existingStatus.statusUrl, - }; - } - } - - let runtimeMode: "service" | "detached" = "service"; - if (input.mode === "detached") { - runtimeMode = "detached"; - } else { - try { - await installConnectorServiceForAgent(input.agentName, { - platform: "auto", - openclawBaseUrl: input.openclawBaseUrl, - }); - runtimeMode = "service"; - } catch (error) { - if (input.mode === "service") { - throw error; - } - runtimeMode = "detached"; - logger.warn("cli.openclaw.setup.service_fallback_detached", { - agentName: input.agentName, - reason: error instanceof Error ? error.message : "unknown", - }); - } - } - - if (runtimeMode === "detached") { - await startDetachedConnectorRuntime({ - agentName: input.agentName, - homeDir: input.homeDir, - openclawBaseUrl: input.openclawBaseUrl, - }); - } - - const connectedStatus = await waitForConnectorConnected({ - connectorBaseUrl: input.connectorBaseUrl, - fetchImpl: input.fetchImpl, - waitTimeoutSeconds: input.waitTimeoutSeconds, - }); - - return { - runtimeMode, - runtimeStatus: "running", - websocketStatus: "connected", - connectorStatusUrl: connectedStatus.statusUrl, - }; -} - -function parseRelayRuntimeConfig( - value: unknown, - relayRuntimeConfigPath: string, -): OpenclawRelayRuntimeConfig { - if (!isRecord(value)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_RELAY_RUNTIME_CONFIG", - "Relay runtime config must be an object", - { relayRuntimeConfigPath }, - ); - } - - const updatedAt = - typeof value.updatedAt === "string" && value.updatedAt.trim().length > 0 - ? value.updatedAt.trim() - : undefined; - const openclawHookToken = - typeof value.openclawHookToken === "string" && - value.openclawHookToken.trim().length > 0 - ? value.openclawHookToken.trim() - : undefined; - const relayTransformPeersPath = - typeof value.relayTransformPeersPath === "string" && - value.relayTransformPeersPath.trim().length > 0 - ? value.relayTransformPeersPath.trim() - : undefined; - - return { - openclawBaseUrl: parseOpenclawBaseUrl(value.openclawBaseUrl), - openclawHookToken, - relayTransformPeersPath, - updatedAt, - }; -} - -async function loadRelayRuntimeConfig( - relayRuntimeConfigPath: string, -): Promise { - let parsed: unknown; - try { - parsed = await readJsonFile(relayRuntimeConfigPath); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return undefined; - } - - throw error; - } - - return parseRelayRuntimeConfig(parsed, relayRuntimeConfigPath); -} - -async function saveRelayRuntimeConfig( - relayRuntimeConfigPath: string, - openclawBaseUrl: string, - openclawHookToken?: string, - relayTransformPeersPath?: string, -): Promise { - const config: OpenclawRelayRuntimeConfig = { - openclawBaseUrl, - ...(openclawHookToken ? { openclawHookToken } : {}), - ...(relayTransformPeersPath ? { relayTransformPeersPath } : {}), - updatedAt: nowIso(), - }; - - await writeSecureFile( - relayRuntimeConfigPath, - `${JSON.stringify(config, null, 2)}\n`, - ); -} - -async function resolveOpenclawBaseUrl(input: { - optionValue?: string; - relayRuntimeConfigPath: string; -}): Promise { - if ( - typeof input.optionValue === "string" && - input.optionValue.trim().length > 0 - ) { - return parseOpenclawBaseUrl(input.optionValue); - } - - const envOpenclawBaseUrl = process.env.OPENCLAW_BASE_URL; - if ( - typeof envOpenclawBaseUrl === "string" && - envOpenclawBaseUrl.trim().length > 0 - ) { - return parseOpenclawBaseUrl(envOpenclawBaseUrl); - } - - const existingConfig = await loadRelayRuntimeConfig( - input.relayRuntimeConfigPath, - ); - if (existingConfig !== undefined) { - return existingConfig.openclawBaseUrl; - } - - return DEFAULT_OPENCLAW_BASE_URL; -} - -function normalizeStringArrayWithValues( - value: unknown, - requiredValues: readonly string[], -): string[] { - const normalized = new Set(); - - if (Array.isArray(value)) { - for (const item of value) { - if (typeof item !== "string") { - continue; - } - - const trimmed = item.trim(); - if (trimmed.length > 0) { - normalized.add(trimmed); - } - } - } - - for (const requiredValue of requiredValues) { - const trimmed = requiredValue.trim(); - if (trimmed.length > 0) { - normalized.add(trimmed); - } - } - - return Array.from(normalized); -} - -function resolveHookDefaultSessionKey( - config: Record, - hooks: Record, -): string { - const session = isRecord(config.session) ? config.session : {}; - const scope = - typeof session.scope === "string" ? session.scope.trim().toLowerCase() : ""; - const configuredMainSessionKey = - resolveConfiguredOpenclawMainSessionKey(session); - - if ( - typeof hooks.defaultSessionKey === "string" && - hooks.defaultSessionKey.trim().length > 0 - ) { - return normalizeLegacyHookDefaultSessionKey( - hooks.defaultSessionKey, - configuredMainSessionKey, - ); - } - - if (scope === "global") { - return "global"; - } - - return configuredMainSessionKey; -} - -function resolveConfiguredOpenclawMainSessionKey( - session: Record, -): string { - if ( - typeof session.mainKey === "string" && - session.mainKey.trim().length > 0 - ) { - return session.mainKey.trim(); - } - - return DEFAULT_OPENCLAW_MAIN_SESSION_KEY; -} - -function normalizeLegacyHookDefaultSessionKey( - value: string, - fallbackSessionKey: string, -): string { - const trimmed = value.trim(); - const legacyMatch = /^agent:[^:]+:(.+)$/i.exec(trimmed); - if (!legacyMatch) { - return trimmed; - } - const routedSessionKey = legacyMatch[1]?.trim(); - if (typeof routedSessionKey === "string" && routedSessionKey.length > 0) { - return routedSessionKey; - } - - return fallbackSessionKey; -} - -function isCanonicalAgentSessionKey(value: string): boolean { - return /^agent:[^:]+:.+/i.test(value.trim()); -} - -function generateOpenclawHookToken(): string { - return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); -} - -function generateOpenclawGatewayToken(): string { - return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); -} - -function parseGatewayAuthMode( - value: unknown, -): "token" | "password" | "trusted-proxy" | undefined { - if (typeof value !== "string") { - return undefined; - } - const normalized = value.trim().toLowerCase(); - if ( - normalized === "token" || - normalized === "password" || - normalized === "trusted-proxy" - ) { - return normalized; - } - return undefined; -} - -function resolveEnvOpenclawGatewayToken(): string | undefined { - if ( - typeof process.env.OPENCLAW_GATEWAY_TOKEN === "string" && - process.env.OPENCLAW_GATEWAY_TOKEN.trim().length > 0 - ) { - return process.env.OPENCLAW_GATEWAY_TOKEN.trim(); - } - return undefined; -} - -function resolveGatewayAuthToken(existingToken?: string): string { - return ( - resolveEnvOpenclawGatewayToken() ?? - existingToken ?? - generateOpenclawGatewayToken() - ); -} - -function upsertRelayHookMapping( - mappingsValue: unknown, -): Record[] { - const mappings = Array.isArray(mappingsValue) - ? mappingsValue.filter(isRecord).map((mapping) => ({ ...mapping })) - : []; - - const existingIndex = mappings.findIndex((mapping) => { - if (mapping.id === HOOK_MAPPING_ID) { - return true; - } - - if (!isRecord(mapping.match)) { - return false; - } - - return mapping.match.path === HOOK_PATH_SEND_TO_PEER; - }); - - const baseMapping = - existingIndex >= 0 && isRecord(mappings[existingIndex]) - ? mappings[existingIndex] - : {}; - - const nextMatch = isRecord(baseMapping.match) ? { ...baseMapping.match } : {}; - nextMatch.path = HOOK_PATH_SEND_TO_PEER; - - const nextTransform = isRecord(baseMapping.transform) - ? { ...baseMapping.transform } - : {}; - nextTransform.module = RELAY_MODULE_FILE_NAME; - - const relayMapping: Record = { - ...baseMapping, - id: HOOK_MAPPING_ID, - match: nextMatch, - action: "agent", - wakeMode: "now", - transform: nextTransform, - }; - - if (existingIndex >= 0) { - mappings[existingIndex] = relayMapping; - return mappings; - } - - mappings.push(relayMapping); - return mappings; -} - -async function patchOpenclawConfig( - openclawConfigPath: string, - hookToken?: string, -): Promise<{ hookToken: string; configChanged: boolean }> { - let config: unknown; - try { - config = await readJsonFile(openclawConfigPath); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - throw createCliError( - "CLI_OPENCLAW_CONFIG_NOT_FOUND", - "OpenClaw config file was not found", - { openclawConfigPath }, - ); - } - - throw error; - } - - if (!isRecord(config)) { - throw createCliError( - "CLI_OPENCLAW_INVALID_CONFIG", - "OpenClaw config root must be an object", - { openclawConfigPath }, - ); - } - - const hooks = isRecord(config.hooks) ? { ...config.hooks } : {}; - const existingHookToken = - typeof hooks.token === "string" && hooks.token.trim().length > 0 - ? hooks.token.trim() - : undefined; - const preferredHookToken = - typeof hookToken === "string" && hookToken.trim().length > 0 - ? hookToken.trim() - : undefined; - const resolvedHookToken = - existingHookToken ?? preferredHookToken ?? generateOpenclawHookToken(); - const defaultSessionKey = resolveHookDefaultSessionKey(config, hooks); - - hooks.enabled = true; - hooks.token = resolvedHookToken; - hooks.defaultSessionKey = defaultSessionKey; - hooks.allowRequestSessionKey = false; - hooks.allowedSessionKeyPrefixes = normalizeStringArrayWithValues( - hooks.allowedSessionKeyPrefixes, - ["hook:", defaultSessionKey], - ); - hooks.mappings = upsertRelayHookMapping(hooks.mappings); - - const gateway = isRecord(config.gateway) ? { ...config.gateway } : {}; - const gatewayAuth = isRecord(gateway.auth) ? { ...gateway.auth } : {}; - const configuredGatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); - if (configuredGatewayAuthMode === undefined) { - gatewayAuth.mode = "token"; - } - - const effectiveGatewayAuthMode = - parseGatewayAuthMode(gatewayAuth.mode) ?? "token"; - if (effectiveGatewayAuthMode === "token") { - const existingGatewayAuthToken = - typeof gatewayAuth.token === "string" && - gatewayAuth.token.trim().length > 0 - ? gatewayAuth.token.trim() - : undefined; - gatewayAuth.token = resolveGatewayAuthToken(existingGatewayAuthToken); - } - gateway.auth = gatewayAuth; - - const nextConfig = { - ...config, - hooks, - gateway, - }; - const configChanged = JSON.stringify(config) !== JSON.stringify(nextConfig); - if (configChanged) { - await writeFile( - openclawConfigPath, - `${JSON.stringify(nextConfig, null, 2)}\n`, - "utf8", - ); - } - - return { - hookToken: resolvedHookToken, - configChanged, - }; -} - -function toDoctorCheck( - input: OpenclawDoctorCheckResult, -): OpenclawDoctorCheckResult { - return input; -} - -function toDoctorResult( - checks: OpenclawDoctorCheckResult[], -): OpenclawDoctorResult { - return { - status: checks.every((check) => check.status === "pass") - ? "healthy" - : "unhealthy", - checkedAt: nowIso(), - checks, - }; -} - -function isRelayHookMapping(value: unknown): boolean { - if (!isRecord(value)) { - return false; - } - - if (!isRecord(value.match) || value.match.path !== HOOK_PATH_SEND_TO_PEER) { - return false; - } - - if (typeof value.id === "string" && value.id !== HOOK_MAPPING_ID) { - return false; - } - - return true; -} - -function hasRelayTransformModule(value: unknown): boolean { - if (!isRecord(value) || !isRecord(value.transform)) { - return false; - } - - return value.transform.module === RELAY_MODULE_FILE_NAME; -} - -function parseDoctorPeerAlias(peerAlias?: string): string | undefined { - if (peerAlias === undefined) { - return undefined; - } - - return parsePeerAlias(peerAlias); -} - -async function resolveHookToken(input: { - optionValue?: string; - relayRuntimeConfigPath: string; -}): Promise { - const trimmedOption = input.optionValue?.trim(); - if (trimmedOption !== undefined && trimmedOption.length > 0) { - return trimmedOption; - } - - const envValue = process.env.OPENCLAW_HOOK_TOKEN?.trim(); - if (envValue !== undefined && envValue.length > 0) { - return envValue; - } - - const existingConfig = await loadRelayRuntimeConfig( - input.relayRuntimeConfigPath, - ); - if (existingConfig?.openclawHookToken) { - return existingConfig.openclawHookToken; - } - - return undefined; -} - -function resolveProbeMessage(optionValue?: string): string { - const trimmed = optionValue?.trim(); - if (trimmed !== undefined && trimmed.length > 0) { - return trimmed; - } - - return "clawdentity relay probe"; -} - -function resolveProbeSessionId(optionValue?: string): string { - const trimmed = optionValue?.trim(); - if (trimmed !== undefined && trimmed.length > 0) { - return trimmed; - } - - return "clawdentity-relay-test"; -} - -function formatDoctorCheckLine(check: OpenclawDoctorCheckResult): string { - const icon = check.status === "pass" ? "✅" : "❌"; - return `${icon} ${check.label}: ${check.message}`; -} - -function printDoctorResult(result: OpenclawDoctorResult): void { - writeStdoutLine(`OpenClaw doctor status: ${result.status}`); - for (const check of result.checks) { - writeStdoutLine(formatDoctorCheckLine(check)); - if (check.status === "fail" && check.remediationHint) { - writeStdoutLine(`Fix: ${check.remediationHint}`); - } - } -} - -function printRelayTestResult(result: OpenclawRelayTestResult): void { - writeStdoutLine(`Relay test status: ${result.status}`); - writeStdoutLine(`Peer alias: ${result.peerAlias}`); - writeStdoutLine(`Endpoint: ${result.endpoint}`); - if (typeof result.httpStatus === "number") { - writeStdoutLine(`HTTP status: ${result.httpStatus}`); - } - writeStdoutLine(`Message: ${result.message}`); - if (result.remediationHint) { - writeStdoutLine(`Fix: ${result.remediationHint}`); - } -} - -function printRelayWebsocketTestResult( - result: OpenclawRelayWebsocketTestResult, -): void { - writeStdoutLine(`Relay websocket test status: ${result.status}`); - writeStdoutLine(`Peer alias: ${result.peerAlias}`); - if (typeof result.connectorBaseUrl === "string") { - writeStdoutLine(`Connector base URL: ${result.connectorBaseUrl}`); - } - if (typeof result.connectorStatusUrl === "string") { - writeStdoutLine(`Connector status URL: ${result.connectorStatusUrl}`); - } - writeStdoutLine(`Message: ${result.message}`); - if (result.remediationHint) { - writeStdoutLine(`Fix: ${result.remediationHint}`); - } -} - -function toSendToPeerEndpoint(openclawBaseUrl: string): string { - const normalizedBase = openclawBaseUrl.endsWith("/") - ? openclawBaseUrl - : `${openclawBaseUrl}/`; - return new URL(OPENCLAW_SEND_TO_PEER_HOOK_PATH, normalizedBase).toString(); -} - -async function resolveSelectedAgentName(input: { - homeDir: string; -}): Promise<{ agentName: string; selectedAgentPath: string }> { - const selectedAgentPath = resolveOpenclawAgentNamePath(input.homeDir); - let selectedAgentRaw: string; - try { - selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - throw createCliError( - "CLI_OPENCLAW_SELECTED_AGENT_MISSING", - "Selected agent marker is missing", - { selectedAgentPath }, - ); - } - throw createCliError( - "CLI_OPENCLAW_SELECTED_AGENT_INVALID", - "Selected agent marker is invalid", - { selectedAgentPath }, - ); - } - - try { - return { - agentName: assertValidAgentName(selectedAgentRaw.trim()), - selectedAgentPath, - }; - } catch { - throw createCliError( - "CLI_OPENCLAW_SELECTED_AGENT_INVALID", - "Selected agent marker is invalid", - { selectedAgentPath }, - ); - } -} - -async function resolveConnectorAssignment(input: { - homeDir: string; - agentName: string; -}): Promise<{ - connectorAssignmentsPath: string; - connectorBaseUrl: string; - connectorStatusUrl: string; -}> { - const connectorAssignmentsPath = resolveConnectorAssignmentsPath( - input.homeDir, - ); - const connectorAssignments = await loadConnectorAssignments( - connectorAssignmentsPath, - ); - const assignment = connectorAssignments.agents[input.agentName]; - if (assignment === undefined) { - throw createCliError( - "CLI_OPENCLAW_CONNECTOR_ASSIGNMENT_MISSING", - "Connector assignment is missing for selected agent", - { - connectorAssignmentsPath, - agentName: input.agentName, - }, - ); - } - - return { - connectorAssignmentsPath, - connectorBaseUrl: assignment.connectorBaseUrl, - connectorStatusUrl: resolveConnectorStatusUrl(assignment.connectorBaseUrl), - }; -} - -export async function runOpenclawDoctor( - options: OpenclawDoctorOptions = {}, -): Promise { - const homeDir = resolveHomeDir(options.homeDir); - const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); - const peerAlias = parseDoctorPeerAlias(options.peerAlias); - const checks: OpenclawDoctorCheckResult[] = []; - - if (options.includeConfigCheck !== false) { - const resolveConfigImpl = options.resolveConfigImpl ?? resolveConfig; - try { - const resolvedConfig = await resolveConfigImpl(); - const envProxyUrl = - typeof process.env.CLAWDENTITY_PROXY_URL === "string" - ? process.env.CLAWDENTITY_PROXY_URL.trim() - : ""; - if ( - typeof resolvedConfig.registryUrl !== "string" || - resolvedConfig.registryUrl.trim().length === 0 - ) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "registryUrl is missing", - remediationHint: - "Run: clawdentity config set registryUrl ", - }), - ); - } else if ( - typeof resolvedConfig.apiKey !== "string" || - resolvedConfig.apiKey.trim().length === 0 - ) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "apiKey is missing", - remediationHint: "Run: clawdentity config set apiKey ", - }), - ); - } else if (envProxyUrl.length > 0) { - let hasValidEnvProxyUrl = true; - try { - parseProxyUrl(envProxyUrl); - } catch { - hasValidEnvProxyUrl = false; - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "CLAWDENTITY_PROXY_URL is invalid", - remediationHint: - "Set CLAWDENTITY_PROXY_URL to a valid http(s) URL or unset it", - }), - ); - } - - if (hasValidEnvProxyUrl) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "pass", - message: - "registryUrl and apiKey are configured (proxy URL override is active via CLAWDENTITY_PROXY_URL)", - }), - ); - } - } else if ( - typeof resolvedConfig.proxyUrl !== "string" || - resolvedConfig.proxyUrl.trim().length === 0 - ) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "proxyUrl is missing", - remediationHint: - "Run: clawdentity invite redeem or clawdentity config init", - }), - ); - } else { - let hasValidConfigProxyUrl = true; - try { - parseProxyUrl(resolvedConfig.proxyUrl); - } catch { - hasValidConfigProxyUrl = false; - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "proxyUrl is invalid", - remediationHint: - "Run: clawdentity invite redeem or clawdentity config init", - }), - ); - } - - if (hasValidConfigProxyUrl) { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "pass", - message: "registryUrl, apiKey, and proxyUrl are configured", - }), - ); - } - } - } catch { - checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "unable to resolve CLI config", - remediationHint: - "Run: clawdentity config init (or fix your CLI state config file)", - }), - ); - } - } - - const selectedAgentPath = resolveOpenclawAgentNamePath(homeDir); - let selectedAgentName: string | undefined; - try { - const selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); - selectedAgentName = assertValidAgentName(selectedAgentRaw.trim()); - checks.push( - toDoctorCheck({ - id: "state.selectedAgent", - label: "Selected agent marker", - status: "pass", - message: `selected agent is ${selectedAgentName}`, - }), - ); - } catch (error) { - const missing = getErrorCode(error) === "ENOENT"; - checks.push( - toDoctorCheck({ - id: "state.selectedAgent", - label: "Selected agent marker", - status: "fail", - message: missing - ? `missing ${selectedAgentPath}` - : "selected agent marker is invalid", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - } - - if (selectedAgentName === undefined) { - checks.push( - toDoctorCheck({ - id: "state.credentials", - label: "Local agent credentials", - status: "fail", - message: "cannot validate credentials without selected agent marker", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - } else { - try { - await ensureLocalAgentCredentials(homeDir, selectedAgentName); - checks.push( - toDoctorCheck({ - id: "state.credentials", - label: "Local agent credentials", - status: "pass", - message: "ait.jwt and secret.key are present", - }), - ); - } catch (error) { - const details = error instanceof AppError ? error.details : undefined; - const filePath = - details && typeof details.filePath === "string" - ? details.filePath - : undefined; - checks.push( - toDoctorCheck({ - id: "state.credentials", - label: "Local agent credentials", - status: "fail", - message: - filePath === undefined - ? "agent credentials are missing or invalid" - : `credential file missing or empty: ${filePath}`, - remediationHint: - "Run: clawdentity agent create --framework openclaw", - details: - filePath === undefined - ? undefined - : { filePath, selectedAgentName }, - }), - ); - } - } - - const peersPath = resolvePeersPath(homeDir); - let peersConfig: PeersConfig | undefined; - try { - peersConfig = await loadPeersConfig(peersPath); - const peerAliases = Object.keys(peersConfig.peers); - if (peerAlias !== undefined) { - if (peersConfig.peers[peerAlias] === undefined) { - checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "fail", - message: `peer alias is missing: ${peerAlias}`, - remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, - details: { peersPath, peerAlias }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "pass", - message: `peer alias exists: ${peerAlias}`, - details: { peersPath, peerAlias }, - }), - ); - } - } else if (peerAliases.length === 0) { - checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "pass", - message: "no peers are configured yet (optional until pairing)", - details: { peersPath }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "pass", - message: `configured peers: ${peerAliases.length}`, - details: { peersPath }, - }), - ); - } - } catch { - checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "fail", - message: `invalid peers config at ${peersPath}`, - remediationHint: `Fix JSON in ${peersPath} or rerun openclaw setup`, - details: { peersPath }, - }), - ); - } - - const transformTargetPath = resolveTransformTargetPath(openclawDir); - const relayTransformRuntimePath = resolveTransformRuntimePath(openclawDir); - const relayTransformPeersPath = resolveTransformPeersPath(openclawDir); - try { - const transformContents = await readFile(transformTargetPath, "utf8"); - const runtimeContents = await readFile(relayTransformRuntimePath, "utf8"); - const peersSnapshotContents = await readFile( - relayTransformPeersPath, - "utf8", - ); - - if ( - transformContents.trim().length === 0 || - runtimeContents.trim().length === 0 || - peersSnapshotContents.trim().length === 0 - ) { - checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "fail", - message: "relay transform artifacts are missing or empty", - remediationHint: "Run: clawdentity skill install", - details: { - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "pass", - message: "relay transform artifacts are present", - details: { - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - }, - }), - ); - } - } catch { - checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "fail", - message: "missing relay transform artifacts", - remediationHint: "Run: clawdentity skill install", - details: { - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - }, - }), - ); - } - - const openclawConfigPath = resolveOpenclawConfigPath(openclawDir, homeDir); - try { - const openclawConfig = await readJsonFile(openclawConfigPath); - if (!isRecord(openclawConfig)) { - throw new Error("root"); - } - const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; - const hooksEnabled = hooks.enabled === true; - const hookToken = - typeof hooks.token === "string" && hooks.token.trim().length > 0 - ? hooks.token.trim() - : undefined; - const defaultSessionKey = - typeof hooks.defaultSessionKey === "string" && - hooks.defaultSessionKey.trim().length > 0 - ? hooks.defaultSessionKey.trim() - : undefined; - const allowRequestSessionKey = hooks.allowRequestSessionKey === false; - const allowedSessionKeyPrefixes = normalizeStringArrayWithValues( - hooks.allowedSessionKeyPrefixes, - [], - ); - const missingRequiredSessionPrefixes = - defaultSessionKey === undefined - ? ["hook:"] - : ["hook:", defaultSessionKey].filter( - (prefix) => !allowedSessionKeyPrefixes.includes(prefix), - ); - const mappings = Array.isArray(hooks.mappings) - ? hooks.mappings.filter(isRecord) - : []; - const relayMapping = mappings.find((mapping) => - isRelayHookMapping(mapping), - ); - if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { - checks.push( - toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", - status: "fail", - message: `missing send-to-peer mapping in ${openclawConfigPath}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", - status: "pass", - message: "send-to-peer mapping is configured", - details: { openclawConfigPath }, - }), - ); - } - - if (!hooksEnabled) { - checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "fail", - message: `hooks.enabled is not true in ${openclawConfigPath}`, - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else if (hookToken === undefined) { - checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "fail", - message: `hooks.token is missing in ${openclawConfigPath}`, - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "pass", - message: "hooks token is configured", - details: { openclawConfigPath }, - }), - ); - } - - const sessionRoutingIssues: string[] = []; - if (defaultSessionKey === undefined) { - sessionRoutingIssues.push("hooks.defaultSessionKey is missing"); - } - if (!allowRequestSessionKey) { - sessionRoutingIssues.push("hooks.allowRequestSessionKey is not false"); - } - if (missingRequiredSessionPrefixes.length > 0) { - sessionRoutingIssues.push( - `hooks.allowedSessionKeyPrefixes is missing: ${missingRequiredSessionPrefixes.join(", ")}`, - ); - } - if ( - defaultSessionKey !== undefined && - isCanonicalAgentSessionKey(defaultSessionKey) - ) { - sessionRoutingIssues.push( - "hooks.defaultSessionKey uses canonical agent format (agent::...); use OpenClaw request session keys like main, global, or subagent:*", - ); - } - - if (sessionRoutingIssues.length > 0) { - checks.push( - toDoctorCheck({ - id: "state.hookSessionRouting", - label: "OpenClaw hook session routing", - status: "fail", - message: sessionRoutingIssues.join("; "), - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.hookSessionRouting", - label: "OpenClaw hook session routing", - status: "pass", - message: - "hooks default session and allowed session prefixes are configured", - details: { openclawConfigPath }, - }), - ); - } - - const gateway = isRecord(openclawConfig.gateway) - ? openclawConfig.gateway - : {}; - const gatewayAuth = isRecord(gateway.auth) ? gateway.auth : {}; - const gatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); - const gatewayAuthToken = - typeof gatewayAuth.token === "string" && - gatewayAuth.token.trim().length > 0 - ? gatewayAuth.token.trim() - : undefined; - const gatewayAuthPassword = - typeof gatewayAuth.password === "string" && - gatewayAuth.password.trim().length > 0 - ? gatewayAuth.password.trim() - : undefined; - - if (gatewayAuthMode === "token") { - if (gatewayAuthToken === undefined) { - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `gateway.auth.token is missing in ${openclawConfigPath}`, - remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "pass", - message: "gateway auth is configured with token mode", - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } - } else if (gatewayAuthMode === "password") { - if (gatewayAuthPassword === undefined) { - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `gateway.auth.password is missing in ${openclawConfigPath}`, - remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "pass", - message: "gateway auth is configured with password mode", - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } - } else if (gatewayAuthMode === "trusted-proxy") { - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "pass", - message: "gateway auth is configured with trusted-proxy mode", - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `gateway.auth.mode is missing or unsupported in ${openclawConfigPath}`, - remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - details: { openclawConfigPath }, - }), - ); - } - } catch { - checks.push( - toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.hookSessionRouting", - label: "OpenClaw hook session routing", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - } - - const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); - try { - const openclawBaseUrl = await resolveOpenclawBaseUrl({ - relayRuntimeConfigPath, - }); - checks.push( - toDoctorCheck({ - id: "state.openclawBaseUrl", - label: "OpenClaw base URL", - status: "pass", - message: `resolved to ${openclawBaseUrl}`, - }), - ); - } catch { - checks.push( - toDoctorCheck({ - id: "state.openclawBaseUrl", - label: "OpenClaw base URL", - status: "fail", - message: `unable to resolve OpenClaw base URL from ${relayRuntimeConfigPath}`, - remediationHint: OPENCLAW_SETUP_WITH_BASE_URL_HINT, - }), - ); - } - - const gatewayPendingState = - await readOpenclawGatewayPendingState(openclawDir); - if (gatewayPendingState.status === "missing") { - checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "pass", - message: "no pending gateway device approvals file was found", - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else if (gatewayPendingState.status === "invalid") { - checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "fail", - message: `invalid pending device approvals file: ${gatewayPendingState.gatewayDevicePendingPath}`, - remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else if (gatewayPendingState.status === "unreadable") { - checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "fail", - message: `unable to read pending device approvals at ${gatewayPendingState.gatewayDevicePendingPath}`, - remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else if (gatewayPendingState.pendingRequestIds.length === 0) { - checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "pass", - message: "no pending gateway device approvals", - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else { - checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "fail", - message: `pending gateway device approvals: ${gatewayPendingState.pendingRequestIds.length}`, - remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - pendingRequestIds: gatewayPendingState.pendingRequestIds, - }, - }), - ); - } - - if (options.includeConnectorRuntimeCheck !== false) { - if (selectedAgentName === undefined) { - checks.push( - toDoctorCheck({ - id: "state.connectorRuntime", - label: "Connector runtime", - status: "fail", - message: - "cannot validate connector runtime without selected agent marker", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.connectorInboundInbox", - label: "Connector inbound inbox", - status: "fail", - message: - "cannot validate connector inbound inbox without selected agent marker", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.openclawHookHealth", - label: "OpenClaw hook health", - status: "fail", - message: - "cannot validate OpenClaw hook health without selected agent marker", - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - }), - ); - } else { - const connectorAssignmentsPath = resolveConnectorAssignmentsPath(homeDir); - try { - const connectorAssignments = await loadConnectorAssignments( - connectorAssignmentsPath, - ); - const assignment = connectorAssignments.agents[selectedAgentName]; - if (assignment === undefined) { - checks.push( - toDoctorCheck({ - id: "state.connectorRuntime", - label: "Connector runtime", - status: "fail", - message: `no connector assignment found for ${selectedAgentName}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { connectorAssignmentsPath, selectedAgentName }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.connectorInboundInbox", - label: "Connector inbound inbox", - status: "fail", - message: `no connector assignment found for ${selectedAgentName}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { connectorAssignmentsPath, selectedAgentName }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.openclawHookHealth", - label: "OpenClaw hook health", - status: "fail", - message: `no connector assignment found for ${selectedAgentName}`, - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { connectorAssignmentsPath, selectedAgentName }, - }), - ); - } else { - const fetchImpl = options.fetchImpl ?? globalThis.fetch; - if (typeof fetchImpl !== "function") { - checks.push( - toDoctorCheck({ - id: "state.connectorRuntime", - label: "Connector runtime", - status: "fail", - message: - "fetch implementation is unavailable for connector checks", - remediationHint: - "Run doctor in a Node runtime with fetch support, or rerun openclaw setup", - }), - ); - checks.push( - toDoctorCheck({ - id: "state.connectorInboundInbox", - label: "Connector inbound inbox", - status: "fail", - message: - "fetch implementation is unavailable for connector inbox checks", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.openclawHookHealth", - label: "OpenClaw hook health", - status: "fail", - message: - "fetch implementation is unavailable for OpenClaw hook health checks", - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - }), - ); - } else { - const connectorStatus = await fetchConnectorHealthStatus({ - connectorBaseUrl: assignment.connectorBaseUrl, - fetchImpl, - }); - if (connectorStatus.connected) { - checks.push( - toDoctorCheck({ - id: "state.connectorRuntime", - label: "Connector runtime", - status: "pass", - message: `connector websocket is connected (${assignment.connectorBaseUrl})`, - details: { - connectorStatusUrl: connectorStatus.statusUrl, - connectorBaseUrl: assignment.connectorBaseUrl, - }, - }), - ); - const inboxPendingCount = - connectorStatus.inboundInbox?.pendingCount ?? 0; - const replayError = connectorStatus.inboundInbox?.lastReplayError; - checks.push( - toDoctorCheck({ - id: "state.connectorInboundInbox", - label: "Connector inbound inbox", - status: "pass", - message: - inboxPendingCount === 0 - ? "connector inbound inbox is empty" - : `connector inbound inbox has ${inboxPendingCount} pending message(s)`, - details: { - connectorStatusUrl: connectorStatus.statusUrl, - connectorBaseUrl: assignment.connectorBaseUrl, - ...connectorStatus.inboundInbox, - }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.openclawHookHealth", - label: "OpenClaw hook health", - status: - connectorStatus.openclawHook?.lastAttemptStatus === - "failed" && inboxPendingCount > 0 - ? "fail" - : "pass", - message: - connectorStatus.openclawHook?.lastAttemptStatus === - "failed" && inboxPendingCount > 0 - ? `connector replay to local OpenClaw hook is failing: ${replayError ?? "unknown error"}` - : "connector replay to local OpenClaw hook is healthy", - remediationHint: - connectorStatus.openclawHook?.lastAttemptStatus === - "failed" && inboxPendingCount > 0 - ? OPENCLAW_SETUP_RESTART_COMMAND_HINT - : undefined, - details: { - connectorStatusUrl: connectorStatus.statusUrl, - connectorBaseUrl: assignment.connectorBaseUrl, - ...connectorStatus.openclawHook, - inboxPendingCount, - }, - }), - ); - } else { - const reason = - connectorStatus.reason ?? "connector runtime is unavailable"; - checks.push( - toDoctorCheck({ - id: "state.connectorRuntime", - label: "Connector runtime", - status: "fail", - message: `connector runtime is not ready: ${reason}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { - connectorStatusUrl: connectorStatus.statusUrl, - connectorBaseUrl: assignment.connectorBaseUrl, - }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.connectorInboundInbox", - label: "Connector inbound inbox", - status: "fail", - message: `unable to read connector inbound inbox status: ${reason}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { - connectorStatusUrl: connectorStatus.statusUrl, - connectorBaseUrl: assignment.connectorBaseUrl, - }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.openclawHookHealth", - label: "OpenClaw hook health", - status: "fail", - message: `unable to verify OpenClaw hook health: ${reason}`, - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { - connectorStatusUrl: connectorStatus.statusUrl, - connectorBaseUrl: assignment.connectorBaseUrl, - }, - }), - ); - } - } - } - } catch { - checks.push( - toDoctorCheck({ - id: "state.connectorRuntime", - label: "Connector runtime", - status: "fail", - message: `unable to read connector assignments at ${connectorAssignmentsPath}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { connectorAssignmentsPath }, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.connectorInboundInbox", - label: "Connector inbound inbox", - status: "fail", - message: - "cannot validate connector inbound inbox without connector assignment", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - checks.push( - toDoctorCheck({ - id: "state.openclawHookHealth", - label: "OpenClaw hook health", - status: "fail", - message: - "cannot validate OpenClaw hook health without connector assignment", - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - }), - ); - } - } - } - - return toDoctorResult(checks); -} - -function parseRelayProbeFailure(input: { - status: number; - responseBody: string; -}): Pick { - if (input.status === 401 || input.status === 403) { - return { - message: "OpenClaw hook token was rejected", - remediationHint: - "Pass a valid token with --hook-token or set OPENCLAW_HOOK_TOKEN", - }; - } - - if (input.status === 404) { - return { - message: "OpenClaw send-to-peer hook is unavailable", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }; - } - - if (input.status === 405) { - return { - message: "OpenClaw send-to-peer hook is not enabled for POST requests", - remediationHint: `${OPENCLAW_SETUP_COMMAND_HINT}, then restart OpenClaw`, - }; - } - - if (input.status === 500) { - return { - message: "Relay probe failed inside local relay pipeline", - remediationHint: - "Check peer pairing and rerun: clawdentity openclaw setup ", - }; - } - - return { - message: `Relay probe failed with HTTP ${input.status}`, - remediationHint: - input.responseBody.trim().length > 0 - ? `Inspect response body: ${input.responseBody.trim()}` - : "Check local OpenClaw and connector logs", - }; -} - -async function resolveRelayProbePeerAlias(input: { - homeDir: string; - peerAliasOption?: string; -}): Promise { - if ( - typeof input.peerAliasOption === "string" && - input.peerAliasOption.trim().length > 0 - ) { - return parsePeerAlias(input.peerAliasOption); - } - - const peersPath = resolvePeersPath(input.homeDir); - const peersConfig = await loadPeersConfig(peersPath); - const peerAliases = Object.keys(peersConfig.peers); - - if (peerAliases.length === 1) { - return peerAliases[0]; - } - - if (peerAliases.length === 0) { - throw createCliError( - "CLI_OPENCLAW_RELAY_TEST_PEER_REQUIRED", - "No paired peer is configured yet. Complete QR pairing first.", - { peersPath }, - ); - } - - throw createCliError( - "CLI_OPENCLAW_RELAY_TEST_PEER_REQUIRED", - "Multiple peers are configured. Pass --peer to choose one.", - { peersPath, peerAliases }, - ); -} - -export async function runOpenclawRelayTest( - options: OpenclawRelayTestOptions, -): Promise { - const homeDir = resolveHomeDir(options.homeDir); - const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); - const checkedAt = nowIso(); - let peerAlias: string; - try { - peerAlias = await resolveRelayProbePeerAlias({ - homeDir, - peerAliasOption: options.peer, - }); - } catch (error) { - const appError = error instanceof AppError ? error : undefined; - return { - status: "failure", - checkedAt, - peerAlias: "unresolved", - endpoint: toSendToPeerEndpoint(DEFAULT_OPENCLAW_BASE_URL), - message: appError?.message ?? "Unable to resolve relay peer alias", - remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, - details: appError?.details as Record | undefined, - }; - } - - const preflight = await runOpenclawDoctor({ - homeDir, - openclawDir, - peerAlias, - resolveConfigImpl: options.resolveConfigImpl, - includeConnectorRuntimeCheck: false, - }); - - const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); - let openclawBaseUrl = DEFAULT_OPENCLAW_BASE_URL; - try { - openclawBaseUrl = await resolveOpenclawBaseUrl({ - optionValue: options.openclawBaseUrl, - relayRuntimeConfigPath, - }); - } catch { - return { - status: "failure", - checkedAt, - peerAlias, - endpoint: toSendToPeerEndpoint(DEFAULT_OPENCLAW_BASE_URL), - message: "Unable to resolve OpenClaw base URL", - remediationHint: - "Set OPENCLAW_BASE_URL or run openclaw setup with --openclaw-base-url", - preflight, - }; - } - - const endpoint = toSendToPeerEndpoint(openclawBaseUrl); - if (preflight.status === "unhealthy") { - const firstFailure = preflight.checks.find( - (check) => check.status === "fail", - ); - return { - status: "failure", - checkedAt, - peerAlias, - endpoint, - message: - firstFailure === undefined - ? "Preflight checks failed" - : `Preflight failed: ${firstFailure.label}`, - remediationHint: firstFailure?.remediationHint, - preflight, - }; - } - - const hookToken = await resolveHookToken({ - optionValue: options.hookToken, - relayRuntimeConfigPath, - }); - const fetchImpl = options.fetchImpl ?? globalThis.fetch; - if (typeof fetchImpl !== "function") { - return { - status: "failure", - checkedAt, - peerAlias, - endpoint, - message: "fetch implementation is unavailable", - remediationHint: "Run relay test in a Node runtime with fetch support", - preflight, - }; - } - - let response: Response; - try { - response = await fetchImpl(endpoint, { - method: "POST", - headers: { - "content-type": "application/json", - ...(hookToken === undefined ? {} : { "x-openclaw-token": hookToken }), - }, - body: JSON.stringify({ - peer: peerAlias, - sessionId: resolveProbeSessionId(options.sessionId), - message: resolveProbeMessage(options.message), - }), - }); - } catch { - return { - status: "failure", - checkedAt, - peerAlias, - endpoint, - message: "Relay probe request failed", - remediationHint: "Ensure local OpenClaw is running and reachable", - preflight, - }; - } - - if (response.ok) { - return { - status: "success", - checkedAt, - peerAlias, - endpoint, - httpStatus: response.status, - message: "Relay probe accepted", - preflight, - }; - } - - const responseBody = await response.text(); - const failure = parseRelayProbeFailure({ - status: response.status, - responseBody, - }); - return { - status: "failure", - checkedAt, - peerAlias, - endpoint, - httpStatus: response.status, - message: failure.message, - remediationHint: failure.remediationHint, - details: - responseBody.trim().length > 0 - ? { responseBody: responseBody.trim() } - : undefined, - preflight, - }; -} - -export async function runOpenclawRelayWebsocketTest( - options: OpenclawRelayWebsocketTestOptions, -): Promise { - const homeDir = resolveHomeDir(options.homeDir); - const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); - const checkedAt = nowIso(); - - let peerAlias: string; - try { - peerAlias = await resolveRelayProbePeerAlias({ - homeDir, - peerAliasOption: options.peer, - }); - } catch (error) { - const appError = error instanceof AppError ? error : undefined; - return { - status: "failure", - checkedAt, - peerAlias: "unresolved", - message: appError?.message ?? "Unable to resolve relay peer alias", - remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, - details: appError?.details as Record | undefined, - }; - } - - const preflight = await runOpenclawDoctor({ - homeDir, - openclawDir, - peerAlias, - resolveConfigImpl: options.resolveConfigImpl, - includeConnectorRuntimeCheck: false, - }); - if (preflight.status === "unhealthy") { - const firstFailure = preflight.checks.find( - (check) => check.status === "fail", - ); - return { - status: "failure", - checkedAt, - peerAlias, - message: - firstFailure === undefined - ? "Preflight checks failed" - : `Preflight failed: ${firstFailure.label}`, - remediationHint: firstFailure?.remediationHint, - preflight, - }; - } - - const fetchImpl = options.fetchImpl ?? globalThis.fetch; - if (typeof fetchImpl !== "function") { - return { - status: "failure", - checkedAt, - peerAlias, - message: "fetch implementation is unavailable", - remediationHint: - "Run relay websocket test in a Node runtime with fetch support", - preflight, - }; - } - - let connectorBaseUrl: string | undefined; - let connectorStatusUrl: string | undefined; - try { - const selectedAgent = await resolveSelectedAgentName({ homeDir }); - const connectorAssignment = await resolveConnectorAssignment({ - homeDir, - agentName: selectedAgent.agentName, - }); - connectorBaseUrl = connectorAssignment.connectorBaseUrl; - connectorStatusUrl = connectorAssignment.connectorStatusUrl; - } catch (error) { - const appError = error instanceof AppError ? error : undefined; - return { - status: "failure", - checkedAt, - peerAlias, - connectorBaseUrl, - connectorStatusUrl, - message: - appError?.message ?? - "Unable to resolve connector assignment for websocket test", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: appError?.details as Record | undefined, - preflight, - }; - } - - const connectorStatus = await fetchConnectorHealthStatus({ - connectorBaseUrl, - fetchImpl, - }); - if (!connectorStatus.connected) { - return { - status: "failure", - checkedAt, - peerAlias, - connectorBaseUrl, - connectorStatusUrl: connectorStatus.statusUrl, - message: "Connector websocket is not connected", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: - connectorStatus.reason === undefined - ? undefined - : { - reason: connectorStatus.reason, - }, - preflight, - }; - } - - return { - status: "success", - checkedAt, - peerAlias, - connectorBaseUrl, - connectorStatusUrl: connectorStatus.statusUrl, - message: "Connector websocket is connected for paired relay", - preflight, - }; -} - -export function createOpenclawInviteCode( - options: OpenclawInviteOptions, -): OpenclawInviteResult { - const did = parseAgentDid(options.did, "invite did"); - const proxyUrl = parseProxyUrl(options.proxyUrl); - const peerAlias = - options.peerAlias === undefined - ? undefined - : parsePeerAlias(options.peerAlias); - const agentName = parseOptionalProfileName(options.agentName, "agentName"); - const humanName = parseOptionalProfileName(options.humanName, "humanName"); - - const payload = parseInvitePayload({ - v: 1, - issuedAt: nowIso(), - did, - proxyUrl, - alias: peerAlias, - agentName, - humanName, - }); - - const result: OpenclawInviteResult = { - code: encodeInvitePayload(payload), - did: payload.did, - proxyUrl: payload.proxyUrl, - peerAlias: payload.alias, - agentName: payload.agentName, - humanName: payload.humanName, - }; - - return result; -} - -export function decodeOpenclawInviteCode(code: string): OpenclawInvitePayload { - return decodeInvitePayload(code); -} - -export async function setupOpenclawRelay( - agentName: string, - options: OpenclawSetupOptions, -): Promise { - const normalizedAgentName = assertValidAgentName(agentName); - const homeDir = resolveHomeDir(options.homeDir); - const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); - const openclawConfigPath = resolveOpenclawConfigPath(openclawDir, homeDir); - const transformSource = - typeof options.transformSource === "string" && - options.transformSource.trim().length > 0 - ? options.transformSource.trim() - : resolveDefaultTransformSource(openclawDir); - const transformTargetPath = resolveTransformTargetPath(openclawDir); - const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); - const existingRelayRuntimeConfig = await loadRelayRuntimeConfig( - relayRuntimeConfigPath, - ); - const openclawBaseUrl = await resolveOpenclawBaseUrl({ - optionValue: options.openclawBaseUrl, - relayRuntimeConfigPath, - }); - - await ensureLocalAgentCredentials(homeDir, normalizedAgentName); - await mkdir(dirname(transformTargetPath), { recursive: true }); - try { - await copyFile(transformSource, transformTargetPath); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - throw createCliError( - "CLI_OPENCLAW_TRANSFORM_NOT_FOUND", - "Relay transform source file was not found", - { transformSource }, - ); - } - - throw error; - } - - const patchedOpenclawConfig = await patchOpenclawConfig( - openclawConfigPath, - existingRelayRuntimeConfig?.openclawHookToken, - ); - - const peersPath = resolvePeersPath(homeDir); - const peers = await loadPeersConfig(peersPath); - await savePeersConfig(peersPath, peers); - - const relayTransformPeersPath = resolveTransformPeersPath(openclawDir); - await writeSecureFile( - relayTransformPeersPath, - `${JSON.stringify(peers, null, 2)}\n`, - ); - - const connectorAssignmentsPath = resolveConnectorAssignmentsPath(homeDir); - const connectorAssignments = await loadConnectorAssignments( - connectorAssignmentsPath, - ); - const connectorPort = allocateConnectorPort( - connectorAssignments, - normalizedAgentName, - ); - const connectorBaseUrl = buildConnectorBaseUrl( - CONNECTOR_HOST_LOOPBACK, - connectorPort, - ); - connectorAssignments.agents[normalizedAgentName] = { - connectorBaseUrl, - updatedAt: nowIso(), - }; - await saveConnectorAssignments( - connectorAssignmentsPath, - connectorAssignments, - ); - - const relayTransformRuntimePath = resolveTransformRuntimePath(openclawDir); - await writeSecureFile( - relayTransformRuntimePath, - `${JSON.stringify( - { - version: 1, - connectorBaseUrl: buildRelayConnectorBaseUrls(connectorPort)[0], - connectorBaseUrls: buildRelayConnectorBaseUrls(connectorPort), - connectorPath: DEFAULT_CONNECTOR_OUTBOUND_PATH, - peersConfigPath: RELAY_PEERS_FILE_NAME, - updatedAt: nowIso(), - }, - null, - 2, - )}\n`, - ); - - const agentNamePath = resolveOpenclawAgentNamePath(homeDir); - await writeSecureFile(agentNamePath, `${normalizedAgentName}\n`); - await saveRelayRuntimeConfig( - relayRuntimeConfigPath, - openclawBaseUrl, - patchedOpenclawConfig.hookToken, - relayTransformPeersPath, - ); - - logger.info("cli.openclaw_setup_completed", { - agentName: normalizedAgentName, - openclawConfigPath, - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - openclawBaseUrl, - connectorBaseUrl, - relayRuntimeConfigPath, - }); - - return { - openclawConfigPath, - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - openclawBaseUrl, - connectorBaseUrl, - relayRuntimeConfigPath, - openclawConfigChanged: patchedOpenclawConfig.configChanged, - }; -} - -async function assertSetupChecklistHealthy(input: { - homeDir: string; - openclawDir: string; - includeConnectorRuntimeCheck: boolean; - gatewayDeviceApprovalRunner?: OpenclawGatewayDeviceApprovalRunner; -}): Promise { - let checklist = await runOpenclawDoctor({ - homeDir: input.homeDir, - openclawDir: input.openclawDir, - includeConfigCheck: false, - includeConnectorRuntimeCheck: input.includeConnectorRuntimeCheck, - }); - - if (checklist.status === "healthy") { - return; - } - - let gatewayApprovalSummary: OpenclawGatewayDeviceApprovalSummary | undefined; - const gatewayPairingFailure = checklist.checks.find( - (check) => - check.id === "state.gatewayDevicePairing" && check.status === "fail", - ); - if (gatewayPairingFailure !== undefined) { - gatewayApprovalSummary = await autoApproveOpenclawGatewayDevices({ - homeDir: input.homeDir, - openclawDir: input.openclawDir, - runner: input.gatewayDeviceApprovalRunner, - }); - if (gatewayApprovalSummary !== undefined) { - const successfulAttempts = gatewayApprovalSummary.attempts.filter( - (attempt) => attempt.ok, - ).length; - const failedAttempts = gatewayApprovalSummary.attempts.filter( - (attempt) => !attempt.ok, - ); - logger.info("cli.openclaw_setup_gateway_device_recovery_attempted", { - openclawDir: input.openclawDir, - pendingCount: gatewayApprovalSummary.pendingRequestIds.length, - successfulAttempts, - failedAttempts: failedAttempts.length, - commandUnavailable: failedAttempts.some( - (attempt) => attempt.unavailable, - ), - }); - checklist = await runOpenclawDoctor({ - homeDir: input.homeDir, - openclawDir: input.openclawDir, - includeConfigCheck: false, - includeConnectorRuntimeCheck: input.includeConnectorRuntimeCheck, - }); - if (checklist.status === "healthy") { - return; - } - } - } - - const firstFailure = checklist.checks.find( - (check) => check.status === "fail", - ); - const unavailableGatewayApprovalAttempt = - gatewayApprovalSummary?.attempts.find((attempt) => attempt.unavailable); - const remediationHint = - unavailableGatewayApprovalAttempt !== undefined && - firstFailure?.id === "state.gatewayDevicePairing" - ? `${OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT}. Ensure the \`${resolveOpenclawGatewayApprovalCommand()}\` command is available.` - : firstFailure?.remediationHint; - throw createCliError( - "CLI_OPENCLAW_SETUP_CHECKLIST_FAILED", - "OpenClaw setup checklist failed", - { - firstFailedCheckId: firstFailure?.id, - firstFailedCheckMessage: firstFailure?.message, - remediationHint, - gatewayDeviceApproval: gatewayApprovalSummary, - checks: checklist.checks, - }, - ); -} - -export async function setupOpenclawSelfReady( - agentName: string, - options: OpenclawSetupOptions, -): Promise { - const normalizedAgentName = assertValidAgentName(agentName); - const resolvedHomeDir = resolveHomeDir(options.homeDir); - const resolvedOpenclawDir = resolveOpenclawDir( - options.openclawDir, - resolvedHomeDir, - ); - const setup = await setupOpenclawRelay(normalizedAgentName, { - ...options, - homeDir: resolvedHomeDir, - openclawDir: resolvedOpenclawDir, - }); - if (options.noRuntimeStart === true) { - await assertSetupChecklistHealthy({ - homeDir: resolvedHomeDir, - openclawDir: resolvedOpenclawDir, - includeConnectorRuntimeCheck: false, - gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, - }); - return { - ...setup, - runtimeMode: "none", - runtimeStatus: "skipped", - websocketStatus: "skipped", - }; - } - - const fetchImpl = globalThis.fetch; - if (typeof fetchImpl !== "function") { - throw createCliError( - "CLI_OPENCLAW_SETUP_FETCH_UNAVAILABLE", - "Runtime fetch is unavailable for connector readiness checks", - ); - } - - const resolvedMode = parseOpenclawRuntimeMode(options.runtimeMode); - const waitTimeoutSeconds = parseWaitTimeoutSeconds( - options.waitTimeoutSeconds, - ); - let runtime = await startSetupConnectorRuntime({ - agentName: normalizedAgentName, - homeDir: resolvedHomeDir, - openclawBaseUrl: setup.openclawBaseUrl, - connectorBaseUrl: setup.connectorBaseUrl, - mode: resolvedMode, - waitTimeoutSeconds, - fetchImpl, - }); - - await assertSetupChecklistHealthy({ - homeDir: resolvedHomeDir, - openclawDir: resolvedOpenclawDir, - includeConnectorRuntimeCheck: true, - gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, - }); - - const requiresStabilityGuard = - setup.openclawConfigChanged && - (runtime.runtimeMode === "existing" || runtime.runtimeMode === "detached"); - if (requiresStabilityGuard) { - const stabilityWindowSeconds = Math.min( - waitTimeoutSeconds, - OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS, - ); - const stableStatus = await monitorConnectorStabilityWindow({ - connectorBaseUrl: setup.connectorBaseUrl, - fetchImpl, - durationSeconds: stabilityWindowSeconds, - pollIntervalMs: OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS, - }); - - if (!stableStatus.connected) { - logger.warn("cli.openclaw.setup.connector_dropped_post_config_change", { - agentName: normalizedAgentName, - connectorBaseUrl: setup.connectorBaseUrl, - connectorStatusUrl: stableStatus.statusUrl, - reason: stableStatus.reason, - previousRuntimeMode: runtime.runtimeMode, - stabilityWindowSeconds, - }); - runtime = await startSetupConnectorRuntime({ - agentName: normalizedAgentName, - homeDir: resolvedHomeDir, - openclawBaseUrl: setup.openclawBaseUrl, - connectorBaseUrl: setup.connectorBaseUrl, - mode: resolvedMode, - waitTimeoutSeconds, - fetchImpl, - }); - await assertSetupChecklistHealthy({ - homeDir: resolvedHomeDir, - openclawDir: resolvedOpenclawDir, - includeConnectorRuntimeCheck: true, - gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, - }); - } - } - - return { - ...setup, - ...runtime, - }; -} - -export async function setupOpenclawRelayFromInvite( - agentName: string, - options: OpenclawSetupOptions, -): Promise { - return setupOpenclawRelay(agentName, options); -} - -export const createOpenclawCommand = (): Command => { - const openclawCommand = new Command("openclaw").description( - "Manage OpenClaw relay setup", - ); - - openclawCommand - .command("setup ") - .description("Apply OpenClaw relay setup") - .option( - "--openclaw-dir ", - "OpenClaw state directory (default ~/.openclaw)", - ) - .option( - "--transform-source ", - "Path to relay-to-peer.mjs (default /skills/clawdentity-openclaw-relay/relay-to-peer.mjs)", - ) - .option( - "--openclaw-base-url ", - "Base URL for local OpenClaw hook API (default http://127.0.0.1:18789)", - ) - .option( - "--runtime-mode ", - "Connector runtime mode: auto | service | detached (default auto)", - ) - .option( - "--wait-timeout-seconds ", - "Seconds to wait for connector websocket readiness (default 30)", - ) - .option( - "--no-runtime-start", - "Skip connector runtime startup (advanced/manual mode)", - ) - .action( - withErrorHandling( - "openclaw setup", - async (agentName: string, options: OpenclawSetupCommandOptions) => { - const result = await setupOpenclawSelfReady(agentName, options); - writeStdoutLine("Self setup complete"); - writeStdoutLine( - `Updated OpenClaw config: ${result.openclawConfigPath}`, - ); - writeStdoutLine(`Installed transform: ${result.transformTargetPath}`); - writeStdoutLine( - `Transform runtime config: ${result.relayTransformRuntimePath}`, - ); - writeStdoutLine( - `Transform peers snapshot: ${result.relayTransformPeersPath}`, - ); - writeStdoutLine(`Connector base URL: ${result.connectorBaseUrl}`); - writeStdoutLine(`OpenClaw base URL: ${result.openclawBaseUrl}`); - writeStdoutLine( - `Relay runtime config: ${result.relayRuntimeConfigPath}`, - ); - writeStdoutLine(`Runtime mode: ${result.runtimeMode}`); - writeStdoutLine(`Runtime status: ${result.runtimeStatus}`); - writeStdoutLine(`WebSocket status: ${result.websocketStatus}`); - if (result.connectorStatusUrl) { - writeStdoutLine( - `Connector status URL: ${result.connectorStatusUrl}`, - ); - } - }, - ), - ); - - openclawCommand - .command("doctor") - .description("Validate local OpenClaw relay setup and print remediation") - .option("--peer ", "Validate that a specific peer alias exists") - .option( - "--openclaw-dir ", - "OpenClaw state directory (default ~/.openclaw)", - ) - .option("--json", "Print machine-readable JSON output") - .action( - withErrorHandling( - "openclaw doctor", - async (options: OpenclawDoctorCommandOptions) => { - const result = await runOpenclawDoctor({ - openclawDir: options.openclawDir, - peerAlias: options.peer, - json: options.json, - }); - if (options.json) { - writeStdoutLine(JSON.stringify(result, null, 2)); - } else { - printDoctorResult(result); - } - - if (result.status === "unhealthy") { - process.exitCode = 1; - } - }, - ), - ); - - const relayCommand = openclawCommand - .command("relay") - .description("Run OpenClaw relay diagnostics"); - - relayCommand - .command("test") - .description( - "Send a relay probe to a configured peer (auto-selects when one peer exists)", - ) - .option("--peer ", "Peer alias in local peers map") - .option( - "--openclaw-base-url ", - "Base URL for local OpenClaw hook API (default OPENCLAW_BASE_URL or relay runtime config)", - ) - .option( - "--hook-token ", - "OpenClaw hook token (default OPENCLAW_HOOK_TOKEN)", - ) - .option("--session-id ", "Session id for the probe payload") - .option("--message ", "Probe message body") - .option( - "--openclaw-dir ", - "OpenClaw state directory (default ~/.openclaw)", - ) - .option("--json", "Print machine-readable JSON output") - .action( - withErrorHandling( - "openclaw relay test", - async (options: OpenclawRelayTestOptions) => { - const result = await runOpenclawRelayTest(options); - - if (options.json) { - writeStdoutLine(JSON.stringify(result, null, 2)); - } else { - printRelayTestResult(result); - if ( - result.preflight !== undefined && - result.preflight.status === "unhealthy" - ) { - writeStdoutLine("Preflight details:"); - for (const check of result.preflight.checks) { - if (check.status === "fail") { - writeStdoutLine(formatDoctorCheckLine(check)); - if (check.remediationHint) { - writeStdoutLine(`Fix: ${check.remediationHint}`); - } - } - } - } - } - - if (result.status === "failure") { - process.exitCode = 1; - } - }, - ), - ); - - relayCommand - .command("ws-test") - .description( - "Validate connector websocket connectivity for a paired relay peer", - ) - .option("--peer ", "Peer alias in local peers map") - .option( - "--openclaw-dir ", - "OpenClaw state directory (default ~/.openclaw)", - ) - .option("--json", "Print machine-readable JSON output") - .action( - withErrorHandling( - "openclaw relay ws-test", - async (options: OpenclawRelayWebsocketTestOptions) => { - const result = await runOpenclawRelayWebsocketTest(options); - if (options.json) { - writeStdoutLine(JSON.stringify(result, null, 2)); - } else { - printRelayWebsocketTestResult(result); - if ( - result.preflight !== undefined && - result.preflight.status === "unhealthy" - ) { - writeStdoutLine("Preflight details:"); - for (const check of result.preflight.checks) { - if (check.status === "fail") { - writeStdoutLine(formatDoctorCheckLine(check)); - if (check.remediationHint) { - writeStdoutLine(`Fix: ${check.remediationHint}`); - } - } - } - } - } - - if (result.status === "failure") { - process.exitCode = 1; - } - }, - ), - ); - - return openclawCommand; -}; +export { createOpenclawCommand } from "./openclaw/command.js"; + +export { runOpenclawDoctor } from "./openclaw/doctor.js"; +export { + runOpenclawRelayTest, + runOpenclawRelayWebsocketTest, +} from "./openclaw/relay.js"; +export { + createOpenclawInviteCode, + decodeOpenclawInviteCode, + setupOpenclawRelay, + setupOpenclawRelayFromInvite, + setupOpenclawSelfReady, +} from "./openclaw/setup.js"; +export type { + OpenclawDoctorCheckResult, + OpenclawDoctorResult, + OpenclawInviteResult, + OpenclawRelayTestResult, + OpenclawRelayWebsocketTestResult, + OpenclawSelfSetupResult, + OpenclawSetupResult, +} from "./openclaw/types.js"; diff --git a/apps/cli/src/commands/openclaw/AGENTS.md b/apps/cli/src/commands/openclaw/AGENTS.md new file mode 100644 index 0000000..4badd3b --- /dev/null +++ b/apps/cli/src/commands/openclaw/AGENTS.md @@ -0,0 +1,35 @@ +# AGENTS.md (openclaw command modules) + +## Purpose +- Keep OpenClaw command code modular, testable, and behavior-stable. +- Preserve CLI output/error behavior unless explicitly requested by a tracked issue. + +## Module Boundaries +- `types.ts`: shared type contracts only. +- `constants.ts`: string constants, defaults, hints, and logger. +- `common.ts`: validation/parsing/error helpers and shared generic utilities. +- `paths.ts`: all filesystem/env path resolution logic. +- `state.ts`: JSON file IO + persisted runtime/peer/assignment config access. +- `gateway.ts`: OpenClaw gateway pending-device approval flow. +- `connector.ts`: connector runtime status/probing/runtime-start helpers. +- `config.ts`: OpenClaw config patching and hook/gateway auth normalization. +- `doctor*.ts`: doctor orchestration and check groups. +- `relay.ts`: relay probe and websocket diagnostics. +- `setup.ts`: invite encode/decode and setup orchestration. +- `command.ts`: commander wiring + stdout formatting calls. +- `../openclaw.ts`: thin public facade and stable exports. + +## Guardrails +- Keep every source file under 800 LOC. +- Do not introduce circular imports. +- Put reusable logic in shared modules (`common.ts`, `state.ts`, `connector.ts`, `config.ts`) instead of duplicating. +- Keep error codes/messages and remediation hints stable; tests assert these flows. +- Keep command stdout wording and ordering stable unless tests and issue scope require change. + +## Change Workflow +- When adding behavior, add/adjust tests in `apps/cli/src/commands/openclaw.test.ts` first or in the same change. +- Run targeted checks before handoff: + - `pnpm -C apps/cli test -- openclaw` + - `pnpm -C apps/cli typecheck` + - `pnpm lint` +- If a helper is used by multiple domains, prefer promoting it to a shared module instead of cross-domain duplication. diff --git a/apps/cli/src/commands/openclaw/command.ts b/apps/cli/src/commands/openclaw/command.ts new file mode 100644 index 0000000..9ab51b3 --- /dev/null +++ b/apps/cli/src/commands/openclaw/command.ts @@ -0,0 +1,221 @@ +import { Command } from "commander"; +import { writeStdoutLine } from "../../io.js"; +import { withErrorHandling } from "../helpers.js"; +import { runOpenclawDoctor } from "./doctor.js"; +import { + formatDoctorCheckLine, + printDoctorResult, + printRelayTestResult, + printRelayWebsocketTestResult, +} from "./output.js"; +import { + runOpenclawRelayTest, + runOpenclawRelayWebsocketTest, +} from "./relay.js"; +import { setupOpenclawSelfReady } from "./setup.js"; +import type { + OpenclawDoctorCommandOptions, + OpenclawRelayTestOptions, + OpenclawRelayWebsocketTestOptions, + OpenclawSetupCommandOptions, +} from "./types.js"; + +export const createOpenclawCommand = (): Command => { + const openclawCommand = new Command("openclaw").description( + "Manage OpenClaw relay setup", + ); + + openclawCommand + .command("setup ") + .description("Apply OpenClaw relay setup") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option( + "--transform-source ", + "Path to relay-to-peer.mjs (default /skills/clawdentity-openclaw-relay/relay-to-peer.mjs)", + ) + .option( + "--openclaw-base-url ", + "Base URL for local OpenClaw hook API (default http://127.0.0.1:18789)", + ) + .option( + "--runtime-mode ", + "Connector runtime mode: auto | service | detached (default auto)", + ) + .option( + "--wait-timeout-seconds ", + "Seconds to wait for connector websocket readiness (default 30)", + ) + .option( + "--no-runtime-start", + "Skip connector runtime startup (advanced/manual mode)", + ) + .action( + withErrorHandling( + "openclaw setup", + async (agentName: string, options: OpenclawSetupCommandOptions) => { + const result = await setupOpenclawSelfReady(agentName, options); + writeStdoutLine("Self setup complete"); + writeStdoutLine( + `Updated OpenClaw config: ${result.openclawConfigPath}`, + ); + writeStdoutLine(`Installed transform: ${result.transformTargetPath}`); + writeStdoutLine( + `Transform runtime config: ${result.relayTransformRuntimePath}`, + ); + writeStdoutLine( + `Transform peers snapshot: ${result.relayTransformPeersPath}`, + ); + writeStdoutLine(`Connector base URL: ${result.connectorBaseUrl}`); + writeStdoutLine(`OpenClaw base URL: ${result.openclawBaseUrl}`); + writeStdoutLine( + `Relay runtime config: ${result.relayRuntimeConfigPath}`, + ); + writeStdoutLine(`Runtime mode: ${result.runtimeMode}`); + writeStdoutLine(`Runtime status: ${result.runtimeStatus}`); + writeStdoutLine(`WebSocket status: ${result.websocketStatus}`); + if (result.connectorStatusUrl) { + writeStdoutLine( + `Connector status URL: ${result.connectorStatusUrl}`, + ); + } + }, + ), + ); + + openclawCommand + .command("doctor") + .description("Validate local OpenClaw relay setup and print remediation") + .option("--peer ", "Validate that a specific peer alias exists") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "openclaw doctor", + async (options: OpenclawDoctorCommandOptions) => { + const result = await runOpenclawDoctor({ + openclawDir: options.openclawDir, + peerAlias: options.peer, + json: options.json, + }); + if (options.json) { + writeStdoutLine(JSON.stringify(result, null, 2)); + } else { + printDoctorResult(result); + } + + if (result.status === "unhealthy") { + process.exitCode = 1; + } + }, + ), + ); + + const relayCommand = openclawCommand + .command("relay") + .description("Run OpenClaw relay diagnostics"); + + relayCommand + .command("test") + .description( + "Send a relay probe to a configured peer (auto-selects when one peer exists)", + ) + .option("--peer ", "Peer alias in local peers map") + .option( + "--openclaw-base-url ", + "Base URL for local OpenClaw hook API (default OPENCLAW_BASE_URL or relay runtime config)", + ) + .option( + "--hook-token ", + "OpenClaw hook token (default OPENCLAW_HOOK_TOKEN)", + ) + .option("--session-id ", "Session id for the probe payload") + .option("--message ", "Probe message body") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "openclaw relay test", + async (options: OpenclawRelayTestOptions) => { + const result = await runOpenclawRelayTest(options); + + if (options.json) { + writeStdoutLine(JSON.stringify(result, null, 2)); + } else { + printRelayTestResult(result); + if ( + result.preflight !== undefined && + result.preflight.status === "unhealthy" + ) { + writeStdoutLine("Preflight details:"); + for (const check of result.preflight.checks) { + if (check.status === "fail") { + writeStdoutLine(formatDoctorCheckLine(check)); + if (check.remediationHint) { + writeStdoutLine(`Fix: ${check.remediationHint}`); + } + } + } + } + } + + if (result.status === "failure") { + process.exitCode = 1; + } + }, + ), + ); + + relayCommand + .command("ws-test") + .description( + "Validate connector websocket connectivity for a paired relay peer", + ) + .option("--peer ", "Peer alias in local peers map") + .option( + "--openclaw-dir ", + "OpenClaw state directory (default ~/.openclaw)", + ) + .option("--json", "Print machine-readable JSON output") + .action( + withErrorHandling( + "openclaw relay ws-test", + async (options: OpenclawRelayWebsocketTestOptions) => { + const result = await runOpenclawRelayWebsocketTest(options); + if (options.json) { + writeStdoutLine(JSON.stringify(result, null, 2)); + } else { + printRelayWebsocketTestResult(result); + if ( + result.preflight !== undefined && + result.preflight.status === "unhealthy" + ) { + writeStdoutLine("Preflight details:"); + for (const check of result.preflight.checks) { + if (check.status === "fail") { + writeStdoutLine(formatDoctorCheckLine(check)); + if (check.remediationHint) { + writeStdoutLine(`Fix: ${check.remediationHint}`); + } + } + } + } + } + + if (result.status === "failure") { + process.exitCode = 1; + } + }, + ), + ); + + return openclawCommand; +}; diff --git a/apps/cli/src/commands/openclaw/common.ts b/apps/cli/src/commands/openclaw/common.ts new file mode 100644 index 0000000..a49a479 --- /dev/null +++ b/apps/cli/src/commands/openclaw/common.ts @@ -0,0 +1,337 @@ +import { + decodeBase64url, + encodeBase64url, + parseDid, +} from "@clawdentity/protocol"; +import { AppError, nowIso } from "@clawdentity/sdk"; +import { INVITE_CODE_PREFIX, PEER_ALIAS_PATTERN } from "./constants.js"; +import type { + OpenclawDoctorCheckResult, + OpenclawDoctorResult, + OpenclawInvitePayload, +} from "./types.js"; + +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); + +export function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function createCliError( + code: string, + message: string, + details?: Record, +): AppError { + return new AppError({ + code, + message, + status: 400, + details, + }); +} + +export function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +export function parseNonEmptyString(value: unknown, label: string): string { + if (typeof value !== "string") { + throw createCliError( + "CLI_OPENCLAW_INVALID_INPUT", + "Input must be a string", + { + label, + }, + ); + } + + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INPUT", + "Input must not be empty", + { + label, + }, + ); + } + + return trimmed; +} + +export function parseOptionalProfileName( + value: unknown, + label: "agentName" | "humanName", +): string | undefined { + if (value === undefined) { + return undefined; + } + + return parseNonEmptyString(value, label); +} + +export function parsePeerAlias(value: unknown): string { + const alias = parseNonEmptyString(value, "peer alias"); + if (alias.length > 128) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEER_ALIAS", + "peer alias must be at most 128 characters", + ); + } + + if (!PEER_ALIAS_PATTERN.test(alias)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEER_ALIAS", + "peer alias must use only letters, numbers, dot, underscore, or hyphen", + ); + } + + return alias; +} + +export function parseProxyUrl(value: unknown): string { + return parseHttpUrl(value, { + label: "proxy URL", + code: "CLI_OPENCLAW_INVALID_PROXY_URL", + message: "proxy URL must be a valid URL", + }); +} + +export function parseHttpUrl( + value: unknown, + input: { + label: string; + code: string; + message: string; + }, +): string { + const candidate = parseNonEmptyString(value, input.label); + let parsedUrl: URL; + try { + parsedUrl = new URL(candidate); + } catch { + throw createCliError(input.code, input.message); + } + + if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { + throw createCliError(input.code, `${input.label} must use http or https`); + } + + if ( + parsedUrl.pathname === "/" && + parsedUrl.search.length === 0 && + parsedUrl.hash.length === 0 + ) { + return parsedUrl.origin; + } + + return parsedUrl.toString(); +} + +export function parseOpenclawBaseUrl(value: unknown): string { + return parseHttpUrl(value, { + label: "OpenClaw base URL", + code: "CLI_OPENCLAW_INVALID_OPENCLAW_BASE_URL", + message: "OpenClaw base URL must be a valid URL", + }); +} + +export function parseAgentDid(value: unknown, label: string): string { + const did = parseNonEmptyString(value, label); + + try { + const parsed = parseDid(did); + if (parsed.kind !== "agent") { + throw createCliError( + "CLI_OPENCLAW_INVALID_DID", + "DID is not an agent DID", + ); + } + } catch { + throw createCliError("CLI_OPENCLAW_INVALID_DID", "Agent DID is invalid", { + label, + }); + } + + return did; +} + +export function parseInvitePayload(value: unknown): OpenclawInvitePayload { + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite payload must be an object", + ); + } + + if (value.v !== 1) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite payload version is unsupported", + ); + } + + const issuedAt = parseNonEmptyString(value.issuedAt, "invite issuedAt"); + const did = parseAgentDid(value.did, "invite did"); + const proxyUrl = parseProxyUrl(value.proxyUrl); + const alias = + value.alias === undefined ? undefined : parsePeerAlias(value.alias); + const agentName = parseOptionalProfileName(value.agentName, "agentName"); + const humanName = parseOptionalProfileName(value.humanName, "humanName"); + + if ( + alias === undefined && + agentName === undefined && + humanName === undefined + ) { + return { + v: 1, + issuedAt, + did, + proxyUrl, + }; + } + + if (agentName === undefined && humanName === undefined) { + return { + v: 1, + issuedAt, + did, + proxyUrl, + alias, + }; + } + + return { + v: 1, + issuedAt, + did, + proxyUrl, + alias, + agentName, + humanName, + }; +} + +export function encodeInvitePayload(payload: OpenclawInvitePayload): string { + const encoded = encodeBase64url(textEncoder.encode(JSON.stringify(payload))); + return `${INVITE_CODE_PREFIX}${encoded}`; +} + +export function decodeInvitePayload(code: string): OpenclawInvitePayload { + const rawCode = parseNonEmptyString(code, "invite code"); + if (!rawCode.startsWith(INVITE_CODE_PREFIX)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "Invite code has invalid prefix", + ); + } + + const encoded = rawCode.slice(INVITE_CODE_PREFIX.length); + if (encoded.length === 0) { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite code payload is empty", + ); + } + + let decodedJson: string; + try { + decodedJson = textDecoder.decode(decodeBase64url(encoded)); + } catch { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite code payload is not valid base64url", + ); + } + + let parsedPayload: unknown; + try { + parsedPayload = JSON.parse(decodedJson); + } catch { + throw createCliError( + "CLI_OPENCLAW_INVALID_INVITE", + "invite code payload is not valid JSON", + ); + } + + return parseInvitePayload(parsedPayload); +} + +export function normalizeStringArrayWithValues( + value: unknown, + requiredValues: readonly string[], +): string[] { + const normalized = new Set(); + + if (Array.isArray(value)) { + for (const item of value) { + if (typeof item !== "string") { + continue; + } + + const trimmed = item.trim(); + if (trimmed.length > 0) { + normalized.add(trimmed); + } + } + } + + for (const requiredValue of requiredValues) { + const trimmed = requiredValue.trim(); + if (trimmed.length > 0) { + normalized.add(trimmed); + } + } + + return Array.from(normalized); +} + +export function parseDoctorPeerAlias(peerAlias?: string): string | undefined { + if (peerAlias === undefined) { + return undefined; + } + + return parsePeerAlias(peerAlias); +} + +export function resolveProbeMessage(optionValue?: string): string { + const trimmed = optionValue?.trim(); + if (trimmed !== undefined && trimmed.length > 0) { + return trimmed; + } + + return "clawdentity relay probe"; +} + +export function resolveProbeSessionId(optionValue?: string): string { + const trimmed = optionValue?.trim(); + if (trimmed !== undefined && trimmed.length > 0) { + return trimmed; + } + + return "clawdentity-relay-test"; +} + +export function toDoctorCheck( + input: OpenclawDoctorCheckResult, +): OpenclawDoctorCheckResult { + return input; +} + +export function toDoctorResult( + checks: OpenclawDoctorCheckResult[], +): OpenclawDoctorResult { + return { + status: checks.every((check) => check.status === "pass") + ? "healthy" + : "unhealthy", + checkedAt: nowIso(), + checks, + }; +} diff --git a/apps/cli/src/commands/openclaw/config.ts b/apps/cli/src/commands/openclaw/config.ts new file mode 100644 index 0000000..501f063 --- /dev/null +++ b/apps/cli/src/commands/openclaw/config.ts @@ -0,0 +1,283 @@ +import { randomBytes } from "node:crypto"; +import { writeFile } from "node:fs/promises"; +import { + createCliError, + getErrorCode, + isRecord, + normalizeStringArrayWithValues, +} from "./common.js"; +import { + DEFAULT_OPENCLAW_MAIN_SESSION_KEY, + HOOK_MAPPING_ID, + HOOK_PATH_SEND_TO_PEER, + OPENCLAW_HOOK_TOKEN_BYTES, + RELAY_MODULE_FILE_NAME, +} from "./constants.js"; +import { readJsonFile } from "./state.js"; + +export function resolveHookDefaultSessionKey( + config: Record, + hooks: Record, +): string { + const session = isRecord(config.session) ? config.session : {}; + const scope = + typeof session.scope === "string" ? session.scope.trim().toLowerCase() : ""; + const configuredMainSessionKey = + resolveConfiguredOpenclawMainSessionKey(session); + + if ( + typeof hooks.defaultSessionKey === "string" && + hooks.defaultSessionKey.trim().length > 0 + ) { + return normalizeLegacyHookDefaultSessionKey( + hooks.defaultSessionKey, + configuredMainSessionKey, + ); + } + + if (scope === "global") { + return "global"; + } + + return configuredMainSessionKey; +} + +function resolveConfiguredOpenclawMainSessionKey( + session: Record, +): string { + if ( + typeof session.mainKey === "string" && + session.mainKey.trim().length > 0 + ) { + return session.mainKey.trim(); + } + + return DEFAULT_OPENCLAW_MAIN_SESSION_KEY; +} + +function normalizeLegacyHookDefaultSessionKey( + value: string, + fallbackSessionKey: string, +): string { + const trimmed = value.trim(); + const legacyMatch = /^agent:[^:]+:(.+)$/i.exec(trimmed); + if (!legacyMatch) { + return trimmed; + } + const routedSessionKey = legacyMatch[1]?.trim(); + if (typeof routedSessionKey === "string" && routedSessionKey.length > 0) { + return routedSessionKey; + } + + return fallbackSessionKey; +} + +export function isCanonicalAgentSessionKey(value: string): boolean { + return /^agent:[^:]+:.+/i.test(value.trim()); +} + +function generateOpenclawHookToken(): string { + return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); +} + +function generateOpenclawGatewayToken(): string { + return randomBytes(OPENCLAW_HOOK_TOKEN_BYTES).toString("hex"); +} + +export function parseGatewayAuthMode( + value: unknown, +): "token" | "password" | "trusted-proxy" | undefined { + if (typeof value !== "string") { + return undefined; + } + const normalized = value.trim().toLowerCase(); + if ( + normalized === "token" || + normalized === "password" || + normalized === "trusted-proxy" + ) { + return normalized; + } + return undefined; +} + +function resolveEnvOpenclawGatewayToken(): string | undefined { + if ( + typeof process.env.OPENCLAW_GATEWAY_TOKEN === "string" && + process.env.OPENCLAW_GATEWAY_TOKEN.trim().length > 0 + ) { + return process.env.OPENCLAW_GATEWAY_TOKEN.trim(); + } + return undefined; +} + +function resolveGatewayAuthToken(existingToken?: string): string { + return ( + resolveEnvOpenclawGatewayToken() ?? + existingToken ?? + generateOpenclawGatewayToken() + ); +} + +function upsertRelayHookMapping( + mappingsValue: unknown, +): Record[] { + const mappings = Array.isArray(mappingsValue) + ? mappingsValue.filter(isRecord).map((mapping) => ({ ...mapping })) + : []; + + const existingIndex = mappings.findIndex((mapping) => { + if (mapping.id === HOOK_MAPPING_ID) { + return true; + } + + if (!isRecord(mapping.match)) { + return false; + } + + return mapping.match.path === HOOK_PATH_SEND_TO_PEER; + }); + + const baseMapping = + existingIndex >= 0 && isRecord(mappings[existingIndex]) + ? mappings[existingIndex] + : {}; + + const nextMatch = isRecord(baseMapping.match) ? { ...baseMapping.match } : {}; + nextMatch.path = HOOK_PATH_SEND_TO_PEER; + + const nextTransform = isRecord(baseMapping.transform) + ? { ...baseMapping.transform } + : {}; + nextTransform.module = RELAY_MODULE_FILE_NAME; + + const relayMapping: Record = { + ...baseMapping, + id: HOOK_MAPPING_ID, + match: nextMatch, + action: "agent", + wakeMode: "now", + transform: nextTransform, + }; + + if (existingIndex >= 0) { + mappings[existingIndex] = relayMapping; + return mappings; + } + + mappings.push(relayMapping); + return mappings; +} + +export async function patchOpenclawConfig( + openclawConfigPath: string, + hookToken?: string, +): Promise<{ hookToken: string; configChanged: boolean }> { + let config: unknown; + try { + config = await readJsonFile(openclawConfigPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_CONFIG_NOT_FOUND", + "OpenClaw config file was not found", + { openclawConfigPath }, + ); + } + + throw error; + } + + if (!isRecord(config)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_CONFIG", + "OpenClaw config root must be an object", + { openclawConfigPath }, + ); + } + + const hooks = isRecord(config.hooks) ? { ...config.hooks } : {}; + const existingHookToken = + typeof hooks.token === "string" && hooks.token.trim().length > 0 + ? hooks.token.trim() + : undefined; + const preferredHookToken = + typeof hookToken === "string" && hookToken.trim().length > 0 + ? hookToken.trim() + : undefined; + const resolvedHookToken = + existingHookToken ?? preferredHookToken ?? generateOpenclawHookToken(); + const defaultSessionKey = resolveHookDefaultSessionKey(config, hooks); + + hooks.enabled = true; + hooks.token = resolvedHookToken; + hooks.defaultSessionKey = defaultSessionKey; + hooks.allowRequestSessionKey = false; + hooks.allowedSessionKeyPrefixes = normalizeStringArrayWithValues( + hooks.allowedSessionKeyPrefixes, + ["hook:", defaultSessionKey], + ); + hooks.mappings = upsertRelayHookMapping(hooks.mappings); + + const gateway = isRecord(config.gateway) ? { ...config.gateway } : {}; + const gatewayAuth = isRecord(gateway.auth) ? { ...gateway.auth } : {}; + const configuredGatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); + if (configuredGatewayAuthMode === undefined) { + gatewayAuth.mode = "token"; + } + + const effectiveGatewayAuthMode = + parseGatewayAuthMode(gatewayAuth.mode) ?? "token"; + if (effectiveGatewayAuthMode === "token") { + const existingGatewayAuthToken = + typeof gatewayAuth.token === "string" && + gatewayAuth.token.trim().length > 0 + ? gatewayAuth.token.trim() + : undefined; + gatewayAuth.token = resolveGatewayAuthToken(existingGatewayAuthToken); + } + gateway.auth = gatewayAuth; + + const nextConfig = { + ...config, + hooks, + gateway, + }; + const configChanged = JSON.stringify(config) !== JSON.stringify(nextConfig); + if (configChanged) { + await writeFile( + openclawConfigPath, + `${JSON.stringify(nextConfig, null, 2)}\n`, + "utf8", + ); + } + + return { + hookToken: resolvedHookToken, + configChanged, + }; +} + +export function isRelayHookMapping(value: unknown): boolean { + if (!isRecord(value)) { + return false; + } + + if (!isRecord(value.match) || value.match.path !== HOOK_PATH_SEND_TO_PEER) { + return false; + } + + if (typeof value.id === "string" && value.id !== HOOK_MAPPING_ID) { + return false; + } + + return true; +} + +export function hasRelayTransformModule(value: unknown): boolean { + if (!isRecord(value) || !isRecord(value.transform)) { + return false; + } + + return value.transform.module === RELAY_MODULE_FILE_NAME; +} diff --git a/apps/cli/src/commands/openclaw/connector.ts b/apps/cli/src/commands/openclaw/connector.ts new file mode 100644 index 0000000..d87be54 --- /dev/null +++ b/apps/cli/src/commands/openclaw/connector.ts @@ -0,0 +1,628 @@ +import { spawn } from "node:child_process"; +import { closeSync, existsSync, openSync } from "node:fs"; +import { mkdir, readFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; +import { nowUtcMs } from "@clawdentity/sdk"; +import { getConfigDir } from "../../config/manager.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { installConnectorServiceForAgent } from "../connector.js"; +import { createCliError, getErrorCode, isRecord } from "./common.js"; +import { + CONNECTOR_DETACHED_STDERR_FILE_SUFFIX, + CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX, + CONNECTOR_HOST_DOCKER, + CONNECTOR_HOST_DOCKER_GATEWAY, + CONNECTOR_HOST_LINUX_BRIDGE, + CONNECTOR_HOST_LOOPBACK, + CONNECTOR_RUN_DIR_NAME, + DEFAULT_CONNECTOR_PORT, + DEFAULT_CONNECTOR_STATUS_PATH, + DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS, + logger, +} from "./constants.js"; +import { + resolveConnectorAssignmentsPath, + resolveOpenclawAgentNamePath, +} from "./paths.js"; +import { loadConnectorAssignments, writeSecureFile } from "./state.js"; +import type { + ConnectorAssignmentsConfig, + ConnectorHealthStatus, + OpenclawRuntimeMode, + OpenclawRuntimeResult, + ParsedConnectorStatusPayload, +} from "./types.js"; + +export function parseConnectorPortFromBaseUrl(baseUrl: string): number { + const parsed = new URL(baseUrl); + if (parsed.port) { + return Number(parsed.port); + } + return parsed.protocol === "https:" ? 443 : 80; +} + +export function allocateConnectorPort( + assignments: ConnectorAssignmentsConfig, + agentName: string, +): number { + const existing = assignments.agents[agentName]; + if (existing) { + return parseConnectorPortFromBaseUrl(existing.connectorBaseUrl); + } + + const usedPorts = new Set(); + for (const entry of Object.values(assignments.agents)) { + usedPorts.add(parseConnectorPortFromBaseUrl(entry.connectorBaseUrl)); + } + + let nextPort = DEFAULT_CONNECTOR_PORT; + while (usedPorts.has(nextPort)) { + nextPort += 1; + } + + return nextPort; +} + +export function buildConnectorBaseUrl(host: string, port: number): string { + return `http://${host}:${port}`; +} + +export function buildRelayConnectorBaseUrls(port: number): string[] { + return [ + buildConnectorBaseUrl(CONNECTOR_HOST_DOCKER, port), + buildConnectorBaseUrl(CONNECTOR_HOST_DOCKER_GATEWAY, port), + buildConnectorBaseUrl(CONNECTOR_HOST_LINUX_BRIDGE, port), + buildConnectorBaseUrl(CONNECTOR_HOST_LOOPBACK, port), + ]; +} + +export function parseOpenclawRuntimeMode(value: unknown): OpenclawRuntimeMode { + if (typeof value !== "string" || value.trim().length === 0) { + return "auto"; + } + + const normalized = value.trim().toLowerCase(); + if ( + normalized === "auto" || + normalized === "service" || + normalized === "detached" + ) { + return normalized; + } + + throw createCliError( + "CLI_OPENCLAW_SETUP_RUNTIME_MODE_INVALID", + "runtimeMode must be one of: auto, service, detached", + ); +} + +export function parseWaitTimeoutSeconds(value: unknown): number { + if (typeof value !== "string" || value.trim().length === 0) { + return DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS; + } + + const parsed = Number.parseInt(value, 10); + if (!Number.isInteger(parsed) || parsed < 1) { + throw createCliError( + "CLI_OPENCLAW_SETUP_TIMEOUT_INVALID", + "waitTimeoutSeconds must be a positive integer", + ); + } + + return parsed; +} + +export function resolveConnectorStatusUrl(connectorBaseUrl: string): string { + const normalizedBase = connectorBaseUrl.endsWith("/") + ? connectorBaseUrl + : `${connectorBaseUrl}/`; + return new URL( + DEFAULT_CONNECTOR_STATUS_PATH.slice(1), + normalizedBase, + ).toString(); +} + +function parseConnectorStatusPayload( + payload: unknown, +): ParsedConnectorStatusPayload { + if ( + !isRecord(payload) || + !isRecord(payload.websocket) || + typeof payload.websocket.connected !== "boolean" + ) { + throw createCliError( + "CLI_OPENCLAW_SETUP_CONNECTOR_STATUS_INVALID", + "Connector status response is invalid", + ); + } + + const inboundRoot = isRecord(payload.inbound) ? payload.inbound : undefined; + const pending = + inboundRoot && isRecord(inboundRoot.pending) + ? inboundRoot.pending + : undefined; + const deadLetter = + inboundRoot && isRecord(inboundRoot.deadLetter) + ? inboundRoot.deadLetter + : undefined; + const replay = + inboundRoot && isRecord(inboundRoot.replay) + ? inboundRoot.replay + : undefined; + const hook = + inboundRoot && isRecord(inboundRoot.openclawHook) + ? inboundRoot.openclawHook + : undefined; + + return { + websocketConnected: payload.websocket.connected, + inboundInbox: + pending || deadLetter || replay + ? { + pendingCount: + pending && typeof pending.pendingCount === "number" + ? pending.pendingCount + : undefined, + pendingBytes: + pending && typeof pending.pendingBytes === "number" + ? pending.pendingBytes + : undefined, + oldestPendingAt: + pending && typeof pending.oldestPendingAt === "string" + ? pending.oldestPendingAt + : undefined, + nextAttemptAt: + pending && typeof pending.nextAttemptAt === "string" + ? pending.nextAttemptAt + : undefined, + lastReplayAt: + replay && typeof replay.lastReplayAt === "string" + ? replay.lastReplayAt + : undefined, + lastReplayError: + replay && typeof replay.lastReplayError === "string" + ? replay.lastReplayError + : undefined, + replayerActive: + replay && typeof replay.replayerActive === "boolean" + ? replay.replayerActive + : undefined, + deadLetterCount: + deadLetter && typeof deadLetter.deadLetterCount === "number" + ? deadLetter.deadLetterCount + : undefined, + deadLetterBytes: + deadLetter && typeof deadLetter.deadLetterBytes === "number" + ? deadLetter.deadLetterBytes + : undefined, + oldestDeadLetterAt: + deadLetter && typeof deadLetter.oldestDeadLetterAt === "string" + ? deadLetter.oldestDeadLetterAt + : undefined, + } + : undefined, + openclawHook: hook + ? { + url: typeof hook.url === "string" ? hook.url : undefined, + lastAttemptAt: + typeof hook.lastAttemptAt === "string" + ? hook.lastAttemptAt + : undefined, + lastAttemptStatus: + hook.lastAttemptStatus === "ok" || + hook.lastAttemptStatus === "failed" + ? hook.lastAttemptStatus + : undefined, + } + : undefined, + }; +} + +export async function fetchConnectorHealthStatus(input: { + connectorBaseUrl: string; + fetchImpl: typeof fetch; +}): Promise { + const statusUrl = resolveConnectorStatusUrl(input.connectorBaseUrl); + try { + const response = await input.fetchImpl(statusUrl, { + method: "GET", + headers: { + accept: "application/json", + }, + }); + if (!response.ok) { + return { + connected: false, + reachable: false, + statusUrl, + reason: `HTTP ${response.status}`, + }; + } + + let payload: unknown; + try { + payload = await response.json(); + } catch { + return { + connected: false, + reachable: false, + statusUrl, + reason: "invalid JSON payload", + }; + } + + const parsed = parseConnectorStatusPayload(payload); + return { + connected: parsed.websocketConnected, + inboundInbox: parsed.inboundInbox, + openclawHook: parsed.openclawHook, + reachable: true, + statusUrl, + reason: parsed.websocketConnected + ? undefined + : "connector websocket is disconnected", + }; + } catch { + return { + connected: false, + reachable: false, + statusUrl, + reason: "connector status endpoint is unreachable", + }; + } +} + +export async function waitForConnectorConnected(input: { + connectorBaseUrl: string; + fetchImpl: typeof fetch; + waitTimeoutSeconds: number; +}): Promise { + const deadline = nowUtcMs() + input.waitTimeoutSeconds * 1000; + let latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + + while (!latest.connected && nowUtcMs() < deadline) { + await new Promise((resolve) => { + setTimeout(resolve, 1000); + }); + latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + } + + if (!latest.connected) { + throw createCliError( + "CLI_OPENCLAW_SETUP_CONNECTOR_NOT_READY", + `Connector runtime is not websocket-connected after ${input.waitTimeoutSeconds} seconds`, + { + connectorBaseUrl: input.connectorBaseUrl, + connectorStatusUrl: latest.statusUrl, + reason: latest.reason, + }, + ); + } + + return latest; +} + +function sleepMilliseconds(durationMs: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, durationMs); + }); +} + +export async function monitorConnectorStabilityWindow(input: { + connectorBaseUrl: string; + fetchImpl: typeof fetch; + durationSeconds: number; + pollIntervalMs: number; +}): Promise { + if (input.durationSeconds <= 0) { + return fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + } + + const deadline = nowUtcMs() + input.durationSeconds * 1000; + let latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + if (!latest.connected) { + return latest; + } + + while (nowUtcMs() < deadline) { + await sleepMilliseconds(input.pollIntervalMs); + latest = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + if (!latest.connected) { + return latest; + } + } + + return latest; +} + +export function resolveConnectorRunDir(homeDir: string): string { + return join(getConfigDir({ homeDir }), CONNECTOR_RUN_DIR_NAME); +} + +function resolveConnectorPidPath(homeDir: string, agentName: string): string { + return join(resolveConnectorRunDir(homeDir), `connector-${agentName}.pid`); +} + +function resolveDetachedConnectorLogPath( + homeDir: string, + agentName: string, + stream: "stdout" | "stderr", +): string { + const suffix = + stream === "stdout" + ? CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX + : CONNECTOR_DETACHED_STDERR_FILE_SUFFIX; + return join( + resolveConnectorRunDir(homeDir), + `connector-${agentName}.${suffix}`, + ); +} + +async function readConnectorPidFile( + pidPath: string, +): Promise { + try { + const raw = (await readFile(pidPath, "utf8")).trim(); + if (raw.length === 0) { + return undefined; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isInteger(parsed) || parsed <= 0) { + return undefined; + } + + return parsed; + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + throw error; + } +} + +function isPidRunning(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch { + return false; + } +} + +async function stopDetachedConnectorIfRunning(input: { + homeDir: string; + agentName: string; +}): Promise { + const pidPath = resolveConnectorPidPath(input.homeDir, input.agentName); + const pid = await readConnectorPidFile(pidPath); + if (pid === undefined || !isPidRunning(pid)) { + return; + } + + try { + process.kill(pid, "SIGTERM"); + } catch { + // Ignore stale pid races; setup health checks will verify readiness. + } +} + +function resolveCliEntryPathForDetachedStart(): string { + const argvEntry = typeof process.argv[1] === "string" ? process.argv[1] : ""; + if (argvEntry.length > 0 && existsSync(argvEntry)) { + return argvEntry; + } + + const modulePath = fileURLToPath(import.meta.url); + return join(dirname(modulePath), "..", "bin.js"); +} + +async function startDetachedConnectorRuntime(input: { + agentName: string; + homeDir: string; + openclawBaseUrl: string; +}): Promise { + await stopDetachedConnectorIfRunning({ + homeDir: input.homeDir, + agentName: input.agentName, + }); + const runDir = resolveConnectorRunDir(input.homeDir); + await mkdir(runDir, { recursive: true }); + + const cliEntryPath = resolveCliEntryPathForDetachedStart(); + const args = [ + cliEntryPath, + "connector", + "start", + input.agentName, + "--openclaw-base-url", + input.openclawBaseUrl, + ]; + const stdoutLogPath = resolveDetachedConnectorLogPath( + input.homeDir, + input.agentName, + "stdout", + ); + const stderrLogPath = resolveDetachedConnectorLogPath( + input.homeDir, + input.agentName, + "stderr", + ); + const stdoutFd = openSync(stdoutLogPath, "a"); + const stderrFd = openSync(stderrLogPath, "a"); + + try { + const child = spawn(process.execPath, args, { + detached: true, + stdio: ["ignore", stdoutFd, stderrFd], + env: process.env, + }); + child.unref(); + await writeSecureFile( + resolveConnectorPidPath(input.homeDir, input.agentName), + `${child.pid}\n`, + ); + logger.info("cli.openclaw.setup.detached_runtime_started", { + agentName: input.agentName, + pid: child.pid, + stdoutLogPath, + stderrLogPath, + }); + } finally { + closeSync(stdoutFd); + closeSync(stderrFd); + } +} + +export async function startSetupConnectorRuntime(input: { + agentName: string; + homeDir: string; + openclawBaseUrl: string; + connectorBaseUrl: string; + mode: OpenclawRuntimeMode; + waitTimeoutSeconds: number; + fetchImpl: typeof fetch; +}): Promise { + if (input.mode !== "service") { + const existingStatus = await fetchConnectorHealthStatus({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + }); + if (existingStatus.connected) { + return { + runtimeMode: "existing", + runtimeStatus: "running", + websocketStatus: "connected", + connectorStatusUrl: existingStatus.statusUrl, + }; + } + } + + let runtimeMode: "service" | "detached" = "service"; + if (input.mode === "detached") { + runtimeMode = "detached"; + } else { + try { + await installConnectorServiceForAgent(input.agentName, { + platform: "auto", + openclawBaseUrl: input.openclawBaseUrl, + }); + runtimeMode = "service"; + } catch (error) { + if (input.mode === "service") { + throw error; + } + runtimeMode = "detached"; + logger.warn("cli.openclaw.setup.service_fallback_detached", { + agentName: input.agentName, + reason: error instanceof Error ? error.message : "unknown", + }); + } + } + + if (runtimeMode === "detached") { + await startDetachedConnectorRuntime({ + agentName: input.agentName, + homeDir: input.homeDir, + openclawBaseUrl: input.openclawBaseUrl, + }); + } + + const connectedStatus = await waitForConnectorConnected({ + connectorBaseUrl: input.connectorBaseUrl, + fetchImpl: input.fetchImpl, + waitTimeoutSeconds: input.waitTimeoutSeconds, + }); + + return { + runtimeMode, + runtimeStatus: "running", + websocketStatus: "connected", + connectorStatusUrl: connectedStatus.statusUrl, + }; +} + +export async function resolveSelectedAgentName(input: { + homeDir: string; +}): Promise<{ agentName: string; selectedAgentPath: string }> { + const selectedAgentPath = resolveOpenclawAgentNamePath(input.homeDir); + let selectedAgentRaw: string; + try { + selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_SELECTED_AGENT_MISSING", + "Selected agent marker is missing", + { selectedAgentPath }, + ); + } + throw createCliError( + "CLI_OPENCLAW_SELECTED_AGENT_INVALID", + "Selected agent marker is invalid", + { selectedAgentPath }, + ); + } + + try { + return { + agentName: assertValidAgentName(selectedAgentRaw.trim()), + selectedAgentPath, + }; + } catch { + throw createCliError( + "CLI_OPENCLAW_SELECTED_AGENT_INVALID", + "Selected agent marker is invalid", + { selectedAgentPath }, + ); + } +} + +export async function resolveConnectorAssignment(input: { + homeDir: string; + agentName: string; +}): Promise<{ + connectorAssignmentsPath: string; + connectorBaseUrl: string; + connectorStatusUrl: string; +}> { + const connectorAssignmentsPath = resolveConnectorAssignmentsPath( + input.homeDir, + ); + const connectorAssignments = await loadConnectorAssignments( + connectorAssignmentsPath, + ); + const assignment = connectorAssignments.agents[input.agentName]; + if (assignment === undefined) { + throw createCliError( + "CLI_OPENCLAW_CONNECTOR_ASSIGNMENT_MISSING", + "Connector assignment is missing for selected agent", + { + connectorAssignmentsPath, + agentName: input.agentName, + }, + ); + } + + return { + connectorAssignmentsPath, + connectorBaseUrl: assignment.connectorBaseUrl, + connectorStatusUrl: resolveConnectorStatusUrl(assignment.connectorBaseUrl), + }; +} diff --git a/apps/cli/src/commands/openclaw/constants.ts b/apps/cli/src/commands/openclaw/constants.ts new file mode 100644 index 0000000..7249f1f --- /dev/null +++ b/apps/cli/src/commands/openclaw/constants.ts @@ -0,0 +1,61 @@ +import { createLogger } from "@clawdentity/sdk"; + +export const logger = createLogger({ service: "cli", module: "openclaw" }); + +export const AGENTS_DIR_NAME = "agents"; +export const AIT_FILE_NAME = "ait.jwt"; +export const SECRET_KEY_FILE_NAME = "secret.key"; +export const PEERS_FILE_NAME = "peers.json"; +export const OPENCLAW_DIR_NAME = ".openclaw"; +export const OPENCLAW_CONFIG_FILE_NAME = "openclaw.json"; +export const LEGACY_OPENCLAW_STATE_DIR_NAMES = [ + ".clawdbot", + ".moldbot", + ".moltbot", +] as const; +export const LEGACY_OPENCLAW_CONFIG_FILE_NAMES = [ + "clawdbot.json", + "moldbot.json", + "moltbot.json", +] as const; +export const OPENCLAW_AGENT_FILE_NAME = "openclaw-agent-name"; +export const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; +export const OPENCLAW_CONNECTORS_FILE_NAME = "openclaw-connectors.json"; +export const SKILL_DIR_NAME = "clawdentity-openclaw-relay"; +export const RELAY_MODULE_FILE_NAME = "relay-to-peer.mjs"; +export const RELAY_RUNTIME_FILE_NAME = "clawdentity-relay.json"; +export const RELAY_PEERS_FILE_NAME = "clawdentity-peers.json"; +export const HOOK_MAPPING_ID = "clawdentity-send-to-peer"; +export const HOOK_PATH_SEND_TO_PEER = "send-to-peer"; +export const OPENCLAW_SEND_TO_PEER_HOOK_PATH = "hooks/send-to-peer"; +export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; +export const DEFAULT_OPENCLAW_MAIN_SESSION_KEY = "main"; +export const DEFAULT_CONNECTOR_PORT = 19400; +export const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; +export const DEFAULT_CONNECTOR_STATUS_PATH = "/v1/status"; +export const DEFAULT_SETUP_WAIT_TIMEOUT_SECONDS = 30; +export const CONNECTOR_HOST_LOOPBACK = "127.0.0.1"; +export const CONNECTOR_HOST_DOCKER = "host.docker.internal"; +export const CONNECTOR_HOST_DOCKER_GATEWAY = "gateway.docker.internal"; +export const CONNECTOR_HOST_LINUX_BRIDGE = "172.17.0.1"; +export const CONNECTOR_RUN_DIR_NAME = "run"; +export const CONNECTOR_DETACHED_STDOUT_FILE_SUFFIX = "stdout.log"; +export const CONNECTOR_DETACHED_STDERR_FILE_SUFFIX = "stderr.log"; +export const INVITE_CODE_PREFIX = "clawd1_"; +export const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; +export const FILE_MODE = 0o600; +export const OPENCLAW_HOOK_TOKEN_BYTES = 32; +export const OPENCLAW_SETUP_COMMAND_HINT = + "Run: clawdentity openclaw setup "; +export const OPENCLAW_SETUP_RESTART_COMMAND_HINT = `${OPENCLAW_SETUP_COMMAND_HINT} and restart OpenClaw`; +export const OPENCLAW_SETUP_WITH_BASE_URL_HINT = `${OPENCLAW_SETUP_COMMAND_HINT} --openclaw-base-url `; +export const OPENCLAW_PAIRING_COMMAND_HINT = + "Run QR pairing first: clawdentity pair start --qr and clawdentity pair confirm --qr-file "; +export const OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT = + "Run: clawdentity openclaw setup (auto-recovers pending OpenClaw gateway device approvals)"; +export const OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT = + "Run: clawdentity openclaw setup (ensures gateway auth mode/token are configured)"; +export const OPENCLAW_GATEWAY_APPROVAL_COMMAND = "openclaw"; +export const OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS = 10_000; +export const OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS = 20; +export const OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS = 1_000; diff --git a/apps/cli/src/commands/openclaw/doctor-connector-checks.ts b/apps/cli/src/commands/openclaw/doctor-connector-checks.ts new file mode 100644 index 0000000..ca5e80d --- /dev/null +++ b/apps/cli/src/commands/openclaw/doctor-connector-checks.ts @@ -0,0 +1,274 @@ +import { toDoctorCheck } from "./common.js"; +import { fetchConnectorHealthStatus } from "./connector.js"; +import { + OPENCLAW_SETUP_COMMAND_HINT, + OPENCLAW_SETUP_RESTART_COMMAND_HINT, +} from "./constants.js"; +import { resolveConnectorAssignmentsPath } from "./paths.js"; +import { loadConnectorAssignments } from "./state.js"; +import type { OpenclawDoctorCheckResult } from "./types.js"; + +export async function runDoctorConnectorRuntimeChecks(input: { + homeDir: string; + selectedAgentName?: string; + fetchImpl?: typeof fetch; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + if (input.selectedAgentName === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: + "cannot validate connector runtime without selected agent marker", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: + "cannot validate connector inbound inbox without selected agent marker", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: + "cannot validate OpenClaw hook health without selected agent marker", + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + }), + ); + return; + } + + const connectorAssignmentsPath = resolveConnectorAssignmentsPath( + input.homeDir, + ); + try { + const connectorAssignments = await loadConnectorAssignments( + connectorAssignmentsPath, + ); + const assignment = connectorAssignments.agents[input.selectedAgentName]; + if (assignment === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: `no connector assignment found for ${input.selectedAgentName}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { + connectorAssignmentsPath, + selectedAgentName: input.selectedAgentName, + }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: `no connector assignment found for ${input.selectedAgentName}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { + connectorAssignmentsPath, + selectedAgentName: input.selectedAgentName, + }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: `no connector assignment found for ${input.selectedAgentName}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { + connectorAssignmentsPath, + selectedAgentName: input.selectedAgentName, + }, + }), + ); + return; + } + + const fetchImpl = input.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + input.checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: "fetch implementation is unavailable for connector checks", + remediationHint: + "Run doctor in a Node runtime with fetch support, or rerun openclaw setup", + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: + "fetch implementation is unavailable for connector inbox checks", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: + "fetch implementation is unavailable for OpenClaw hook health checks", + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + }), + ); + return; + } + + const connectorStatus = await fetchConnectorHealthStatus({ + connectorBaseUrl: assignment.connectorBaseUrl, + fetchImpl, + }); + if (connectorStatus.connected) { + input.checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "pass", + message: `connector websocket is connected (${assignment.connectorBaseUrl})`, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + const inboxPendingCount = connectorStatus.inboundInbox?.pendingCount ?? 0; + const replayError = connectorStatus.inboundInbox?.lastReplayError; + input.checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "pass", + message: + inboxPendingCount === 0 + ? "connector inbound inbox is empty" + : `connector inbound inbox has ${inboxPendingCount} pending message(s)`, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + ...connectorStatus.inboundInbox, + }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: + connectorStatus.openclawHook?.lastAttemptStatus === "failed" && + inboxPendingCount > 0 + ? "fail" + : "pass", + message: + connectorStatus.openclawHook?.lastAttemptStatus === "failed" && + inboxPendingCount > 0 + ? `connector replay to local OpenClaw hook is failing: ${replayError ?? "unknown error"}` + : "connector replay to local OpenClaw hook is healthy", + remediationHint: + connectorStatus.openclawHook?.lastAttemptStatus === "failed" && + inboxPendingCount > 0 + ? OPENCLAW_SETUP_RESTART_COMMAND_HINT + : undefined, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + ...connectorStatus.openclawHook, + inboxPendingCount, + }, + }), + ); + return; + } + + const reason = connectorStatus.reason ?? "connector runtime is unavailable"; + input.checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: `connector runtime is not ready: ${reason}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: `unable to read connector inbound inbox status: ${reason}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: `unable to verify OpenClaw hook health: ${reason}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { + connectorStatusUrl: connectorStatus.statusUrl, + connectorBaseUrl: assignment.connectorBaseUrl, + }, + }), + ); + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.connectorRuntime", + label: "Connector runtime", + status: "fail", + message: `unable to read connector assignments at ${connectorAssignmentsPath}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { connectorAssignmentsPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.connectorInboundInbox", + label: "Connector inbound inbox", + status: "fail", + message: + "cannot validate connector inbound inbox without connector assignment", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.openclawHookHealth", + label: "OpenClaw hook health", + status: "fail", + message: + "cannot validate OpenClaw hook health without connector assignment", + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + }), + ); + } +} diff --git a/apps/cli/src/commands/openclaw/doctor-static-checks.ts b/apps/cli/src/commands/openclaw/doctor-static-checks.ts new file mode 100644 index 0000000..a74ab43 --- /dev/null +++ b/apps/cli/src/commands/openclaw/doctor-static-checks.ts @@ -0,0 +1,787 @@ +import { readFile } from "node:fs/promises"; +import { AppError } from "@clawdentity/sdk"; +import { resolveConfig } from "../../config/manager.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { + getErrorCode, + isRecord, + normalizeStringArrayWithValues, + parseProxyUrl, + toDoctorCheck, +} from "./common.js"; +import { + hasRelayTransformModule, + isCanonicalAgentSessionKey, + isRelayHookMapping, + parseGatewayAuthMode, +} from "./config.js"; +import { + OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + OPENCLAW_PAIRING_COMMAND_HINT, + OPENCLAW_SETUP_COMMAND_HINT, + OPENCLAW_SETUP_RESTART_COMMAND_HINT, + OPENCLAW_SETUP_WITH_BASE_URL_HINT, +} from "./constants.js"; +import { readOpenclawGatewayPendingState } from "./gateway.js"; +import { + resolveOpenclawAgentNamePath, + resolveOpenclawConfigPath, + resolvePeersPath, + resolveRelayRuntimeConfigPath, + resolveTransformPeersPath, + resolveTransformRuntimePath, + resolveTransformTargetPath, +} from "./paths.js"; +import { + ensureLocalAgentCredentials, + loadPeersConfig, + readJsonFile, + resolveOpenclawBaseUrl, +} from "./state.js"; +import type { + OpenclawDoctorCheckResult, + OpenclawDoctorOptions, + PeersConfig, +} from "./types.js"; + +export async function runDoctorConfigCheck(input: { + options: OpenclawDoctorOptions; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + if (input.options.includeConfigCheck === false) { + return; + } + + const resolveConfigImpl = input.options.resolveConfigImpl ?? resolveConfig; + try { + const resolvedConfig = await resolveConfigImpl(); + const envProxyUrl = + typeof process.env.CLAWDENTITY_PROXY_URL === "string" + ? process.env.CLAWDENTITY_PROXY_URL.trim() + : ""; + if ( + typeof resolvedConfig.registryUrl !== "string" || + resolvedConfig.registryUrl.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "registryUrl is missing", + remediationHint: + "Run: clawdentity config set registryUrl ", + }), + ); + } else if ( + typeof resolvedConfig.apiKey !== "string" || + resolvedConfig.apiKey.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "apiKey is missing", + remediationHint: "Run: clawdentity config set apiKey ", + }), + ); + } else if (envProxyUrl.length > 0) { + let hasValidEnvProxyUrl = true; + try { + parseProxyUrl(envProxyUrl); + } catch { + hasValidEnvProxyUrl = false; + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "CLAWDENTITY_PROXY_URL is invalid", + remediationHint: + "Set CLAWDENTITY_PROXY_URL to a valid http(s) URL or unset it", + }), + ); + } + + if (hasValidEnvProxyUrl) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: + "registryUrl and apiKey are configured (proxy URL override is active via CLAWDENTITY_PROXY_URL)", + }), + ); + } + } else if ( + typeof resolvedConfig.proxyUrl !== "string" || + resolvedConfig.proxyUrl.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "proxyUrl is missing", + remediationHint: + "Run: clawdentity invite redeem or clawdentity config init", + }), + ); + } else { + let hasValidConfigProxyUrl = true; + try { + parseProxyUrl(resolvedConfig.proxyUrl); + } catch { + hasValidConfigProxyUrl = false; + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "proxyUrl is invalid", + remediationHint: + "Run: clawdentity invite redeem or clawdentity config init", + }), + ); + } + + if (hasValidConfigProxyUrl) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: "registryUrl, apiKey, and proxyUrl are configured", + }), + ); + } + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "unable to resolve CLI config", + remediationHint: + "Run: clawdentity config init (or fix your CLI state config file)", + }), + ); + } +} + +export async function runDoctorSelectedAgentCheck(input: { + homeDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const selectedAgentPath = resolveOpenclawAgentNamePath(input.homeDir); + let selectedAgentName: string | undefined; + try { + const selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); + selectedAgentName = assertValidAgentName(selectedAgentRaw.trim()); + input.checks.push( + toDoctorCheck({ + id: "state.selectedAgent", + label: "Selected agent marker", + status: "pass", + message: `selected agent is ${selectedAgentName}`, + }), + ); + } catch (error) { + const missing = getErrorCode(error) === "ENOENT"; + input.checks.push( + toDoctorCheck({ + id: "state.selectedAgent", + label: "Selected agent marker", + status: "fail", + message: missing + ? `missing ${selectedAgentPath}` + : "selected agent marker is invalid", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + } + + return selectedAgentName; +} + +export async function runDoctorCredentialsCheck(input: { + homeDir: string; + selectedAgentName?: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + if (input.selectedAgentName === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "fail", + message: "cannot validate credentials without selected agent marker", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + return; + } + + try { + await ensureLocalAgentCredentials(input.homeDir, input.selectedAgentName); + input.checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "pass", + message: "ait.jwt and secret.key are present", + }), + ); + } catch (error) { + const details = error instanceof AppError ? error.details : undefined; + const filePath = + details && typeof details.filePath === "string" + ? details.filePath + : undefined; + input.checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "fail", + message: + filePath === undefined + ? "agent credentials are missing or invalid" + : `credential file missing or empty: ${filePath}`, + remediationHint: + "Run: clawdentity agent create --framework openclaw", + details: + filePath === undefined + ? undefined + : { filePath, selectedAgentName: input.selectedAgentName }, + }), + ); + } +} + +export async function runDoctorPeersCheck(input: { + homeDir: string; + peerAlias?: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const peersPath = resolvePeersPath(input.homeDir); + let peersConfig: PeersConfig | undefined; + try { + peersConfig = await loadPeersConfig(peersPath); + const peerAliases = Object.keys(peersConfig.peers); + if (input.peerAlias !== undefined) { + if (peersConfig.peers[input.peerAlias] === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: `peer alias is missing: ${input.peerAlias}`, + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, + details: { peersPath, peerAlias: input.peerAlias }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: `peer alias exists: ${input.peerAlias}`, + details: { peersPath, peerAlias: input.peerAlias }, + }), + ); + } + } else if (peerAliases.length === 0) { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: "no peers are configured yet (optional until pairing)", + details: { peersPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: `configured peers: ${peerAliases.length}`, + details: { peersPath }, + }), + ); + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: `invalid peers config at ${peersPath}`, + remediationHint: `Fix JSON in ${peersPath} or rerun openclaw setup`, + details: { peersPath }, + }), + ); + } + + return peersConfig; +} + +export async function runDoctorTransformCheck(input: { + openclawDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const transformTargetPath = resolveTransformTargetPath(input.openclawDir); + const relayTransformRuntimePath = resolveTransformRuntimePath( + input.openclawDir, + ); + const relayTransformPeersPath = resolveTransformPeersPath(input.openclawDir); + try { + const transformContents = await readFile(transformTargetPath, "utf8"); + const runtimeContents = await readFile(relayTransformRuntimePath, "utf8"); + const peersSnapshotContents = await readFile( + relayTransformPeersPath, + "utf8", + ); + + if ( + transformContents.trim().length === 0 || + runtimeContents.trim().length === 0 || + peersSnapshotContents.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: "relay transform artifacts are missing or empty", + remediationHint: "Run: clawdentity skill install", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "pass", + message: "relay transform artifacts are present", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: "missing relay transform artifacts", + remediationHint: "Run: clawdentity skill install", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } +} + +export async function runDoctorOpenclawConfigCheck(input: { + openclawDir: string; + homeDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const openclawConfigPath = resolveOpenclawConfigPath( + input.openclawDir, + input.homeDir, + ); + try { + const openclawConfig = await readJsonFile(openclawConfigPath); + if (!isRecord(openclawConfig)) { + throw new Error("root"); + } + const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; + const hooksEnabled = hooks.enabled === true; + const hookToken = + typeof hooks.token === "string" && hooks.token.trim().length > 0 + ? hooks.token.trim() + : undefined; + const defaultSessionKey = + typeof hooks.defaultSessionKey === "string" && + hooks.defaultSessionKey.trim().length > 0 + ? hooks.defaultSessionKey.trim() + : undefined; + const allowRequestSessionKey = hooks.allowRequestSessionKey === false; + const allowedSessionKeyPrefixes = normalizeStringArrayWithValues( + hooks.allowedSessionKeyPrefixes, + [], + ); + const missingRequiredSessionPrefixes = + defaultSessionKey === undefined + ? ["hook:"] + : ["hook:", defaultSessionKey].filter( + (prefix) => !allowedSessionKeyPrefixes.includes(prefix), + ); + const mappings = Array.isArray(hooks.mappings) + ? hooks.mappings.filter(isRecord) + : []; + const relayMapping = mappings.find((mapping) => + isRelayHookMapping(mapping), + ); + if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { + input.checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `missing send-to-peer mapping in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "pass", + message: "send-to-peer mapping is configured", + details: { openclawConfigPath }, + }), + ); + } + + if (!hooksEnabled) { + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `hooks.enabled is not true in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else if (hookToken === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `hooks.token is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "pass", + message: "hooks token is configured", + details: { openclawConfigPath }, + }), + ); + } + + const sessionRoutingIssues: string[] = []; + if (defaultSessionKey === undefined) { + sessionRoutingIssues.push("hooks.defaultSessionKey is missing"); + } + if (!allowRequestSessionKey) { + sessionRoutingIssues.push("hooks.allowRequestSessionKey is not false"); + } + if (missingRequiredSessionPrefixes.length > 0) { + sessionRoutingIssues.push( + `hooks.allowedSessionKeyPrefixes is missing: ${missingRequiredSessionPrefixes.join(", ")}`, + ); + } + if ( + defaultSessionKey !== undefined && + isCanonicalAgentSessionKey(defaultSessionKey) + ) { + sessionRoutingIssues.push( + "hooks.defaultSessionKey uses canonical agent format (agent::...); use OpenClaw request session keys like main, global, or subagent:*", + ); + } + + if (sessionRoutingIssues.length > 0) { + input.checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "fail", + message: sessionRoutingIssues.join("; "), + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "pass", + message: + "hooks default session and allowed session prefixes are configured", + details: { openclawConfigPath }, + }), + ); + } + + const gateway = isRecord(openclawConfig.gateway) + ? openclawConfig.gateway + : {}; + const gatewayAuth = isRecord(gateway.auth) ? gateway.auth : {}; + const gatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); + const gatewayAuthToken = + typeof gatewayAuth.token === "string" && + gatewayAuth.token.trim().length > 0 + ? gatewayAuth.token.trim() + : undefined; + const gatewayAuthPassword = + typeof gatewayAuth.password === "string" && + gatewayAuth.password.trim().length > 0 + ? gatewayAuth.password.trim() + : undefined; + + if (gatewayAuthMode === "token") { + if (gatewayAuthToken === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.token is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with token mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } + } else if (gatewayAuthMode === "password") { + if (gatewayAuthPassword === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.password is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with password mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } + } else if (gatewayAuthMode === "trusted-proxy") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with trusted-proxy mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.mode is missing or unsupported in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath }, + }), + ); + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + } +} + +export async function runDoctorOpenclawBaseUrlCheck(input: { + homeDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(input.homeDir); + try { + const openclawBaseUrl = await resolveOpenclawBaseUrl({ + relayRuntimeConfigPath, + }); + input.checks.push( + toDoctorCheck({ + id: "state.openclawBaseUrl", + label: "OpenClaw base URL", + status: "pass", + message: `resolved to ${openclawBaseUrl}`, + }), + ); + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.openclawBaseUrl", + label: "OpenClaw base URL", + status: "fail", + message: `unable to resolve OpenClaw base URL from ${relayRuntimeConfigPath}`, + remediationHint: OPENCLAW_SETUP_WITH_BASE_URL_HINT, + }), + ); + } +} + +export async function runDoctorGatewayPairingCheck(input: { + openclawDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const gatewayPendingState = await readOpenclawGatewayPendingState( + input.openclawDir, + ); + if (gatewayPendingState.status === "missing") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "pass", + message: "no pending gateway device approvals file was found", + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.status === "invalid") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `invalid pending device approvals file: ${gatewayPendingState.gatewayDevicePendingPath}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.status === "unreadable") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `unable to read pending device approvals at ${gatewayPendingState.gatewayDevicePendingPath}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.pendingRequestIds.length === 0) { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "pass", + message: "no pending gateway device approvals", + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `pending gateway device approvals: ${gatewayPendingState.pendingRequestIds.length}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + pendingRequestIds: gatewayPendingState.pendingRequestIds, + }, + }), + ); + } +} diff --git a/apps/cli/src/commands/openclaw/doctor.ts b/apps/cli/src/commands/openclaw/doctor.ts new file mode 100644 index 0000000..47376ba --- /dev/null +++ b/apps/cli/src/commands/openclaw/doctor.ts @@ -0,0 +1,52 @@ +import { parseDoctorPeerAlias, toDoctorResult } from "./common.js"; +import { runDoctorConnectorRuntimeChecks } from "./doctor-connector-checks.js"; +import { + runDoctorConfigCheck, + runDoctorCredentialsCheck, + runDoctorGatewayPairingCheck, + runDoctorOpenclawBaseUrlCheck, + runDoctorOpenclawConfigCheck, + runDoctorPeersCheck, + runDoctorSelectedAgentCheck, + runDoctorTransformCheck, +} from "./doctor-static-checks.js"; +import { resolveHomeDir, resolveOpenclawDir } from "./paths.js"; +import type { + OpenclawDoctorCheckResult, + OpenclawDoctorOptions, + OpenclawDoctorResult, +} from "./types.js"; + +export async function runOpenclawDoctor( + options: OpenclawDoctorOptions = {}, +): Promise { + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const peerAlias = parseDoctorPeerAlias(options.peerAlias); + const checks: OpenclawDoctorCheckResult[] = []; + + await runDoctorConfigCheck({ options, checks }); + + const selectedAgentName = await runDoctorSelectedAgentCheck({ + homeDir, + checks, + }); + await runDoctorCredentialsCheck({ homeDir, selectedAgentName, checks }); + + await runDoctorPeersCheck({ homeDir, peerAlias, checks }); + await runDoctorTransformCheck({ openclawDir, checks }); + await runDoctorOpenclawConfigCheck({ openclawDir, homeDir, checks }); + await runDoctorOpenclawBaseUrlCheck({ homeDir, checks }); + await runDoctorGatewayPairingCheck({ openclawDir, checks }); + + if (options.includeConnectorRuntimeCheck !== false) { + await runDoctorConnectorRuntimeChecks({ + homeDir, + selectedAgentName, + fetchImpl: options.fetchImpl, + checks, + }); + } + + return toDoctorResult(checks); +} diff --git a/apps/cli/src/commands/openclaw/gateway.ts b/apps/cli/src/commands/openclaw/gateway.ts new file mode 100644 index 0000000..1bde6e3 --- /dev/null +++ b/apps/cli/src/commands/openclaw/gateway.ts @@ -0,0 +1,195 @@ +import { spawn } from "node:child_process"; +import { join } from "node:path"; +import { getErrorCode, isRecord } from "./common.js"; +import { + OPENCLAW_GATEWAY_APPROVAL_COMMAND, + OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS, +} from "./constants.js"; +import { resolveOpenclawConfigPath } from "./paths.js"; +import { readJsonFile } from "./state.js"; +import type { + OpenclawGatewayDeviceApprovalAttempt, + OpenclawGatewayDeviceApprovalExecution, + OpenclawGatewayDeviceApprovalInput, + OpenclawGatewayDeviceApprovalRunner, + OpenclawGatewayDeviceApprovalSummary, + OpenclawGatewayPendingState, +} from "./types.js"; + +export async function readOpenclawGatewayPendingState( + openclawDir: string, +): Promise { + const gatewayDevicePendingPath = join(openclawDir, "devices", "pending.json"); + try { + const pendingPayload = await readJsonFile(gatewayDevicePendingPath); + if (!isRecord(pendingPayload)) { + return { + status: "invalid", + gatewayDevicePendingPath, + }; + } + return { + status: "ok", + gatewayDevicePendingPath, + pendingRequestIds: Object.keys(pendingPayload), + }; + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return { + status: "missing", + gatewayDevicePendingPath, + }; + } + return { + status: "unreadable", + gatewayDevicePendingPath, + }; + } +} + +export function resolveOpenclawGatewayApprovalCommand(): string { + const envOverride = process.env.OPENCLAW_GATEWAY_APPROVAL_COMMAND?.trim(); + if (typeof envOverride === "string" && envOverride.length > 0) { + return envOverride; + } + return OPENCLAW_GATEWAY_APPROVAL_COMMAND; +} + +async function runOpenclawGatewayApprovalCommand(input: { + command: string; + args: string[]; + openclawDir: string; + openclawConfigPath: string; +}): Promise { + return await new Promise( + (resolve) => { + const child = spawn(input.command, input.args, { + env: { + ...process.env, + OPENCLAW_STATE_DIR: input.openclawDir, + OPENCLAW_CONFIG_PATH: input.openclawConfigPath, + }, + stdio: ["ignore", "pipe", "pipe"], + }); + + let settled = false; + let stdout = ""; + let stderr = ""; + + const finalize = (result: OpenclawGatewayDeviceApprovalExecution) => { + if (settled) { + return; + } + settled = true; + resolve({ + ...result, + stdout: stdout.trim(), + stderr: stderr.trim(), + }); + }; + + const timeout = setTimeout(() => { + try { + child.kill("SIGTERM"); + } catch { + // Best-effort timeout shutdown. + } + finalize({ + ok: false, + errorMessage: `command timed out after ${OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS}ms`, + }); + }, OPENCLAW_GATEWAY_APPROVAL_TIMEOUT_MS); + + child.stdout?.on("data", (chunk: Buffer | string) => { + stdout += String(chunk); + }); + child.stderr?.on("data", (chunk: Buffer | string) => { + stderr += String(chunk); + }); + + child.once("error", (error) => { + clearTimeout(timeout); + const errorCode = getErrorCode(error); + finalize({ + ok: false, + unavailable: errorCode === "ENOENT", + errorMessage: + error instanceof Error + ? error.message + : "failed to run openclaw command", + }); + }); + + child.once("close", (exitCode) => { + clearTimeout(timeout); + finalize({ + ok: exitCode === 0, + exitCode: typeof exitCode === "number" ? exitCode : undefined, + }); + }); + }, + ); +} + +async function runOpenclawGatewayDeviceApproval( + input: OpenclawGatewayDeviceApprovalInput, +): Promise { + const command = resolveOpenclawGatewayApprovalCommand(); + return await runOpenclawGatewayApprovalCommand({ + command, + args: ["devices", "approve", input.requestId, "--json"], + openclawDir: input.openclawDir, + openclawConfigPath: input.openclawConfigPath, + }); +} + +export async function autoApproveOpenclawGatewayDevices(input: { + homeDir: string; + openclawDir: string; + runner?: OpenclawGatewayDeviceApprovalRunner; +}): Promise { + const pendingState = await readOpenclawGatewayPendingState(input.openclawDir); + if ( + pendingState.status !== "ok" || + pendingState.pendingRequestIds.length === 0 + ) { + return undefined; + } + + const openclawConfigPath = resolveOpenclawConfigPath( + input.openclawDir, + input.homeDir, + ); + const approvalRunner = input.runner ?? runOpenclawGatewayDeviceApproval; + const attempts: OpenclawGatewayDeviceApprovalAttempt[] = []; + + for (const requestId of pendingState.pendingRequestIds) { + const execution = await approvalRunner({ + requestId, + openclawDir: input.openclawDir, + openclawConfigPath, + }); + attempts.push({ + requestId, + ok: execution.ok, + unavailable: execution.unavailable === true, + reason: + execution.errorMessage ?? + (execution.stderr && execution.stderr.length > 0 + ? execution.stderr + : execution.stdout && execution.stdout.length > 0 + ? execution.stdout + : undefined), + exitCode: execution.exitCode, + }); + if (execution.unavailable === true) { + break; + } + } + + return { + gatewayDevicePendingPath: pendingState.gatewayDevicePendingPath, + pendingRequestIds: pendingState.pendingRequestIds, + attempts, + }; +} diff --git a/apps/cli/src/commands/openclaw/output.ts b/apps/cli/src/commands/openclaw/output.ts new file mode 100644 index 0000000..653c8c8 --- /dev/null +++ b/apps/cli/src/commands/openclaw/output.ts @@ -0,0 +1,62 @@ +import { writeStdoutLine } from "../../io.js"; +import { OPENCLAW_SEND_TO_PEER_HOOK_PATH } from "./constants.js"; +import type { + OpenclawDoctorCheckResult, + OpenclawDoctorResult, + OpenclawRelayTestResult, + OpenclawRelayWebsocketTestResult, +} from "./types.js"; + +export function formatDoctorCheckLine( + check: OpenclawDoctorCheckResult, +): string { + const icon = check.status === "pass" ? "✅" : "❌"; + return `${icon} ${check.label}: ${check.message}`; +} + +export function printDoctorResult(result: OpenclawDoctorResult): void { + writeStdoutLine(`OpenClaw doctor status: ${result.status}`); + for (const check of result.checks) { + writeStdoutLine(formatDoctorCheckLine(check)); + if (check.status === "fail" && check.remediationHint) { + writeStdoutLine(`Fix: ${check.remediationHint}`); + } + } +} + +export function printRelayTestResult(result: OpenclawRelayTestResult): void { + writeStdoutLine(`Relay test status: ${result.status}`); + writeStdoutLine(`Peer alias: ${result.peerAlias}`); + writeStdoutLine(`Endpoint: ${result.endpoint}`); + if (typeof result.httpStatus === "number") { + writeStdoutLine(`HTTP status: ${result.httpStatus}`); + } + writeStdoutLine(`Message: ${result.message}`); + if (result.remediationHint) { + writeStdoutLine(`Fix: ${result.remediationHint}`); + } +} + +export function printRelayWebsocketTestResult( + result: OpenclawRelayWebsocketTestResult, +): void { + writeStdoutLine(`Relay websocket test status: ${result.status}`); + writeStdoutLine(`Peer alias: ${result.peerAlias}`); + if (typeof result.connectorBaseUrl === "string") { + writeStdoutLine(`Connector base URL: ${result.connectorBaseUrl}`); + } + if (typeof result.connectorStatusUrl === "string") { + writeStdoutLine(`Connector status URL: ${result.connectorStatusUrl}`); + } + writeStdoutLine(`Message: ${result.message}`); + if (result.remediationHint) { + writeStdoutLine(`Fix: ${result.remediationHint}`); + } +} + +export function toSendToPeerEndpoint(openclawBaseUrl: string): string { + const normalizedBase = openclawBaseUrl.endsWith("/") + ? openclawBaseUrl + : `${openclawBaseUrl}/`; + return new URL(OPENCLAW_SEND_TO_PEER_HOOK_PATH, normalizedBase).toString(); +} diff --git a/apps/cli/src/commands/openclaw/paths.ts b/apps/cli/src/commands/openclaw/paths.ts new file mode 100644 index 0000000..1db7ea7 --- /dev/null +++ b/apps/cli/src/commands/openclaw/paths.ts @@ -0,0 +1,169 @@ +import { existsSync } from "node:fs"; +import { homedir } from "node:os"; +import { dirname, join, resolve as resolvePath } from "node:path"; +import { getConfigDir } from "../../config/manager.js"; +import { + AGENTS_DIR_NAME, + LEGACY_OPENCLAW_CONFIG_FILE_NAMES, + LEGACY_OPENCLAW_STATE_DIR_NAMES, + OPENCLAW_AGENT_FILE_NAME, + OPENCLAW_CONFIG_FILE_NAME, + OPENCLAW_CONNECTORS_FILE_NAME, + OPENCLAW_DIR_NAME, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + PEERS_FILE_NAME, + RELAY_MODULE_FILE_NAME, + RELAY_PEERS_FILE_NAME, + RELAY_RUNTIME_FILE_NAME, + SKILL_DIR_NAME, +} from "./constants.js"; + +export function resolveHomeDir(homeDir?: string): string { + if (typeof homeDir === "string" && homeDir.trim().length > 0) { + return homeDir.trim(); + } + + return homedir(); +} + +export function resolveHomePrefixedPath( + input: string, + homeDir: string, +): string { + const trimmed = input.trim(); + if (trimmed.startsWith("~")) { + return resolvePath(trimmed.replace(/^~(?=$|[\\/])/, homeDir)); + } + return resolvePath(trimmed); +} + +export function readNonEmptyEnvPath( + value: string | undefined, + homeDir: string, +): string | undefined { + if (typeof value !== "string" || value.trim().length === 0) { + return undefined; + } + return resolveHomePrefixedPath(value, homeDir); +} + +export function resolveOpenclawHomeDir(homeDir: string): string { + const envOpenclawHome = readNonEmptyEnvPath( + process.env.OPENCLAW_HOME, + homeDir, + ); + return envOpenclawHome ?? homeDir; +} + +export function resolveDefaultOpenclawStateDir( + openclawHomeDir: string, +): string { + const newStateDir = join(openclawHomeDir, OPENCLAW_DIR_NAME); + if (existsSync(newStateDir)) { + return newStateDir; + } + + for (const legacyDirName of LEGACY_OPENCLAW_STATE_DIR_NAMES) { + const legacyStateDir = join(openclawHomeDir, legacyDirName); + if (existsSync(legacyStateDir)) { + return legacyStateDir; + } + } + + return newStateDir; +} + +export function resolveOpenclawDir( + openclawDir: string | undefined, + homeDir: string, +): string { + if (typeof openclawDir === "string" && openclawDir.trim().length > 0) { + return resolveHomePrefixedPath(openclawDir, homeDir); + } + + const envStateDir = readNonEmptyEnvPath( + process.env.OPENCLAW_STATE_DIR ?? process.env.CLAWDBOT_STATE_DIR, + homeDir, + ); + if (envStateDir !== undefined) { + return envStateDir; + } + + const envConfigPath = readNonEmptyEnvPath( + process.env.OPENCLAW_CONFIG_PATH ?? process.env.CLAWDBOT_CONFIG_PATH, + homeDir, + ); + if (envConfigPath !== undefined) { + return dirname(envConfigPath); + } + + const openclawHomeDir = resolveOpenclawHomeDir(homeDir); + return resolveDefaultOpenclawStateDir(openclawHomeDir); +} + +export function resolveAgentDirectory( + homeDir: string, + agentName: string, +): string { + return join(getConfigDir({ homeDir }), AGENTS_DIR_NAME, agentName); +} + +export function resolvePeersPath(homeDir: string): string { + return join(getConfigDir({ homeDir }), PEERS_FILE_NAME); +} + +export function resolveOpenclawConfigPath( + openclawDir: string, + homeDir: string, +): string { + const envConfigPath = readNonEmptyEnvPath( + process.env.OPENCLAW_CONFIG_PATH ?? process.env.CLAWDBOT_CONFIG_PATH, + homeDir, + ); + if (envConfigPath !== undefined) { + return envConfigPath; + } + + const configCandidates = [ + join(openclawDir, OPENCLAW_CONFIG_FILE_NAME), + ...LEGACY_OPENCLAW_CONFIG_FILE_NAMES.map((fileName) => + join(openclawDir, fileName), + ), + ]; + + for (const candidate of configCandidates) { + if (existsSync(candidate)) { + return candidate; + } + } + + return configCandidates[0]; +} + +export function resolveDefaultTransformSource(openclawDir: string): string { + return join(openclawDir, "skills", SKILL_DIR_NAME, RELAY_MODULE_FILE_NAME); +} + +export function resolveTransformTargetPath(openclawDir: string): string { + return join(openclawDir, "hooks", "transforms", RELAY_MODULE_FILE_NAME); +} + +export function resolveOpenclawAgentNamePath(homeDir: string): string { + return join(getConfigDir({ homeDir }), OPENCLAW_AGENT_FILE_NAME); +} + +export function resolveRelayRuntimeConfigPath(homeDir: string): string { + return join(getConfigDir({ homeDir }), OPENCLAW_RELAY_RUNTIME_FILE_NAME); +} + +export function resolveConnectorAssignmentsPath(homeDir: string): string { + return join(getConfigDir({ homeDir }), OPENCLAW_CONNECTORS_FILE_NAME); +} + +export function resolveTransformRuntimePath(openclawDir: string): string { + return join(openclawDir, "hooks", "transforms", RELAY_RUNTIME_FILE_NAME); +} + +export function resolveTransformPeersPath(openclawDir: string): string { + return join(openclawDir, "hooks", "transforms", RELAY_PEERS_FILE_NAME); +} diff --git a/apps/cli/src/commands/openclaw/relay.ts b/apps/cli/src/commands/openclaw/relay.ts new file mode 100644 index 0000000..5e4bd68 --- /dev/null +++ b/apps/cli/src/commands/openclaw/relay.ts @@ -0,0 +1,384 @@ +import { AppError, nowIso } from "@clawdentity/sdk"; +import { + createCliError, + parsePeerAlias, + resolveProbeMessage, + resolveProbeSessionId, +} from "./common.js"; +import { + fetchConnectorHealthStatus, + resolveConnectorAssignment, + resolveSelectedAgentName, +} from "./connector.js"; +import { + DEFAULT_OPENCLAW_BASE_URL, + OPENCLAW_PAIRING_COMMAND_HINT, + OPENCLAW_SETUP_COMMAND_HINT, +} from "./constants.js"; +import { runOpenclawDoctor } from "./doctor.js"; +import { toSendToPeerEndpoint } from "./output.js"; +import { + resolveHomeDir, + resolveOpenclawDir, + resolvePeersPath, + resolveRelayRuntimeConfigPath, +} from "./paths.js"; +import { + loadPeersConfig, + resolveHookToken, + resolveOpenclawBaseUrl, +} from "./state.js"; +import type { + OpenclawRelayTestOptions, + OpenclawRelayTestResult, + OpenclawRelayWebsocketTestOptions, + OpenclawRelayWebsocketTestResult, +} from "./types.js"; + +function parseRelayProbeFailure(input: { + status: number; + responseBody: string; +}): Pick { + if (input.status === 401 || input.status === 403) { + return { + message: "OpenClaw hook token was rejected", + remediationHint: + "Pass a valid token with --hook-token or set OPENCLAW_HOOK_TOKEN", + }; + } + + if (input.status === 404) { + return { + message: "OpenClaw send-to-peer hook is unavailable", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }; + } + + if (input.status === 405) { + return { + message: "OpenClaw send-to-peer hook is not enabled for POST requests", + remediationHint: `${OPENCLAW_SETUP_COMMAND_HINT}, then restart OpenClaw`, + }; + } + + if (input.status === 500) { + return { + message: "Relay probe failed inside local relay pipeline", + remediationHint: + "Check peer pairing and rerun: clawdentity openclaw setup ", + }; + } + + return { + message: `Relay probe failed with HTTP ${input.status}`, + remediationHint: + input.responseBody.trim().length > 0 + ? `Inspect response body: ${input.responseBody.trim()}` + : "Check local OpenClaw and connector logs", + }; +} + +async function resolveRelayProbePeerAlias(input: { + homeDir: string; + peerAliasOption?: string; +}): Promise { + if ( + typeof input.peerAliasOption === "string" && + input.peerAliasOption.trim().length > 0 + ) { + return parsePeerAlias(input.peerAliasOption); + } + + const peersPath = resolvePeersPath(input.homeDir); + const peersConfig = await loadPeersConfig(peersPath); + const peerAliases = Object.keys(peersConfig.peers); + + if (peerAliases.length === 1) { + return peerAliases[0]; + } + + if (peerAliases.length === 0) { + throw createCliError( + "CLI_OPENCLAW_RELAY_TEST_PEER_REQUIRED", + "No paired peer is configured yet. Complete QR pairing first.", + { peersPath }, + ); + } + + throw createCliError( + "CLI_OPENCLAW_RELAY_TEST_PEER_REQUIRED", + "Multiple peers are configured. Pass --peer to choose one.", + { peersPath, peerAliases }, + ); +} + +export async function runOpenclawRelayTest( + options: OpenclawRelayTestOptions, +): Promise { + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const checkedAt = nowIso(); + let peerAlias: string; + try { + peerAlias = await resolveRelayProbePeerAlias({ + homeDir, + peerAliasOption: options.peer, + }); + } catch (error) { + const appError = error instanceof AppError ? error : undefined; + return { + status: "failure", + checkedAt, + peerAlias: "unresolved", + endpoint: toSendToPeerEndpoint(DEFAULT_OPENCLAW_BASE_URL), + message: appError?.message ?? "Unable to resolve relay peer alias", + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, + details: appError?.details as Record | undefined, + }; + } + + const preflight = await runOpenclawDoctor({ + homeDir, + openclawDir, + peerAlias, + resolveConfigImpl: options.resolveConfigImpl, + includeConnectorRuntimeCheck: false, + }); + + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); + let openclawBaseUrl = DEFAULT_OPENCLAW_BASE_URL; + try { + openclawBaseUrl = await resolveOpenclawBaseUrl({ + optionValue: options.openclawBaseUrl, + relayRuntimeConfigPath, + }); + } catch { + return { + status: "failure", + checkedAt, + peerAlias, + endpoint: toSendToPeerEndpoint(DEFAULT_OPENCLAW_BASE_URL), + message: "Unable to resolve OpenClaw base URL", + remediationHint: + "Set OPENCLAW_BASE_URL or run openclaw setup with --openclaw-base-url", + preflight, + }; + } + + const endpoint = toSendToPeerEndpoint(openclawBaseUrl); + if (preflight.status === "unhealthy") { + const firstFailure = preflight.checks.find( + (check) => check.status === "fail", + ); + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + message: + firstFailure === undefined + ? "Preflight checks failed" + : `Preflight failed: ${firstFailure.label}`, + remediationHint: firstFailure?.remediationHint, + preflight, + }; + } + + const hookToken = await resolveHookToken({ + optionValue: options.hookToken, + relayRuntimeConfigPath, + }); + const fetchImpl = options.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + message: "fetch implementation is unavailable", + remediationHint: "Run relay test in a Node runtime with fetch support", + preflight, + }; + } + + let response: Response; + try { + response = await fetchImpl(endpoint, { + method: "POST", + headers: { + "content-type": "application/json", + ...(hookToken === undefined ? {} : { "x-openclaw-token": hookToken }), + }, + body: JSON.stringify({ + peer: peerAlias, + sessionId: resolveProbeSessionId(options.sessionId), + message: resolveProbeMessage(options.message), + }), + }); + } catch { + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + message: "Relay probe request failed", + remediationHint: "Ensure local OpenClaw is running and reachable", + preflight, + }; + } + + if (response.ok) { + return { + status: "success", + checkedAt, + peerAlias, + endpoint, + httpStatus: response.status, + message: "Relay probe accepted", + preflight, + }; + } + + const responseBody = await response.text(); + const failure = parseRelayProbeFailure({ + status: response.status, + responseBody, + }); + return { + status: "failure", + checkedAt, + peerAlias, + endpoint, + httpStatus: response.status, + message: failure.message, + remediationHint: failure.remediationHint, + details: + responseBody.trim().length > 0 + ? { responseBody: responseBody.trim() } + : undefined, + preflight, + }; +} + +export async function runOpenclawRelayWebsocketTest( + options: OpenclawRelayWebsocketTestOptions, +): Promise { + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const checkedAt = nowIso(); + + let peerAlias: string; + try { + peerAlias = await resolveRelayProbePeerAlias({ + homeDir, + peerAliasOption: options.peer, + }); + } catch (error) { + const appError = error instanceof AppError ? error : undefined; + return { + status: "failure", + checkedAt, + peerAlias: "unresolved", + message: appError?.message ?? "Unable to resolve relay peer alias", + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, + details: appError?.details as Record | undefined, + }; + } + + const preflight = await runOpenclawDoctor({ + homeDir, + openclawDir, + peerAlias, + resolveConfigImpl: options.resolveConfigImpl, + includeConnectorRuntimeCheck: false, + }); + if (preflight.status === "unhealthy") { + const firstFailure = preflight.checks.find( + (check) => check.status === "fail", + ); + return { + status: "failure", + checkedAt, + peerAlias, + message: + firstFailure === undefined + ? "Preflight checks failed" + : `Preflight failed: ${firstFailure.label}`, + remediationHint: firstFailure?.remediationHint, + preflight, + }; + } + + const fetchImpl = options.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl !== "function") { + return { + status: "failure", + checkedAt, + peerAlias, + message: "fetch implementation is unavailable", + remediationHint: + "Run relay websocket test in a Node runtime with fetch support", + preflight, + }; + } + + let connectorBaseUrl: string | undefined; + let connectorStatusUrl: string | undefined; + try { + const selectedAgent = await resolveSelectedAgentName({ homeDir }); + const connectorAssignment = await resolveConnectorAssignment({ + homeDir, + agentName: selectedAgent.agentName, + }); + connectorBaseUrl = connectorAssignment.connectorBaseUrl; + connectorStatusUrl = connectorAssignment.connectorStatusUrl; + } catch (error) { + const appError = error instanceof AppError ? error : undefined; + return { + status: "failure", + checkedAt, + peerAlias, + connectorBaseUrl, + connectorStatusUrl, + message: + appError?.message ?? + "Unable to resolve connector assignment for websocket test", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: appError?.details as Record | undefined, + preflight, + }; + } + + const connectorStatus = await fetchConnectorHealthStatus({ + connectorBaseUrl, + fetchImpl, + }); + if (!connectorStatus.connected) { + return { + status: "failure", + checkedAt, + peerAlias, + connectorBaseUrl, + connectorStatusUrl: connectorStatus.statusUrl, + message: "Connector websocket is not connected", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: + connectorStatus.reason === undefined + ? undefined + : { + reason: connectorStatus.reason, + }, + preflight, + }; + } + + return { + status: "success", + checkedAt, + peerAlias, + connectorBaseUrl, + connectorStatusUrl: connectorStatus.statusUrl, + message: "Connector websocket is connected for paired relay", + preflight, + }; +} diff --git a/apps/cli/src/commands/openclaw/setup.ts b/apps/cli/src/commands/openclaw/setup.ts new file mode 100644 index 0000000..1c8581b --- /dev/null +++ b/apps/cli/src/commands/openclaw/setup.ts @@ -0,0 +1,427 @@ +import { copyFile, mkdir } from "node:fs/promises"; +import { dirname } from "node:path"; +import { nowIso } from "@clawdentity/sdk"; +import { assertValidAgentName } from "../agent-name.js"; +import { + createCliError, + decodeInvitePayload, + encodeInvitePayload, + getErrorCode, + parseAgentDid, + parseInvitePayload, + parseOptionalProfileName, + parsePeerAlias, + parseProxyUrl, +} from "./common.js"; +import { patchOpenclawConfig } from "./config.js"; +import { + allocateConnectorPort, + buildConnectorBaseUrl, + buildRelayConnectorBaseUrls, + monitorConnectorStabilityWindow, + parseOpenclawRuntimeMode, + parseWaitTimeoutSeconds, + startSetupConnectorRuntime, +} from "./connector.js"; +import { + CONNECTOR_HOST_LOOPBACK, + DEFAULT_CONNECTOR_OUTBOUND_PATH, + logger, + OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS, + OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS, + RELAY_PEERS_FILE_NAME, +} from "./constants.js"; +import { runOpenclawDoctor } from "./doctor.js"; +import { + autoApproveOpenclawGatewayDevices, + resolveOpenclawGatewayApprovalCommand, +} from "./gateway.js"; +import { + resolveConnectorAssignmentsPath, + resolveDefaultTransformSource, + resolveHomeDir, + resolveOpenclawAgentNamePath, + resolveOpenclawConfigPath, + resolveOpenclawDir, + resolvePeersPath, + resolveRelayRuntimeConfigPath, + resolveTransformPeersPath, + resolveTransformRuntimePath, + resolveTransformTargetPath, +} from "./paths.js"; +import { + ensureLocalAgentCredentials, + loadConnectorAssignments, + loadPeersConfig, + loadRelayRuntimeConfig, + resolveOpenclawBaseUrl, + saveConnectorAssignments, + savePeersConfig, + saveRelayRuntimeConfig, + writeSecureFile, +} from "./state.js"; +import type { + OpenclawGatewayDeviceApprovalSummary, + OpenclawInviteOptions, + OpenclawInvitePayload, + OpenclawInviteResult, + OpenclawSelfSetupResult, + OpenclawSetupOptions, + OpenclawSetupResult, +} from "./types.js"; + +export function createOpenclawInviteCode( + options: OpenclawInviteOptions, +): OpenclawInviteResult { + const did = parseAgentDid(options.did, "invite did"); + const proxyUrl = parseProxyUrl(options.proxyUrl); + const peerAlias = + options.peerAlias === undefined + ? undefined + : parsePeerAlias(options.peerAlias); + const agentName = parseOptionalProfileName(options.agentName, "agentName"); + const humanName = parseOptionalProfileName(options.humanName, "humanName"); + + const payload = parseInvitePayload({ + v: 1, + issuedAt: nowIso(), + did, + proxyUrl, + alias: peerAlias, + agentName, + humanName, + }); + + const result: OpenclawInviteResult = { + code: encodeInvitePayload(payload), + did: payload.did, + proxyUrl: payload.proxyUrl, + peerAlias: payload.alias, + agentName: payload.agentName, + humanName: payload.humanName, + }; + + return result; +} + +export function decodeOpenclawInviteCode(code: string): OpenclawInvitePayload { + return decodeInvitePayload(code); +} + +export async function setupOpenclawRelay( + agentName: string, + options: OpenclawSetupOptions, +): Promise { + const normalizedAgentName = assertValidAgentName(agentName); + const homeDir = resolveHomeDir(options.homeDir); + const openclawDir = resolveOpenclawDir(options.openclawDir, homeDir); + const openclawConfigPath = resolveOpenclawConfigPath(openclawDir, homeDir); + const transformSource = + typeof options.transformSource === "string" && + options.transformSource.trim().length > 0 + ? options.transformSource.trim() + : resolveDefaultTransformSource(openclawDir); + const transformTargetPath = resolveTransformTargetPath(openclawDir); + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(homeDir); + const existingRelayRuntimeConfig = await loadRelayRuntimeConfig( + relayRuntimeConfigPath, + ); + const openclawBaseUrl = await resolveOpenclawBaseUrl({ + optionValue: options.openclawBaseUrl, + relayRuntimeConfigPath, + }); + + await ensureLocalAgentCredentials(homeDir, normalizedAgentName); + await mkdir(dirname(transformTargetPath), { recursive: true }); + try { + await copyFile(transformSource, transformTargetPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_TRANSFORM_NOT_FOUND", + "Relay transform source file was not found", + { transformSource }, + ); + } + + throw error; + } + + const patchedOpenclawConfig = await patchOpenclawConfig( + openclawConfigPath, + existingRelayRuntimeConfig?.openclawHookToken, + ); + + const peersPath = resolvePeersPath(homeDir); + const peers = await loadPeersConfig(peersPath); + await savePeersConfig(peersPath, peers); + + const relayTransformPeersPath = resolveTransformPeersPath(openclawDir); + await writeSecureFile( + relayTransformPeersPath, + `${JSON.stringify(peers, null, 2)}\n`, + ); + + const connectorAssignmentsPath = resolveConnectorAssignmentsPath(homeDir); + const connectorAssignments = await loadConnectorAssignments( + connectorAssignmentsPath, + ); + const connectorPort = allocateConnectorPort( + connectorAssignments, + normalizedAgentName, + ); + const connectorBaseUrl = buildConnectorBaseUrl( + CONNECTOR_HOST_LOOPBACK, + connectorPort, + ); + connectorAssignments.agents[normalizedAgentName] = { + connectorBaseUrl, + updatedAt: nowIso(), + }; + await saveConnectorAssignments( + connectorAssignmentsPath, + connectorAssignments, + ); + + const relayTransformRuntimePath = resolveTransformRuntimePath(openclawDir); + await writeSecureFile( + relayTransformRuntimePath, + `${JSON.stringify( + { + version: 1, + connectorBaseUrl: buildRelayConnectorBaseUrls(connectorPort)[0], + connectorBaseUrls: buildRelayConnectorBaseUrls(connectorPort), + connectorPath: DEFAULT_CONNECTOR_OUTBOUND_PATH, + peersConfigPath: RELAY_PEERS_FILE_NAME, + updatedAt: nowIso(), + }, + null, + 2, + )}\n`, + ); + + const agentNamePath = resolveOpenclawAgentNamePath(homeDir); + await writeSecureFile(agentNamePath, `${normalizedAgentName}\n`); + await saveRelayRuntimeConfig( + relayRuntimeConfigPath, + openclawBaseUrl, + patchedOpenclawConfig.hookToken, + relayTransformPeersPath, + ); + + logger.info("cli.openclaw_setup_completed", { + agentName: normalizedAgentName, + openclawConfigPath, + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + openclawBaseUrl, + connectorBaseUrl, + relayRuntimeConfigPath, + }); + + return { + openclawConfigPath, + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + openclawBaseUrl, + connectorBaseUrl, + relayRuntimeConfigPath, + openclawConfigChanged: patchedOpenclawConfig.configChanged, + }; +} + +async function assertSetupChecklistHealthy(input: { + homeDir: string; + openclawDir: string; + includeConnectorRuntimeCheck: boolean; + gatewayDeviceApprovalRunner?: OpenclawSetupOptions["gatewayDeviceApprovalRunner"]; +}): Promise { + let checklist = await runOpenclawDoctor({ + homeDir: input.homeDir, + openclawDir: input.openclawDir, + includeConfigCheck: false, + includeConnectorRuntimeCheck: input.includeConnectorRuntimeCheck, + }); + + if (checklist.status === "healthy") { + return; + } + + let gatewayApprovalSummary: OpenclawGatewayDeviceApprovalSummary | undefined; + const gatewayPairingFailure = checklist.checks.find( + (check) => + check.id === "state.gatewayDevicePairing" && check.status === "fail", + ); + if (gatewayPairingFailure !== undefined) { + gatewayApprovalSummary = await autoApproveOpenclawGatewayDevices({ + homeDir: input.homeDir, + openclawDir: input.openclawDir, + runner: input.gatewayDeviceApprovalRunner, + }); + if (gatewayApprovalSummary !== undefined) { + const successfulAttempts = gatewayApprovalSummary.attempts.filter( + (attempt) => attempt.ok, + ).length; + const failedAttempts = gatewayApprovalSummary.attempts.filter( + (attempt) => !attempt.ok, + ); + logger.info("cli.openclaw_setup_gateway_device_recovery_attempted", { + openclawDir: input.openclawDir, + pendingCount: gatewayApprovalSummary.pendingRequestIds.length, + successfulAttempts, + failedAttempts: failedAttempts.length, + commandUnavailable: failedAttempts.some( + (attempt) => attempt.unavailable, + ), + }); + checklist = await runOpenclawDoctor({ + homeDir: input.homeDir, + openclawDir: input.openclawDir, + includeConfigCheck: false, + includeConnectorRuntimeCheck: input.includeConnectorRuntimeCheck, + }); + if (checklist.status === "healthy") { + return; + } + } + } + + const firstFailure = checklist.checks.find( + (check) => check.status === "fail", + ); + const unavailableGatewayApprovalAttempt = + gatewayApprovalSummary?.attempts.find((attempt) => attempt.unavailable); + const remediationHint = + unavailableGatewayApprovalAttempt !== undefined && + firstFailure?.id === "state.gatewayDevicePairing" + ? `${OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT}. Ensure the \`${resolveOpenclawGatewayApprovalCommand()}\` command is available.` + : firstFailure?.remediationHint; + throw createCliError( + "CLI_OPENCLAW_SETUP_CHECKLIST_FAILED", + "OpenClaw setup checklist failed", + { + firstFailedCheckId: firstFailure?.id, + firstFailedCheckMessage: firstFailure?.message, + remediationHint, + gatewayDeviceApproval: gatewayApprovalSummary, + checks: checklist.checks, + }, + ); +} + +export async function setupOpenclawSelfReady( + agentName: string, + options: OpenclawSetupOptions, +): Promise { + const normalizedAgentName = assertValidAgentName(agentName); + const resolvedHomeDir = resolveHomeDir(options.homeDir); + const resolvedOpenclawDir = resolveOpenclawDir( + options.openclawDir, + resolvedHomeDir, + ); + const setup = await setupOpenclawRelay(normalizedAgentName, { + ...options, + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + }); + if (options.noRuntimeStart === true) { + await assertSetupChecklistHealthy({ + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + includeConnectorRuntimeCheck: false, + gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, + }); + return { + ...setup, + runtimeMode: "none", + runtimeStatus: "skipped", + websocketStatus: "skipped", + }; + } + + const fetchImpl = globalThis.fetch; + if (typeof fetchImpl !== "function") { + throw createCliError( + "CLI_OPENCLAW_SETUP_FETCH_UNAVAILABLE", + "Runtime fetch is unavailable for connector readiness checks", + ); + } + + const resolvedMode = parseOpenclawRuntimeMode(options.runtimeMode); + const waitTimeoutSeconds = parseWaitTimeoutSeconds( + options.waitTimeoutSeconds, + ); + let runtime = await startSetupConnectorRuntime({ + agentName: normalizedAgentName, + homeDir: resolvedHomeDir, + openclawBaseUrl: setup.openclawBaseUrl, + connectorBaseUrl: setup.connectorBaseUrl, + mode: resolvedMode, + waitTimeoutSeconds, + fetchImpl, + }); + + await assertSetupChecklistHealthy({ + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + includeConnectorRuntimeCheck: true, + gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, + }); + + const requiresStabilityGuard = + setup.openclawConfigChanged && + (runtime.runtimeMode === "existing" || runtime.runtimeMode === "detached"); + if (requiresStabilityGuard) { + const stabilityWindowSeconds = Math.min( + waitTimeoutSeconds, + OPENCLAW_SETUP_STABILITY_WINDOW_SECONDS, + ); + const stableStatus = await monitorConnectorStabilityWindow({ + connectorBaseUrl: setup.connectorBaseUrl, + fetchImpl, + durationSeconds: stabilityWindowSeconds, + pollIntervalMs: OPENCLAW_SETUP_STABILITY_POLL_INTERVAL_MS, + }); + + if (!stableStatus.connected) { + logger.warn("cli.openclaw.setup.connector_dropped_post_config_change", { + agentName: normalizedAgentName, + connectorBaseUrl: setup.connectorBaseUrl, + connectorStatusUrl: stableStatus.statusUrl, + reason: stableStatus.reason, + previousRuntimeMode: runtime.runtimeMode, + stabilityWindowSeconds, + }); + runtime = await startSetupConnectorRuntime({ + agentName: normalizedAgentName, + homeDir: resolvedHomeDir, + openclawBaseUrl: setup.openclawBaseUrl, + connectorBaseUrl: setup.connectorBaseUrl, + mode: resolvedMode, + waitTimeoutSeconds, + fetchImpl, + }); + await assertSetupChecklistHealthy({ + homeDir: resolvedHomeDir, + openclawDir: resolvedOpenclawDir, + includeConnectorRuntimeCheck: true, + gatewayDeviceApprovalRunner: options.gatewayDeviceApprovalRunner, + }); + } + } + + return { + ...setup, + ...runtime, + }; +} + +export async function setupOpenclawRelayFromInvite( + agentName: string, + options: OpenclawSetupOptions, +): Promise { + return setupOpenclawRelay(agentName, options); +} diff --git a/apps/cli/src/commands/openclaw/state.ts b/apps/cli/src/commands/openclaw/state.ts new file mode 100644 index 0000000..e80c137 --- /dev/null +++ b/apps/cli/src/commands/openclaw/state.ts @@ -0,0 +1,361 @@ +import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { nowIso } from "@clawdentity/sdk"; +import { assertValidAgentName } from "../agent-name.js"; +import { + createCliError, + getErrorCode, + isRecord, + parseAgentDid, + parseHttpUrl, + parseOpenclawBaseUrl, + parseOptionalProfileName, + parsePeerAlias, + parseProxyUrl, +} from "./common.js"; +import { + AIT_FILE_NAME, + DEFAULT_OPENCLAW_BASE_URL, + FILE_MODE, + SECRET_KEY_FILE_NAME, +} from "./constants.js"; +import { resolveAgentDirectory } from "./paths.js"; +import type { + ConnectorAssignmentEntry, + ConnectorAssignmentsConfig, + OpenclawRelayRuntimeConfig, + PeerEntry, + PeersConfig, +} from "./types.js"; + +export async function readJsonFile(filePath: string): Promise { + const raw = await readFile(filePath, "utf8"); + + try { + return JSON.parse(raw); + } catch { + throw createCliError("CLI_OPENCLAW_INVALID_JSON", "JSON file is invalid", { + filePath, + }); + } +} + +export async function writeSecureFile( + filePath: string, + content: string, +): Promise { + await mkdir(dirname(filePath), { recursive: true }); + await writeFile(filePath, content, "utf8"); + await chmod(filePath, FILE_MODE); +} + +export async function ensureLocalAgentCredentials( + homeDir: string, + agentName: string, +): Promise { + const agentDir = resolveAgentDirectory(homeDir, agentName); + const requiredFiles = [ + join(agentDir, SECRET_KEY_FILE_NAME), + join(agentDir, AIT_FILE_NAME), + ]; + + for (const filePath of requiredFiles) { + let content: string; + try { + content = await readFile(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_OPENCLAW_MISSING_AGENT_CREDENTIALS", + "Local agent credentials are missing", + { agentName, filePath }, + ); + } + + throw error; + } + + if (content.trim().length === 0) { + throw createCliError( + "CLI_OPENCLAW_EMPTY_AGENT_CREDENTIALS", + "Agent credential file is empty", + { filePath }, + ); + } + } +} + +export async function loadPeersConfig(peersPath: string): Promise { + let parsed: unknown; + + try { + parsed = await readJsonFile(peersPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return { peers: {} }; + } + + throw error; + } + + if (!isRecord(parsed)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEERS_CONFIG", + "Peer config root must be a JSON object", + { peersPath }, + ); + } + + const peersValue = parsed.peers; + if (peersValue === undefined) { + return { peers: {} }; + } + + if (!isRecord(peersValue)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEERS_CONFIG", + "Peer config peers field must be an object", + { peersPath }, + ); + } + + const peers: Record = {}; + for (const [alias, value] of Object.entries(peersValue)) { + const normalizedAlias = parsePeerAlias(alias); + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_PEERS_CONFIG", + "Peer entry must be an object", + { alias: normalizedAlias }, + ); + } + + const did = parseAgentDid(value.did, `Peer ${normalizedAlias} did`); + const proxyUrl = parseProxyUrl(value.proxyUrl); + const agentName = parseOptionalProfileName(value.agentName, "agentName"); + const humanName = parseOptionalProfileName(value.humanName, "humanName"); + + if (agentName === undefined && humanName === undefined) { + peers[normalizedAlias] = { did, proxyUrl }; + continue; + } + + peers[normalizedAlias] = { did, proxyUrl, agentName, humanName }; + } + + return { peers }; +} + +export async function savePeersConfig( + peersPath: string, + config: PeersConfig, +): Promise { + await writeSecureFile(peersPath, `${JSON.stringify(config, null, 2)}\n`); +} + +function parseConnectorBaseUrlForAssignment( + value: unknown, + label: string, +): string { + return parseHttpUrl(value, { + label, + code: "CLI_OPENCLAW_INVALID_CONNECTOR_BASE_URL", + message: "Connector base URL must be a valid URL", + }); +} + +function parseConnectorAssignments( + value: unknown, + connectorAssignmentsPath: string, +): ConnectorAssignmentsConfig { + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_CONNECTOR_ASSIGNMENTS", + "Connector assignments config must be an object", + { connectorAssignmentsPath }, + ); + } + + const agentsRaw = value.agents; + if (!isRecord(agentsRaw)) { + return { agents: {} }; + } + + const agents: Record = {}; + for (const [agentName, entryValue] of Object.entries(agentsRaw)) { + if (!isRecord(entryValue)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_CONNECTOR_ASSIGNMENTS", + "Connector assignment entry must be an object", + { connectorAssignmentsPath, agentName }, + ); + } + + const connectorBaseUrl = parseConnectorBaseUrlForAssignment( + entryValue.connectorBaseUrl, + "connectorBaseUrl", + ); + const updatedAt = + typeof entryValue.updatedAt === "string" && + entryValue.updatedAt.trim().length > 0 + ? entryValue.updatedAt.trim() + : nowIso(); + + agents[assertValidAgentName(agentName)] = { + connectorBaseUrl, + updatedAt, + }; + } + + return { agents }; +} + +export async function loadConnectorAssignments( + connectorAssignmentsPath: string, +): Promise { + let parsed: unknown; + try { + parsed = await readJsonFile(connectorAssignmentsPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return { agents: {} }; + } + throw error; + } + + return parseConnectorAssignments(parsed, connectorAssignmentsPath); +} + +export async function saveConnectorAssignments( + connectorAssignmentsPath: string, + config: ConnectorAssignmentsConfig, +): Promise { + await writeSecureFile( + connectorAssignmentsPath, + `${JSON.stringify(config, null, 2)}\n`, + ); +} + +function parseRelayRuntimeConfig( + value: unknown, + relayRuntimeConfigPath: string, +): OpenclawRelayRuntimeConfig { + if (!isRecord(value)) { + throw createCliError( + "CLI_OPENCLAW_INVALID_RELAY_RUNTIME_CONFIG", + "Relay runtime config must be an object", + { relayRuntimeConfigPath }, + ); + } + + const updatedAt = + typeof value.updatedAt === "string" && value.updatedAt.trim().length > 0 + ? value.updatedAt.trim() + : undefined; + const openclawHookToken = + typeof value.openclawHookToken === "string" && + value.openclawHookToken.trim().length > 0 + ? value.openclawHookToken.trim() + : undefined; + const relayTransformPeersPath = + typeof value.relayTransformPeersPath === "string" && + value.relayTransformPeersPath.trim().length > 0 + ? value.relayTransformPeersPath.trim() + : undefined; + + return { + openclawBaseUrl: parseOpenclawBaseUrl(value.openclawBaseUrl), + openclawHookToken, + relayTransformPeersPath, + updatedAt, + }; +} + +export async function loadRelayRuntimeConfig( + relayRuntimeConfigPath: string, +): Promise { + let parsed: unknown; + try { + parsed = await readJsonFile(relayRuntimeConfigPath); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + + throw error; + } + + return parseRelayRuntimeConfig(parsed, relayRuntimeConfigPath); +} + +export async function saveRelayRuntimeConfig( + relayRuntimeConfigPath: string, + openclawBaseUrl: string, + openclawHookToken?: string, + relayTransformPeersPath?: string, +): Promise { + const config: OpenclawRelayRuntimeConfig = { + openclawBaseUrl, + ...(openclawHookToken ? { openclawHookToken } : {}), + ...(relayTransformPeersPath ? { relayTransformPeersPath } : {}), + updatedAt: nowIso(), + }; + + await writeSecureFile( + relayRuntimeConfigPath, + `${JSON.stringify(config, null, 2)}\n`, + ); +} + +export async function resolveOpenclawBaseUrl(input: { + optionValue?: string; + relayRuntimeConfigPath: string; +}): Promise { + if ( + typeof input.optionValue === "string" && + input.optionValue.trim().length > 0 + ) { + return parseOpenclawBaseUrl(input.optionValue); + } + + const envOpenclawBaseUrl = process.env.OPENCLAW_BASE_URL; + if ( + typeof envOpenclawBaseUrl === "string" && + envOpenclawBaseUrl.trim().length > 0 + ) { + return parseOpenclawBaseUrl(envOpenclawBaseUrl); + } + + const existingConfig = await loadRelayRuntimeConfig( + input.relayRuntimeConfigPath, + ); + if (existingConfig !== undefined) { + return existingConfig.openclawBaseUrl; + } + + return DEFAULT_OPENCLAW_BASE_URL; +} + +export async function resolveHookToken(input: { + optionValue?: string; + relayRuntimeConfigPath: string; +}): Promise { + const trimmedOption = input.optionValue?.trim(); + if (trimmedOption !== undefined && trimmedOption.length > 0) { + return trimmedOption; + } + + const envValue = process.env.OPENCLAW_HOOK_TOKEN?.trim(); + if (envValue !== undefined && envValue.length > 0) { + return envValue; + } + + const existingConfig = await loadRelayRuntimeConfig( + input.relayRuntimeConfigPath, + ); + if (existingConfig?.openclawHookToken) { + return existingConfig.openclawHookToken; + } + + return undefined; +} diff --git a/apps/cli/src/commands/openclaw/types.ts b/apps/cli/src/commands/openclaw/types.ts new file mode 100644 index 0000000..0f5b829 --- /dev/null +++ b/apps/cli/src/commands/openclaw/types.ts @@ -0,0 +1,292 @@ +import type { resolveConfig } from "../../config/manager.js"; + +export type OpenclawInvitePayload = { + v: 1; + issuedAt: string; + did: string; + proxyUrl: string; + alias?: string; + agentName?: string; + humanName?: string; +}; + +export type OpenclawInviteOptions = { + did: string; + proxyUrl: string; + peerAlias?: string; + agentName?: string; + humanName?: string; +}; + +export type OpenclawSetupOptions = { + inviteCode?: string; + openclawDir?: string; + transformSource?: string; + openclawBaseUrl?: string; + runtimeMode?: string; + waitTimeoutSeconds?: string; + noRuntimeStart?: boolean; + homeDir?: string; + gatewayDeviceApprovalRunner?: OpenclawGatewayDeviceApprovalRunner; +}; + +export type OpenclawDoctorOptions = { + homeDir?: string; + openclawDir?: string; + peerAlias?: string; + resolveConfigImpl?: typeof resolveConfig; + fetchImpl?: typeof fetch; + includeConfigCheck?: boolean; + includeConnectorRuntimeCheck?: boolean; + json?: boolean; +}; + +export type OpenclawDoctorCommandOptions = { + peer?: string; + openclawDir?: string; + json?: boolean; +}; + +export type OpenclawSetupCommandOptions = { + openclawDir?: string; + transformSource?: string; + openclawBaseUrl?: string; + runtimeMode?: string; + waitTimeoutSeconds?: string; + noRuntimeStart?: boolean; +}; + +export type OpenclawRelayTestOptions = { + peer?: string; + homeDir?: string; + openclawDir?: string; + openclawBaseUrl?: string; + hookToken?: string; + sessionId?: string; + message?: string; + fetchImpl?: typeof fetch; + resolveConfigImpl?: typeof resolveConfig; + json?: boolean; +}; + +export type OpenclawRelayWebsocketTestOptions = { + peer?: string; + homeDir?: string; + openclawDir?: string; + fetchImpl?: typeof fetch; + resolveConfigImpl?: typeof resolveConfig; + json?: boolean; +}; + +export type OpenclawGatewayDeviceApprovalInput = { + requestId: string; + openclawDir: string; + openclawConfigPath: string; +}; + +export type OpenclawGatewayDeviceApprovalExecution = { + ok: boolean; + unavailable?: boolean; + exitCode?: number; + stdout?: string; + stderr?: string; + errorMessage?: string; +}; + +export type OpenclawGatewayDeviceApprovalRunner = ( + input: OpenclawGatewayDeviceApprovalInput, +) => Promise; + +export type OpenclawGatewayDeviceApprovalAttempt = { + requestId: string; + ok: boolean; + unavailable: boolean; + reason?: string; + exitCode?: number; +}; + +export type OpenclawGatewayDeviceApprovalSummary = { + gatewayDevicePendingPath: string; + pendingRequestIds: string[]; + attempts: OpenclawGatewayDeviceApprovalAttempt[]; +}; + +export type PeerEntry = { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; +}; + +export type PeersConfig = { + peers: Record; +}; + +export type OpenclawInviteResult = { + code: string; + did: string; + proxyUrl: string; + peerAlias?: string; + agentName?: string; + humanName?: string; +}; + +export type OpenclawSetupResult = { + openclawConfigPath: string; + transformTargetPath: string; + relayTransformRuntimePath: string; + relayTransformPeersPath: string; + openclawBaseUrl: string; + connectorBaseUrl: string; + relayRuntimeConfigPath: string; + openclawConfigChanged: boolean; +}; + +export type OpenclawRuntimeMode = "auto" | "service" | "detached"; + +export type OpenclawRuntimeResult = { + runtimeMode: "none" | "service" | "detached" | "existing"; + runtimeStatus: "running" | "skipped"; + websocketStatus: "connected" | "skipped"; + connectorStatusUrl?: string; +}; + +export type OpenclawSelfSetupResult = OpenclawSetupResult & + OpenclawRuntimeResult; + +export type OpenclawRelayRuntimeConfig = { + openclawBaseUrl: string; + openclawHookToken?: string; + relayTransformPeersPath?: string; + updatedAt?: string; +}; + +export type ConnectorAssignmentEntry = { + connectorBaseUrl: string; + updatedAt: string; +}; + +export type ConnectorAssignmentsConfig = { + agents: Record; +}; + +export type OpenclawDoctorCheckId = + | "config.registry" + | "state.selectedAgent" + | "state.credentials" + | "state.peers" + | "state.transform" + | "state.hookMapping" + | "state.hookToken" + | "state.hookSessionRouting" + | "state.gatewayAuth" + | "state.gatewayDevicePairing" + | "state.openclawBaseUrl" + | "state.connectorRuntime" + | "state.connectorInboundInbox" + | "state.openclawHookHealth"; + +export type OpenclawDoctorCheckStatus = "pass" | "fail"; + +export type OpenclawDoctorCheckResult = { + id: OpenclawDoctorCheckId; + label: string; + status: OpenclawDoctorCheckStatus; + message: string; + remediationHint?: string; + details?: Record; +}; + +export type OpenclawDoctorResult = { + status: "healthy" | "unhealthy"; + checkedAt: string; + checks: OpenclawDoctorCheckResult[]; +}; + +export type OpenclawRelayTestResult = { + status: "success" | "failure"; + checkedAt: string; + peerAlias: string; + endpoint: string; + message: string; + httpStatus?: number; + remediationHint?: string; + details?: Record; + preflight?: OpenclawDoctorResult; +}; + +export type OpenclawRelayWebsocketTestResult = { + status: "success" | "failure"; + checkedAt: string; + peerAlias: string; + message: string; + connectorBaseUrl?: string; + connectorStatusUrl?: string; + remediationHint?: string; + details?: Record; + preflight?: OpenclawDoctorResult; +}; + +export type OpenclawGatewayPendingState = + | { + status: "missing"; + gatewayDevicePendingPath: string; + } + | { + status: "invalid"; + gatewayDevicePendingPath: string; + } + | { + status: "unreadable"; + gatewayDevicePendingPath: string; + } + | { + status: "ok"; + gatewayDevicePendingPath: string; + pendingRequestIds: string[]; + }; + +export type ConnectorHealthStatus = { + connected: boolean; + inboundInbox?: { + deadLetterBytes?: number; + deadLetterCount?: number; + oldestDeadLetterAt?: string; + lastReplayAt?: string; + lastReplayError?: string; + nextAttemptAt?: string; + oldestPendingAt?: string; + pendingBytes?: number; + pendingCount?: number; + replayerActive?: boolean; + }; + openclawHook?: { + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + url?: string; + }; + reachable: boolean; + statusUrl: string; + reason?: string; +}; + +export type ParsedConnectorStatusPayload = { + inboundInbox?: { + deadLetterBytes?: number; + deadLetterCount?: number; + oldestDeadLetterAt?: string; + lastReplayAt?: string; + lastReplayError?: string; + nextAttemptAt?: string; + oldestPendingAt?: string; + pendingBytes?: number; + pendingCount?: number; + replayerActive?: boolean; + }; + openclawHook?: { + lastAttemptAt?: string; + lastAttemptStatus?: "ok" | "failed"; + url?: string; + }; + websocketConnected: boolean; +}; From 39ded327adfc48a42a8e23b8cd32bd982bdb1839 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 16:05:31 +0530 Subject: [PATCH 125/190] refactor(proxy): split agent relay session into modules --- apps/proxy/src/AGENTS.md | 6 + apps/proxy/src/agent-relay-session.ts | 1377 +---------------- .../src/agent-relay-session/constants.ts | 9 + apps/proxy/src/agent-relay-session/core.ts | 796 ++++++++++ apps/proxy/src/agent-relay-session/errors.ts | 21 + apps/proxy/src/agent-relay-session/frames.ts | 83 + apps/proxy/src/agent-relay-session/parsers.ts | 111 ++ .../agent-relay-session/pending-deliveries.ts | 12 + apps/proxy/src/agent-relay-session/policy.ts | 22 + .../src/agent-relay-session/queue-state.ts | 152 ++ apps/proxy/src/agent-relay-session/rpc.ts | 119 ++ .../src/agent-relay-session/scheduler.ts | 28 + apps/proxy/src/agent-relay-session/types.ts | 117 ++ 13 files changed, 1493 insertions(+), 1360 deletions(-) create mode 100644 apps/proxy/src/agent-relay-session/constants.ts create mode 100644 apps/proxy/src/agent-relay-session/core.ts create mode 100644 apps/proxy/src/agent-relay-session/errors.ts create mode 100644 apps/proxy/src/agent-relay-session/frames.ts create mode 100644 apps/proxy/src/agent-relay-session/parsers.ts create mode 100644 apps/proxy/src/agent-relay-session/pending-deliveries.ts create mode 100644 apps/proxy/src/agent-relay-session/policy.ts create mode 100644 apps/proxy/src/agent-relay-session/queue-state.ts create mode 100644 apps/proxy/src/agent-relay-session/rpc.ts create mode 100644 apps/proxy/src/agent-relay-session/scheduler.ts create mode 100644 apps/proxy/src/agent-relay-session/types.ts diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 21bb710..fa25e60 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -93,3 +93,9 @@ - Index pairing tickets by ticket `kid` in both in-memory and Durable Object stores; persist the original full ticket string alongside each entry and require exact ticket match on confirm. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. - When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. + +## Agent Relay Session Modularization +- Keep `agent-relay-session.ts` focused on Durable Object state machine orchestration; move helpers, parsers, and queue helpers into `apps/proxy/src/agent-relay-session/` so the entry file stays below 800 lines. +- Name helper modules by concern (`types`, `errors`, `frames`, `parsers`, `queue`, `policy`, `rpc`) and avoid importing back from `agent-relay-session.ts` to prevent cycles. +- Re-export the public API (`Relay*` types, `AgentRelaySession`, and RPC helpers) from `agent-relay-session.ts` so existing imports in routes/tests stay untouched. +- When introducing a new helper, document it here so future splits keep the Durable Object surface lean and test coverage aware. diff --git a/apps/proxy/src/agent-relay-session.ts b/apps/proxy/src/agent-relay-session.ts index a0a966d..40bfb29 100644 --- a/apps/proxy/src/agent-relay-session.ts +++ b/apps/proxy/src/agent-relay-session.ts @@ -1,1360 +1,17 @@ -import { - CONNECTOR_FRAME_VERSION, - DEFAULT_RELAY_DELIVER_TIMEOUT_MS, - type DeliverFrame, - type HeartbeatAckFrame, - parseFrame, - serializeFrame, -} from "@clawdentity/connector"; -import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; -import { nowUtcMs, toIso } from "@clawdentity/sdk"; -import { parseProxyConfig } from "./config.js"; - -const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; -const RELAY_RPC_DELIVER_PATH = "/rpc/deliver-to-connector"; -const RELAY_RPC_GET_RECEIPT_PATH = "/rpc/get-delivery-receipt"; -const RELAY_RPC_RECORD_RECEIPT_PATH = "/rpc/record-delivery-receipt"; -const RELAY_HEARTBEAT_INTERVAL_MS = 30_000; -const RELAY_HEARTBEAT_ACK_TIMEOUT_MS = 60_000; -const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; -const RELAY_SOCKET_SUPERSEDED_CLOSE_CODE = 1000; -const RELAY_SOCKET_STALE_CLOSE_CODE = 1011; - -type DurableObjectStorageLike = { - deleteAlarm?: () => Promise | void; - get?: (key: string) => Promise | unknown; - put?: (key: string, value: unknown) => Promise | void; - setAlarm: (scheduledTime: number | Date) => Promise | void; -}; - -type DurableObjectStateLike = { - acceptWebSocket: (socket: WebSocket, tags?: string[]) => void; - getWebSockets: () => WebSocket[]; - storage: DurableObjectStorageLike; -}; - -export type RelayDeliveryInput = { - conversationId?: string; - payload: unknown; - recipientAgentDid: string; - replyTo?: string; - requestId: string; - senderAgentDid: string; -}; - -export type RelayDeliveryState = - | "delivered" - | "queued" - | "processed_by_openclaw" - | "dead_lettered"; - -export type RelayDeliveryResult = { - connectedSockets: number; - delivered: boolean; - deliveryId: string; - queueDepth: number; - queued: boolean; - state: RelayDeliveryState; -}; - -export type RelayReceiptRecordInput = { - reason?: string; - recipientAgentDid: string; - requestId: string; - senderAgentDid: string; - status: "processed_by_openclaw" | "dead_lettered"; -}; - -export type RelayReceiptLookupInput = { - requestId: string; - senderAgentDid: string; -}; - -export type RelayReceiptLookupResult = { - found: boolean; - receipt?: RelayDeliveryReceipt; -}; - -export class RelaySessionDeliveryError extends Error { - readonly code: string; - readonly status: number; - - constructor(input: { code: string; message: string; status: number }) { - super(input.message); - this.name = "RelaySessionDeliveryError"; - this.code = input.code; - this.status = input.status; - } -} - -class RelayQueueFullError extends Error { - readonly code = "PROXY_RELAY_QUEUE_FULL"; - readonly status = 507; - - constructor() { - super("Target relay queue is full"); - this.name = "RelayQueueFullError"; - } -} - -export type AgentRelaySessionStub = { - deliverToConnector?: ( - input: RelayDeliveryInput, - ) => Promise; - getDeliveryReceipt?: ( - input: RelayReceiptLookupInput, - ) => Promise; - recordDeliveryReceipt?: (input: RelayReceiptRecordInput) => Promise; - fetch: (request: Request) => Promise; -}; - -export type AgentRelaySessionNamespace = { - get: (id: DurableObjectId) => AgentRelaySessionStub; - idFromName: (name: string) => DurableObjectId; -}; - -type PendingDelivery = { - reject: (error: unknown) => void; - resolve: (accepted: boolean) => void; - timeoutHandle: ReturnType; -}; - -type QueuedRelayDelivery = { - attemptCount: number; - createdAtMs: number; - deliveryId: string; - expiresAtMs: number; - nextAttemptAtMs: number; - payload: unknown; - recipientAgentDid: string; - replyTo?: string; - requestId: string; - senderAgentDid: string; - conversationId?: string; -}; - -type RelayDeliveryReceipt = { - deliveryId: string; - expiresAtMs: number; - recipientAgentDid: string; - reason?: string; - requestId: string; - senderAgentDid: string; - statusUpdatedAt: string; - state: RelayDeliveryState; -}; - -type RelayQueueState = { - deliveries: QueuedRelayDelivery[]; - receipts: Record; -}; - -type RelayDeliveryPolicy = { - maxFrameBytes: number; - maxInFlightDeliveries: number; - queueMaxMessagesPerAgent: number; - queueTtlMs: number; - retryInitialMs: number; - retryJitterRatio: number; - retryMaxAttempts: number; - retryMaxMs: number; -}; - -function toHeartbeatFrame(nowMs: number): { id: string; payload: string } { - const id = generateUlid(nowMs); - return { - id, - payload: serializeFrame({ - v: CONNECTOR_FRAME_VERSION, - type: "heartbeat", - id, - ts: toIso(nowMs), - }), - }; -} - -function toHeartbeatAckFrame(ackId: string): string { - const nowMs = nowUtcMs(); - const ackFrame: HeartbeatAckFrame = { - v: CONNECTOR_FRAME_VERSION, - type: "heartbeat_ack", - id: generateUlid(nowMs), - ts: toIso(nowMs), - ackId, - }; - - return serializeFrame(ackFrame); -} - -function toDeliverFrame(input: RelayDeliveryInput): DeliverFrame { - const nowMs = nowUtcMs(); - return { - v: CONNECTOR_FRAME_VERSION, - type: "deliver", - id: generateUlid(nowMs), - ts: toIso(nowMs), - fromAgentDid: input.senderAgentDid, - toAgentDid: input.recipientAgentDid, - payload: input.payload, - conversationId: input.conversationId, - replyTo: input.replyTo, - }; -} - -function getWebSocketMessageBytes(message: string | ArrayBuffer): number { - if (typeof message === "string") { - return new TextEncoder().encode(message).byteLength; - } - - return message.byteLength; -} - -function parseDeliveryInput(value: unknown): RelayDeliveryInput { - if (typeof value !== "object" || value === null) { - throw new TypeError("Relay delivery input must be an object"); - } - - const input = value as Partial; - if ( - typeof input.requestId !== "string" || - typeof input.senderAgentDid !== "string" || - typeof input.recipientAgentDid !== "string" - ) { - throw new TypeError("Relay delivery input is invalid"); - } - - if ( - input.replyTo !== undefined && - (typeof input.replyTo !== "string" || input.replyTo.trim().length === 0) - ) { - throw new TypeError("Relay delivery input is invalid"); - } - if (typeof input.replyTo === "string") { - try { - new URL(input.replyTo); - } catch { - throw new TypeError("Relay delivery input is invalid"); - } - } - - return { - requestId: input.requestId, - senderAgentDid: input.senderAgentDid, - recipientAgentDid: input.recipientAgentDid, - payload: input.payload, - conversationId: - typeof input.conversationId === "string" && - input.conversationId.trim().length > 0 - ? input.conversationId.trim() - : undefined, - replyTo: - typeof input.replyTo === "string" && input.replyTo.trim().length > 0 - ? input.replyTo.trim() - : undefined, - }; -} - -function parseReceiptRecordInput(value: unknown): RelayReceiptRecordInput { - if (typeof value !== "object" || value === null) { - throw new TypeError("Relay receipt input must be an object"); - } - - const input = value as Partial; - if ( - typeof input.requestId !== "string" || - input.requestId.trim().length === 0 || - typeof input.senderAgentDid !== "string" || - input.senderAgentDid.trim().length === 0 || - typeof input.recipientAgentDid !== "string" || - input.recipientAgentDid.trim().length === 0 - ) { - throw new TypeError("Relay receipt input is invalid"); - } - - if ( - input.status !== "processed_by_openclaw" && - input.status !== "dead_lettered" - ) { - throw new TypeError("Relay receipt input is invalid"); - } - - return { - requestId: input.requestId.trim(), - senderAgentDid: input.senderAgentDid.trim(), - recipientAgentDid: input.recipientAgentDid.trim(), - status: input.status, - reason: - typeof input.reason === "string" && input.reason.trim().length > 0 - ? input.reason.trim() - : undefined, - }; -} - -function parseReceiptLookupInput(value: unknown): RelayReceiptLookupInput { - if (typeof value !== "object" || value === null) { - throw new TypeError("Relay receipt lookup input must be an object"); - } - - const input = value as Partial; - if ( - typeof input.requestId !== "string" || - input.requestId.trim().length === 0 || - typeof input.senderAgentDid !== "string" || - input.senderAgentDid.trim().length === 0 - ) { - throw new TypeError("Relay receipt lookup input is invalid"); - } - - return { - requestId: input.requestId.trim(), - senderAgentDid: input.senderAgentDid.trim(), - }; -} - -function toRelayDeliveryResult(input: { - connectedSockets: number; - deliveryId: string; - queueDepth: number; - state: RelayDeliveryState; -}): RelayDeliveryResult { - return { - deliveryId: input.deliveryId, - state: input.state, - delivered: input.state === "delivered", - queued: input.state === "queued", - connectedSockets: input.connectedSockets, - queueDepth: input.queueDepth, - }; -} - -function toErrorResponse(input: { - code: string; - message: string; - status: number; -}): Response { - return Response.json( - { - error: { - code: input.code, - message: input.message, - }, - }, - { status: input.status }, - ); -} - -export async function deliverToRelaySession( - relaySession: AgentRelaySessionStub, - input: RelayDeliveryInput, -): Promise { - const response = await relaySession.fetch( - new Request(`https://agent-relay-session${RELAY_RPC_DELIVER_PATH}`, { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(input), - }), - ); - - if (!response.ok) { - let code = "PROXY_RELAY_DELIVERY_FAILED"; - let message = "Relay session delivery RPC failed"; - try { - const body = (await response.json()) as { - error?: { code?: unknown; message?: unknown }; - }; - if (typeof body.error?.code === "string") { - code = body.error.code; - } - if (typeof body.error?.message === "string") { - message = body.error.message; - } - } catch { - // Ignore parse failures and keep defaults. - } - - throw new RelaySessionDeliveryError({ - code, - message, - status: response.status, - }); - } - - return (await response.json()) as RelayDeliveryResult; -} - -export async function recordRelayDeliveryReceipt( - relaySession: AgentRelaySessionStub, - input: RelayReceiptRecordInput, -): Promise { - const response = await relaySession.fetch( - new Request(`https://agent-relay-session${RELAY_RPC_RECORD_RECEIPT_PATH}`, { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(input), - }), - ); - - if (!response.ok) { - throw new RelaySessionDeliveryError({ - code: "PROXY_RELAY_RECEIPT_WRITE_FAILED", - message: "Relay delivery receipt write RPC failed", - status: response.status, - }); - } -} - -export async function getRelayDeliveryReceipt( - relaySession: AgentRelaySessionStub, - input: RelayReceiptLookupInput, -): Promise { - const response = await relaySession.fetch( - new Request(`https://agent-relay-session${RELAY_RPC_GET_RECEIPT_PATH}`, { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(input), - }), - ); - - if (!response.ok) { - throw new RelaySessionDeliveryError({ - code: "PROXY_RELAY_RECEIPT_READ_FAILED", - message: "Relay delivery receipt read RPC failed", - status: response.status, - }); - } - - return (await response.json()) as RelayReceiptLookupResult; -} - -export class AgentRelaySession { - private readonly deliveryPolicy: RelayDeliveryPolicy; - private readonly heartbeatAckSockets = new Map(); - private readonly pendingDeliveries = new Map(); - private readonly socketLastAckAtMs = new Map(); - private readonly socketsPendingClose = new Set(); - private readonly state: DurableObjectStateLike; - private inMemoryQueueState: RelayQueueState = { - deliveries: [], - receipts: {}, - }; - - constructor(state: DurableObjectStateLike, env?: unknown) { - this.state = state; - const config = parseProxyConfig(env ?? {}); - this.deliveryPolicy = { - maxFrameBytes: config.relayMaxFrameBytes, - maxInFlightDeliveries: config.relayMaxInFlightDeliveries, - queueMaxMessagesPerAgent: config.relayQueueMaxMessagesPerAgent, - queueTtlMs: config.relayQueueTtlSeconds * 1000, - retryInitialMs: config.relayRetryInitialMs, - retryJitterRatio: config.relayRetryJitterRatio, - retryMaxAttempts: config.relayRetryMaxAttempts, - retryMaxMs: config.relayRetryMaxMs, - }; - } - - async fetch(request: Request): Promise { - const url = new URL(request.url); - - if (url.pathname === RELAY_CONNECT_PATH) { - return this.handleConnect(request); - } - - if (request.method === "POST" && url.pathname === RELAY_RPC_DELIVER_PATH) { - let input: RelayDeliveryInput; - try { - input = parseDeliveryInput(await request.json()); - } catch { - return new Response("Invalid relay delivery input", { status: 400 }); - } - - try { - const result = await this.deliverToConnector(input); - return Response.json(result, { status: 202 }); - } catch (error) { - if (error instanceof RelayQueueFullError) { - return toErrorResponse({ - code: error.code, - message: error.message, - status: error.status, - }); - } - - return new Response("Relay delivery failed", { status: 502 }); - } - } - - if ( - request.method === "POST" && - url.pathname === RELAY_RPC_RECORD_RECEIPT_PATH - ) { - let input: RelayReceiptRecordInput; - try { - input = parseReceiptRecordInput(await request.json()); - } catch { - return new Response("Invalid relay receipt input", { status: 400 }); - } - - await this.recordDeliveryReceipt(input); - return Response.json({ accepted: true }, { status: 202 }); - } - - if ( - request.method === "POST" && - url.pathname === RELAY_RPC_GET_RECEIPT_PATH - ) { - let input: RelayReceiptLookupInput; - try { - input = parseReceiptLookupInput(await request.json()); - } catch { - return new Response("Invalid relay receipt lookup input", { - status: 400, - }); - } - - const receipt = await this.getDeliveryReceipt(input); - return Response.json(receipt, { status: 200 }); - } - - return new Response("Not found", { status: 404 }); - } - - async alarm(): Promise { - const nowMs = nowUtcMs(); - const sockets = this.getActiveSockets(nowMs); - - if (sockets.length > 0) { - for (const socket of sockets) { - this.sendHeartbeatFrame(socket, nowMs); - } - } - - const queueState = await this.loadQueueState(nowMs); - const queueMutated = await this.processQueueDeliveries(queueState, nowMs); - if (queueMutated) { - await this.saveQueueState(queueState); - } - - await this.scheduleNextAlarm(queueState, nowMs); - } - - async deliverToConnector( - input: RelayDeliveryInput, - ): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - const existingReceipt = queueState.receipts[input.requestId]; - - if ( - existingReceipt !== undefined && - existingReceipt.expiresAtMs > nowMs && - existingReceipt.senderAgentDid === input.senderAgentDid && - existingReceipt.recipientAgentDid === input.recipientAgentDid - ) { - return toRelayDeliveryResult({ - deliveryId: existingReceipt.deliveryId, - state: existingReceipt.state, - connectedSockets: this.getActiveSockets(nowMs).length, - queueDepth: queueState.deliveries.length, - }); - } - - const sockets = this.getActiveSockets(nowMs); - const deliveryId = generateUlid(nowMs); - const deliveryTtlExpiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; - let priorAttempts = 0; - - if ( - sockets.length > 0 && - this.pendingDeliveries.size < this.deliveryPolicy.maxInFlightDeliveries - ) { - priorAttempts = 1; - try { - const accepted = await this.sendDeliverFrame(sockets[0], input); - if (accepted) { - this.upsertReceipt(queueState, { - requestId: input.requestId, - deliveryId, - state: "delivered", - expiresAtMs: deliveryTtlExpiresAtMs, - senderAgentDid: input.senderAgentDid, - recipientAgentDid: input.recipientAgentDid, - statusUpdatedAt: toIso(nowMs), - }); - await this.saveQueueState(queueState); - await this.scheduleNextAlarm(queueState, nowMs); - - return toRelayDeliveryResult({ - deliveryId, - state: "delivered", - connectedSockets: sockets.length, - queueDepth: queueState.deliveries.length, - }); - } - } catch { - // Fall through to durable queueing below. - } - } - - if (priorAttempts >= this.deliveryPolicy.retryMaxAttempts) { - throw new Error("Relay delivery exhausted retry budget"); - } - - if ( - queueState.deliveries.length >= - this.deliveryPolicy.queueMaxMessagesPerAgent - ) { - throw new RelayQueueFullError(); - } - - const queuedDelivery: QueuedRelayDelivery = { - deliveryId, - requestId: input.requestId, - senderAgentDid: input.senderAgentDid, - recipientAgentDid: input.recipientAgentDid, - conversationId: input.conversationId, - replyTo: input.replyTo, - payload: input.payload, - createdAtMs: nowMs, - attemptCount: priorAttempts, - expiresAtMs: deliveryTtlExpiresAtMs, - nextAttemptAtMs: nowMs + this.computeRetryDelayMs(priorAttempts), - }; - - queueState.deliveries.push(queuedDelivery); - this.upsertReceipt(queueState, { - requestId: queuedDelivery.requestId, - deliveryId: queuedDelivery.deliveryId, - state: "queued", - expiresAtMs: queuedDelivery.expiresAtMs, - senderAgentDid: queuedDelivery.senderAgentDid, - recipientAgentDid: queuedDelivery.recipientAgentDid, - statusUpdatedAt: toIso(nowMs), - }); - - await this.saveQueueState(queueState); - await this.scheduleNextAlarm(queueState, nowMs); - - return toRelayDeliveryResult({ - deliveryId, - state: "queued", - connectedSockets: sockets.length, - queueDepth: queueState.deliveries.length, - }); - } - - async recordDeliveryReceipt(input: RelayReceiptRecordInput): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - const existing = queueState.receipts[input.requestId]; - if (existing === undefined) { - return; - } - - if ( - existing.senderAgentDid !== input.senderAgentDid || - existing.recipientAgentDid !== input.recipientAgentDid - ) { - return; - } - - existing.state = input.status; - existing.reason = input.reason; - existing.expiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; - existing.statusUpdatedAt = toIso(nowMs); - await this.saveQueueState(queueState); - await this.scheduleNextAlarm(queueState, nowMs); - } - - async getDeliveryReceipt( - input: RelayReceiptLookupInput, - ): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - const existing = queueState.receipts[input.requestId]; - if ( - existing === undefined || - existing.senderAgentDid !== input.senderAgentDid - ) { - return { found: false }; - } - - return { - found: true, - receipt: existing, - }; - } - - async webSocketMessage( - ws: WebSocket, - message: string | ArrayBuffer, - ): Promise { - const frameBytes = getWebSocketMessageBytes(message); - if (frameBytes > this.deliveryPolicy.maxFrameBytes) { - this.closeSocket(ws, 1009, "frame_too_large"); - await this.scheduleFromStorage(); - return; - } - - const nowMs = nowUtcMs(); - const frameResult = (() => { - try { - return parseFrame(message); - } catch { - return null; - } - })(); - - if (frameResult === null) { - await this.scheduleFromStorage(); - return; - } - - const frame = frameResult; - - if (frame.type === "heartbeat") { - this.touchSocketAck(ws, nowMs); - ws.send(toHeartbeatAckFrame(frame.id)); - await this.scheduleFromStorage(); - return; - } - - if (frame.type === "deliver_ack") { - this.touchSocketAck(ws, nowMs); - const pending = this.pendingDeliveries.get(frame.ackId); - if (pending) { - clearTimeout(pending.timeoutHandle); - this.pendingDeliveries.delete(frame.ackId); - pending.resolve(frame.accepted); - } - await this.scheduleFromStorage(); - return; - } - - if (frame.type === "heartbeat_ack") { - const ackedSocket = this.heartbeatAckSockets.get(frame.ackId); - this.heartbeatAckSockets.delete(frame.ackId); - this.touchSocketAck(ackedSocket ?? ws, nowMs); - await this.scheduleFromStorage(); - return; - } - - await this.scheduleFromStorage(); - } - - async webSocketClose( - ws?: WebSocket, - code?: number, - _reason?: string, - wasClean?: boolean, - ): Promise { - if (ws !== undefined) { - this.removeSocketTracking(ws); - this.socketsPendingClose.delete(ws); - } - - const gracefulClose = code === 1000 && (wasClean ?? true); - if (!gracefulClose && this.state.getWebSockets().length === 0) { - this.rejectPendingDeliveries(new Error("Connector socket closed")); - } - - await this.scheduleFromStorage(); - } - - async webSocketError(ws?: WebSocket): Promise { - await this.webSocketClose(ws, 1011, "connector_socket_error", false); - } - - private async handleConnect(request: Request): Promise { - const upgradeHeader = request.headers.get("upgrade"); - if (upgradeHeader?.toLowerCase() !== "websocket") { - return new Response("Expected websocket upgrade", { status: 426 }); - } - - const connectorAgentDid = - request.headers.get(CONNECTOR_AGENT_DID_HEADER)?.trim() ?? ""; - if (connectorAgentDid.length === 0) { - return new Response("Missing connector agent DID", { status: 400 }); - } - - const nowMs = nowUtcMs(); - const activeSockets = this.getActiveSockets(nowMs); - for (const socket of activeSockets) { - this.closeSocket( - socket, - RELAY_SOCKET_SUPERSEDED_CLOSE_CODE, - "superseded_by_new_connection", - ); - } - - const pair = new WebSocketPair(); - const client = pair[0]; - const server = pair[1]; - - this.state.acceptWebSocket(server, [connectorAgentDid]); - this.touchSocketAck(server, nowMs); - void this.drainQueueOnReconnect(); - - return new Response(null, { - status: 101, - webSocket: client, - }); - } - - private async loadQueueState(nowMs: number): Promise { - const fromStorage = this.state.storage.get - ? await this.state.storage.get(RELAY_QUEUE_STORAGE_KEY) - : this.inMemoryQueueState; - const rawState = - typeof fromStorage === "object" && fromStorage !== null - ? (fromStorage as Partial) - : undefined; - - const queueState: RelayQueueState = { - deliveries: Array.isArray(rawState?.deliveries) - ? rawState.deliveries.filter((entry) => this.isQueuedDelivery(entry)) - : [], - receipts: this.normalizeReceipts(rawState?.receipts), - }; - - const pruned = this.pruneExpiredQueueState(queueState, nowMs); - if (pruned) { - await this.saveQueueState(queueState); - } - - return queueState; - } - - private async saveQueueState(queueState: RelayQueueState): Promise { - const serialized: RelayQueueState = { - deliveries: [...queueState.deliveries], - receipts: { ...queueState.receipts }, - }; - - if (this.state.storage.put) { - await this.state.storage.put(RELAY_QUEUE_STORAGE_KEY, serialized); - return; - } - - this.inMemoryQueueState = serialized; - } - - private isQueuedDelivery(value: unknown): value is QueuedRelayDelivery { - if (typeof value !== "object" || value === null) { - return false; - } - - const candidate = value as Partial; - return ( - typeof candidate.deliveryId === "string" && - typeof candidate.requestId === "string" && - typeof candidate.senderAgentDid === "string" && - typeof candidate.recipientAgentDid === "string" && - (candidate.conversationId === undefined || - typeof candidate.conversationId === "string") && - (candidate.replyTo === undefined || - typeof candidate.replyTo === "string") && - typeof candidate.createdAtMs === "number" && - Number.isFinite(candidate.createdAtMs) && - typeof candidate.attemptCount === "number" && - Number.isInteger(candidate.attemptCount) && - candidate.attemptCount >= 0 && - typeof candidate.expiresAtMs === "number" && - Number.isFinite(candidate.expiresAtMs) && - typeof candidate.nextAttemptAtMs === "number" && - Number.isFinite(candidate.nextAttemptAtMs) - ); - } - - private normalizeReceipts( - input: unknown, - ): Record { - if (typeof input !== "object" || input === null) { - return {}; - } - - const normalized: Record = {}; - for (const [key, value] of Object.entries( - input as Record, - )) { - if (typeof value !== "object" || value === null) { - continue; - } - - const receipt = value as Partial; - if ( - typeof receipt.requestId !== "string" || - receipt.requestId !== key || - typeof receipt.deliveryId !== "string" || - typeof receipt.senderAgentDid !== "string" || - typeof receipt.recipientAgentDid !== "string" || - typeof receipt.expiresAtMs !== "number" || - !Number.isFinite(receipt.expiresAtMs) || - typeof receipt.statusUpdatedAt !== "string" || - !( - receipt.state === "queued" || - receipt.state === "delivered" || - receipt.state === "processed_by_openclaw" || - receipt.state === "dead_lettered" - ) - ) { - continue; - } - - normalized[key] = { - requestId: receipt.requestId, - deliveryId: receipt.deliveryId, - expiresAtMs: receipt.expiresAtMs, - senderAgentDid: receipt.senderAgentDid, - recipientAgentDid: receipt.recipientAgentDid, - state: receipt.state, - reason: typeof receipt.reason === "string" ? receipt.reason : undefined, - statusUpdatedAt: receipt.statusUpdatedAt, - }; - } - - return normalized; - } - - private pruneExpiredQueueState( - queueState: RelayQueueState, - nowMs: number, - ): boolean { - let mutated = false; - - const retainedDeliveries: QueuedRelayDelivery[] = []; - for (const delivery of queueState.deliveries) { - if (delivery.expiresAtMs <= nowMs) { - this.deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - retainedDeliveries.push(delivery); - } - - if (retainedDeliveries.length !== queueState.deliveries.length) { - queueState.deliveries = retainedDeliveries; - mutated = true; - } - - for (const [requestId, receipt] of Object.entries(queueState.receipts)) { - if (receipt.expiresAtMs <= nowMs) { - delete queueState.receipts[requestId]; - mutated = true; - } - } - - return mutated; - } - - private deleteQueuedReceipt( - queueState: RelayQueueState, - requestId: string, - deliveryId: string, - ): void { - const receipt = queueState.receipts[requestId]; - if (receipt === undefined) { - return; - } - - if (receipt.deliveryId !== deliveryId || receipt.state !== "queued") { - return; - } - - delete queueState.receipts[requestId]; - } - - private upsertReceipt( - queueState: RelayQueueState, - receipt: RelayDeliveryReceipt, - ): void { - queueState.receipts[receipt.requestId] = receipt; - } - - private async processQueueDeliveries( - queueState: RelayQueueState, - nowMs: number, - ): Promise { - if (queueState.deliveries.length === 0) { - return false; - } - - const sockets = this.getActiveSockets(nowMs); - if (sockets.length === 0) { - let mutated = false; - for (const delivery of queueState.deliveries) { - if (delivery.nextAttemptAtMs <= nowMs) { - delivery.nextAttemptAtMs = - nowMs + this.computeRetryDelayMs(delivery.attemptCount); - mutated = true; - } - } - - return mutated; - } - - queueState.deliveries.sort((left, right) => { - if (left.nextAttemptAtMs !== right.nextAttemptAtMs) { - return left.nextAttemptAtMs - right.nextAttemptAtMs; - } - - return left.createdAtMs - right.createdAtMs; - }); - - let mutated = false; - const socket = sockets[0]; - - for (let index = 0; index < queueState.deliveries.length; ) { - if ( - this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries - ) { - break; - } - - const delivery = queueState.deliveries[index]; - - if (delivery.expiresAtMs <= nowMs) { - queueState.deliveries.splice(index, 1); - this.deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - if (delivery.attemptCount >= this.deliveryPolicy.retryMaxAttempts) { - queueState.deliveries.splice(index, 1); - this.deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - if (delivery.nextAttemptAtMs > nowMs) { - index += 1; - continue; - } - - let accepted = false; - let deliveryError = false; - try { - accepted = await this.sendDeliverFrame(socket, { - requestId: delivery.requestId, - senderAgentDid: delivery.senderAgentDid, - recipientAgentDid: delivery.recipientAgentDid, - conversationId: delivery.conversationId, - replyTo: delivery.replyTo, - payload: delivery.payload, - }); - } catch { - deliveryError = true; - } - - if (accepted) { - queueState.deliveries.splice(index, 1); - this.upsertReceipt(queueState, { - requestId: delivery.requestId, - deliveryId: delivery.deliveryId, - state: "delivered", - expiresAtMs: nowMs + this.deliveryPolicy.queueTtlMs, - senderAgentDid: delivery.senderAgentDid, - recipientAgentDid: delivery.recipientAgentDid, - statusUpdatedAt: toIso(nowMs), - }); - mutated = true; - continue; - } - - const nextAttemptCount = delivery.attemptCount + 1; - if (nextAttemptCount >= this.deliveryPolicy.retryMaxAttempts) { - queueState.deliveries.splice(index, 1); - this.deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - delivery.attemptCount = nextAttemptCount; - delivery.nextAttemptAtMs = - nowMs + this.computeRetryDelayMs(delivery.attemptCount); - mutated = true; - index += 1; - - if (deliveryError) { - for ( - let remaining = index; - remaining < queueState.deliveries.length; - remaining += 1 - ) { - if (queueState.deliveries[remaining].nextAttemptAtMs <= nowMs) { - queueState.deliveries[remaining].nextAttemptAtMs = - nowMs + - this.computeRetryDelayMs( - queueState.deliveries[remaining].attemptCount, - ); - } - } - break; - } - } - - return mutated; - } - - private computeRetryDelayMs(priorAttempts: number): number { - const exponent = Math.max(0, priorAttempts - 1); - const baseDelay = Math.min( - this.deliveryPolicy.retryMaxMs, - this.deliveryPolicy.retryInitialMs * 2 ** exponent, - ); - - if (this.deliveryPolicy.retryJitterRatio <= 0) { - return baseDelay; - } - - const jitterSpan = baseDelay * this.deliveryPolicy.retryJitterRatio; - const lowerBound = Math.max(1, Math.floor(baseDelay - jitterSpan)); - const upperBound = Math.ceil(baseDelay + jitterSpan); - const sample = lowerBound + Math.random() * (upperBound - lowerBound); - return Math.min(this.deliveryPolicy.retryMaxMs, Math.floor(sample)); - } - - private async sendDeliverFrame( - socket: WebSocket, - input: RelayDeliveryInput, - ): Promise { - if ( - this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries - ) { - throw new Error("Relay connector in-flight window is full"); - } - - const frame = toDeliverFrame(input); - const framePayload = serializeFrame(frame); - const frameBytes = new TextEncoder().encode(framePayload).byteLength; - if (frameBytes > this.deliveryPolicy.maxFrameBytes) { - throw new Error("Relay connector frame exceeds max allowed size"); - } - - return new Promise((resolve, reject) => { - const timeoutHandle = setTimeout(() => { - this.pendingDeliveries.delete(frame.id); - reject(new Error("Relay connector acknowledgement timed out")); - }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); - - this.pendingDeliveries.set(frame.id, { - resolve, - reject, - timeoutHandle, - }); - - try { - socket.send(framePayload); - } catch (error) { - clearTimeout(timeoutHandle); - this.pendingDeliveries.delete(frame.id); - reject(error); - } - }); - } - - private rejectPendingDeliveries(error: Error): void { - for (const [deliveryId, pending] of this.pendingDeliveries) { - clearTimeout(pending.timeoutHandle); - pending.reject(error); - this.pendingDeliveries.delete(deliveryId); - } - } - - private getActiveSockets(nowMs: number): WebSocket[] { - const sockets = this.state.getWebSockets(); - this.pruneSocketTracking(sockets); - const activeSockets: WebSocket[] = []; - - for (const socket of sockets) { - if (this.socketsPendingClose.has(socket)) { - continue; - } - - const lastAckAtMs = this.resolveSocketLastAckAtMs(socket, nowMs); - if (nowMs - lastAckAtMs > RELAY_HEARTBEAT_ACK_TIMEOUT_MS) { - this.closeSocket( - socket, - RELAY_SOCKET_STALE_CLOSE_CODE, - "heartbeat_ack_timeout", - ); - continue; - } - - activeSockets.push(socket); - } - - return activeSockets; - } - - private resolveSocketLastAckAtMs(socket: WebSocket, nowMs: number): number { - const existing = this.socketLastAckAtMs.get(socket); - if (existing !== undefined) { - return existing; - } - - this.socketLastAckAtMs.set(socket, nowMs); - return nowMs; - } - - private touchSocketAck(socket: WebSocket, nowMs: number): void { - if (this.socketsPendingClose.has(socket)) { - return; - } - this.socketLastAckAtMs.set(socket, nowMs); - } - - private sendHeartbeatFrame(socket: WebSocket, nowMs: number): void { - const heartbeatFrame = toHeartbeatFrame(nowMs); - this.clearSocketHeartbeatAcks(socket); - this.heartbeatAckSockets.set(heartbeatFrame.id, socket); - - try { - socket.send(heartbeatFrame.payload); - } catch { - this.heartbeatAckSockets.delete(heartbeatFrame.id); - this.closeSocket( - socket, - RELAY_SOCKET_STALE_CLOSE_CODE, - "heartbeat_send_failed", - ); - } - } - - private clearSocketHeartbeatAcks(socket: WebSocket): void { - for (const [ackId, ackSocket] of this.heartbeatAckSockets) { - if (ackSocket === socket) { - this.heartbeatAckSockets.delete(ackId); - } - } - } - - private closeSocket(socket: WebSocket, code: number, reason: string): void { - this.socketsPendingClose.add(socket); - this.removeSocketTracking(socket); - try { - socket.close(code, reason); - } catch { - // Ignore close errors for already-closed sockets. - } - } - - private removeSocketTracking(socket: WebSocket): void { - this.socketLastAckAtMs.delete(socket); - this.clearSocketHeartbeatAcks(socket); - } - - private pruneSocketTracking(activeSockets: WebSocket[]): void { - const activeSocketSet = new Set(activeSockets); - - for (const socket of this.socketLastAckAtMs.keys()) { - if (!activeSocketSet.has(socket)) { - this.socketLastAckAtMs.delete(socket); - } - } - - for (const socket of this.socketsPendingClose) { - if (!activeSocketSet.has(socket)) { - this.socketsPendingClose.delete(socket); - } - } - - for (const [ackId, socket] of this.heartbeatAckSockets.entries()) { - if (!activeSocketSet.has(socket)) { - this.heartbeatAckSockets.delete(ackId); - } - } - } - - private async drainQueueOnReconnect(): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - let queueMutated = false; - - for (const delivery of queueState.deliveries) { - if (delivery.nextAttemptAtMs > nowMs) { - delivery.nextAttemptAtMs = nowMs; - queueMutated = true; - } - } - - if (await this.processQueueDeliveries(queueState, nowMs)) { - queueMutated = true; - } - - if (queueMutated) { - await this.saveQueueState(queueState); - } - - await this.scheduleNextAlarm(queueState, nowMs); - } - - private async scheduleFromStorage(): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - await this.scheduleNextAlarm(queueState, nowMs); - } - - private async scheduleNextAlarm( - queueState: RelayQueueState, - nowMs: number, - ): Promise { - const candidates: number[] = []; - - const queueWakeAtMs = this.findNextQueueWakeMs(queueState, nowMs); - if (queueWakeAtMs !== undefined) { - candidates.push(queueWakeAtMs); - } - - if (this.getActiveSockets(nowMs).length > 0) { - candidates.push(nowMs + RELAY_HEARTBEAT_INTERVAL_MS); - } - - if (candidates.length === 0) { - await this.state.storage.deleteAlarm?.(); - return; - } - - await this.state.storage.setAlarm(Math.min(...candidates)); - } - - private findNextQueueWakeMs( - queueState: RelayQueueState, - nowMs: number, - ): number | undefined { - let earliest: number | undefined; - - for (const delivery of queueState.deliveries) { - const candidate = Math.max(nowMs + 1, delivery.nextAttemptAtMs); - if (earliest === undefined || candidate < earliest) { - earliest = candidate; - } - } - - return earliest; - } -} +export { AgentRelaySession } from "./agent-relay-session/core.js"; +export { RelaySessionDeliveryError } from "./agent-relay-session/errors.js"; +export { + deliverToRelaySession, + getRelayDeliveryReceipt, + recordRelayDeliveryReceipt, +} from "./agent-relay-session/rpc.js"; +export type { + AgentRelaySessionNamespace, + AgentRelaySessionStub, + RelayDeliveryInput, + RelayDeliveryResult, + RelayDeliveryState, + RelayReceiptLookupInput, + RelayReceiptLookupResult, + RelayReceiptRecordInput, +} from "./agent-relay-session/types.js"; diff --git a/apps/proxy/src/agent-relay-session/constants.ts b/apps/proxy/src/agent-relay-session/constants.ts new file mode 100644 index 0000000..7877806 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/constants.ts @@ -0,0 +1,9 @@ +export const CONNECTOR_AGENT_DID_HEADER = "x-claw-connector-agent-did"; +export const RELAY_RPC_DELIVER_PATH = "/rpc/deliver-to-connector"; +export const RELAY_RPC_GET_RECEIPT_PATH = "/rpc/get-delivery-receipt"; +export const RELAY_RPC_RECORD_RECEIPT_PATH = "/rpc/record-delivery-receipt"; +export const RELAY_HEARTBEAT_INTERVAL_MS = 30_000; +export const RELAY_HEARTBEAT_ACK_TIMEOUT_MS = 60_000; +export const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; +export const RELAY_SOCKET_SUPERSEDED_CLOSE_CODE = 1000; +export const RELAY_SOCKET_STALE_CLOSE_CODE = 1011; diff --git a/apps/proxy/src/agent-relay-session/core.ts b/apps/proxy/src/agent-relay-session/core.ts new file mode 100644 index 0000000..ea1f8f0 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/core.ts @@ -0,0 +1,796 @@ +import { + DEFAULT_RELAY_DELIVER_TIMEOUT_MS, + parseFrame, + serializeFrame, +} from "@clawdentity/connector"; +import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { nowUtcMs, toIso } from "@clawdentity/sdk"; +import { parseProxyConfig } from "../config.js"; +import { + CONNECTOR_AGENT_DID_HEADER, + RELAY_HEARTBEAT_ACK_TIMEOUT_MS, + RELAY_QUEUE_STORAGE_KEY, + RELAY_RPC_DELIVER_PATH, + RELAY_RPC_GET_RECEIPT_PATH, + RELAY_RPC_RECORD_RECEIPT_PATH, + RELAY_SOCKET_STALE_CLOSE_CODE, + RELAY_SOCKET_SUPERSEDED_CLOSE_CODE, +} from "./constants.js"; +import { RelayQueueFullError } from "./errors.js"; +import { + getWebSocketMessageBytes, + toDeliverFrame, + toHeartbeatAckFrame, + toHeartbeatFrame, + toRelayDeliveryResult, +} from "./frames.js"; +import { + parseDeliveryInput, + parseReceiptLookupInput, + parseReceiptRecordInput, +} from "./parsers.js"; +import { rejectPendingDeliveries } from "./pending-deliveries.js"; +import { computeRetryDelayMs } from "./policy.js"; +import { + deleteQueuedReceipt, + isQueuedDelivery, + normalizeReceipts, + pruneExpiredQueueState, + upsertReceipt, +} from "./queue-state.js"; +import { toErrorResponse } from "./rpc.js"; +import { scheduleNextRelayAlarm } from "./scheduler.js"; +import type { + DurableObjectStateLike, + PendingDelivery, + QueuedRelayDelivery, + RelayDeliveryInput, + RelayDeliveryPolicy, + RelayDeliveryResult, + RelayQueueState, + RelayReceiptLookupInput, + RelayReceiptLookupResult, + RelayReceiptRecordInput, +} from "./types.js"; + +export class AgentRelaySession { + private readonly deliveryPolicy: RelayDeliveryPolicy; + private readonly heartbeatAckSockets = new Map(); + private readonly pendingDeliveries = new Map(); + private readonly socketLastAckAtMs = new Map(); + private readonly socketsPendingClose = new Set(); + private readonly state: DurableObjectStateLike; + private inMemoryQueueState: RelayQueueState = { + deliveries: [], + receipts: {}, + }; + + constructor(state: DurableObjectStateLike, env?: unknown) { + this.state = state; + const config = parseProxyConfig(env ?? {}); + this.deliveryPolicy = { + maxFrameBytes: config.relayMaxFrameBytes, + maxInFlightDeliveries: config.relayMaxInFlightDeliveries, + queueMaxMessagesPerAgent: config.relayQueueMaxMessagesPerAgent, + queueTtlMs: config.relayQueueTtlSeconds * 1000, + retryInitialMs: config.relayRetryInitialMs, + retryJitterRatio: config.relayRetryJitterRatio, + retryMaxAttempts: config.relayRetryMaxAttempts, + retryMaxMs: config.relayRetryMaxMs, + }; + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + if (url.pathname === RELAY_CONNECT_PATH) { + return this.handleConnect(request); + } + + if (request.method === "POST" && url.pathname === RELAY_RPC_DELIVER_PATH) { + let input: RelayDeliveryInput; + try { + input = parseDeliveryInput(await request.json()); + } catch { + return new Response("Invalid relay delivery input", { status: 400 }); + } + + try { + const result = await this.deliverToConnector(input); + return Response.json(result, { status: 202 }); + } catch (error) { + if (error instanceof RelayQueueFullError) { + return toErrorResponse({ + code: error.code, + message: error.message, + status: error.status, + }); + } + + return new Response("Relay delivery failed", { status: 502 }); + } + } + + if ( + request.method === "POST" && + url.pathname === RELAY_RPC_RECORD_RECEIPT_PATH + ) { + let input: RelayReceiptRecordInput; + try { + input = parseReceiptRecordInput(await request.json()); + } catch { + return new Response("Invalid relay receipt input", { status: 400 }); + } + + await this.recordDeliveryReceipt(input); + return Response.json({ accepted: true }, { status: 202 }); + } + + if ( + request.method === "POST" && + url.pathname === RELAY_RPC_GET_RECEIPT_PATH + ) { + let input: RelayReceiptLookupInput; + try { + input = parseReceiptLookupInput(await request.json()); + } catch { + return new Response("Invalid relay receipt lookup input", { + status: 400, + }); + } + + const receipt = await this.getDeliveryReceipt(input); + return Response.json(receipt, { status: 200 }); + } + + return new Response("Not found", { status: 404 }); + } + + async alarm(): Promise { + const nowMs = nowUtcMs(); + const sockets = this.getActiveSockets(nowMs); + + if (sockets.length > 0) { + for (const socket of sockets) { + this.sendHeartbeatFrame(socket, nowMs); + } + } + + const queueState = await this.loadQueueState(nowMs); + const queueMutated = await this.processQueueDeliveries(queueState, nowMs); + if (queueMutated) { + await this.saveQueueState(queueState); + } + + await this.scheduleNextAlarm(queueState, nowMs); + } + + async deliverToConnector( + input: RelayDeliveryInput, + ): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + const existingReceipt = queueState.receipts[input.requestId]; + + if ( + existingReceipt !== undefined && + existingReceipt.expiresAtMs > nowMs && + existingReceipt.senderAgentDid === input.senderAgentDid && + existingReceipt.recipientAgentDid === input.recipientAgentDid + ) { + return toRelayDeliveryResult({ + deliveryId: existingReceipt.deliveryId, + state: existingReceipt.state, + connectedSockets: this.getActiveSockets(nowMs).length, + queueDepth: queueState.deliveries.length, + }); + } + + const sockets = this.getActiveSockets(nowMs); + const deliveryId = generateUlid(nowMs); + const deliveryTtlExpiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; + let priorAttempts = 0; + + if ( + sockets.length > 0 && + this.pendingDeliveries.size < this.deliveryPolicy.maxInFlightDeliveries + ) { + priorAttempts = 1; + try { + const accepted = await this.sendDeliverFrame(sockets[0], input); + if (accepted) { + upsertReceipt(queueState, { + requestId: input.requestId, + deliveryId, + state: "delivered", + expiresAtMs: deliveryTtlExpiresAtMs, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), + }); + await this.saveQueueState(queueState); + await this.scheduleNextAlarm(queueState, nowMs); + + return toRelayDeliveryResult({ + deliveryId, + state: "delivered", + connectedSockets: sockets.length, + queueDepth: queueState.deliveries.length, + }); + } + } catch { + // Fall through to durable queueing below. + } + } + + if (priorAttempts >= this.deliveryPolicy.retryMaxAttempts) { + throw new Error("Relay delivery exhausted retry budget"); + } + + if ( + queueState.deliveries.length >= + this.deliveryPolicy.queueMaxMessagesPerAgent + ) { + throw new RelayQueueFullError(); + } + + const queuedDelivery: QueuedRelayDelivery = { + deliveryId, + requestId: input.requestId, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + conversationId: input.conversationId, + replyTo: input.replyTo, + payload: input.payload, + createdAtMs: nowMs, + attemptCount: priorAttempts, + expiresAtMs: deliveryTtlExpiresAtMs, + nextAttemptAtMs: + nowMs + computeRetryDelayMs(this.deliveryPolicy, priorAttempts), + }; + + queueState.deliveries.push(queuedDelivery); + upsertReceipt(queueState, { + requestId: queuedDelivery.requestId, + deliveryId: queuedDelivery.deliveryId, + state: "queued", + expiresAtMs: queuedDelivery.expiresAtMs, + senderAgentDid: queuedDelivery.senderAgentDid, + recipientAgentDid: queuedDelivery.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), + }); + + await this.saveQueueState(queueState); + await this.scheduleNextAlarm(queueState, nowMs); + + return toRelayDeliveryResult({ + deliveryId, + state: "queued", + connectedSockets: sockets.length, + queueDepth: queueState.deliveries.length, + }); + } + + async recordDeliveryReceipt(input: RelayReceiptRecordInput): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + const existing = queueState.receipts[input.requestId]; + if (existing === undefined) { + return; + } + + if ( + existing.senderAgentDid !== input.senderAgentDid || + existing.recipientAgentDid !== input.recipientAgentDid + ) { + return; + } + + existing.state = input.status; + existing.reason = input.reason; + existing.expiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; + existing.statusUpdatedAt = toIso(nowMs); + await this.saveQueueState(queueState); + await this.scheduleNextAlarm(queueState, nowMs); + } + + async getDeliveryReceipt( + input: RelayReceiptLookupInput, + ): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + const existing = queueState.receipts[input.requestId]; + if ( + existing === undefined || + existing.senderAgentDid !== input.senderAgentDid + ) { + return { found: false }; + } + + return { + found: true, + receipt: existing, + }; + } + + async webSocketMessage( + ws: WebSocket, + message: string | ArrayBuffer, + ): Promise { + const frameBytes = getWebSocketMessageBytes(message); + if (frameBytes > this.deliveryPolicy.maxFrameBytes) { + this.closeSocket(ws, 1009, "frame_too_large"); + await this.scheduleFromStorage(); + return; + } + + const nowMs = nowUtcMs(); + const frameResult = (() => { + try { + return parseFrame(message); + } catch { + return null; + } + })(); + + if (frameResult === null) { + await this.scheduleFromStorage(); + return; + } + + const frame = frameResult; + + if (frame.type === "heartbeat") { + this.touchSocketAck(ws, nowMs); + ws.send(toHeartbeatAckFrame(frame.id)); + await this.scheduleFromStorage(); + return; + } + + if (frame.type === "deliver_ack") { + this.touchSocketAck(ws, nowMs); + const pending = this.pendingDeliveries.get(frame.ackId); + if (pending) { + clearTimeout(pending.timeoutHandle); + this.pendingDeliveries.delete(frame.ackId); + pending.resolve(frame.accepted); + } + await this.scheduleFromStorage(); + return; + } + + if (frame.type === "heartbeat_ack") { + const ackedSocket = this.heartbeatAckSockets.get(frame.ackId); + this.heartbeatAckSockets.delete(frame.ackId); + this.touchSocketAck(ackedSocket ?? ws, nowMs); + await this.scheduleFromStorage(); + return; + } + + await this.scheduleFromStorage(); + } + + async webSocketClose( + ws?: WebSocket, + code?: number, + _reason?: string, + wasClean?: boolean, + ): Promise { + if (ws !== undefined) { + this.removeSocketTracking(ws); + this.socketsPendingClose.delete(ws); + } + + const gracefulClose = code === 1000 && (wasClean ?? true); + if (!gracefulClose && this.state.getWebSockets().length === 0) { + rejectPendingDeliveries( + this.pendingDeliveries, + new Error("Connector socket closed"), + ); + } + + await this.scheduleFromStorage(); + } + + async webSocketError(ws?: WebSocket): Promise { + await this.webSocketClose(ws, 1011, "connector_socket_error", false); + } + + private async handleConnect(request: Request): Promise { + const upgradeHeader = request.headers.get("upgrade"); + if (upgradeHeader?.toLowerCase() !== "websocket") { + return new Response("Expected websocket upgrade", { status: 426 }); + } + + const connectorAgentDid = + request.headers.get(CONNECTOR_AGENT_DID_HEADER)?.trim() ?? ""; + if (connectorAgentDid.length === 0) { + return new Response("Missing connector agent DID", { status: 400 }); + } + + const nowMs = nowUtcMs(); + const activeSockets = this.getActiveSockets(nowMs); + for (const socket of activeSockets) { + this.closeSocket( + socket, + RELAY_SOCKET_SUPERSEDED_CLOSE_CODE, + "superseded_by_new_connection", + ); + } + + const pair = new WebSocketPair(); + const client = pair[0]; + const server = pair[1]; + + this.state.acceptWebSocket(server, [connectorAgentDid]); + this.touchSocketAck(server, nowMs); + void this.drainQueueOnReconnect(); + + return new Response(null, { + status: 101, + webSocket: client, + }); + } + + private async loadQueueState(nowMs: number): Promise { + const fromStorage = this.state.storage.get + ? await this.state.storage.get(RELAY_QUEUE_STORAGE_KEY) + : this.inMemoryQueueState; + const rawState = + typeof fromStorage === "object" && fromStorage !== null + ? (fromStorage as Partial) + : undefined; + + const queueState: RelayQueueState = { + deliveries: Array.isArray(rawState?.deliveries) + ? rawState.deliveries.filter((entry) => isQueuedDelivery(entry)) + : [], + receipts: normalizeReceipts(rawState?.receipts), + }; + + const pruned = pruneExpiredQueueState(queueState, nowMs); + if (pruned) { + await this.saveQueueState(queueState); + } + + return queueState; + } + + private async saveQueueState(queueState: RelayQueueState): Promise { + const serialized: RelayQueueState = { + deliveries: [...queueState.deliveries], + receipts: { ...queueState.receipts }, + }; + + if (this.state.storage.put) { + await this.state.storage.put(RELAY_QUEUE_STORAGE_KEY, serialized); + return; + } + + this.inMemoryQueueState = serialized; + } + + private async processQueueDeliveries( + queueState: RelayQueueState, + nowMs: number, + ): Promise { + if (queueState.deliveries.length === 0) { + return false; + } + + const sockets = this.getActiveSockets(nowMs); + if (sockets.length === 0) { + let mutated = false; + for (const delivery of queueState.deliveries) { + if (delivery.nextAttemptAtMs <= nowMs) { + delivery.nextAttemptAtMs = + nowMs + + computeRetryDelayMs(this.deliveryPolicy, delivery.attemptCount); + mutated = true; + } + } + + return mutated; + } + + queueState.deliveries.sort((left, right) => { + if (left.nextAttemptAtMs !== right.nextAttemptAtMs) { + return left.nextAttemptAtMs - right.nextAttemptAtMs; + } + + return left.createdAtMs - right.createdAtMs; + }); + + let mutated = false; + const socket = sockets[0]; + + for (let index = 0; index < queueState.deliveries.length; ) { + if ( + this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries + ) { + break; + } + + const delivery = queueState.deliveries[index]; + + if (delivery.expiresAtMs <= nowMs) { + queueState.deliveries.splice(index, 1); + deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + if (delivery.attemptCount >= this.deliveryPolicy.retryMaxAttempts) { + queueState.deliveries.splice(index, 1); + deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + if (delivery.nextAttemptAtMs > nowMs) { + index += 1; + continue; + } + + let accepted = false; + let deliveryError = false; + try { + accepted = await this.sendDeliverFrame(socket, { + requestId: delivery.requestId, + senderAgentDid: delivery.senderAgentDid, + recipientAgentDid: delivery.recipientAgentDid, + conversationId: delivery.conversationId, + replyTo: delivery.replyTo, + payload: delivery.payload, + }); + } catch { + deliveryError = true; + } + + if (accepted) { + queueState.deliveries.splice(index, 1); + upsertReceipt(queueState, { + requestId: delivery.requestId, + deliveryId: delivery.deliveryId, + state: "delivered", + expiresAtMs: nowMs + this.deliveryPolicy.queueTtlMs, + senderAgentDid: delivery.senderAgentDid, + recipientAgentDid: delivery.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), + }); + mutated = true; + continue; + } + + const nextAttemptCount = delivery.attemptCount + 1; + if (nextAttemptCount >= this.deliveryPolicy.retryMaxAttempts) { + queueState.deliveries.splice(index, 1); + deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + delivery.attemptCount = nextAttemptCount; + delivery.nextAttemptAtMs = + nowMs + computeRetryDelayMs(this.deliveryPolicy, delivery.attemptCount); + mutated = true; + index += 1; + + if (deliveryError) { + for ( + let remaining = index; + remaining < queueState.deliveries.length; + remaining += 1 + ) { + if (queueState.deliveries[remaining].nextAttemptAtMs <= nowMs) { + queueState.deliveries[remaining].nextAttemptAtMs = + nowMs + + computeRetryDelayMs( + this.deliveryPolicy, + queueState.deliveries[remaining].attemptCount, + ); + } + } + break; + } + } + + return mutated; + } + private async sendDeliverFrame( + socket: WebSocket, + input: RelayDeliveryInput, + ): Promise { + if ( + this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries + ) { + throw new Error("Relay connector in-flight window is full"); + } + + const frame = toDeliverFrame(input); + const framePayload = serializeFrame(frame); + const frameBytes = new TextEncoder().encode(framePayload).byteLength; + if (frameBytes > this.deliveryPolicy.maxFrameBytes) { + throw new Error("Relay connector frame exceeds max allowed size"); + } + + return new Promise((resolve, reject) => { + const timeoutHandle = setTimeout(() => { + this.pendingDeliveries.delete(frame.id); + reject(new Error("Relay connector acknowledgement timed out")); + }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); + + this.pendingDeliveries.set(frame.id, { + resolve, + reject, + timeoutHandle, + }); + + try { + socket.send(framePayload); + } catch (error) { + clearTimeout(timeoutHandle); + this.pendingDeliveries.delete(frame.id); + reject(error); + } + }); + } + + private getActiveSockets(nowMs: number): WebSocket[] { + const sockets = this.state.getWebSockets(); + this.pruneSocketTracking(sockets); + const activeSockets: WebSocket[] = []; + + for (const socket of sockets) { + if (this.socketsPendingClose.has(socket)) { + continue; + } + + const lastAckAtMs = this.resolveSocketLastAckAtMs(socket, nowMs); + if (nowMs - lastAckAtMs > RELAY_HEARTBEAT_ACK_TIMEOUT_MS) { + this.closeSocket( + socket, + RELAY_SOCKET_STALE_CLOSE_CODE, + "heartbeat_ack_timeout", + ); + continue; + } + + activeSockets.push(socket); + } + + return activeSockets; + } + + private resolveSocketLastAckAtMs(socket: WebSocket, nowMs: number): number { + const existing = this.socketLastAckAtMs.get(socket); + if (existing !== undefined) { + return existing; + } + + this.socketLastAckAtMs.set(socket, nowMs); + return nowMs; + } + + private touchSocketAck(socket: WebSocket, nowMs: number): void { + if (this.socketsPendingClose.has(socket)) { + return; + } + this.socketLastAckAtMs.set(socket, nowMs); + } + + private sendHeartbeatFrame(socket: WebSocket, nowMs: number): void { + const heartbeatFrame = toHeartbeatFrame(nowMs); + this.clearSocketHeartbeatAcks(socket); + this.heartbeatAckSockets.set(heartbeatFrame.id, socket); + + try { + socket.send(heartbeatFrame.payload); + } catch { + this.heartbeatAckSockets.delete(heartbeatFrame.id); + this.closeSocket( + socket, + RELAY_SOCKET_STALE_CLOSE_CODE, + "heartbeat_send_failed", + ); + } + } + + private clearSocketHeartbeatAcks(socket: WebSocket): void { + for (const [ackId, ackSocket] of this.heartbeatAckSockets) { + if (ackSocket === socket) { + this.heartbeatAckSockets.delete(ackId); + } + } + } + + private closeSocket(socket: WebSocket, code: number, reason: string): void { + this.socketsPendingClose.add(socket); + this.removeSocketTracking(socket); + try { + socket.close(code, reason); + } catch { + // Ignore close errors for already-closed sockets. + } + } + + private removeSocketTracking(socket: WebSocket): void { + this.socketLastAckAtMs.delete(socket); + this.clearSocketHeartbeatAcks(socket); + } + + private pruneSocketTracking(activeSockets: WebSocket[]): void { + const activeSocketSet = new Set(activeSockets); + + for (const socket of this.socketLastAckAtMs.keys()) { + if (!activeSocketSet.has(socket)) { + this.socketLastAckAtMs.delete(socket); + } + } + + for (const socket of this.socketsPendingClose) { + if (!activeSocketSet.has(socket)) { + this.socketsPendingClose.delete(socket); + } + } + + for (const [ackId, socket] of this.heartbeatAckSockets.entries()) { + if (!activeSocketSet.has(socket)) { + this.heartbeatAckSockets.delete(ackId); + } + } + } + + private async drainQueueOnReconnect(): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + let queueMutated = false; + + for (const delivery of queueState.deliveries) { + if (delivery.nextAttemptAtMs > nowMs) { + delivery.nextAttemptAtMs = nowMs; + queueMutated = true; + } + } + + if (await this.processQueueDeliveries(queueState, nowMs)) { + queueMutated = true; + } + + if (queueMutated) { + await this.saveQueueState(queueState); + } + + await this.scheduleNextAlarm(queueState, nowMs); + } + + private async scheduleFromStorage(): Promise { + const nowMs = nowUtcMs(); + const queueState = await this.loadQueueState(nowMs); + await this.scheduleNextAlarm(queueState, nowMs); + } + + private async scheduleNextAlarm( + queueState: RelayQueueState, + nowMs: number, + ): Promise { + await scheduleNextRelayAlarm({ + storage: this.state.storage, + queueState, + nowMs, + hasActiveSockets: this.getActiveSockets(nowMs).length > 0, + }); + } +} diff --git a/apps/proxy/src/agent-relay-session/errors.ts b/apps/proxy/src/agent-relay-session/errors.ts new file mode 100644 index 0000000..bde57bb --- /dev/null +++ b/apps/proxy/src/agent-relay-session/errors.ts @@ -0,0 +1,21 @@ +export class RelaySessionDeliveryError extends Error { + readonly code: string; + readonly status: number; + + constructor(input: { code: string; message: string; status: number }) { + super(input.message); + this.name = "RelaySessionDeliveryError"; + this.code = input.code; + this.status = input.status; + } +} + +export class RelayQueueFullError extends Error { + readonly code = "PROXY_RELAY_QUEUE_FULL"; + readonly status = 507; + + constructor() { + super("Target relay queue is full"); + this.name = "RelayQueueFullError"; + } +} diff --git a/apps/proxy/src/agent-relay-session/frames.ts b/apps/proxy/src/agent-relay-session/frames.ts new file mode 100644 index 0000000..2d5971d --- /dev/null +++ b/apps/proxy/src/agent-relay-session/frames.ts @@ -0,0 +1,83 @@ +import { + CONNECTOR_FRAME_VERSION, + type DeliverFrame, + type HeartbeatAckFrame, + serializeFrame, +} from "@clawdentity/connector"; +import { generateUlid } from "@clawdentity/protocol"; +import { nowUtcMs, toIso } from "@clawdentity/sdk"; +import type { + RelayDeliveryInput, + RelayDeliveryResult, + RelayDeliveryState, +} from "./types.js"; + +export function toHeartbeatFrame(nowMs: number): { + id: string; + payload: string; +} { + const id = generateUlid(nowMs); + return { + id, + payload: serializeFrame({ + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat", + id, + ts: toIso(nowMs), + }), + }; +} + +export function toHeartbeatAckFrame(ackId: string): string { + const nowMs = nowUtcMs(); + const ackFrame: HeartbeatAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat_ack", + id: generateUlid(nowMs), + ts: toIso(nowMs), + ackId, + }; + + return serializeFrame(ackFrame); +} + +export function toDeliverFrame(input: RelayDeliveryInput): DeliverFrame { + const nowMs = nowUtcMs(); + return { + v: CONNECTOR_FRAME_VERSION, + type: "deliver", + id: generateUlid(nowMs), + ts: toIso(nowMs), + fromAgentDid: input.senderAgentDid, + toAgentDid: input.recipientAgentDid, + payload: input.payload, + conversationId: input.conversationId, + replyTo: input.replyTo, + }; +} + +export function getWebSocketMessageBytes( + message: string | ArrayBuffer, +): number { + if (typeof message === "string") { + return new TextEncoder().encode(message).byteLength; + } + + return message.byteLength; +} + +export function toRelayDeliveryResult(input: { + connectedSockets: number; + deliveryId: string; + queueDepth: number; + state: RelayDeliveryState; +}): RelayDeliveryResult { + return { + deliveryId: input.deliveryId, + state: input.state, + delivered: input.state === "delivered", + queued: input.state === "queued", + connectedSockets: input.connectedSockets, + queueDepth: input.queueDepth, + }; +} diff --git a/apps/proxy/src/agent-relay-session/parsers.ts b/apps/proxy/src/agent-relay-session/parsers.ts new file mode 100644 index 0000000..06591e6 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/parsers.ts @@ -0,0 +1,111 @@ +import type { + RelayDeliveryInput, + RelayReceiptLookupInput, + RelayReceiptRecordInput, +} from "./types.js"; + +export function parseDeliveryInput(value: unknown): RelayDeliveryInput { + if (typeof value !== "object" || value === null) { + throw new TypeError("Relay delivery input must be an object"); + } + + const input = value as Partial; + if ( + typeof input.requestId !== "string" || + typeof input.senderAgentDid !== "string" || + typeof input.recipientAgentDid !== "string" + ) { + throw new TypeError("Relay delivery input is invalid"); + } + + if ( + input.replyTo !== undefined && + (typeof input.replyTo !== "string" || input.replyTo.trim().length === 0) + ) { + throw new TypeError("Relay delivery input is invalid"); + } + if (typeof input.replyTo === "string") { + try { + new URL(input.replyTo); + } catch { + throw new TypeError("Relay delivery input is invalid"); + } + } + + return { + requestId: input.requestId, + senderAgentDid: input.senderAgentDid, + recipientAgentDid: input.recipientAgentDid, + payload: input.payload, + conversationId: + typeof input.conversationId === "string" && + input.conversationId.trim().length > 0 + ? input.conversationId.trim() + : undefined, + replyTo: + typeof input.replyTo === "string" && input.replyTo.trim().length > 0 + ? input.replyTo.trim() + : undefined, + }; +} + +export function parseReceiptRecordInput( + value: unknown, +): RelayReceiptRecordInput { + if (typeof value !== "object" || value === null) { + throw new TypeError("Relay receipt input must be an object"); + } + + const input = value as Partial; + if ( + typeof input.requestId !== "string" || + input.requestId.trim().length === 0 || + typeof input.senderAgentDid !== "string" || + input.senderAgentDid.trim().length === 0 || + typeof input.recipientAgentDid !== "string" || + input.recipientAgentDid.trim().length === 0 + ) { + throw new TypeError("Relay receipt input is invalid"); + } + + if ( + input.status !== "processed_by_openclaw" && + input.status !== "dead_lettered" + ) { + throw new TypeError("Relay receipt input is invalid"); + } + + return { + requestId: input.requestId.trim(), + senderAgentDid: input.senderAgentDid.trim(), + recipientAgentDid: input.recipientAgentDid.trim(), + status: input.status, + reason: + typeof input.reason === "string" && input.reason.trim().length > 0 + ? input.reason.trim() + : undefined, + }; +} + +export function parseReceiptLookupInput( + value: unknown, +): RelayReceiptLookupInput { + if (typeof value !== "object" || value === null) { + throw new TypeError("Relay receipt lookup input must be an object"); + } + + const input = value as Partial; + if ( + typeof input.requestId !== "string" || + input.requestId.trim().length === 0 || + typeof input.senderAgentDid !== "string" || + input.senderAgentDid.trim().length === 0 + ) { + throw new TypeError("Relay receipt lookup input is invalid"); + } + + return { + requestId: input.requestId.trim(), + senderAgentDid: input.senderAgentDid.trim(), + }; +} diff --git a/apps/proxy/src/agent-relay-session/pending-deliveries.ts b/apps/proxy/src/agent-relay-session/pending-deliveries.ts new file mode 100644 index 0000000..dcd668c --- /dev/null +++ b/apps/proxy/src/agent-relay-session/pending-deliveries.ts @@ -0,0 +1,12 @@ +import type { PendingDelivery } from "./types.js"; + +export function rejectPendingDeliveries( + pendingDeliveries: Map, + error: Error, +): void { + for (const [deliveryId, pending] of pendingDeliveries) { + clearTimeout(pending.timeoutHandle); + pending.reject(error); + pendingDeliveries.delete(deliveryId); + } +} diff --git a/apps/proxy/src/agent-relay-session/policy.ts b/apps/proxy/src/agent-relay-session/policy.ts new file mode 100644 index 0000000..26a7a25 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/policy.ts @@ -0,0 +1,22 @@ +import type { RelayDeliveryPolicy } from "./types.js"; + +export function computeRetryDelayMs( + policy: RelayDeliveryPolicy, + priorAttempts: number, +): number { + const exponent = Math.max(0, priorAttempts - 1); + const baseDelay = Math.min( + policy.retryMaxMs, + policy.retryInitialMs * 2 ** exponent, + ); + + if (policy.retryJitterRatio <= 0) { + return baseDelay; + } + + const jitterSpan = baseDelay * policy.retryJitterRatio; + const lowerBound = Math.max(1, Math.floor(baseDelay - jitterSpan)); + const upperBound = Math.ceil(baseDelay + jitterSpan); + const sample = lowerBound + Math.random() * (upperBound - lowerBound); + return Math.min(policy.retryMaxMs, Math.floor(sample)); +} diff --git a/apps/proxy/src/agent-relay-session/queue-state.ts b/apps/proxy/src/agent-relay-session/queue-state.ts new file mode 100644 index 0000000..144ea27 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/queue-state.ts @@ -0,0 +1,152 @@ +import type { + QueuedRelayDelivery, + RelayDeliveryReceipt, + RelayQueueState, +} from "./types.js"; + +export function isQueuedDelivery(value: unknown): value is QueuedRelayDelivery { + if (typeof value !== "object" || value === null) { + return false; + } + + const candidate = value as Partial; + return ( + typeof candidate.deliveryId === "string" && + typeof candidate.requestId === "string" && + typeof candidate.senderAgentDid === "string" && + typeof candidate.recipientAgentDid === "string" && + (candidate.conversationId === undefined || + typeof candidate.conversationId === "string") && + (candidate.replyTo === undefined || + typeof candidate.replyTo === "string") && + typeof candidate.createdAtMs === "number" && + Number.isFinite(candidate.createdAtMs) && + typeof candidate.attemptCount === "number" && + Number.isInteger(candidate.attemptCount) && + candidate.attemptCount >= 0 && + typeof candidate.expiresAtMs === "number" && + Number.isFinite(candidate.expiresAtMs) && + typeof candidate.nextAttemptAtMs === "number" && + Number.isFinite(candidate.nextAttemptAtMs) + ); +} + +export function normalizeReceipts( + input: unknown, +): Record { + if (typeof input !== "object" || input === null) { + return {}; + } + + const normalized: Record = {}; + for (const [key, value] of Object.entries(input as Record)) { + if (typeof value !== "object" || value === null) { + continue; + } + + const receipt = value as Partial; + if ( + typeof receipt.requestId !== "string" || + receipt.requestId !== key || + typeof receipt.deliveryId !== "string" || + typeof receipt.senderAgentDid !== "string" || + typeof receipt.recipientAgentDid !== "string" || + typeof receipt.expiresAtMs !== "number" || + !Number.isFinite(receipt.expiresAtMs) || + typeof receipt.statusUpdatedAt !== "string" || + !( + receipt.state === "queued" || + receipt.state === "delivered" || + receipt.state === "processed_by_openclaw" || + receipt.state === "dead_lettered" + ) + ) { + continue; + } + + normalized[key] = { + requestId: receipt.requestId, + deliveryId: receipt.deliveryId, + expiresAtMs: receipt.expiresAtMs, + senderAgentDid: receipt.senderAgentDid, + recipientAgentDid: receipt.recipientAgentDid, + state: receipt.state, + reason: typeof receipt.reason === "string" ? receipt.reason : undefined, + statusUpdatedAt: receipt.statusUpdatedAt, + }; + } + + return normalized; +} + +export function deleteQueuedReceipt( + queueState: RelayQueueState, + requestId: string, + deliveryId: string, +): void { + const receipt = queueState.receipts[requestId]; + if (receipt === undefined) { + return; + } + + if (receipt.deliveryId !== deliveryId || receipt.state !== "queued") { + return; + } + + delete queueState.receipts[requestId]; +} + +export function upsertReceipt( + queueState: RelayQueueState, + receipt: RelayDeliveryReceipt, +): void { + queueState.receipts[receipt.requestId] = receipt; +} + +export function pruneExpiredQueueState( + queueState: RelayQueueState, + nowMs: number, +): boolean { + let mutated = false; + + const retainedDeliveries: QueuedRelayDelivery[] = []; + for (const delivery of queueState.deliveries) { + if (delivery.expiresAtMs <= nowMs) { + deleteQueuedReceipt(queueState, delivery.requestId, delivery.deliveryId); + mutated = true; + continue; + } + + retainedDeliveries.push(delivery); + } + + if (retainedDeliveries.length !== queueState.deliveries.length) { + queueState.deliveries = retainedDeliveries; + mutated = true; + } + + for (const [requestId, receipt] of Object.entries(queueState.receipts)) { + if (receipt.expiresAtMs <= nowMs) { + delete queueState.receipts[requestId]; + mutated = true; + } + } + + return mutated; +} + +export function findNextQueueWakeMs( + queueState: RelayQueueState, + nowMs: number, +): number | undefined { + let earliest: number | undefined; + + for (const delivery of queueState.deliveries) { + const candidate = Math.max(nowMs + 1, delivery.nextAttemptAtMs); + if (earliest === undefined || candidate < earliest) { + earliest = candidate; + } + } + + return earliest; +} diff --git a/apps/proxy/src/agent-relay-session/rpc.ts b/apps/proxy/src/agent-relay-session/rpc.ts new file mode 100644 index 0000000..cf82d85 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/rpc.ts @@ -0,0 +1,119 @@ +import { + RELAY_RPC_DELIVER_PATH, + RELAY_RPC_GET_RECEIPT_PATH, + RELAY_RPC_RECORD_RECEIPT_PATH, +} from "./constants.js"; +import { RelaySessionDeliveryError } from "./errors.js"; +import type { + AgentRelaySessionStub, + RelayDeliveryInput, + RelayDeliveryResult, + RelayReceiptLookupInput, + RelayReceiptLookupResult, + RelayReceiptRecordInput, +} from "./types.js"; + +export function toErrorResponse(input: { + code: string; + message: string; + status: number; +}): Response { + return Response.json( + { + error: { + code: input.code, + message: input.message, + }, + }, + { status: input.status }, + ); +} + +export async function deliverToRelaySession( + relaySession: AgentRelaySessionStub, + input: RelayDeliveryInput, +): Promise { + const response = await relaySession.fetch( + new Request(`https://agent-relay-session${RELAY_RPC_DELIVER_PATH}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(input), + }), + ); + + if (!response.ok) { + let code = "PROXY_RELAY_DELIVERY_FAILED"; + let message = "Relay session delivery RPC failed"; + try { + const body = (await response.json()) as { + error?: { code?: unknown; message?: unknown }; + }; + if (typeof body.error?.code === "string") { + code = body.error.code; + } + if (typeof body.error?.message === "string") { + message = body.error.message; + } + } catch { + // Ignore parse failures and keep defaults. + } + + throw new RelaySessionDeliveryError({ + code, + message, + status: response.status, + }); + } + + return (await response.json()) as RelayDeliveryResult; +} + +export async function recordRelayDeliveryReceipt( + relaySession: AgentRelaySessionStub, + input: RelayReceiptRecordInput, +): Promise { + const response = await relaySession.fetch( + new Request(`https://agent-relay-session${RELAY_RPC_RECORD_RECEIPT_PATH}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(input), + }), + ); + + if (!response.ok) { + throw new RelaySessionDeliveryError({ + code: "PROXY_RELAY_RECEIPT_WRITE_FAILED", + message: "Relay delivery receipt write RPC failed", + status: response.status, + }); + } +} + +export async function getRelayDeliveryReceipt( + relaySession: AgentRelaySessionStub, + input: RelayReceiptLookupInput, +): Promise { + const response = await relaySession.fetch( + new Request(`https://agent-relay-session${RELAY_RPC_GET_RECEIPT_PATH}`, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(input), + }), + ); + + if (!response.ok) { + throw new RelaySessionDeliveryError({ + code: "PROXY_RELAY_RECEIPT_READ_FAILED", + message: "Relay delivery receipt read RPC failed", + status: response.status, + }); + } + + return (await response.json()) as RelayReceiptLookupResult; +} diff --git a/apps/proxy/src/agent-relay-session/scheduler.ts b/apps/proxy/src/agent-relay-session/scheduler.ts new file mode 100644 index 0000000..7781030 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/scheduler.ts @@ -0,0 +1,28 @@ +import { RELAY_HEARTBEAT_INTERVAL_MS } from "./constants.js"; +import { findNextQueueWakeMs } from "./queue-state.js"; +import type { DurableObjectStorageLike, RelayQueueState } from "./types.js"; + +export async function scheduleNextRelayAlarm(input: { + storage: DurableObjectStorageLike; + queueState: RelayQueueState; + nowMs: number; + hasActiveSockets: boolean; +}): Promise { + const candidates: number[] = []; + + const queueWakeAtMs = findNextQueueWakeMs(input.queueState, input.nowMs); + if (queueWakeAtMs !== undefined) { + candidates.push(queueWakeAtMs); + } + + if (input.hasActiveSockets) { + candidates.push(input.nowMs + RELAY_HEARTBEAT_INTERVAL_MS); + } + + if (candidates.length === 0) { + await input.storage.deleteAlarm?.(); + return; + } + + await input.storage.setAlarm(Math.min(...candidates)); +} diff --git a/apps/proxy/src/agent-relay-session/types.ts b/apps/proxy/src/agent-relay-session/types.ts new file mode 100644 index 0000000..aaabc7d --- /dev/null +++ b/apps/proxy/src/agent-relay-session/types.ts @@ -0,0 +1,117 @@ +export type DurableObjectStorageLike = { + deleteAlarm?: () => Promise | void; + get?: (key: string) => Promise | unknown; + put?: (key: string, value: unknown) => Promise | void; + setAlarm: (scheduledTime: number | Date) => Promise | void; +}; + +export type DurableObjectStateLike = { + acceptWebSocket: (socket: WebSocket, tags?: string[]) => void; + getWebSockets: () => WebSocket[]; + storage: DurableObjectStorageLike; +}; + +export type RelayDeliveryInput = { + conversationId?: string; + payload: unknown; + recipientAgentDid: string; + replyTo?: string; + requestId: string; + senderAgentDid: string; +}; + +export type RelayDeliveryState = + | "delivered" + | "queued" + | "processed_by_openclaw" + | "dead_lettered"; + +export type RelayDeliveryResult = { + connectedSockets: number; + delivered: boolean; + deliveryId: string; + queueDepth: number; + queued: boolean; + state: RelayDeliveryState; +}; + +export type RelayReceiptRecordInput = { + reason?: string; + recipientAgentDid: string; + requestId: string; + senderAgentDid: string; + status: "processed_by_openclaw" | "dead_lettered"; +}; + +export type RelayReceiptLookupInput = { + requestId: string; + senderAgentDid: string; +}; + +export type RelayReceiptLookupResult = { + found: boolean; + receipt?: RelayDeliveryReceipt; +}; + +export type AgentRelaySessionStub = { + deliverToConnector?: ( + input: RelayDeliveryInput, + ) => Promise; + getDeliveryReceipt?: ( + input: RelayReceiptLookupInput, + ) => Promise; + recordDeliveryReceipt?: (input: RelayReceiptRecordInput) => Promise; + fetch: (request: Request) => Promise; +}; + +export type AgentRelaySessionNamespace = { + get: (id: DurableObjectId) => AgentRelaySessionStub; + idFromName: (name: string) => DurableObjectId; +}; + +export type PendingDelivery = { + reject: (error: unknown) => void; + resolve: (accepted: boolean) => void; + timeoutHandle: ReturnType; +}; + +export type QueuedRelayDelivery = { + attemptCount: number; + createdAtMs: number; + deliveryId: string; + expiresAtMs: number; + nextAttemptAtMs: number; + payload: unknown; + recipientAgentDid: string; + replyTo?: string; + requestId: string; + senderAgentDid: string; + conversationId?: string; +}; + +export type RelayDeliveryReceipt = { + deliveryId: string; + expiresAtMs: number; + recipientAgentDid: string; + reason?: string; + requestId: string; + senderAgentDid: string; + statusUpdatedAt: string; + state: RelayDeliveryState; +}; + +export type RelayQueueState = { + deliveries: QueuedRelayDelivery[]; + receipts: Record; +}; + +export type RelayDeliveryPolicy = { + maxFrameBytes: number; + maxInFlightDeliveries: number; + queueMaxMessagesPerAgent: number; + queueTtlMs: number; + retryInitialMs: number; + retryJitterRatio: number; + retryMaxAttempts: number; + retryMaxMs: number; +}; From d2d6dbf6fbf6b930e1a4a4fffaaf9c3cd386ae11 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 16:19:50 +0530 Subject: [PATCH 126/190] refactor(cli): split pair command into modules --- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/pair.ts | 2157 +-------------------- apps/cli/src/commands/pair/AGENTS.md | 29 + apps/cli/src/commands/pair/command.ts | 236 +++ apps/cli/src/commands/pair/common.ts | 502 +++++ apps/cli/src/commands/pair/persistence.ts | 290 +++ apps/cli/src/commands/pair/proxy.ts | 512 +++++ apps/cli/src/commands/pair/qr.ts | 158 ++ apps/cli/src/commands/pair/service.ts | 493 +++++ apps/cli/src/commands/pair/types.ts | 101 + 10 files changed, 2341 insertions(+), 2138 deletions(-) create mode 100644 apps/cli/src/commands/pair/AGENTS.md create mode 100644 apps/cli/src/commands/pair/command.ts create mode 100644 apps/cli/src/commands/pair/common.ts create mode 100644 apps/cli/src/commands/pair/persistence.ts create mode 100644 apps/cli/src/commands/pair/proxy.ts create mode 100644 apps/cli/src/commands/pair/qr.ts create mode 100644 apps/cli/src/commands/pair/service.ts create mode 100644 apps/cli/src/commands/pair/types.ts diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index b40ec70..849bb21 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -113,6 +113,7 @@ - `openclaw relay ws-test --peer ` must verify paired-peer selection plus connector websocket readiness using connector `/v1/status`, and return deterministic remediation when websocket connectivity is down. ## Pair Command Rules +- `pair.ts` must stay a thin public facade that re-exports pair APIs from `pair/*` modules. - `pair start ` must call proxy `/pair/start` with `Authorization: Claw ` and signed PoP headers from local agent `secret.key`. - `pair start` must rely on local Claw agent auth + PoP headers only; ownership is validated server-side via proxy-to-registry internal service auth. - `pair start --qr` must generate a one-time local PNG QR containing the returned ticket and print the filesystem path. diff --git a/apps/cli/src/commands/pair.ts b/apps/cli/src/commands/pair.ts index 5c52d12..a971243 100644 --- a/apps/cli/src/commands/pair.ts +++ b/apps/cli/src/commands/pair.ts @@ -1,2138 +1,19 @@ -import { randomBytes } from "node:crypto"; -import { - chmod, - mkdir, - readdir, - readFile, - unlink, - writeFile, -} from "node:fs/promises"; -import { dirname, join, resolve } from "node:path"; -import { decodeBase64url, parseDid } from "@clawdentity/protocol"; -import { - AppError, - createLogger, - nowUtcMs, - signHttpRequest, -} from "@clawdentity/sdk"; -import { Command } from "commander"; -import jsQR from "jsqr"; -import { PNG } from "pngjs"; -import QRCode from "qrcode"; -import { - type CliConfig, - getConfigDir, - resolveConfig, -} from "../config/manager.js"; -import { fetchRegistryMetadata } from "../config/registry-metadata.js"; -import { writeStdoutLine } from "../io.js"; -import { assertValidAgentName } from "./agent-name.js"; -import { withErrorHandling } from "./helpers.js"; - -const logger = createLogger({ service: "cli", module: "pair" }); - -const AGENTS_DIR_NAME = "agents"; -const AIT_FILE_NAME = "ait.jwt"; -const SECRET_KEY_FILE_NAME = "secret.key"; -const PAIRING_QR_DIR_NAME = "pairing"; -const PEERS_FILE_NAME = "peers.json"; -const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; - -const PAIR_START_PATH = "/pair/start"; -const PAIR_CONFIRM_PATH = "/pair/confirm"; -const PAIR_STATUS_PATH = "/pair/status"; -const NONCE_SIZE = 24; -const PAIRING_TICKET_PREFIX = "clwpair1_"; -const PAIRING_QR_MAX_AGE_SECONDS = 900; -const PAIRING_QR_FILENAME_PATTERN = /-pair-(\d+)\.png$/; -const FILE_MODE = 0o600; -const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; -const DEFAULT_STATUS_WAIT_SECONDS = 300; -const DEFAULT_STATUS_POLL_INTERVAL_SECONDS = 3; -const MAX_PROFILE_NAME_LENGTH = 64; - -export type PairStartOptions = { - ttlSeconds?: string; - qr?: boolean; - qrOutput?: string; - wait?: boolean; - waitSeconds?: string; - pollIntervalSeconds?: string; -}; - -export type PairConfirmOptions = { - qrFile?: string; - ticket?: string; -}; - -export type PairStatusOptions = { - ticket?: string; - wait?: boolean; - waitSeconds?: string; - pollIntervalSeconds?: string; -}; - -type PairRequestOptions = { - fetchImpl?: typeof fetch; - getConfigDirImpl?: typeof getConfigDir; - nowSecondsImpl?: () => number; - nonceFactoryImpl?: () => string; - readFileImpl?: typeof readFile; - writeFileImpl?: typeof writeFile; - chmodImpl?: typeof chmod; - mkdirImpl?: typeof mkdir; - readdirImpl?: typeof readdir; - unlinkImpl?: typeof unlink; - sleepImpl?: (ms: number) => Promise; - resolveConfigImpl?: () => Promise; - qrEncodeImpl?: (ticket: string) => Promise; - qrDecodeImpl?: (imageBytes: Uint8Array) => string; -}; - -type PairCommandDependencies = PairRequestOptions; - -type PairStartResult = { - initiatorAgentDid: string; - initiatorProfile: PeerProfile; - ticket: string; - expiresAt: string; - proxyUrl: string; - qrPath?: string; -}; - -type PairConfirmResult = { - paired: boolean; - initiatorAgentDid: string; - initiatorProfile: PeerProfile; - responderAgentDid: string; - responderProfile: PeerProfile; - proxyUrl: string; - peerAlias?: string; -}; - -type PairStatusResult = { - status: "pending" | "confirmed"; - initiatorAgentDid: string; - initiatorProfile: PeerProfile; - responderAgentDid?: string; - responderProfile?: PeerProfile; - expiresAt: string; - confirmedAt?: string; - proxyUrl: string; - peerAlias?: string; -}; - -type RegistryErrorEnvelope = { - error?: { - code?: string; - message?: string; - }; -}; - -type PeerEntry = { - did: string; - proxyUrl: string; - agentName?: string; - humanName?: string; -}; - -type PeersConfig = { - peers: Record; -}; - -type LocalAgentProofMaterial = { - ait: string; - secretKey: Uint8Array; -}; - -type PeerProfile = { - agentName: string; - humanName: string; - proxyOrigin?: string; -}; - -const isRecord = (value: unknown): value is Record => { - return typeof value === "object" && value !== null; -}; - -const nowUnixSeconds = (): number => Math.floor(nowUtcMs() / 1000); - -function createCliError(code: string, message: string): AppError { - return new AppError({ - code, - message, - status: 400, - }); -} - -function parseNonEmptyString(value: unknown): string { - if (typeof value !== "string") { - return ""; - } - - return value.trim(); -} - -function hasControlChars(value: string): boolean { - for (let index = 0; index < value.length; index += 1) { - const code = value.charCodeAt(index); - if (code <= 31 || code === 127) { - return true; - } - } - - return false; -} - -function parseProfileName( - value: unknown, - label: "agentName" | "humanName", -): string { - const candidate = parseNonEmptyString(value); - if (candidate.length === 0) { - throw createCliError( - "CLI_PAIR_PROFILE_INVALID", - `${label} is required for pairing`, - ); - } - - if (candidate.length > MAX_PROFILE_NAME_LENGTH) { - throw createCliError( - "CLI_PAIR_PROFILE_INVALID", - `${label} must be at most ${MAX_PROFILE_NAME_LENGTH} characters`, - ); - } - - if (hasControlChars(candidate)) { - throw createCliError( - "CLI_PAIR_PROFILE_INVALID", - `${label} contains control characters`, - ); - } - - return candidate; -} - -function parsePeerProfile(payload: unknown): PeerProfile { - if (!isRecord(payload)) { - throw createCliError( - "CLI_PAIR_PROFILE_INVALID", - "Pair profile must be an object", - ); - } - - const profile: PeerProfile = { - agentName: parseProfileName(payload.agentName, "agentName"), - humanName: parseProfileName(payload.humanName, "humanName"), - }; - - const proxyOrigin = parseNonEmptyString(payload.proxyOrigin); - if (proxyOrigin.length > 0) { - let parsedProxyOrigin: string; - try { - parsedProxyOrigin = new URL(parseProxyUrl(proxyOrigin)).origin; - } catch { - throw createCliError( - "CLI_PAIR_PROFILE_INVALID", - "proxyOrigin is invalid for pairing", - ); - } - profile.proxyOrigin = parsedProxyOrigin; - } - - return profile; -} - -function parsePairingTicket(value: unknown): string { - let ticket = parseNonEmptyString(value); - while (ticket.startsWith("`")) { - ticket = ticket.slice(1); - } - while (ticket.endsWith("`")) { - ticket = ticket.slice(0, -1); - } - ticket = ticket.trim().replace(/\s+/gu, ""); - - if (!ticket.startsWith(PAIRING_TICKET_PREFIX)) { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - const encodedPayload = ticket.slice(PAIRING_TICKET_PREFIX.length); - if (encodedPayload.length === 0) { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - try { - const payloadRaw = new TextDecoder().decode( - decodeBase64url(encodedPayload), - ); - const payload = JSON.parse(payloadRaw); - if (!isRecord(payload)) { - throw new Error("invalid payload"); - } - } catch { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - return ticket; -} - -function parsePairingTicketIssuerOrigin(ticket: string): string { - const normalizedTicket = parsePairingTicket(ticket); - const encodedPayload = normalizedTicket.slice(PAIRING_TICKET_PREFIX.length); - const payloadRaw = new TextDecoder().decode(decodeBase64url(encodedPayload)); - - let payload: unknown; - try { - payload = JSON.parse(payloadRaw); - } catch { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - if (!isRecord(payload) || typeof payload.iss !== "string") { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - let issuerUrl: URL; - try { - issuerUrl = new URL(payload.iss); - } catch { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - if (issuerUrl.protocol !== "https:" && issuerUrl.protocol !== "http:") { - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_INVALID", - "Pairing ticket is invalid", - ); - } - - return issuerUrl.origin; -} - -function assertTicketIssuerMatchesProxy(input: { - ticket: string; - proxyUrl: string; - context: "confirm" | "status"; -}): void { - const issuerOrigin = parsePairingTicketIssuerOrigin(input.ticket); - - let proxyOrigin: string; - try { - proxyOrigin = new URL(input.proxyUrl).origin; - } catch { - throw createCliError( - "CLI_PAIR_PROXY_URL_INVALID", - "Configured proxyUrl is invalid. Run `clawdentity config set proxyUrl ` and retry.", - ); - } - - if (issuerOrigin === proxyOrigin) { - return; - } - - const command = input.context === "confirm" ? "pair confirm" : "pair status"; - throw createCliError( - "CLI_PAIR_TICKET_ISSUER_MISMATCH", - `Pairing ticket was issued by ${issuerOrigin}, but current proxy URL is ${proxyOrigin}. Run \`clawdentity config set proxyUrl ${issuerOrigin}\` and retry \`${command}\`.`, - ); -} - -function parseAitAgentDid(ait: string): string { - const parts = ait.split("."); - if (parts.length < 2) { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - "Agent AIT is invalid. Recreate the agent before pairing.", - ); - } - - let payloadRaw: string; - try { - payloadRaw = new TextDecoder().decode(decodeBase64url(parts[1] ?? "")); - } catch { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - "Agent AIT is invalid. Recreate the agent before pairing.", - ); - } - - let payload: unknown; - try { - payload = JSON.parse(payloadRaw); - } catch { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - "Agent AIT is invalid. Recreate the agent before pairing.", - ); - } - - if (!isRecord(payload) || typeof payload.sub !== "string") { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - "Agent AIT is invalid. Recreate the agent before pairing.", - ); - } - - const candidate = payload.sub.trim(); - try { - const parsed = parseDid(candidate); - if (parsed.kind !== "agent") { - throw new Error("invalid kind"); - } - } catch { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - "Agent AIT is invalid. Recreate the agent before pairing.", - ); - } - - return candidate; -} - -function parsePeerAlias(value: string): string { - if (value.length === 0 || value.length > 128) { - throw createCliError( - "CLI_PAIR_PEER_ALIAS_INVALID", - "Generated peer alias is invalid", - ); - } - - if (!PEER_ALIAS_PATTERN.test(value)) { - throw createCliError( - "CLI_PAIR_PEER_ALIAS_INVALID", - "Generated peer alias is invalid", - ); - } - - return value; -} - -function derivePeerAliasBase(peerDid: string): string { - try { - const parsed = parseDid(peerDid); - if (parsed.kind === "agent") { - return parsePeerAlias(`peer-${parsed.ulid.slice(-8).toLowerCase()}`); - } - } catch { - // Fall through to generic alias. - } - - return "peer"; -} - -function resolvePeerAlias(input: { - peers: Record; - peerDid: string; -}): string { - for (const [alias, entry] of Object.entries(input.peers)) { - if (entry.did === input.peerDid) { - return alias; - } - } - - const baseAlias = derivePeerAliasBase(input.peerDid); - if (input.peers[baseAlias] === undefined) { - return baseAlias; - } - - let index = 2; - while (input.peers[`${baseAlias}-${index}`] !== undefined) { - index += 1; - } - - return `${baseAlias}-${index}`; -} - -function resolvePeersConfigPath(getConfigDirImpl: typeof getConfigDir): string { - return join(getConfigDirImpl(), PEERS_FILE_NAME); -} - -function parsePeerEntry(value: unknown): PeerEntry { - if (!isRecord(value)) { - throw createCliError( - "CLI_PAIR_PEERS_CONFIG_INVALID", - "Peer entry must be an object", - ); - } - - const did = parseNonEmptyString(value.did); - const proxyUrl = parseNonEmptyString(value.proxyUrl); - if (did.length === 0 || proxyUrl.length === 0) { - throw createCliError( - "CLI_PAIR_PEERS_CONFIG_INVALID", - "Peer entry is invalid", - ); - } - - const agentNameRaw = parseNonEmptyString(value.agentName); - const humanNameRaw = parseNonEmptyString(value.humanName); - - const entry: PeerEntry = { - did, - proxyUrl, - }; - if (agentNameRaw.length > 0) { - entry.agentName = parseProfileName(agentNameRaw, "agentName"); - } - if (humanNameRaw.length > 0) { - entry.humanName = parseProfileName(humanNameRaw, "humanName"); - } - return entry; -} - -async function loadPeersConfig(input: { - getConfigDirImpl: typeof getConfigDir; - readFileImpl: typeof readFile; -}): Promise { - const peersPath = resolvePeersConfigPath(input.getConfigDirImpl); - let raw: string; - try { - raw = await input.readFileImpl(peersPath, "utf8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return { peers: {} }; - } - - throw error; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - throw createCliError( - "CLI_PAIR_PEERS_CONFIG_INVALID", - "Peer config is not valid JSON", - ); - } - - if (!isRecord(parsed)) { - throw createCliError( - "CLI_PAIR_PEERS_CONFIG_INVALID", - "Peer config must be a JSON object", - ); - } - - if (parsed.peers === undefined) { - return { peers: {} }; - } - - if (!isRecord(parsed.peers)) { - throw createCliError( - "CLI_PAIR_PEERS_CONFIG_INVALID", - "Peer config peers field must be an object", - ); - } - - const peers: Record = {}; - for (const [alias, value] of Object.entries(parsed.peers)) { - peers[parsePeerAlias(alias)] = parsePeerEntry(value); - } - - return { peers }; -} - -async function savePeersConfig(input: { - config: PeersConfig; - getConfigDirImpl: typeof getConfigDir; - mkdirImpl: typeof mkdir; - writeFileImpl: typeof writeFile; - chmodImpl: typeof chmod; -}): Promise { - const peersPath = resolvePeersConfigPath(input.getConfigDirImpl); - await input.mkdirImpl(dirname(peersPath), { recursive: true }); - await input.writeFileImpl( - peersPath, - `${JSON.stringify(input.config, null, 2)}\n`, - "utf8", - ); - await input.chmodImpl(peersPath, FILE_MODE); -} - -function resolveRelayRuntimeConfigPath( - getConfigDirImpl: typeof getConfigDir, -): string { - return join(getConfigDirImpl(), OPENCLAW_RELAY_RUNTIME_FILE_NAME); -} - -async function loadRelayTransformPeersPath(input: { - getConfigDirImpl: typeof getConfigDir; - readFileImpl: typeof readFile; -}): Promise { - const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath( - input.getConfigDirImpl, - ); - let raw: string; - try { - raw = await input.readFileImpl(relayRuntimeConfigPath, "utf8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return undefined; - } - - logger.warn("cli.pair.relay_runtime_read_failed", { - relayRuntimeConfigPath, - reason: - error instanceof Error && error.message.length > 0 - ? error.message - : "unknown", - }); - return undefined; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - logger.warn("cli.pair.relay_runtime_invalid_json", { - relayRuntimeConfigPath, - }); - return undefined; - } - - if (!isRecord(parsed)) { - return undefined; - } - - const relayTransformPeersPath = parseNonEmptyString( - parsed.relayTransformPeersPath, - ); - if (relayTransformPeersPath.length === 0) { - return undefined; - } - - return resolve(relayTransformPeersPath); -} - -async function syncOpenclawRelayPeersSnapshot(input: { - config: PeersConfig; - getConfigDirImpl: typeof getConfigDir; - readFileImpl: typeof readFile; - mkdirImpl: typeof mkdir; - writeFileImpl: typeof writeFile; - chmodImpl: typeof chmod; -}): Promise { - const relayTransformPeersPath = await loadRelayTransformPeersPath({ - getConfigDirImpl: input.getConfigDirImpl, - readFileImpl: input.readFileImpl, - }); - if (relayTransformPeersPath === undefined) { - return; - } - - try { - await input.readFileImpl(relayTransformPeersPath, "utf8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return; - } - - logger.warn("cli.pair.relay_peers_snapshot_probe_failed", { - relayTransformPeersPath, - reason: - error instanceof Error && error.message.length > 0 - ? error.message - : "unknown", - }); - return; - } - - try { - await input.mkdirImpl(dirname(relayTransformPeersPath), { - recursive: true, - }); - await input.writeFileImpl( - relayTransformPeersPath, - `${JSON.stringify(input.config, null, 2)}\n`, - "utf8", - ); - await input.chmodImpl(relayTransformPeersPath, FILE_MODE); - } catch (error) { - logger.warn("cli.pair.relay_peers_snapshot_write_failed", { - relayTransformPeersPath, - reason: - error instanceof Error && error.message.length > 0 - ? error.message - : "unknown", - }); - } -} - -function parseTtlSeconds(value: string | undefined): number | undefined { - const raw = parseNonEmptyString(value); - if (raw.length === 0) { - return undefined; - } - - const parsed = Number.parseInt(raw, 10); - if (!Number.isInteger(parsed) || parsed < 1) { - throw createCliError( - "CLI_PAIR_START_INVALID_TTL", - "ttlSeconds must be a positive integer", - ); - } - - return parsed; -} - -function parsePositiveIntegerOption(input: { - value: string | undefined; - optionName: string; - defaultValue: number; -}): number { - const raw = parseNonEmptyString(input.value); - if (raw.length === 0) { - return input.defaultValue; - } - - const parsed = Number.parseInt(raw, 10); - if (!Number.isInteger(parsed) || parsed < 1) { - throw createCliError( - "CLI_PAIR_STATUS_WAIT_INVALID", - `${input.optionName} must be a positive integer`, - ); - } - - return parsed; -} - -function resolveLocalPairProfile(input: { - config: CliConfig; - agentName: string; - proxyUrl?: string; -}): PeerProfile { - const humanName = parseNonEmptyString(input.config.humanName); - if (humanName.length === 0) { - throw createCliError( - "CLI_PAIR_HUMAN_NAME_MISSING", - "Human name is missing. Run `clawdentity invite redeem --display-name ` or `clawdentity config set humanName `.", - ); - } - - const profile: PeerProfile = { - agentName: parseProfileName(input.agentName, "agentName"), - humanName: parseProfileName(humanName, "humanName"), - }; - const proxyUrl = parseNonEmptyString(input.proxyUrl); - if (proxyUrl.length > 0) { - profile.proxyOrigin = new URL(parseProxyUrl(proxyUrl)).origin; - } - return profile; -} - -function normalizeProxyOrigin(candidate: string): string { - return new URL(parseProxyUrl(candidate)).origin; -} - -function resolvePeerProxyUrl(input: { - ticket: string; - peerProfile: PeerProfile; - peerProxyOrigin?: string; -}): string { - const configuredPeerOrigin = parseNonEmptyString(input.peerProxyOrigin); - const profilePeerOrigin = parseNonEmptyString(input.peerProfile.proxyOrigin); - const fallbackPeerOrigin = parsePairingTicketIssuerOrigin(input.ticket); - const peerOrigin = - configuredPeerOrigin.length > 0 - ? configuredPeerOrigin - : profilePeerOrigin.length > 0 - ? profilePeerOrigin - : fallbackPeerOrigin; - - return new URL( - "/hooks/agent", - `${normalizeProxyOrigin(peerOrigin)}/`, - ).toString(); -} - -function toIssuerProxyUrl(ticket: string): string { - return parseProxyUrl(parsePairingTicketIssuerOrigin(ticket)); -} - -function toIssuerProxyRequestUrl(ticket: string, path: string): string { - return toProxyRequestUrl(toIssuerProxyUrl(ticket), path); -} - -function toPeerProxyOriginFromStatus(input: { - callerAgentDid: string; - initiatorAgentDid: string; - responderAgentDid: string; - initiatorProfile: PeerProfile; - responderProfile?: PeerProfile; -}): string | undefined { - if (input.callerAgentDid === input.initiatorAgentDid) { - return input.responderProfile?.proxyOrigin; - } - - if (input.callerAgentDid === input.responderAgentDid) { - return input.initiatorProfile.proxyOrigin; - } - - return undefined; -} - -function toPeerProxyOriginFromConfirm(input: { - ticket: string; - initiatorProfile: PeerProfile; -}): string { - const initiatorOrigin = parseNonEmptyString( - input.initiatorProfile.proxyOrigin, - ); - if (initiatorOrigin.length > 0) { - return initiatorOrigin; - } - return parsePairingTicketIssuerOrigin(input.ticket); -} - -function toResponderProfile(input: { - config: CliConfig; - agentName: string; - localProxyUrl: string; -}): PeerProfile { - return resolveLocalPairProfile({ - config: input.config, - agentName: input.agentName, - proxyUrl: input.localProxyUrl, - }); -} - -function parseProxyUrl(candidate: string): string { - try { - const parsed = new URL(candidate); - if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { - throw new Error("invalid protocol"); - } - - return parsed.toString(); - } catch { - throw createCliError("CLI_PAIR_INVALID_PROXY_URL", "Proxy URL is invalid"); - } -} - -async function resolveProxyUrl(input: { - config: CliConfig; - fetchImpl: typeof fetch; -}): Promise { - const fromEnv = parseNonEmptyString(process.env.CLAWDENTITY_PROXY_URL); - if (fromEnv.length > 0) { - return parseProxyUrl(fromEnv); - } - - const metadata = await fetchRegistryMetadata(input.config.registryUrl, { - fetchImpl: input.fetchImpl, - }); - const metadataProxyUrl = parseProxyUrl(metadata.proxyUrl); - - const configuredProxyUrl = parseNonEmptyString(input.config.proxyUrl); - if (configuredProxyUrl.length === 0) { - return metadataProxyUrl; - } - - const normalizedConfiguredProxyUrl = parseProxyUrl(configuredProxyUrl); - if (normalizedConfiguredProxyUrl === metadataProxyUrl) { - return metadataProxyUrl; - } - - throw createCliError( - "CLI_PAIR_PROXY_URL_MISMATCH", - `Configured proxy URL does not match registry metadata. config=${normalizedConfiguredProxyUrl} metadata=${metadataProxyUrl}. Rerun onboarding invite redeem to refresh config.`, - ); -} - -function toProxyRequestUrl(proxyUrl: string, path: string): string { - const normalizedBase = proxyUrl.endsWith("/") ? proxyUrl : `${proxyUrl}/`; - return new URL(path.slice(1), normalizedBase).toString(); -} - -function toPathWithQuery(url: string): string { - const parsed = new URL(url); - return `${parsed.pathname}${parsed.search}`; -} - -function extractErrorCode(payload: unknown): string | undefined { - if (!isRecord(payload)) { - return undefined; - } - - const envelope = payload as RegistryErrorEnvelope; - if (!envelope.error || typeof envelope.error.code !== "string") { - return undefined; - } - - const code = envelope.error.code.trim(); - return code.length > 0 ? code : undefined; -} - -function extractErrorMessage(payload: unknown): string | undefined { - if (!isRecord(payload)) { - return undefined; - } - - const envelope = payload as RegistryErrorEnvelope; - if (!envelope.error || typeof envelope.error.message !== "string") { - return undefined; - } - - const message = envelope.error.message.trim(); - return message.length > 0 ? message : undefined; -} - -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - -async function executePairRequest(input: { - fetchImpl: typeof fetch; - init: RequestInit; - url: string; -}): Promise { - try { - return await input.fetchImpl(input.url, input.init); - } catch { - throw createCliError( - "CLI_PAIR_REQUEST_FAILED", - "Unable to connect to proxy URL. Check network access and proxyUrl.", - ); - } -} - -function mapStartPairError(status: number, payload: unknown): string { - const code = extractErrorCode(payload); - const message = extractErrorMessage(payload); - - if (code === "PROXY_PAIR_OWNERSHIP_FORBIDDEN" || status === 403) { - return message - ? `Initiator agent ownership check failed (403): ${message}` - : "Initiator agent ownership check failed (403)."; - } - - if (status === 400) { - return message - ? `Pair start request is invalid (400): ${message}` - : "Pair start request is invalid (400)."; - } - - if (status >= 500) { - return `Proxy pairing service is unavailable (${status}).`; - } - - if (message) { - return `Pair start failed (${status}): ${message}`; - } - - return `Pair start failed (${status})`; -} - -function mapConfirmPairError(status: number, payload: unknown): string { - const code = extractErrorCode(payload); - const message = extractErrorMessage(payload); - - if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { - return "Pairing ticket is invalid or expired"; - } - - if (code === "PROXY_PAIR_TICKET_EXPIRED" || status === 410) { - return "Pairing ticket has expired"; - } - - if (code === "PROXY_PAIR_TICKET_INVALID_ISSUER") { - return message - ? `Pair confirm failed: ticket issuer does not match this proxy (${message}). Use the same proxy URL where the ticket was issued.` - : "Pair confirm failed: ticket issuer does not match this proxy. Use the same proxy URL where the ticket was issued."; - } - - if ( - code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || - code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" - ) { - return message - ? `Pair confirm request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` - : "Pair confirm request is invalid (400): pairing ticket is malformed. Re-copy the full ticket/QR without truncation."; - } - - if (status === 400) { - return message - ? `Pair confirm request is invalid (400): ${message}` - : "Pair confirm request is invalid (400)."; - } - - if (status >= 500) { - return `Proxy pairing service is unavailable (${status}).`; - } - - if (message) { - return `Pair confirm failed (${status}): ${message}`; - } - - return `Pair confirm failed (${status})`; -} - -function mapStatusPairError(status: number, payload: unknown): string { - const code = extractErrorCode(payload); - const message = extractErrorMessage(payload); - - if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { - return "Pairing ticket not found"; - } - - if (code === "PROXY_PAIR_TICKET_EXPIRED" || status === 410) { - return "Pairing ticket has expired"; - } - - if (code === "PROXY_PAIR_STATUS_FORBIDDEN" || status === 403) { - return message - ? `Pair status request is forbidden (403): ${message}` - : "Pair status request is forbidden (403)."; - } - - if (code === "PROXY_PAIR_TICKET_INVALID_ISSUER") { - return message - ? `Pair status failed: ticket issuer does not match this proxy (${message}). Use the same proxy URL where the ticket was issued.` - : "Pair status failed: ticket issuer does not match this proxy. Use the same proxy URL where the ticket was issued."; - } - - if ( - code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || - code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" - ) { - return message - ? `Pair status request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` - : "Pair status request is invalid (400): pairing ticket is malformed. Re-copy the full ticket/QR without truncation."; - } - - if (status === 400) { - return message - ? `Pair status request is invalid (400): ${message}` - : "Pair status request is invalid (400)."; - } - - if (status >= 500) { - return `Proxy pairing service is unavailable (${status}).`; - } - - if (message) { - return `Pair status failed (${status}): ${message}`; - } - - return `Pair status failed (${status})`; -} - -function parsePairStartResponse( - payload: unknown, -): Omit { - if (!isRecord(payload)) { - throw createCliError( - "CLI_PAIR_START_INVALID_RESPONSE", - "Pair start response is invalid", - ); - } - - const ticket = parsePairingTicket(payload.ticket); - const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); - const expiresAt = parseNonEmptyString(payload.expiresAt); - let initiatorProfile: PeerProfile; - - if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { - throw createCliError( - "CLI_PAIR_START_INVALID_RESPONSE", - "Pair start response is invalid", - ); - } - try { - initiatorProfile = parsePeerProfile(payload.initiatorProfile); - } catch { - throw createCliError( - "CLI_PAIR_START_INVALID_RESPONSE", - "Pair start response is invalid", - ); - } - - return { - ticket, - initiatorAgentDid, - initiatorProfile, - expiresAt, - }; -} - -function parsePairConfirmResponse( - payload: unknown, -): Omit { - if (!isRecord(payload)) { - throw createCliError( - "CLI_PAIR_CONFIRM_INVALID_RESPONSE", - "Pair confirm response is invalid", - ); - } - - const paired = payload.paired === true; - const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); - const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); - let initiatorProfile: PeerProfile; - let responderProfile: PeerProfile; - - if ( - !paired || - initiatorAgentDid.length === 0 || - responderAgentDid.length === 0 - ) { - throw createCliError( - "CLI_PAIR_CONFIRM_INVALID_RESPONSE", - "Pair confirm response is invalid", - ); - } - try { - initiatorProfile = parsePeerProfile(payload.initiatorProfile); - responderProfile = parsePeerProfile(payload.responderProfile); - } catch { - throw createCliError( - "CLI_PAIR_CONFIRM_INVALID_RESPONSE", - "Pair confirm response is invalid", - ); - } - - return { - paired, - initiatorAgentDid, - responderAgentDid, - initiatorProfile, - responderProfile, - }; -} - -function parsePairStatusResponse( - payload: unknown, -): Omit { - if (!isRecord(payload)) { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - const statusRaw = parseNonEmptyString(payload.status); - if (statusRaw !== "pending" && statusRaw !== "confirmed") { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); - const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); - const expiresAt = parseNonEmptyString(payload.expiresAt); - const confirmedAt = parseNonEmptyString(payload.confirmedAt); - let initiatorProfile: PeerProfile; - - if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - if (statusRaw === "confirmed" && responderAgentDid.length === 0) { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - try { - initiatorProfile = parsePeerProfile(payload.initiatorProfile); - } catch { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - let responderProfile: PeerProfile | undefined; - if (payload.responderProfile !== undefined) { - try { - responderProfile = parsePeerProfile(payload.responderProfile); - } catch { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - } - if (statusRaw === "confirmed" && responderProfile === undefined) { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - return { - status: statusRaw, - initiatorAgentDid, - initiatorProfile, - responderAgentDid: - responderAgentDid.length > 0 ? responderAgentDid : undefined, - responderProfile, - expiresAt, - confirmedAt: confirmedAt.length > 0 ? confirmedAt : undefined, - }; -} - -async function readAgentProofMaterial( - agentName: string, - dependencies: PairRequestOptions, -): Promise { - const readFileImpl = dependencies.readFileImpl ?? readFile; - const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; - const normalizedAgentName = assertValidAgentName(agentName); - - const agentDir = join( - getConfigDirImpl(), - AGENTS_DIR_NAME, - normalizedAgentName, - ); - const aitPath = join(agentDir, AIT_FILE_NAME); - const secretKeyPath = join(agentDir, SECRET_KEY_FILE_NAME); - - let ait: string; - try { - ait = (await readFileImpl(aitPath, "utf-8")).trim(); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - `Agent "${normalizedAgentName}" is missing ${AIT_FILE_NAME}. Run agent create first.`, - ); - } - - throw error; - } - - if (ait.length === 0) { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - `Agent "${normalizedAgentName}" has an empty ${AIT_FILE_NAME}`, - ); - } - - let encodedSecretKey: string; - try { - encodedSecretKey = (await readFileImpl(secretKeyPath, "utf-8")).trim(); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - `Agent "${normalizedAgentName}" is missing ${SECRET_KEY_FILE_NAME}. Run agent create first.`, - ); - } - - throw error; - } - - if (encodedSecretKey.length === 0) { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - `Agent "${normalizedAgentName}" has an empty ${SECRET_KEY_FILE_NAME}`, - ); - } - - let secretKey: Uint8Array; - try { - secretKey = decodeBase64url(encodedSecretKey); - } catch { - throw createCliError( - "CLI_PAIR_AGENT_NOT_FOUND", - `Agent "${normalizedAgentName}" has invalid ${SECRET_KEY_FILE_NAME}`, - ); - } - - return { - ait, - secretKey, - }; -} - -async function buildSignedHeaders(input: { - bodyBytes?: Uint8Array; - method: string; - requestUrl: string; - secretKey: Uint8Array; - timestampSeconds: number; - nonce: string; -}): Promise> { - const signed = await signHttpRequest({ - method: input.method, - pathWithQuery: toPathWithQuery(input.requestUrl), - timestamp: String(input.timestampSeconds), - nonce: input.nonce, - body: input.bodyBytes, - secretKey: input.secretKey, - }); - - return signed.headers; -} - -async function encodeTicketQrPng(ticket: string): Promise { - const buffer = await QRCode.toBuffer(ticket, { - type: "png", - width: 512, - margin: 2, - errorCorrectionLevel: "M", - }); - return new Uint8Array(buffer); -} - -function decodeTicketFromPng(imageBytes: Uint8Array): string { - let decodedPng: PNG; - try { - decodedPng = PNG.sync.read(Buffer.from(imageBytes)); - } catch { - throw createCliError( - "CLI_PAIR_CONFIRM_QR_FILE_INVALID", - "QR image file is invalid or unsupported", - ); - } - - const imageData = new Uint8ClampedArray( - decodedPng.data.buffer, - decodedPng.data.byteOffset, - decodedPng.data.byteLength, - ); - - const decoded = jsQR(imageData, decodedPng.width, decodedPng.height); - if (!decoded || parseNonEmptyString(decoded.data).length === 0) { - throw createCliError( - "CLI_PAIR_CONFIRM_QR_NOT_FOUND", - "No pairing QR code was found in the image", - ); - } - - return parsePairingTicket(decoded.data); -} - -async function persistPairingQr(input: { - agentName: string; - qrOutput: string | undefined; - ticket: string; - dependencies: PairRequestOptions; - nowSeconds: number; -}): Promise { - const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; - const readdirImpl = input.dependencies.readdirImpl ?? readdir; - const unlinkImpl = input.dependencies.unlinkImpl ?? unlink; - const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; - const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; - const qrEncodeImpl = input.dependencies.qrEncodeImpl ?? encodeTicketQrPng; - - const baseDir = join(getConfigDirImpl(), PAIRING_QR_DIR_NAME); - const outputPath = parseNonEmptyString(input.qrOutput) - ? resolve(input.qrOutput ?? "") - : join( - baseDir, - `${assertValidAgentName(input.agentName)}-pair-${input.nowSeconds}.png`, - ); - - const existingFiles = await readdirImpl(baseDir).catch((error) => { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return [] as string[]; - } - - throw error; - }); - for (const fileName of existingFiles) { - if (typeof fileName !== "string") { - continue; - } - - const match = PAIRING_QR_FILENAME_PATTERN.exec(fileName); - if (!match) { - continue; - } - - const issuedAtSeconds = Number.parseInt(match[1] ?? "", 10); - if (!Number.isInteger(issuedAtSeconds)) { - continue; - } - - if (issuedAtSeconds + PAIRING_QR_MAX_AGE_SECONDS > input.nowSeconds) { - continue; - } - - const stalePath = join(baseDir, fileName); - await unlinkImpl(stalePath).catch((error) => { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return; - } - - throw error; - }); - } - - await mkdirImpl(dirname(outputPath), { recursive: true }); - const imageBytes = await qrEncodeImpl(input.ticket); - await writeFileImpl(outputPath, imageBytes); - - return outputPath; -} - -function resolveConfirmTicketSource(options: PairConfirmOptions): { - ticket: string; - source: "ticket" | "qr-file"; - qrFilePath?: string; -} { - const inlineTicket = parseNonEmptyString(options.ticket); - const qrFile = parseNonEmptyString(options.qrFile); - - if (inlineTicket.length > 0 && qrFile.length > 0) { - throw createCliError( - "CLI_PAIR_CONFIRM_INPUT_CONFLICT", - "Provide either --ticket or --qr-file, not both", - ); - } - - if (inlineTicket.length > 0) { - return { - ticket: parsePairingTicket(inlineTicket), - source: "ticket", - }; - } - - if (qrFile.length > 0) { - return { - ticket: "", - source: "qr-file", - qrFilePath: resolve(qrFile), - }; - } - - throw createCliError( - "CLI_PAIR_CONFIRM_TICKET_REQUIRED", - "Pairing ticket is required. Pass --ticket or --qr-file .", - ); -} - -async function persistPairedPeer(input: { - ticket: string; - peerDid: string; - peerProfile: PeerProfile; - peerProxyOrigin?: string; - dependencies: PairRequestOptions; -}): Promise { - const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; - const readFileImpl = input.dependencies.readFileImpl ?? readFile; - const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; - const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; - const chmodImpl = input.dependencies.chmodImpl ?? chmod; - - const peerProxyUrl = resolvePeerProxyUrl({ - ticket: input.ticket, - peerProfile: input.peerProfile, - peerProxyOrigin: input.peerProxyOrigin, - }); - const peersConfig = await loadPeersConfig({ - getConfigDirImpl, - readFileImpl, - }); - const alias = resolvePeerAlias({ - peers: peersConfig.peers, - peerDid: input.peerDid, - }); - peersConfig.peers[alias] = { - did: input.peerDid, - proxyUrl: peerProxyUrl, - agentName: input.peerProfile.agentName, - humanName: input.peerProfile.humanName, - }; - await savePeersConfig({ - config: peersConfig, - getConfigDirImpl, - mkdirImpl, - writeFileImpl, - chmodImpl, - }); - await syncOpenclawRelayPeersSnapshot({ - config: peersConfig, - getConfigDirImpl, - readFileImpl, - mkdirImpl, - writeFileImpl, - chmodImpl, - }); - - return alias; -} - -export async function startPairing( - agentName: string, - options: PairStartOptions, - dependencies: PairRequestOptions = {}, -): Promise { - const fetchImpl = dependencies.fetchImpl ?? fetch; - const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; - const nonceFactoryImpl = - dependencies.nonceFactoryImpl ?? - (() => randomBytes(NONCE_SIZE).toString("base64url")); - - const ttlSeconds = parseTtlSeconds(options.ttlSeconds); - const config = await resolveConfigImpl(); - const proxyUrl = await resolveProxyUrl({ - config, - fetchImpl, - }); - const normalizedAgentName = assertValidAgentName(agentName); - const initiatorProfile = resolveLocalPairProfile({ - config, - agentName: normalizedAgentName, - proxyUrl, - }); - - const { ait, secretKey } = await readAgentProofMaterial( - normalizedAgentName, - dependencies, - ); - - const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_START_PATH); - const requestBody = JSON.stringify({ - ttlSeconds, - initiatorProfile, - }); - const bodyBytes = new TextEncoder().encode(requestBody); - - const timestampSeconds = nowSecondsImpl(); - const nonce = nonceFactoryImpl(); - const signedHeaders = await buildSignedHeaders({ - method: "POST", - requestUrl, - bodyBytes, - secretKey, - timestampSeconds, - nonce, - }); - - const response = await executePairRequest({ - fetchImpl, - url: requestUrl, - init: { - method: "POST", - headers: { - authorization: `Claw ${ait}`, - "content-type": "application/json", - ...signedHeaders, - }, - body: requestBody, - }, - }); - - const responseBody = await parseJsonResponse(response); - - if (!response.ok) { - throw createCliError( - "CLI_PAIR_START_FAILED", - mapStartPairError(response.status, responseBody), - ); - } - - const parsed = parsePairStartResponse(responseBody); - const result: PairStartResult = { - ...parsed, - proxyUrl, - }; - - if (options.qr === true) { - result.qrPath = await persistPairingQr({ - agentName, - qrOutput: options.qrOutput, - ticket: parsed.ticket, - dependencies, - nowSeconds: timestampSeconds, - }); - } - - return result; -} - -export async function confirmPairing( - agentName: string, - options: PairConfirmOptions, - dependencies: PairRequestOptions = {}, -): Promise { - const fetchImpl = dependencies.fetchImpl ?? fetch; - const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; - const nonceFactoryImpl = - dependencies.nonceFactoryImpl ?? - (() => randomBytes(NONCE_SIZE).toString("base64url")); - const readFileImpl = dependencies.readFileImpl ?? readFile; - const qrDecodeImpl = dependencies.qrDecodeImpl ?? decodeTicketFromPng; - const config = await resolveConfigImpl(); - const normalizedAgentName = assertValidAgentName(agentName); - const localProxyUrl = await resolveProxyUrl({ - config, - fetchImpl, - }); - const responderProfile = toResponderProfile({ - config, - agentName: normalizedAgentName, - localProxyUrl, - }); - - const ticketSource = resolveConfirmTicketSource(options); - - let ticket = ticketSource.ticket; - if (ticketSource.source === "qr-file") { - if (!ticketSource.qrFilePath) { - throw createCliError( - "CLI_PAIR_CONFIRM_QR_FILE_REQUIRED", - "QR file path is required", - ); - } - - let imageBytes: Uint8Array; - try { - imageBytes = await readFileImpl(ticketSource.qrFilePath); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw createCliError( - "CLI_PAIR_CONFIRM_QR_FILE_NOT_FOUND", - `QR file not found: ${ticketSource.qrFilePath}`, - ); - } - - throw error; - } - - ticket = parsePairingTicket(qrDecodeImpl(new Uint8Array(imageBytes))); - } - ticket = parsePairingTicket(ticket); - const proxyUrl = toIssuerProxyUrl(ticket); - - const { ait, secretKey } = await readAgentProofMaterial( - normalizedAgentName, - dependencies, - ); - - const requestUrl = toIssuerProxyRequestUrl(ticket, PAIR_CONFIRM_PATH); - const requestBody = JSON.stringify({ - ticket, - responderProfile, - }); - const bodyBytes = new TextEncoder().encode(requestBody); - - const timestampSeconds = nowSecondsImpl(); - const nonce = nonceFactoryImpl(); - const signedHeaders = await buildSignedHeaders({ - method: "POST", - requestUrl, - bodyBytes, - secretKey, - timestampSeconds, - nonce, - }); - - const response = await executePairRequest({ - fetchImpl, - url: requestUrl, - init: { - method: "POST", - headers: { - authorization: `Claw ${ait}`, - "content-type": "application/json", - ...signedHeaders, - }, - body: requestBody, - }, - }); - - const responseBody = await parseJsonResponse(response); - - if (!response.ok) { - throw createCliError( - "CLI_PAIR_CONFIRM_FAILED", - mapConfirmPairError(response.status, responseBody), - ); - } - - const parsed = parsePairConfirmResponse(responseBody); - const peerProxyOrigin = toPeerProxyOriginFromConfirm({ - ticket, - initiatorProfile: parsed.initiatorProfile, - }); - const peerAlias = await persistPairedPeer({ - ticket, - peerDid: parsed.initiatorAgentDid, - peerProfile: parsed.initiatorProfile, - peerProxyOrigin, - dependencies, - }); - - if (ticketSource.source === "qr-file" && ticketSource.qrFilePath) { - const unlinkImpl = dependencies.unlinkImpl ?? unlink; - await unlinkImpl(ticketSource.qrFilePath).catch((error) => { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return; - } - - logger.warn("cli.pair.confirm.qr_cleanup_failed", { - path: ticketSource.qrFilePath, - reason: - error instanceof Error && error.message.length > 0 - ? error.message - : "unknown", - }); - }); - } - - return { - ...parsed, - proxyUrl, - peerAlias, - }; -} - -async function getPairingStatusOnce( - agentName: string, - options: { ticket: string }, - dependencies: PairRequestOptions = {}, -): Promise { - const fetchImpl = dependencies.fetchImpl ?? fetch; - const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; - const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; - const nonceFactoryImpl = - dependencies.nonceFactoryImpl ?? - (() => randomBytes(NONCE_SIZE).toString("base64url")); - const config = await resolveConfigImpl(); - const proxyUrl = await resolveProxyUrl({ - config, - fetchImpl, - }); - - const ticket = parsePairingTicket(options.ticket); - assertTicketIssuerMatchesProxy({ - ticket, - proxyUrl, - context: "status", - }); - const { ait, secretKey } = await readAgentProofMaterial( - agentName, - dependencies, - ); - const callerAgentDid = parseAitAgentDid(ait); - - const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_STATUS_PATH); - const requestBody = JSON.stringify({ ticket }); - const bodyBytes = new TextEncoder().encode(requestBody); - const timestampSeconds = nowSecondsImpl(); - const nonce = nonceFactoryImpl(); - const signedHeaders = await buildSignedHeaders({ - method: "POST", - requestUrl, - bodyBytes, - secretKey, - timestampSeconds, - nonce, - }); - - const response = await executePairRequest({ - fetchImpl, - url: requestUrl, - init: { - method: "POST", - headers: { - authorization: `Claw ${ait}`, - "content-type": "application/json", - ...signedHeaders, - }, - body: requestBody, - }, - }); - const responseBody = await parseJsonResponse(response); - if (!response.ok) { - throw createCliError( - "CLI_PAIR_STATUS_FAILED", - mapStatusPairError(response.status, responseBody), - ); - } - - const parsed = parsePairStatusResponse(responseBody); - let peerAlias: string | undefined; - if (parsed.status === "confirmed") { - const responderAgentDid = parsed.responderAgentDid; - if (!responderAgentDid) { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - const peerDid = - callerAgentDid === parsed.initiatorAgentDid - ? responderAgentDid - : callerAgentDid === responderAgentDid - ? parsed.initiatorAgentDid - : undefined; - const peerProfile = - callerAgentDid === parsed.initiatorAgentDid - ? parsed.responderProfile - : callerAgentDid === responderAgentDid - ? parsed.initiatorProfile - : undefined; - if (!peerDid) { - throw createCliError( - "CLI_PAIR_STATUS_FORBIDDEN", - "Local agent is not a participant in the pairing ticket", - ); - } - if (!peerProfile) { - throw createCliError( - "CLI_PAIR_STATUS_INVALID_RESPONSE", - "Pair status response is invalid", - ); - } - - peerAlias = await persistPairedPeer({ - ticket, - peerDid, - peerProfile, - peerProxyOrigin: toPeerProxyOriginFromStatus({ - callerAgentDid, - initiatorAgentDid: parsed.initiatorAgentDid, - responderAgentDid, - initiatorProfile: parsed.initiatorProfile, - responderProfile: parsed.responderProfile, - }), - dependencies, - }); - } - - return { - ...parsed, - proxyUrl, - peerAlias, - }; -} - -async function waitForPairingStatus(input: { - agentName: string; - ticket: string; - waitSeconds: number; - pollIntervalSeconds: number; - dependencies: PairRequestOptions; -}): Promise { - const nowSecondsImpl = input.dependencies.nowSecondsImpl ?? nowUnixSeconds; - const sleepImpl = - input.dependencies.sleepImpl ?? - (async (ms: number) => { - await new Promise((resolve) => { - setTimeout(resolve, ms); - }); - }); - - const deadlineSeconds = nowSecondsImpl() + input.waitSeconds; - while (true) { - const status = await getPairingStatusOnce( - input.agentName, - { ticket: input.ticket }, - input.dependencies, - ); - - if (status.status === "confirmed") { - return status; - } - - const nowSeconds = nowSecondsImpl(); - if (nowSeconds >= deadlineSeconds) { - throw createCliError( - "CLI_PAIR_STATUS_WAIT_TIMEOUT", - `Pairing is still pending after ${input.waitSeconds} seconds`, - ); - } - - const remainingSeconds = Math.max(0, deadlineSeconds - nowSeconds); - const sleepSeconds = Math.min(input.pollIntervalSeconds, remainingSeconds); - await sleepImpl(sleepSeconds * 1000); - } -} - -export async function getPairingStatus( - agentName: string, - options: PairStatusOptions, - dependencies: PairRequestOptions = {}, -): Promise { - const ticketRaw = parseNonEmptyString(options.ticket); - if (ticketRaw.length === 0) { - throw createCliError( - "CLI_PAIR_STATUS_TICKET_REQUIRED", - "Pair status requires --ticket ", - ); - } - const ticket = parsePairingTicket(ticketRaw); - - if (options.wait !== true) { - return getPairingStatusOnce(agentName, { ticket }, dependencies); - } - - const waitSeconds = parsePositiveIntegerOption({ - value: options.waitSeconds, - optionName: "waitSeconds", - defaultValue: DEFAULT_STATUS_WAIT_SECONDS, - }); - const pollIntervalSeconds = parsePositiveIntegerOption({ - value: options.pollIntervalSeconds, - optionName: "pollIntervalSeconds", - defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, - }); - - return waitForPairingStatus({ - agentName, - ticket, - waitSeconds, - pollIntervalSeconds, - dependencies, - }); -} - -export const createPairCommand = ( - dependencies: PairCommandDependencies = {}, -): Command => { - const pairCommand = new Command("pair").description( - "Manage proxy trust pairing between agents", - ); - - pairCommand - .command("start ") - .description("Start pairing and issue one-time pairing ticket") - .option("--ttl-seconds ", "Pairing ticket expiry in seconds") - .option("--qr", "Generate a local QR file for sharing") - .option("--qr-output ", "Write QR PNG to a specific file path") - .option( - "--wait", - "Wait for responder confirmation and auto-save peer on initiator", - ) - .option( - "--wait-seconds ", - "Max seconds to poll for confirmation (default: 300)", - ) - .option( - "--poll-interval-seconds ", - "Polling interval in seconds while waiting (default: 3)", - ) - .action( - withErrorHandling( - "pair start", - async (agentName: string, options: PairStartOptions) => { - const result = await startPairing(agentName, options, dependencies); - - logger.info("cli.pair_started", { - initiatorAgentDid: result.initiatorAgentDid, - proxyUrl: result.proxyUrl, - expiresAt: result.expiresAt, - qrPath: result.qrPath, - }); - - writeStdoutLine("Pairing ticket created"); - writeStdoutLine(`Ticket: ${result.ticket}`); - writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); - writeStdoutLine( - `Initiator Agent Name: ${result.initiatorProfile.agentName}`, - ); - writeStdoutLine( - `Initiator Human Name: ${result.initiatorProfile.humanName}`, - ); - writeStdoutLine(`Expires At: ${result.expiresAt}`); - if (result.qrPath) { - writeStdoutLine(`QR File: ${result.qrPath}`); - } - - if (options.wait === true) { - const waitSeconds = parsePositiveIntegerOption({ - value: options.waitSeconds, - optionName: "waitSeconds", - defaultValue: DEFAULT_STATUS_WAIT_SECONDS, - }); - const pollIntervalSeconds = parsePositiveIntegerOption({ - value: options.pollIntervalSeconds, - optionName: "pollIntervalSeconds", - defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, - }); - - writeStdoutLine( - `Waiting for confirmation (timeout=${waitSeconds}s, interval=${pollIntervalSeconds}s) ...`, - ); - - const status = await waitForPairingStatus({ - agentName, - ticket: result.ticket, - waitSeconds, - pollIntervalSeconds, - dependencies, - }); - - logger.info("cli.pair_status_confirmed_after_start", { - initiatorAgentDid: status.initiatorAgentDid, - responderAgentDid: status.responderAgentDid, - peerAlias: status.peerAlias, - }); - - writeStdoutLine("Pairing confirmed"); - writeStdoutLine(`Status: ${status.status}`); - if (status.initiatorAgentDid) { - writeStdoutLine( - `Initiator Agent DID: ${status.initiatorAgentDid}`, - ); - } - if (status.responderAgentDid) { - writeStdoutLine( - `Responder Agent DID: ${status.responderAgentDid}`, - ); - } - if (status.responderProfile) { - writeStdoutLine( - `Responder Agent Name: ${status.responderProfile.agentName}`, - ); - writeStdoutLine( - `Responder Human Name: ${status.responderProfile.humanName}`, - ); - } - if (status.peerAlias) { - writeStdoutLine(`Peer alias saved: ${status.peerAlias}`); - } - } - }, - ), - ); - - pairCommand - .command("confirm ") - .description("Confirm pairing using one-time pairing ticket") - .option("--ticket ", "One-time pairing ticket (clwpair1_...)") - .option("--qr-file ", "Path to pairing QR PNG file") - .action( - withErrorHandling( - "pair confirm", - async (agentName: string, options: PairConfirmOptions) => { - const result = await confirmPairing(agentName, options, dependencies); - - logger.info("cli.pair_confirmed", { - initiatorAgentDid: result.initiatorAgentDid, - responderAgentDid: result.responderAgentDid, - proxyUrl: result.proxyUrl, - peerAlias: result.peerAlias, - }); - - writeStdoutLine("Pairing confirmed"); - writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); - writeStdoutLine( - `Initiator Agent Name: ${result.initiatorProfile.agentName}`, - ); - writeStdoutLine( - `Initiator Human Name: ${result.initiatorProfile.humanName}`, - ); - writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); - writeStdoutLine( - `Responder Agent Name: ${result.responderProfile.agentName}`, - ); - writeStdoutLine( - `Responder Human Name: ${result.responderProfile.humanName}`, - ); - writeStdoutLine(`Paired: ${result.paired ? "true" : "false"}`); - if (result.peerAlias) { - writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); - } - }, - ), - ); - - pairCommand - .command("status ") - .description("Check pairing ticket status and sync local peer on confirm") - .option("--ticket ", "One-time pairing ticket (clwpair1_...)") - .option("--wait", "Poll until ticket is confirmed or timeout is reached") - .option( - "--wait-seconds ", - "Max seconds to poll for confirmation (default: 300)", - ) - .option( - "--poll-interval-seconds ", - "Polling interval in seconds while waiting (default: 3)", - ) - .action( - withErrorHandling( - "pair status", - async (agentName: string, options: PairStatusOptions) => { - const result = await getPairingStatus( - agentName, - options, - dependencies, - ); - - logger.info("cli.pair_status", { - initiatorAgentDid: result.initiatorAgentDid, - responderAgentDid: result.responderAgentDid, - status: result.status, - proxyUrl: result.proxyUrl, - peerAlias: result.peerAlias, - }); - - writeStdoutLine(`Status: ${result.status}`); - writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); - writeStdoutLine( - `Initiator Agent Name: ${result.initiatorProfile.agentName}`, - ); - writeStdoutLine( - `Initiator Human Name: ${result.initiatorProfile.humanName}`, - ); - if (result.responderAgentDid) { - writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); - } - if (result.responderProfile) { - writeStdoutLine( - `Responder Agent Name: ${result.responderProfile.agentName}`, - ); - writeStdoutLine( - `Responder Human Name: ${result.responderProfile.humanName}`, - ); - } - writeStdoutLine(`Expires At: ${result.expiresAt}`); - if (result.confirmedAt) { - writeStdoutLine(`Confirmed At: ${result.confirmedAt}`); - } - if (result.peerAlias) { - writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); - } - }, - ), - ); - - return pairCommand; -}; +export { createPairCommand } from "./pair/command.js"; +export { + confirmPairing, + getPairingStatus, + startPairing, +} from "./pair/service.js"; +export type { + PairCommandDependencies, + PairConfirmOptions, + PairConfirmResult, + PairRequestOptions, + PairStartOptions, + PairStartResult, + PairStatusOptions, + PairStatusResult, + PeerEntry, + PeerProfile, + PeersConfig, +} from "./pair/types.js"; diff --git a/apps/cli/src/commands/pair/AGENTS.md b/apps/cli/src/commands/pair/AGENTS.md new file mode 100644 index 0000000..1c6d791 --- /dev/null +++ b/apps/cli/src/commands/pair/AGENTS.md @@ -0,0 +1,29 @@ +# AGENTS.md (pair command modules) + +## Purpose +- Keep pairing command code modular, testable, and behavior-stable. +- Preserve CLI output/error behavior and existing pair test expectations. + +## Module Boundaries +- `types.ts`: shared pair option/result/dependency contracts only. +- `common.ts`: pair constants, logger, validation/parsing helpers, and shared pure utilities. +- `proxy.ts`: proxy URL resolution, signed request helpers, API response parsing, and proxy error mapping. +- `persistence.ts`: peer map persistence and OpenClaw relay peer-snapshot sync. +- `qr.ts`: QR encode/decode, stale QR cleanup, and ticket-source resolution. +- `service.ts`: start/confirm/status orchestration. +- `command.ts`: Commander wiring and stdout formatting only. +- `../pair.ts`: thin public facade and stable exports. + +## Guardrails +- Keep each source file under 800 LOC. +- Avoid circular imports between `common.ts`, `proxy.ts`, `persistence.ts`, `qr.ts`, and `service.ts`. +- Keep pair error codes/messages stable; tests rely on deterministic behavior. +- Keep command stdout wording/order stable unless tests and scope require a change. +- Prefer helper reuse over duplicating ticket/profile/proxy parsing logic. + +## Change Workflow +- Add/update pair tests in `apps/cli/src/commands/pair.test.ts` with behavior changes. +- Run validation before handoff: + - `pnpm -C apps/cli typecheck` + - `pnpm -C apps/cli test -- pair` + - `pnpm lint` diff --git a/apps/cli/src/commands/pair/command.ts b/apps/cli/src/commands/pair/command.ts new file mode 100644 index 0000000..5c7da42 --- /dev/null +++ b/apps/cli/src/commands/pair/command.ts @@ -0,0 +1,236 @@ +import { Command } from "commander"; +import { writeStdoutLine } from "../../io.js"; +import { withErrorHandling } from "../helpers.js"; +import { + DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + DEFAULT_STATUS_WAIT_SECONDS, + logger, + parsePositiveIntegerOption, +} from "./common.js"; +import { + confirmPairing, + getPairingStatus, + startPairing, + waitForPairingStatus, +} from "./service.js"; +import type { + PairCommandDependencies, + PairConfirmOptions, + PairStartOptions, + PairStatusOptions, +} from "./types.js"; + +export const createPairCommand = ( + dependencies: PairCommandDependencies = {}, +): Command => { + const pairCommand = new Command("pair").description( + "Manage proxy trust pairing between agents", + ); + + pairCommand + .command("start ") + .description("Start pairing and issue one-time pairing ticket") + .option("--ttl-seconds ", "Pairing ticket expiry in seconds") + .option("--qr", "Generate a local QR file for sharing") + .option("--qr-output ", "Write QR PNG to a specific file path") + .option( + "--wait", + "Wait for responder confirmation and auto-save peer on initiator", + ) + .option( + "--wait-seconds ", + "Max seconds to poll for confirmation (default: 300)", + ) + .option( + "--poll-interval-seconds ", + "Polling interval in seconds while waiting (default: 3)", + ) + .action( + withErrorHandling( + "pair start", + async (agentName: string, options: PairStartOptions) => { + const result = await startPairing(agentName, options, dependencies); + + logger.info("cli.pair_started", { + initiatorAgentDid: result.initiatorAgentDid, + proxyUrl: result.proxyUrl, + expiresAt: result.expiresAt, + qrPath: result.qrPath, + }); + + writeStdoutLine("Pairing ticket created"); + writeStdoutLine(`Ticket: ${result.ticket}`); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); + writeStdoutLine(`Expires At: ${result.expiresAt}`); + if (result.qrPath) { + writeStdoutLine(`QR File: ${result.qrPath}`); + } + + if (options.wait === true) { + const waitSeconds = parsePositiveIntegerOption({ + value: options.waitSeconds, + optionName: "waitSeconds", + defaultValue: DEFAULT_STATUS_WAIT_SECONDS, + }); + const pollIntervalSeconds = parsePositiveIntegerOption({ + value: options.pollIntervalSeconds, + optionName: "pollIntervalSeconds", + defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + }); + + writeStdoutLine( + `Waiting for confirmation (timeout=${waitSeconds}s, interval=${pollIntervalSeconds}s) ...`, + ); + + const status = await waitForPairingStatus({ + agentName, + ticket: result.ticket, + waitSeconds, + pollIntervalSeconds, + dependencies, + }); + + logger.info("cli.pair_status_confirmed_after_start", { + initiatorAgentDid: status.initiatorAgentDid, + responderAgentDid: status.responderAgentDid, + peerAlias: status.peerAlias, + }); + + writeStdoutLine("Pairing confirmed"); + writeStdoutLine(`Status: ${status.status}`); + if (status.initiatorAgentDid) { + writeStdoutLine( + `Initiator Agent DID: ${status.initiatorAgentDid}`, + ); + } + if (status.responderAgentDid) { + writeStdoutLine( + `Responder Agent DID: ${status.responderAgentDid}`, + ); + } + if (status.responderProfile) { + writeStdoutLine( + `Responder Agent Name: ${status.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${status.responderProfile.humanName}`, + ); + } + if (status.peerAlias) { + writeStdoutLine(`Peer alias saved: ${status.peerAlias}`); + } + } + }, + ), + ); + + pairCommand + .command("confirm ") + .description("Confirm pairing using one-time pairing ticket") + .option("--ticket ", "One-time pairing ticket (clwpair1_...)") + .option("--qr-file ", "Path to pairing QR PNG file") + .action( + withErrorHandling( + "pair confirm", + async (agentName: string, options: PairConfirmOptions) => { + const result = await confirmPairing(agentName, options, dependencies); + + logger.info("cli.pair_confirmed", { + initiatorAgentDid: result.initiatorAgentDid, + responderAgentDid: result.responderAgentDid, + proxyUrl: result.proxyUrl, + peerAlias: result.peerAlias, + }); + + writeStdoutLine("Pairing confirmed"); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); + writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); + writeStdoutLine( + `Responder Agent Name: ${result.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${result.responderProfile.humanName}`, + ); + writeStdoutLine(`Paired: ${result.paired ? "true" : "false"}`); + if (result.peerAlias) { + writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); + } + }, + ), + ); + + pairCommand + .command("status ") + .description("Check pairing ticket status and sync local peer on confirm") + .option("--ticket ", "One-time pairing ticket (clwpair1_...)") + .option("--wait", "Poll until ticket is confirmed or timeout is reached") + .option( + "--wait-seconds ", + "Max seconds to poll for confirmation (default: 300)", + ) + .option( + "--poll-interval-seconds ", + "Polling interval in seconds while waiting (default: 3)", + ) + .action( + withErrorHandling( + "pair status", + async (agentName: string, options: PairStatusOptions) => { + const result = await getPairingStatus( + agentName, + options, + dependencies, + ); + + logger.info("cli.pair_status", { + initiatorAgentDid: result.initiatorAgentDid, + responderAgentDid: result.responderAgentDid, + status: result.status, + proxyUrl: result.proxyUrl, + peerAlias: result.peerAlias, + }); + + writeStdoutLine(`Status: ${result.status}`); + writeStdoutLine(`Initiator Agent DID: ${result.initiatorAgentDid}`); + writeStdoutLine( + `Initiator Agent Name: ${result.initiatorProfile.agentName}`, + ); + writeStdoutLine( + `Initiator Human Name: ${result.initiatorProfile.humanName}`, + ); + if (result.responderAgentDid) { + writeStdoutLine(`Responder Agent DID: ${result.responderAgentDid}`); + } + if (result.responderProfile) { + writeStdoutLine( + `Responder Agent Name: ${result.responderProfile.agentName}`, + ); + writeStdoutLine( + `Responder Human Name: ${result.responderProfile.humanName}`, + ); + } + writeStdoutLine(`Expires At: ${result.expiresAt}`); + if (result.confirmedAt) { + writeStdoutLine(`Confirmed At: ${result.confirmedAt}`); + } + if (result.peerAlias) { + writeStdoutLine(`Peer alias saved: ${result.peerAlias}`); + } + }, + ), + ); + + return pairCommand; +}; diff --git a/apps/cli/src/commands/pair/common.ts b/apps/cli/src/commands/pair/common.ts new file mode 100644 index 0000000..efd2b42 --- /dev/null +++ b/apps/cli/src/commands/pair/common.ts @@ -0,0 +1,502 @@ +import { decodeBase64url, parseDid } from "@clawdentity/protocol"; +import { AppError, createLogger, nowUtcMs } from "@clawdentity/sdk"; +import type { CliConfig } from "../../config/manager.js"; +import type { PeerEntry, PeerProfile } from "./types.js"; + +export const logger = createLogger({ service: "cli", module: "pair" }); + +export const AGENTS_DIR_NAME = "agents"; +export const AIT_FILE_NAME = "ait.jwt"; +export const SECRET_KEY_FILE_NAME = "secret.key"; +export const PAIRING_QR_DIR_NAME = "pairing"; +export const PEERS_FILE_NAME = "peers.json"; +export const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; + +export const PAIR_START_PATH = "/pair/start"; +export const PAIR_CONFIRM_PATH = "/pair/confirm"; +export const PAIR_STATUS_PATH = "/pair/status"; +export const NONCE_SIZE = 24; + +export const PAIRING_TICKET_PREFIX = "clwpair1_"; +export const PAIRING_QR_MAX_AGE_SECONDS = 900; +export const PAIRING_QR_FILENAME_PATTERN = /-pair-(\d+)\.png$/; +export const FILE_MODE = 0o600; +export const PEER_ALIAS_PATTERN = /^[a-zA-Z0-9._-]+$/; +export const DEFAULT_STATUS_WAIT_SECONDS = 300; +export const DEFAULT_STATUS_POLL_INTERVAL_SECONDS = 3; +const MAX_PROFILE_NAME_LENGTH = 64; + +const textDecoder = new TextDecoder(); + +export const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +export const nowUnixSeconds = (): number => Math.floor(nowUtcMs() / 1000); + +export function createCliError(code: string, message: string): AppError { + return new AppError({ + code, + message, + status: 400, + }); +} + +export function parseNonEmptyString(value: unknown): string { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +} + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +export function parseProfileName( + value: unknown, + label: "agentName" | "humanName", +): string { + const candidate = parseNonEmptyString(value); + if (candidate.length === 0) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + `${label} is required for pairing`, + ); + } + + if (candidate.length > MAX_PROFILE_NAME_LENGTH) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + `${label} must be at most ${MAX_PROFILE_NAME_LENGTH} characters`, + ); + } + + if (hasControlChars(candidate)) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + `${label} contains control characters`, + ); + } + + return candidate; +} + +export function parseProxyUrl(candidate: string): string { + try { + const parsed = new URL(candidate); + if (parsed.protocol !== "https:" && parsed.protocol !== "http:") { + throw new Error("invalid protocol"); + } + + return parsed.toString(); + } catch { + throw createCliError("CLI_PAIR_INVALID_PROXY_URL", "Proxy URL is invalid"); + } +} + +export function parsePeerProfile(payload: unknown): PeerProfile { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + "Pair profile must be an object", + ); + } + + const profile: PeerProfile = { + agentName: parseProfileName(payload.agentName, "agentName"), + humanName: parseProfileName(payload.humanName, "humanName"), + }; + + const proxyOrigin = parseNonEmptyString(payload.proxyOrigin); + if (proxyOrigin.length > 0) { + let parsedProxyOrigin: string; + try { + parsedProxyOrigin = new URL(parseProxyUrl(proxyOrigin)).origin; + } catch { + throw createCliError( + "CLI_PAIR_PROFILE_INVALID", + "proxyOrigin is invalid for pairing", + ); + } + profile.proxyOrigin = parsedProxyOrigin; + } + + return profile; +} + +export function parsePairingTicket(value: unknown): string { + let ticket = parseNonEmptyString(value); + while (ticket.startsWith("`")) { + ticket = ticket.slice(1); + } + while (ticket.endsWith("`")) { + ticket = ticket.slice(0, -1); + } + ticket = ticket.trim().replace(/\s+/gu, ""); + + if (!ticket.startsWith(PAIRING_TICKET_PREFIX)) { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + const encodedPayload = ticket.slice(PAIRING_TICKET_PREFIX.length); + if (encodedPayload.length === 0) { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + try { + const payloadRaw = textDecoder.decode(decodeBase64url(encodedPayload)); + const payload = JSON.parse(payloadRaw); + if (!isRecord(payload)) { + throw new Error("invalid payload"); + } + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + return ticket; +} + +export function parsePairingTicketIssuerOrigin(ticket: string): string { + const normalizedTicket = parsePairingTicket(ticket); + const encodedPayload = normalizedTicket.slice(PAIRING_TICKET_PREFIX.length); + const payloadRaw = textDecoder.decode(decodeBase64url(encodedPayload)); + + let payload: unknown; + try { + payload = JSON.parse(payloadRaw); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + if (!isRecord(payload) || typeof payload.iss !== "string") { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + let issuerUrl: URL; + try { + issuerUrl = new URL(payload.iss); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + if (issuerUrl.protocol !== "https:" && issuerUrl.protocol !== "http:") { + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_INVALID", + "Pairing ticket is invalid", + ); + } + + return issuerUrl.origin; +} + +export function assertTicketIssuerMatchesProxy(input: { + ticket: string; + proxyUrl: string; + context: "confirm" | "status"; +}): void { + const issuerOrigin = parsePairingTicketIssuerOrigin(input.ticket); + + let proxyOrigin: string; + try { + proxyOrigin = new URL(input.proxyUrl).origin; + } catch { + throw createCliError( + "CLI_PAIR_PROXY_URL_INVALID", + "Configured proxyUrl is invalid. Run `clawdentity config set proxyUrl ` and retry.", + ); + } + + if (issuerOrigin === proxyOrigin) { + return; + } + + const command = input.context === "confirm" ? "pair confirm" : "pair status"; + throw createCliError( + "CLI_PAIR_TICKET_ISSUER_MISMATCH", + `Pairing ticket was issued by ${issuerOrigin}, but current proxy URL is ${proxyOrigin}. Run \`clawdentity config set proxyUrl ${issuerOrigin}\` and retry \`${command}\`.`, + ); +} + +export function parseAitAgentDid(ait: string): string { + const parts = ait.split("."); + if (parts.length < 2) { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + let payloadRaw: string; + try { + payloadRaw = textDecoder.decode(decodeBase64url(parts[1] ?? "")); + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + let payload: unknown; + try { + payload = JSON.parse(payloadRaw); + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + if (!isRecord(payload) || typeof payload.sub !== "string") { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + const candidate = payload.sub.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + throw new Error("invalid kind"); + } + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + "Agent AIT is invalid. Recreate the agent before pairing.", + ); + } + + return candidate; +} + +export function parsePeerAlias(value: string): string { + if (value.length === 0 || value.length > 128) { + throw createCliError( + "CLI_PAIR_PEER_ALIAS_INVALID", + "Generated peer alias is invalid", + ); + } + + if (!PEER_ALIAS_PATTERN.test(value)) { + throw createCliError( + "CLI_PAIR_PEER_ALIAS_INVALID", + "Generated peer alias is invalid", + ); + } + + return value; +} + +export function derivePeerAliasBase(peerDid: string): string { + try { + const parsed = parseDid(peerDid); + if (parsed.kind === "agent") { + return parsePeerAlias(`peer-${parsed.ulid.slice(-8).toLowerCase()}`); + } + } catch { + // Fall through to generic alias. + } + + return "peer"; +} + +export function parseTtlSeconds(value: string | undefined): number | undefined { + const raw = parseNonEmptyString(value); + if (raw.length === 0) { + return undefined; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isInteger(parsed) || parsed < 1) { + throw createCliError( + "CLI_PAIR_START_INVALID_TTL", + "ttlSeconds must be a positive integer", + ); + } + + return parsed; +} + +export function parsePositiveIntegerOption(input: { + value: string | undefined; + optionName: string; + defaultValue: number; +}): number { + const raw = parseNonEmptyString(input.value); + if (raw.length === 0) { + return input.defaultValue; + } + + const parsed = Number.parseInt(raw, 10); + if (!Number.isInteger(parsed) || parsed < 1) { + throw createCliError( + "CLI_PAIR_STATUS_WAIT_INVALID", + `${input.optionName} must be a positive integer`, + ); + } + + return parsed; +} + +export function resolveLocalPairProfile(input: { + config: CliConfig; + agentName: string; + proxyUrl?: string; +}): PeerProfile { + const humanName = parseNonEmptyString(input.config.humanName); + if (humanName.length === 0) { + throw createCliError( + "CLI_PAIR_HUMAN_NAME_MISSING", + "Human name is missing. Run `clawdentity invite redeem --display-name ` or `clawdentity config set humanName `.", + ); + } + + const profile: PeerProfile = { + agentName: parseProfileName(input.agentName, "agentName"), + humanName: parseProfileName(humanName, "humanName"), + }; + + const proxyUrl = parseNonEmptyString(input.proxyUrl); + if (proxyUrl.length > 0) { + profile.proxyOrigin = new URL(parseProxyUrl(proxyUrl)).origin; + } + + return profile; +} + +function normalizeProxyOrigin(candidate: string): string { + return new URL(parseProxyUrl(candidate)).origin; +} + +export function resolvePeerProxyUrl(input: { + ticket: string; + peerProfile: PeerProfile; + peerProxyOrigin?: string; +}): string { + const configuredPeerOrigin = parseNonEmptyString(input.peerProxyOrigin); + const profilePeerOrigin = parseNonEmptyString(input.peerProfile.proxyOrigin); + const fallbackPeerOrigin = parsePairingTicketIssuerOrigin(input.ticket); + const peerOrigin = + configuredPeerOrigin.length > 0 + ? configuredPeerOrigin + : profilePeerOrigin.length > 0 + ? profilePeerOrigin + : fallbackPeerOrigin; + + return new URL( + "/hooks/agent", + `${normalizeProxyOrigin(peerOrigin)}/`, + ).toString(); +} + +export function toPeerProxyOriginFromStatus(input: { + callerAgentDid: string; + initiatorAgentDid: string; + responderAgentDid: string; + initiatorProfile: PeerProfile; + responderProfile?: PeerProfile; +}): string | undefined { + if (input.callerAgentDid === input.initiatorAgentDid) { + return input.responderProfile?.proxyOrigin; + } + + if (input.callerAgentDid === input.responderAgentDid) { + return input.initiatorProfile.proxyOrigin; + } + + return undefined; +} + +export function toPeerProxyOriginFromConfirm(input: { + ticket: string; + initiatorProfile: PeerProfile; +}): string { + const initiatorOrigin = parseNonEmptyString( + input.initiatorProfile.proxyOrigin, + ); + if (initiatorOrigin.length > 0) { + return initiatorOrigin; + } + + return parsePairingTicketIssuerOrigin(input.ticket); +} + +export function toResponderProfile(input: { + config: CliConfig; + agentName: string; + localProxyUrl: string; +}): PeerProfile { + return resolveLocalPairProfile({ + config: input.config, + agentName: input.agentName, + proxyUrl: input.localProxyUrl, + }); +} + +export function toPathWithQuery(url: string): string { + const parsed = new URL(url); + return `${parsed.pathname}${parsed.search}`; +} + +export function parsePeerEntry(value: unknown): PeerEntry { + if (!isRecord(value)) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer entry must be an object", + ); + } + + const did = parseNonEmptyString(value.did); + const proxyUrl = parseNonEmptyString(value.proxyUrl); + if (did.length === 0 || proxyUrl.length === 0) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer entry is invalid", + ); + } + + const agentNameRaw = parseNonEmptyString(value.agentName); + const humanNameRaw = parseNonEmptyString(value.humanName); + + const entry: PeerEntry = { + did, + proxyUrl, + }; + + if (agentNameRaw.length > 0) { + entry.agentName = parseProfileName(agentNameRaw, "agentName"); + } + + if (humanNameRaw.length > 0) { + entry.humanName = parseProfileName(humanNameRaw, "humanName"); + } + + return entry; +} diff --git a/apps/cli/src/commands/pair/persistence.ts b/apps/cli/src/commands/pair/persistence.ts new file mode 100644 index 0000000..9b396df --- /dev/null +++ b/apps/cli/src/commands/pair/persistence.ts @@ -0,0 +1,290 @@ +import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import { dirname, join, resolve } from "node:path"; +import { getConfigDir } from "../../config/manager.js"; +import { + createCliError, + derivePeerAliasBase, + FILE_MODE, + logger, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + PEERS_FILE_NAME, + parseNonEmptyString, + parsePeerAlias, + parsePeerEntry, + resolvePeerProxyUrl, +} from "./common.js"; +import type { + PairRequestOptions, + PeerEntry, + PeerProfile, + PeersConfig, +} from "./types.js"; + +function resolvePeersConfigPath(getConfigDirImpl: typeof getConfigDir): string { + return join(getConfigDirImpl(), PEERS_FILE_NAME); +} + +async function loadPeersConfig(input: { + getConfigDirImpl: typeof getConfigDir; + readFileImpl: typeof readFile; +}): Promise { + const peersPath = resolvePeersConfigPath(input.getConfigDirImpl); + let raw: string; + + try { + raw = await input.readFileImpl(peersPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return { peers: {} }; + } + + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer config is not valid JSON", + ); + } + + if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer config must be a JSON object", + ); + } + + const parsedRecord = parsed as Record; + if (parsedRecord.peers === undefined) { + return { peers: {} }; + } + + if ( + typeof parsedRecord.peers !== "object" || + parsedRecord.peers === null || + Array.isArray(parsedRecord.peers) + ) { + throw createCliError( + "CLI_PAIR_PEERS_CONFIG_INVALID", + "Peer config peers field must be an object", + ); + } + + const peers: Record = {}; + for (const [alias, value] of Object.entries(parsedRecord.peers)) { + peers[parsePeerAlias(alias)] = parsePeerEntry(value); + } + + return { peers }; +} + +async function savePeersConfig(input: { + config: PeersConfig; + getConfigDirImpl: typeof getConfigDir; + mkdirImpl: typeof mkdir; + writeFileImpl: typeof writeFile; + chmodImpl: typeof chmod; +}): Promise { + const peersPath = resolvePeersConfigPath(input.getConfigDirImpl); + await input.mkdirImpl(dirname(peersPath), { recursive: true }); + await input.writeFileImpl( + peersPath, + `${JSON.stringify(input.config, null, 2)}\n`, + "utf8", + ); + await input.chmodImpl(peersPath, FILE_MODE); +} + +function resolveRelayRuntimeConfigPath( + getConfigDirImpl: typeof getConfigDir, +): string { + return join(getConfigDirImpl(), OPENCLAW_RELAY_RUNTIME_FILE_NAME); +} + +async function loadRelayTransformPeersPath(input: { + getConfigDirImpl: typeof getConfigDir; + readFileImpl: typeof readFile; +}): Promise { + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath( + input.getConfigDirImpl, + ); + let raw: string; + + try { + raw = await input.readFileImpl(relayRuntimeConfigPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return undefined; + } + + logger.warn("cli.pair.relay_runtime_read_failed", { + relayRuntimeConfigPath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + logger.warn("cli.pair.relay_runtime_invalid_json", { + relayRuntimeConfigPath, + }); + return undefined; + } + + if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { + return undefined; + } + + const relayTransformPeersPath = parseNonEmptyString( + (parsed as Record).relayTransformPeersPath, + ); + if (relayTransformPeersPath.length === 0) { + return undefined; + } + + return resolve(relayTransformPeersPath); +} + +async function syncOpenclawRelayPeersSnapshot(input: { + config: PeersConfig; + getConfigDirImpl: typeof getConfigDir; + readFileImpl: typeof readFile; + mkdirImpl: typeof mkdir; + writeFileImpl: typeof writeFile; + chmodImpl: typeof chmod; +}): Promise { + const relayTransformPeersPath = await loadRelayTransformPeersPath({ + getConfigDirImpl: input.getConfigDirImpl, + readFileImpl: input.readFileImpl, + }); + if (relayTransformPeersPath === undefined) { + return; + } + + try { + await input.readFileImpl(relayTransformPeersPath, "utf8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + logger.warn("cli.pair.relay_peers_snapshot_probe_failed", { + relayTransformPeersPath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + return; + } + + try { + await input.mkdirImpl(dirname(relayTransformPeersPath), { + recursive: true, + }); + await input.writeFileImpl( + relayTransformPeersPath, + `${JSON.stringify(input.config, null, 2)}\n`, + "utf8", + ); + await input.chmodImpl(relayTransformPeersPath, FILE_MODE); + } catch (error) { + logger.warn("cli.pair.relay_peers_snapshot_write_failed", { + relayTransformPeersPath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + } +} + +function resolvePeerAlias(input: { + peers: Record; + peerDid: string; +}): string { + for (const [alias, entry] of Object.entries(input.peers)) { + if (entry.did === input.peerDid) { + return alias; + } + } + + const baseAlias = derivePeerAliasBase(input.peerDid); + if (input.peers[baseAlias] === undefined) { + return baseAlias; + } + + let index = 2; + while (input.peers[`${baseAlias}-${index}`] !== undefined) { + index += 1; + } + + return `${baseAlias}-${index}`; +} + +export async function persistPairedPeer(input: { + ticket: string; + peerDid: string; + peerProfile: PeerProfile; + peerProxyOrigin?: string; + dependencies: PairRequestOptions; +}): Promise { + const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; + const readFileImpl = input.dependencies.readFileImpl ?? readFile; + const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; + const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; + const chmodImpl = input.dependencies.chmodImpl ?? chmod; + + const peerProxyUrl = resolvePeerProxyUrl({ + ticket: input.ticket, + peerProfile: input.peerProfile, + peerProxyOrigin: input.peerProxyOrigin, + }); + const peersConfig = await loadPeersConfig({ + getConfigDirImpl, + readFileImpl, + }); + const alias = resolvePeerAlias({ + peers: peersConfig.peers, + peerDid: input.peerDid, + }); + + peersConfig.peers[alias] = { + did: input.peerDid, + proxyUrl: peerProxyUrl, + agentName: input.peerProfile.agentName, + humanName: input.peerProfile.humanName, + }; + + await savePeersConfig({ + config: peersConfig, + getConfigDirImpl, + mkdirImpl, + writeFileImpl, + chmodImpl, + }); + + await syncOpenclawRelayPeersSnapshot({ + config: peersConfig, + getConfigDirImpl, + readFileImpl, + mkdirImpl, + writeFileImpl, + chmodImpl, + }); + + return alias; +} diff --git a/apps/cli/src/commands/pair/proxy.ts b/apps/cli/src/commands/pair/proxy.ts new file mode 100644 index 0000000..26a9f5a --- /dev/null +++ b/apps/cli/src/commands/pair/proxy.ts @@ -0,0 +1,512 @@ +import { readFile } from "node:fs/promises"; +import { join } from "node:path"; +import { decodeBase64url } from "@clawdentity/protocol"; +import { signHttpRequest } from "@clawdentity/sdk"; +import { + type CliConfig, + getConfigDir, + resolveConfig, +} from "../../config/manager.js"; +import { fetchRegistryMetadata } from "../../config/registry-metadata.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { + AGENTS_DIR_NAME, + AIT_FILE_NAME, + createCliError, + isRecord, + parseNonEmptyString, + parsePairingTicket, + parsePairingTicketIssuerOrigin, + parsePeerProfile, + parseProxyUrl, + SECRET_KEY_FILE_NAME, + toPathWithQuery, +} from "./common.js"; +import type { + LocalAgentProofMaterial, + PairConfirmResult, + PairRequestOptions, + PairStartResult, + PairStatusResult, + RegistryErrorEnvelope, +} from "./types.js"; + +export async function resolveProxyUrl(input: { + config: CliConfig; + fetchImpl: typeof fetch; +}): Promise { + const fromEnv = parseNonEmptyString(process.env.CLAWDENTITY_PROXY_URL); + if (fromEnv.length > 0) { + return parseProxyUrl(fromEnv); + } + + const metadata = await fetchRegistryMetadata(input.config.registryUrl, { + fetchImpl: input.fetchImpl, + }); + const metadataProxyUrl = parseProxyUrl(metadata.proxyUrl); + + const configuredProxyUrl = parseNonEmptyString(input.config.proxyUrl); + if (configuredProxyUrl.length === 0) { + return metadataProxyUrl; + } + + const normalizedConfiguredProxyUrl = parseProxyUrl(configuredProxyUrl); + if (normalizedConfiguredProxyUrl === metadataProxyUrl) { + return metadataProxyUrl; + } + + throw createCliError( + "CLI_PAIR_PROXY_URL_MISMATCH", + `Configured proxy URL does not match registry metadata. config=${normalizedConfiguredProxyUrl} metadata=${metadataProxyUrl}. Rerun onboarding invite redeem to refresh config.`, + ); +} + +export function toProxyRequestUrl(proxyUrl: string, path: string): string { + const normalizedBase = proxyUrl.endsWith("/") ? proxyUrl : `${proxyUrl}/`; + return new URL(path.slice(1), normalizedBase).toString(); +} + +export function toIssuerProxyUrl(ticket: string): string { + return parseProxyUrl(parsePairingTicketIssuerOrigin(ticket)); +} + +export function toIssuerProxyRequestUrl(ticket: string, path: string): string { + return toProxyRequestUrl(toIssuerProxyUrl(ticket), path); +} + +function extractErrorCode(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.code !== "string") { + return undefined; + } + + const code = envelope.error.code.trim(); + return code.length > 0 ? code : undefined; +} + +function extractErrorMessage(payload: unknown): string | undefined { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const message = envelope.error.message.trim(); + return message.length > 0 ? message : undefined; +} + +export async function parseJsonResponse(response: Response): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} + +export async function executePairRequest(input: { + fetchImpl: typeof fetch; + init: RequestInit; + url: string; +}): Promise { + try { + return await input.fetchImpl(input.url, input.init); + } catch { + throw createCliError( + "CLI_PAIR_REQUEST_FAILED", + "Unable to connect to proxy URL. Check network access and proxyUrl.", + ); + } +} + +export function mapStartPairError(status: number, payload: unknown): string { + const code = extractErrorCode(payload); + const message = extractErrorMessage(payload); + + if (code === "PROXY_PAIR_OWNERSHIP_FORBIDDEN" || status === 403) { + return message + ? `Initiator agent ownership check failed (403): ${message}` + : "Initiator agent ownership check failed (403)."; + } + + if (status === 400) { + return message + ? `Pair start request is invalid (400): ${message}` + : "Pair start request is invalid (400)."; + } + + if (status >= 500) { + return `Proxy pairing service is unavailable (${status}).`; + } + + if (message) { + return `Pair start failed (${status}): ${message}`; + } + + return `Pair start failed (${status})`; +} + +export function mapConfirmPairError(status: number, payload: unknown): string { + const code = extractErrorCode(payload); + const message = extractErrorMessage(payload); + + if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { + return "Pairing ticket is invalid or expired"; + } + + if (code === "PROXY_PAIR_TICKET_EXPIRED" || status === 410) { + return "Pairing ticket has expired"; + } + + if (code === "PROXY_PAIR_TICKET_INVALID_ISSUER") { + return message + ? `Pair confirm failed: ticket issuer does not match this proxy (${message}). Use the same proxy URL where the ticket was issued.` + : "Pair confirm failed: ticket issuer does not match this proxy. Use the same proxy URL where the ticket was issued."; + } + + if ( + code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || + code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" + ) { + return message + ? `Pair confirm request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` + : "Pair confirm request is invalid (400): pairing ticket is malformed. Re-copy the full ticket/QR without truncation."; + } + + if (status === 400) { + return message + ? `Pair confirm request is invalid (400): ${message}` + : "Pair confirm request is invalid (400)."; + } + + if (status >= 500) { + return `Proxy pairing service is unavailable (${status}).`; + } + + if (message) { + return `Pair confirm failed (${status}): ${message}`; + } + + return `Pair confirm failed (${status})`; +} + +export function mapStatusPairError(status: number, payload: unknown): string { + const code = extractErrorCode(payload); + const message = extractErrorMessage(payload); + + if (code === "PROXY_PAIR_TICKET_NOT_FOUND" || status === 404) { + return "Pairing ticket not found"; + } + + if (code === "PROXY_PAIR_TICKET_EXPIRED" || status === 410) { + return "Pairing ticket has expired"; + } + + if (code === "PROXY_PAIR_STATUS_FORBIDDEN" || status === 403) { + return message + ? `Pair status request is forbidden (403): ${message}` + : "Pair status request is forbidden (403)."; + } + + if (code === "PROXY_PAIR_TICKET_INVALID_ISSUER") { + return message + ? `Pair status failed: ticket issuer does not match this proxy (${message}). Use the same proxy URL where the ticket was issued.` + : "Pair status failed: ticket issuer does not match this proxy. Use the same proxy URL where the ticket was issued."; + } + + if ( + code === "PROXY_PAIR_TICKET_INVALID_FORMAT" || + code === "PROXY_PAIR_TICKET_UNSUPPORTED_VERSION" + ) { + return message + ? `Pair status request is invalid (400): ${message}. Re-copy the full ticket/QR without truncation.` + : "Pair status request is invalid (400): pairing ticket is malformed. Re-copy the full ticket/QR without truncation."; + } + + if (status === 400) { + return message + ? `Pair status request is invalid (400): ${message}` + : "Pair status request is invalid (400)."; + } + + if (status >= 500) { + return `Proxy pairing service is unavailable (${status}).`; + } + + if (message) { + return `Pair status failed (${status}): ${message}`; + } + + return `Pair status failed (${status})`; +} + +export function parsePairStartResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_START_INVALID_RESPONSE", + "Pair start response is invalid", + ); + } + + const ticket = parsePairingTicket(payload.ticket); + const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); + const expiresAt = parseNonEmptyString(payload.expiresAt); + + let initiatorProfile: PairStartResult["initiatorProfile"]; + if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { + throw createCliError( + "CLI_PAIR_START_INVALID_RESPONSE", + "Pair start response is invalid", + ); + } + + try { + initiatorProfile = parsePeerProfile(payload.initiatorProfile); + } catch { + throw createCliError( + "CLI_PAIR_START_INVALID_RESPONSE", + "Pair start response is invalid", + ); + } + + return { + ticket, + initiatorAgentDid, + initiatorProfile, + expiresAt, + }; +} + +export function parsePairConfirmResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_CONFIRM_INVALID_RESPONSE", + "Pair confirm response is invalid", + ); + } + + const paired = payload.paired === true; + const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); + const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); + + if ( + !paired || + initiatorAgentDid.length === 0 || + responderAgentDid.length === 0 + ) { + throw createCliError( + "CLI_PAIR_CONFIRM_INVALID_RESPONSE", + "Pair confirm response is invalid", + ); + } + + let initiatorProfile: PairConfirmResult["initiatorProfile"]; + let responderProfile: PairConfirmResult["responderProfile"]; + try { + initiatorProfile = parsePeerProfile(payload.initiatorProfile); + responderProfile = parsePeerProfile(payload.responderProfile); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_INVALID_RESPONSE", + "Pair confirm response is invalid", + ); + } + + return { + paired, + initiatorAgentDid, + responderAgentDid, + initiatorProfile, + responderProfile, + }; +} + +export function parsePairStatusResponse( + payload: unknown, +): Omit { + if (!isRecord(payload)) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + const statusRaw = parseNonEmptyString(payload.status); + if (statusRaw !== "pending" && statusRaw !== "confirmed") { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + const initiatorAgentDid = parseNonEmptyString(payload.initiatorAgentDid); + const responderAgentDid = parseNonEmptyString(payload.responderAgentDid); + const expiresAt = parseNonEmptyString(payload.expiresAt); + const confirmedAt = parseNonEmptyString(payload.confirmedAt); + + if (initiatorAgentDid.length === 0 || expiresAt.length === 0) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + if (statusRaw === "confirmed" && responderAgentDid.length === 0) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + let initiatorProfile: PairStatusResult["initiatorProfile"]; + try { + initiatorProfile = parsePeerProfile(payload.initiatorProfile); + } catch { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + let responderProfile: PairStatusResult["responderProfile"]; + if (payload.responderProfile !== undefined) { + try { + responderProfile = parsePeerProfile(payload.responderProfile); + } catch { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + } + + if (statusRaw === "confirmed" && responderProfile === undefined) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + return { + status: statusRaw, + initiatorAgentDid, + initiatorProfile, + responderAgentDid: + responderAgentDid.length > 0 ? responderAgentDid : undefined, + responderProfile, + expiresAt, + confirmedAt: confirmedAt.length > 0 ? confirmedAt : undefined, + }; +} + +export async function readAgentProofMaterial( + agentName: string, + dependencies: PairRequestOptions, +): Promise { + const readFileImpl = dependencies.readFileImpl ?? readFile; + const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; + const normalizedAgentName = assertValidAgentName(agentName); + + const agentDir = join( + getConfigDirImpl(), + AGENTS_DIR_NAME, + normalizedAgentName, + ); + const aitPath = join(agentDir, AIT_FILE_NAME); + const secretKeyPath = join(agentDir, SECRET_KEY_FILE_NAME); + + let ait: string; + try { + ait = (await readFileImpl(aitPath, "utf-8")).trim(); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" is missing ${AIT_FILE_NAME}. Run agent create first.`, + ); + } + + throw error; + } + + if (ait.length === 0) { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" has an empty ${AIT_FILE_NAME}`, + ); + } + + let encodedSecretKey: string; + try { + encodedSecretKey = (await readFileImpl(secretKeyPath, "utf-8")).trim(); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" is missing ${SECRET_KEY_FILE_NAME}. Run agent create first.`, + ); + } + + throw error; + } + + if (encodedSecretKey.length === 0) { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" has an empty ${SECRET_KEY_FILE_NAME}`, + ); + } + + let secretKey: Uint8Array; + try { + secretKey = decodeBase64url(encodedSecretKey); + } catch { + throw createCliError( + "CLI_PAIR_AGENT_NOT_FOUND", + `Agent "${normalizedAgentName}" has invalid ${SECRET_KEY_FILE_NAME}`, + ); + } + + return { + ait, + secretKey, + }; +} + +export async function buildSignedHeaders(input: { + bodyBytes?: Uint8Array; + method: string; + requestUrl: string; + secretKey: Uint8Array; + timestampSeconds: number; + nonce: string; +}): Promise> { + const signed = await signHttpRequest({ + method: input.method, + pathWithQuery: toPathWithQuery(input.requestUrl), + timestamp: String(input.timestampSeconds), + nonce: input.nonce, + body: input.bodyBytes, + secretKey: input.secretKey, + }); + + return signed.headers; +} + +export function resolveConfigWithFallback( + dependencies: PairRequestOptions, +): () => Promise { + return dependencies.resolveConfigImpl ?? resolveConfig; +} diff --git a/apps/cli/src/commands/pair/qr.ts b/apps/cli/src/commands/pair/qr.ts new file mode 100644 index 0000000..de2e6da --- /dev/null +++ b/apps/cli/src/commands/pair/qr.ts @@ -0,0 +1,158 @@ +import { mkdir, readdir, unlink, writeFile } from "node:fs/promises"; +import { dirname, join, resolve } from "node:path"; +import jsQR from "jsqr"; +import { PNG } from "pngjs"; +import QRCode from "qrcode"; +import { getConfigDir } from "../../config/manager.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { + createCliError, + PAIRING_QR_DIR_NAME, + PAIRING_QR_FILENAME_PATTERN, + PAIRING_QR_MAX_AGE_SECONDS, + parseNonEmptyString, + parsePairingTicket, +} from "./common.js"; +import type { PairConfirmOptions, PairRequestOptions } from "./types.js"; + +export async function encodeTicketQrPng(ticket: string): Promise { + const buffer = await QRCode.toBuffer(ticket, { + type: "png", + width: 512, + margin: 2, + errorCorrectionLevel: "M", + }); + return new Uint8Array(buffer); +} + +export function decodeTicketFromPng(imageBytes: Uint8Array): string { + let decodedPng: PNG; + try { + decodedPng = PNG.sync.read(Buffer.from(imageBytes)); + } catch { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_FILE_INVALID", + "QR image file is invalid or unsupported", + ); + } + + const imageData = new Uint8ClampedArray( + decodedPng.data.buffer, + decodedPng.data.byteOffset, + decodedPng.data.byteLength, + ); + + const decoded = jsQR(imageData, decodedPng.width, decodedPng.height); + if (!decoded || parseNonEmptyString(decoded.data).length === 0) { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_NOT_FOUND", + "No pairing QR code was found in the image", + ); + } + + return parsePairingTicket(decoded.data); +} + +export async function persistPairingQr(input: { + agentName: string; + qrOutput: string | undefined; + ticket: string; + dependencies: PairRequestOptions; + nowSeconds: number; +}): Promise { + const mkdirImpl = input.dependencies.mkdirImpl ?? mkdir; + const readdirImpl = input.dependencies.readdirImpl ?? readdir; + const unlinkImpl = input.dependencies.unlinkImpl ?? unlink; + const writeFileImpl = input.dependencies.writeFileImpl ?? writeFile; + const getConfigDirImpl = input.dependencies.getConfigDirImpl ?? getConfigDir; + const qrEncodeImpl = input.dependencies.qrEncodeImpl ?? encodeTicketQrPng; + + const baseDir = join(getConfigDirImpl(), PAIRING_QR_DIR_NAME); + const outputPath = parseNonEmptyString(input.qrOutput) + ? resolve(input.qrOutput ?? "") + : join( + baseDir, + `${assertValidAgentName(input.agentName)}-pair-${input.nowSeconds}.png`, + ); + + const existingFiles = await readdirImpl(baseDir).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return [] as string[]; + } + + throw error; + }); + + for (const fileName of existingFiles) { + if (typeof fileName !== "string") { + continue; + } + + const match = PAIRING_QR_FILENAME_PATTERN.exec(fileName); + if (!match) { + continue; + } + + const issuedAtSeconds = Number.parseInt(match[1] ?? "", 10); + if (!Number.isInteger(issuedAtSeconds)) { + continue; + } + + if (issuedAtSeconds + PAIRING_QR_MAX_AGE_SECONDS > input.nowSeconds) { + continue; + } + + const stalePath = join(baseDir, fileName); + await unlinkImpl(stalePath).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + throw error; + }); + } + + await mkdirImpl(dirname(outputPath), { recursive: true }); + const imageBytes = await qrEncodeImpl(input.ticket); + await writeFileImpl(outputPath, imageBytes); + + return outputPath; +} + +export function resolveConfirmTicketSource(options: PairConfirmOptions): { + ticket: string; + source: "ticket" | "qr-file"; + qrFilePath?: string; +} { + const inlineTicket = parseNonEmptyString(options.ticket); + const qrFile = parseNonEmptyString(options.qrFile); + + if (inlineTicket.length > 0 && qrFile.length > 0) { + throw createCliError( + "CLI_PAIR_CONFIRM_INPUT_CONFLICT", + "Provide either --ticket or --qr-file, not both", + ); + } + + if (inlineTicket.length > 0) { + return { + ticket: parsePairingTicket(inlineTicket), + source: "ticket", + }; + } + + if (qrFile.length > 0) { + return { + ticket: "", + source: "qr-file", + qrFilePath: resolve(qrFile), + }; + } + + throw createCliError( + "CLI_PAIR_CONFIRM_TICKET_REQUIRED", + "Pairing ticket is required. Pass --ticket or --qr-file .", + ); +} diff --git a/apps/cli/src/commands/pair/service.ts b/apps/cli/src/commands/pair/service.ts new file mode 100644 index 0000000..1282d6b --- /dev/null +++ b/apps/cli/src/commands/pair/service.ts @@ -0,0 +1,493 @@ +import { randomBytes } from "node:crypto"; +import { readFile, unlink } from "node:fs/promises"; +import { resolveConfig } from "../../config/manager.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { + assertTicketIssuerMatchesProxy, + createCliError, + DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + DEFAULT_STATUS_WAIT_SECONDS, + logger, + NONCE_SIZE, + nowUnixSeconds, + PAIR_CONFIRM_PATH, + PAIR_START_PATH, + PAIR_STATUS_PATH, + parseAitAgentDid, + parseNonEmptyString, + parsePairingTicket, + parsePositiveIntegerOption, + parseTtlSeconds, + toPeerProxyOriginFromConfirm, + toPeerProxyOriginFromStatus, + toResponderProfile, +} from "./common.js"; +import { persistPairedPeer } from "./persistence.js"; +import { + buildSignedHeaders, + executePairRequest, + mapConfirmPairError, + mapStartPairError, + mapStatusPairError, + parseJsonResponse, + parsePairConfirmResponse, + parsePairStartResponse, + parsePairStatusResponse, + readAgentProofMaterial, + resolveProxyUrl, + toIssuerProxyRequestUrl, + toIssuerProxyUrl, + toProxyRequestUrl, +} from "./proxy.js"; +import { + decodeTicketFromPng, + persistPairingQr, + resolveConfirmTicketSource, +} from "./qr.js"; +import type { + PairConfirmOptions, + PairConfirmResult, + PairRequestOptions, + PairStartOptions, + PairStartResult, + PairStatusOptions, + PairStatusResult, +} from "./types.js"; + +export async function startPairing( + agentName: string, + options: PairStartOptions, + dependencies: PairRequestOptions = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; + const nonceFactoryImpl = + dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + + const ttlSeconds = parseTtlSeconds(options.ttlSeconds); + const config = await resolveConfigImpl(); + const proxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); + const normalizedAgentName = assertValidAgentName(agentName); + const initiatorProfile = toResponderProfile({ + config, + agentName: normalizedAgentName, + localProxyUrl: proxyUrl, + }); + + const { ait, secretKey } = await readAgentProofMaterial( + normalizedAgentName, + dependencies, + ); + + const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_START_PATH); + const requestBody = JSON.stringify({ + ttlSeconds, + initiatorProfile, + }); + const bodyBytes = new TextEncoder().encode(requestBody); + + const timestampSeconds = nowSecondsImpl(); + const nonce = nonceFactoryImpl(); + const signedHeaders = await buildSignedHeaders({ + method: "POST", + requestUrl, + bodyBytes, + secretKey, + timestampSeconds, + nonce, + }); + + const response = await executePairRequest({ + fetchImpl, + url: requestUrl, + init: { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signedHeaders, + }, + body: requestBody, + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_PAIR_START_FAILED", + mapStartPairError(response.status, responseBody), + ); + } + + const parsed = parsePairStartResponse(responseBody); + const result: PairStartResult = { + ...parsed, + proxyUrl, + }; + + if (options.qr === true) { + result.qrPath = await persistPairingQr({ + agentName, + qrOutput: options.qrOutput, + ticket: parsed.ticket, + dependencies, + nowSeconds: timestampSeconds, + }); + } + + return result; +} + +export async function confirmPairing( + agentName: string, + options: PairConfirmOptions, + dependencies: PairRequestOptions = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; + const nonceFactoryImpl = + dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + const readFileImpl = dependencies.readFileImpl ?? readFile; + const qrDecodeImpl = dependencies.qrDecodeImpl ?? decodeTicketFromPng; + + const config = await resolveConfigImpl(); + const normalizedAgentName = assertValidAgentName(agentName); + const localProxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); + const responderProfile = toResponderProfile({ + config, + agentName: normalizedAgentName, + localProxyUrl, + }); + + const ticketSource = resolveConfirmTicketSource(options); + let ticket = ticketSource.ticket; + + if (ticketSource.source === "qr-file") { + if (!ticketSource.qrFilePath) { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_FILE_REQUIRED", + "QR file path is required", + ); + } + + let imageBytes: Uint8Array; + try { + imageBytes = await readFileImpl(ticketSource.qrFilePath); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw createCliError( + "CLI_PAIR_CONFIRM_QR_FILE_NOT_FOUND", + `QR file not found: ${ticketSource.qrFilePath}`, + ); + } + + throw error; + } + + ticket = parsePairingTicket(qrDecodeImpl(new Uint8Array(imageBytes))); + } + + ticket = parsePairingTicket(ticket); + const proxyUrl = toIssuerProxyUrl(ticket); + + const { ait, secretKey } = await readAgentProofMaterial( + normalizedAgentName, + dependencies, + ); + + const requestUrl = toIssuerProxyRequestUrl(ticket, PAIR_CONFIRM_PATH); + const requestBody = JSON.stringify({ + ticket, + responderProfile, + }); + const bodyBytes = new TextEncoder().encode(requestBody); + + const timestampSeconds = nowSecondsImpl(); + const nonce = nonceFactoryImpl(); + const signedHeaders = await buildSignedHeaders({ + method: "POST", + requestUrl, + bodyBytes, + secretKey, + timestampSeconds, + nonce, + }); + + const response = await executePairRequest({ + fetchImpl, + url: requestUrl, + init: { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signedHeaders, + }, + body: requestBody, + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_PAIR_CONFIRM_FAILED", + mapConfirmPairError(response.status, responseBody), + ); + } + + const parsed = parsePairConfirmResponse(responseBody); + const peerProxyOrigin = toPeerProxyOriginFromConfirm({ + ticket, + initiatorProfile: parsed.initiatorProfile, + }); + const peerAlias = await persistPairedPeer({ + ticket, + peerDid: parsed.initiatorAgentDid, + peerProfile: parsed.initiatorProfile, + peerProxyOrigin, + dependencies, + }); + + if (ticketSource.source === "qr-file" && ticketSource.qrFilePath) { + const unlinkImpl = dependencies.unlinkImpl ?? unlink; + await unlinkImpl(ticketSource.qrFilePath).catch((error) => { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + logger.warn("cli.pair.confirm.qr_cleanup_failed", { + path: ticketSource.qrFilePath, + reason: + error instanceof Error && error.message.length > 0 + ? error.message + : "unknown", + }); + }); + } + + return { + ...parsed, + proxyUrl, + peerAlias, + }; +} + +async function getPairingStatusOnce( + agentName: string, + options: { ticket: string }, + dependencies: PairRequestOptions = {}, +): Promise { + const fetchImpl = dependencies.fetchImpl ?? fetch; + const resolveConfigImpl = dependencies.resolveConfigImpl ?? resolveConfig; + const nowSecondsImpl = dependencies.nowSecondsImpl ?? nowUnixSeconds; + const nonceFactoryImpl = + dependencies.nonceFactoryImpl ?? + (() => randomBytes(NONCE_SIZE).toString("base64url")); + + const config = await resolveConfigImpl(); + const proxyUrl = await resolveProxyUrl({ + config, + fetchImpl, + }); + + const ticket = parsePairingTicket(options.ticket); + assertTicketIssuerMatchesProxy({ + ticket, + proxyUrl, + context: "status", + }); + + const { ait, secretKey } = await readAgentProofMaterial( + agentName, + dependencies, + ); + const callerAgentDid = parseAitAgentDid(ait); + + const requestUrl = toProxyRequestUrl(proxyUrl, PAIR_STATUS_PATH); + const requestBody = JSON.stringify({ ticket }); + const bodyBytes = new TextEncoder().encode(requestBody); + const timestampSeconds = nowSecondsImpl(); + const nonce = nonceFactoryImpl(); + const signedHeaders = await buildSignedHeaders({ + method: "POST", + requestUrl, + bodyBytes, + secretKey, + timestampSeconds, + nonce, + }); + + const response = await executePairRequest({ + fetchImpl, + url: requestUrl, + init: { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signedHeaders, + }, + body: requestBody, + }, + }); + + const responseBody = await parseJsonResponse(response); + if (!response.ok) { + throw createCliError( + "CLI_PAIR_STATUS_FAILED", + mapStatusPairError(response.status, responseBody), + ); + } + + const parsed = parsePairStatusResponse(responseBody); + let peerAlias: string | undefined; + + if (parsed.status === "confirmed") { + const responderAgentDid = parsed.responderAgentDid; + if (!responderAgentDid) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + const peerDid = + callerAgentDid === parsed.initiatorAgentDid + ? responderAgentDid + : callerAgentDid === responderAgentDid + ? parsed.initiatorAgentDid + : undefined; + const peerProfile = + callerAgentDid === parsed.initiatorAgentDid + ? parsed.responderProfile + : callerAgentDid === responderAgentDid + ? parsed.initiatorProfile + : undefined; + + if (!peerDid) { + throw createCliError( + "CLI_PAIR_STATUS_FORBIDDEN", + "Local agent is not a participant in the pairing ticket", + ); + } + + if (!peerProfile) { + throw createCliError( + "CLI_PAIR_STATUS_INVALID_RESPONSE", + "Pair status response is invalid", + ); + } + + peerAlias = await persistPairedPeer({ + ticket, + peerDid, + peerProfile, + peerProxyOrigin: toPeerProxyOriginFromStatus({ + callerAgentDid, + initiatorAgentDid: parsed.initiatorAgentDid, + responderAgentDid, + initiatorProfile: parsed.initiatorProfile, + responderProfile: parsed.responderProfile, + }), + dependencies, + }); + } + + return { + ...parsed, + proxyUrl, + peerAlias, + }; +} + +export async function waitForPairingStatus(input: { + agentName: string; + ticket: string; + waitSeconds: number; + pollIntervalSeconds: number; + dependencies: PairRequestOptions; +}): Promise { + const nowSecondsImpl = input.dependencies.nowSecondsImpl ?? nowUnixSeconds; + const sleepImpl = + input.dependencies.sleepImpl ?? + (async (ms: number) => { + await new Promise((resolve) => { + setTimeout(resolve, ms); + }); + }); + + const deadlineSeconds = nowSecondsImpl() + input.waitSeconds; + while (true) { + const status = await getPairingStatusOnce( + input.agentName, + { ticket: input.ticket }, + input.dependencies, + ); + + if (status.status === "confirmed") { + return status; + } + + const nowSeconds = nowSecondsImpl(); + if (nowSeconds >= deadlineSeconds) { + throw createCliError( + "CLI_PAIR_STATUS_WAIT_TIMEOUT", + `Pairing is still pending after ${input.waitSeconds} seconds`, + ); + } + + const remainingSeconds = Math.max(0, deadlineSeconds - nowSeconds); + const sleepSeconds = Math.min(input.pollIntervalSeconds, remainingSeconds); + await sleepImpl(sleepSeconds * 1000); + } +} + +export async function getPairingStatus( + agentName: string, + options: PairStatusOptions, + dependencies: PairRequestOptions = {}, +): Promise { + const ticketRaw = parseNonEmptyString(options.ticket); + if (ticketRaw.length === 0) { + throw createCliError( + "CLI_PAIR_STATUS_TICKET_REQUIRED", + "Pair status requires --ticket ", + ); + } + + const ticket = parsePairingTicket(ticketRaw); + if (options.wait !== true) { + return getPairingStatusOnce(agentName, { ticket }, dependencies); + } + + const waitSeconds = parsePositiveIntegerOption({ + value: options.waitSeconds, + optionName: "waitSeconds", + defaultValue: DEFAULT_STATUS_WAIT_SECONDS, + }); + const pollIntervalSeconds = parsePositiveIntegerOption({ + value: options.pollIntervalSeconds, + optionName: "pollIntervalSeconds", + defaultValue: DEFAULT_STATUS_POLL_INTERVAL_SECONDS, + }); + + return waitForPairingStatus({ + agentName, + ticket, + waitSeconds, + pollIntervalSeconds, + dependencies, + }); +} diff --git a/apps/cli/src/commands/pair/types.ts b/apps/cli/src/commands/pair/types.ts new file mode 100644 index 0000000..2c32566 --- /dev/null +++ b/apps/cli/src/commands/pair/types.ts @@ -0,0 +1,101 @@ +import type { CliConfig, getConfigDir } from "../../config/manager.js"; + +export type PairStartOptions = { + ttlSeconds?: string; + qr?: boolean; + qrOutput?: string; + wait?: boolean; + waitSeconds?: string; + pollIntervalSeconds?: string; +}; + +export type PairConfirmOptions = { + qrFile?: string; + ticket?: string; +}; + +export type PairStatusOptions = { + ticket?: string; + wait?: boolean; + waitSeconds?: string; + pollIntervalSeconds?: string; +}; + +export type PairRequestOptions = { + fetchImpl?: typeof fetch; + getConfigDirImpl?: typeof getConfigDir; + nowSecondsImpl?: () => number; + nonceFactoryImpl?: () => string; + readFileImpl?: typeof import("node:fs/promises").readFile; + writeFileImpl?: typeof import("node:fs/promises").writeFile; + chmodImpl?: typeof import("node:fs/promises").chmod; + mkdirImpl?: typeof import("node:fs/promises").mkdir; + readdirImpl?: typeof import("node:fs/promises").readdir; + unlinkImpl?: typeof import("node:fs/promises").unlink; + sleepImpl?: (ms: number) => Promise; + resolveConfigImpl?: () => Promise; + qrEncodeImpl?: (ticket: string) => Promise; + qrDecodeImpl?: (imageBytes: Uint8Array) => string; +}; + +export type PairCommandDependencies = PairRequestOptions; + +export type PeerEntry = { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; +}; + +export type PeersConfig = { + peers: Record; +}; + +export type PeerProfile = { + agentName: string; + humanName: string; + proxyOrigin?: string; +}; + +export type PairStartResult = { + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + ticket: string; + expiresAt: string; + proxyUrl: string; + qrPath?: string; +}; + +export type PairConfirmResult = { + paired: boolean; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid: string; + responderProfile: PeerProfile; + proxyUrl: string; + peerAlias?: string; +}; + +export type PairStatusResult = { + status: "pending" | "confirmed"; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid?: string; + responderProfile?: PeerProfile; + expiresAt: string; + confirmedAt?: string; + proxyUrl: string; + peerAlias?: string; +}; + +export type LocalAgentProofMaterial = { + ait: string; + secretKey: Uint8Array; +}; + +export type RegistryErrorEnvelope = { + error?: { + code?: string; + message?: string; + }; +}; From 4b960ff00fd1d15f9d0c17e23fe4d47a292e9f4a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 16:28:57 +0530 Subject: [PATCH 127/190] refactor(cli): split connector command into modules --- apps/cli/src/commands/connector.ts | 1276 +---------------- apps/cli/src/commands/connector/AGENTS.md | 29 + apps/cli/src/commands/connector/command.ts | 184 +++ apps/cli/src/commands/connector/config.ts | 178 +++ .../cli/src/commands/connector/credentials.ts | 95 ++ apps/cli/src/commands/connector/runtime.ts | 61 + apps/cli/src/commands/connector/service.ts | 534 +++++++ apps/cli/src/commands/connector/types.ts | 137 ++ apps/cli/src/commands/connector/validation.ts | 196 +++ 9 files changed, 1425 insertions(+), 1265 deletions(-) create mode 100644 apps/cli/src/commands/connector/AGENTS.md create mode 100644 apps/cli/src/commands/connector/command.ts create mode 100644 apps/cli/src/commands/connector/config.ts create mode 100644 apps/cli/src/commands/connector/credentials.ts create mode 100644 apps/cli/src/commands/connector/runtime.ts create mode 100644 apps/cli/src/commands/connector/service.ts create mode 100644 apps/cli/src/commands/connector/types.ts create mode 100644 apps/cli/src/commands/connector/validation.ts diff --git a/apps/cli/src/commands/connector.ts b/apps/cli/src/commands/connector.ts index 2c34a29..67c1db0 100644 --- a/apps/cli/src/commands/connector.ts +++ b/apps/cli/src/commands/connector.ts @@ -1,1265 +1,11 @@ -import { execFile as execFileCallback } from "node:child_process"; -import { mkdir, readFile, rm, writeFile } from "node:fs/promises"; -import { homedir } from "node:os"; -import { dirname, join } from "node:path"; -import { fileURLToPath } from "node:url"; -import { promisify } from "node:util"; -import { startConnectorRuntime as bundledStartConnectorRuntime } from "@clawdentity/connector"; -import { AppError, createLogger } from "@clawdentity/sdk"; -import { Command } from "commander"; -import { getConfigDir, resolveConfig } from "../config/manager.js"; -import { fetchRegistryMetadata } from "../config/registry-metadata.js"; -import { writeStdoutLine } from "../io.js"; -import { assertValidAgentName } from "./agent-name.js"; -import { withErrorHandling } from "./helpers.js"; - -const logger = createLogger({ service: "cli", module: "connector" }); -const execFile = promisify(execFileCallback); - -const AGENTS_DIR_NAME = "agents"; -const IDENTITY_FILE_NAME = "identity.json"; -const AIT_FILE_NAME = "ait.jwt"; -const SECRET_KEY_FILE_NAME = "secret.key"; -const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; -const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; -const OPENCLAW_CONNECTORS_FILE_NAME = "openclaw-connectors.json"; -const SERVICE_LOG_DIR_NAME = "logs"; - -const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; -const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; - -type ConnectorCredentials = { - accessToken?: string; - accessExpiresAt?: string; - agentDid: string; - ait: string; - refreshToken: string; - refreshExpiresAt?: string; - secretKey: string; - tokenType?: "Bearer"; -}; - -type ConnectorStartInput = { - agentName: string; - configDir: string; - credentials: ConnectorCredentials; - openclawBaseUrl?: string; - openclawHookPath?: string; - openclawHookToken?: string; - outboundBaseUrl: string; - outboundPath: string; - proxyWebsocketUrl?: string; - registryUrl: string; -}; - -type ConnectorRuntime = { - closed?: Promise; - outboundUrl?: string; - proxyWebsocketUrl?: string; - waitForStop?: () => Promise; - waitUntilStopped?: () => Promise; - websocketUrl?: string; -}; - -type ConnectorModule = { - startConnectorRuntime?: ( - input: ConnectorStartInput, - ) => Promise; -}; - -type ReadFileText = (path: string, encoding: "utf8") => Promise; -type ResolveConfigLike = () => Promise<{ - registryUrl: string; - proxyUrl?: string; -}>; -type ExecFileLike = ( - file: string, - args?: readonly string[], -) => Promise<{ stderr: string; stdout: string }>; -type MkdirLike = ( - path: string, - options?: { recursive?: boolean }, -) => Promise; -type WriteFileLike = ( - filePath: string, - data: string, - encoding: "utf8", -) => Promise; -type RemoveFileLike = ( - filePath: string, - options?: { force?: boolean }, -) => Promise; -type ResolveHomeDirLike = () => string; -type ResolveNodeExecPathLike = () => string; -type ResolveCurrentPlatformLike = () => NodeJS.Platform; -type ResolveCurrentModulePathLike = () => string; -type ResolveCurrentUidLike = () => number; - -type ConnectorCommandDependencies = { - execFileImpl?: ExecFileLike; - fetchImpl?: typeof fetch; - getConfigDirImpl?: typeof getConfigDir; - getHomeDirImpl?: ResolveHomeDirLike; - loadConnectorModule?: () => Promise; - mkdirImpl?: MkdirLike; - readFileImpl?: ReadFileText; - removeFileImpl?: RemoveFileLike; - resolveCurrentModulePathImpl?: ResolveCurrentModulePathLike; - resolveCurrentPlatformImpl?: ResolveCurrentPlatformLike; - resolveCurrentUidImpl?: ResolveCurrentUidLike; - resolveConfigImpl?: ResolveConfigLike; - resolveNodeExecPathImpl?: ResolveNodeExecPathLike; - writeFileImpl?: WriteFileLike; -}; - -type ConnectorStartCommandOptions = { - openclawBaseUrl?: string; - openclawHookPath?: string; - openclawHookToken?: string; - proxyWsUrl?: string; -}; - -type ConnectorServicePlatform = "launchd" | "systemd"; - -type ConnectorServiceInstallCommandOptions = ConnectorStartCommandOptions & { - platform?: "auto" | ConnectorServicePlatform; -}; - -type ConnectorServiceUninstallCommandOptions = { - platform?: "auto" | ConnectorServicePlatform; -}; - -export type ConnectorStartResult = { - outboundUrl: string; - proxyWebsocketUrl?: string; - runtime?: ConnectorRuntime | undefined; -}; - -type OpenclawRelayRuntimeConfig = { - openclawHookToken?: string; -}; - -export type ConnectorServiceInstallResult = { - serviceFilePath: string; - serviceName: string; - platform: ConnectorServicePlatform; -}; - -export type ConnectorServiceUninstallResult = { - serviceFilePath: string; - serviceName: string; - platform: ConnectorServicePlatform; -}; - -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - -function getErrorCode(error: unknown): string | undefined { - if (!isRecord(error)) { - return undefined; - } - - return typeof error.code === "string" ? error.code : undefined; -} - -function createCliError( - code: string, - message: string, - details?: Record, -): AppError { - return new AppError({ - code, - message, - status: 400, - details, - }); -} - -function parseNonEmptyString(value: unknown, label: string): string { - if (typeof value !== "string") { - throw createCliError( - "CLI_CONNECTOR_INVALID_INPUT", - "Connector input is invalid", - { - label, - }, - ); - } - - const trimmed = value.trim(); - if (trimmed.length === 0) { - throw createCliError( - "CLI_CONNECTOR_INVALID_INPUT", - "Connector input is invalid", - { - label, - }, - ); - } - - return trimmed; -} - -function parseAgentDid(value: unknown): string { - const did = parseNonEmptyString(value, "agent did"); - if (!did.startsWith("did:claw:agent:")) { - throw createCliError( - "CLI_CONNECTOR_INVALID_AGENT_IDENTITY", - "Agent identity is invalid for connector startup", - ); - } - - return did; -} - -function parseConnectorBaseUrl(value: string): string { - let parsed: URL; - try { - parsed = new URL(value); - } catch { - throw createCliError( - "CLI_CONNECTOR_INVALID_BASE_URL", - "Connector base URL is invalid", - ); - } - - if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { - throw createCliError( - "CLI_CONNECTOR_INVALID_BASE_URL", - "Connector base URL is invalid", - ); - } - - if ( - parsed.pathname === "/" && - parsed.search.length === 0 && - parsed.hash.length === 0 - ) { - return parsed.origin; - } - - return parsed.toString(); -} - -function parseProxyWebsocketUrl(value: string): string { - let parsed: URL; - try { - parsed = new URL(value); - } catch { - throw createCliError( - "CLI_CONNECTOR_INVALID_PROXY_URL", - "Proxy websocket URL is invalid", - ); - } - - if ( - parsed.protocol !== "ws:" && - parsed.protocol !== "wss:" && - parsed.protocol !== "http:" && - parsed.protocol !== "https:" - ) { - throw createCliError( - "CLI_CONNECTOR_INVALID_PROXY_URL", - "Proxy websocket URL is invalid", - ); - } - - return parsed.toString(); -} - -function resolveProxyWebsocketUrlFromEnv(): string | undefined { - const explicitProxyWsUrl = process.env.CLAWDENTITY_PROXY_WS_URL; - if ( - typeof explicitProxyWsUrl === "string" && - explicitProxyWsUrl.trim().length > 0 - ) { - return parseProxyWebsocketUrl(explicitProxyWsUrl.trim()); - } - - const proxyUrl = process.env.CLAWDENTITY_PROXY_URL; - if (typeof proxyUrl === "string" && proxyUrl.trim().length > 0) { - return parseProxyWebsocketUrl(proxyUrl.trim()); - } - - return undefined; -} - -async function resolveProxyWebsocketUrl(input: { - explicitProxyWsUrl?: string; - configProxyUrl?: string; - registryUrl: string; - fetchImpl?: typeof fetch; -}): Promise { - if ( - typeof input.explicitProxyWsUrl === "string" && - input.explicitProxyWsUrl.trim().length > 0 - ) { - return parseProxyWebsocketUrl(input.explicitProxyWsUrl.trim()); - } - - const fromEnv = resolveProxyWebsocketUrlFromEnv(); - if (fromEnv !== undefined) { - return fromEnv; - } - - if ( - typeof input.configProxyUrl === "string" && - input.configProxyUrl.trim().length > 0 - ) { - return parseProxyWebsocketUrl(input.configProxyUrl.trim()); - } - - const fetchImpl = input.fetchImpl ?? globalThis.fetch; - if (typeof fetchImpl === "function") { - try { - const metadata = await fetchRegistryMetadata(input.registryUrl, { - fetchImpl, - }); - return parseProxyWebsocketUrl(metadata.proxyUrl); - } catch { - // Fall through to deterministic operator guidance below. - } - } - - throw createCliError( - "CLI_CONNECTOR_PROXY_URL_REQUIRED", - "Proxy URL is required for connector startup. Run `clawdentity invite redeem ` or set CLAWDENTITY_PROXY_URL / CLAWDENTITY_PROXY_WS_URL.", - ); -} - -function normalizeOutboundPath(pathValue: string): string { - const trimmed = pathValue.trim(); - if (trimmed.length === 0) { - throw createCliError( - "CLI_CONNECTOR_INVALID_OUTBOUND_PATH", - "Connector outbound path is invalid", - ); - } - - return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; -} - -function resolveConnectorBaseUrlFromEnv(): string | undefined { - const value = process.env.CLAWDENTITY_CONNECTOR_BASE_URL; - if (typeof value !== "string" || value.trim().length === 0) { - return undefined; - } - - return parseConnectorBaseUrl(value.trim()); -} - -async function readConnectorAssignedBaseUrl( - configDir: string, - agentName: string, - readFileImpl: ReadFileText, -): Promise { - const assignmentsPath = join(configDir, OPENCLAW_CONNECTORS_FILE_NAME); - let raw: string; - try { - raw = await readFileImpl(assignmentsPath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return undefined; - } - throw error; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - throw createCliError( - "CLI_CONNECTOR_INVALID_ASSIGNMENTS", - "Connector assignments config is invalid JSON", - { assignmentsPath }, - ); - } - - if (!isRecord(parsed) || !isRecord(parsed.agents)) { - return undefined; - } - - const entry = parsed.agents[agentName]; - if (!isRecord(entry) || typeof entry.connectorBaseUrl !== "string") { - return undefined; - } - - return parseConnectorBaseUrl(entry.connectorBaseUrl); -} - -function resolveConnectorOutboundPath(): string { - const value = process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH; - if (typeof value !== "string" || value.trim().length === 0) { - return DEFAULT_CONNECTOR_OUTBOUND_PATH; - } - - return normalizeOutboundPath(value); -} - -function resolveOutboundUrl(baseUrl: string, path: string): string { - return new URL(path, baseUrl).toString(); -} - -async function readRequiredTrimmedFile( - filePath: string, - label: string, - readFileImpl: ReadFileText, -): Promise { - let raw: string; - try { - raw = await readFileImpl(filePath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - throw createCliError( - "CLI_CONNECTOR_MISSING_AGENT_MATERIAL", - "Local agent credentials are missing for connector startup", - { label }, - ); - } - - throw error; - } - - const trimmed = raw.trim(); - if (trimmed.length === 0) { - throw createCliError( - "CLI_CONNECTOR_MISSING_AGENT_MATERIAL", - "Local agent credentials are missing for connector startup", - { label }, - ); - } - - return trimmed; -} - -async function readRelayRuntimeConfig( - configDir: string, - readFileImpl: ReadFileText, -): Promise { - const filePath = join(configDir, OPENCLAW_RELAY_RUNTIME_FILE_NAME); - let raw: string; - try { - raw = await readFileImpl(filePath, "utf8"); - } catch (error) { - if (getErrorCode(error) === "ENOENT") { - return undefined; - } - throw error; - } - - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - return undefined; - } - if (!isRecord(parsed)) { - return undefined; - } - - const openclawHookToken = - typeof parsed.openclawHookToken === "string" && - parsed.openclawHookToken.trim().length > 0 - ? parsed.openclawHookToken.trim() - : undefined; - if (!openclawHookToken) { - return undefined; - } - - return { - openclawHookToken, - }; -} - -function parseJsonRecord( - value: string, - code: string, - message: string, -): Record { - let parsed: unknown; - try { - parsed = JSON.parse(value); - } catch { - throw createCliError(code, message); - } - - if (!isRecord(parsed)) { - throw createCliError(code, message); - } - - return parsed; -} - -function parseRegistryAuth( - rawRegistryAuth: string, -): Pick< - ConnectorCredentials, - | "accessToken" - | "accessExpiresAt" - | "refreshToken" - | "refreshExpiresAt" - | "tokenType" -> { - const parsed = parseJsonRecord( - rawRegistryAuth, - "CLI_CONNECTOR_INVALID_REGISTRY_AUTH", - "Agent registry auth is invalid for connector startup", - ); - - const refreshToken = parseNonEmptyString(parsed.refreshToken, "refreshToken"); - const accessToken = - typeof parsed.accessToken === "string" && - parsed.accessToken.trim().length > 0 - ? parsed.accessToken.trim() - : undefined; - const accessExpiresAt = - typeof parsed.accessExpiresAt === "string" && - parsed.accessExpiresAt.trim().length > 0 - ? parsed.accessExpiresAt.trim() - : undefined; - const refreshExpiresAt = - typeof parsed.refreshExpiresAt === "string" && - parsed.refreshExpiresAt.trim().length > 0 - ? parsed.refreshExpiresAt.trim() - : undefined; - const tokenType = parsed.tokenType === "Bearer" ? "Bearer" : undefined; - - return { - accessToken, - accessExpiresAt, - refreshToken, - refreshExpiresAt, - tokenType, - }; -} - -function parseAgentIdentity(rawIdentity: string): { did: string } { - const parsed = parseJsonRecord( - rawIdentity, - "CLI_CONNECTOR_INVALID_AGENT_IDENTITY", - "Agent identity is invalid for connector startup", - ); - - return { - did: parseAgentDid(parsed.did), - }; -} - -async function loadDefaultConnectorModule(): Promise { - return { - startConnectorRuntime: bundledStartConnectorRuntime, - }; -} - -function resolveWaitPromise( - runtime: ConnectorRuntime | undefined, -): Promise | undefined { - if (!runtime || !isRecord(runtime)) { - return undefined; - } - - if (typeof runtime.waitUntilStopped === "function") { - return runtime.waitUntilStopped(); - } - - if (typeof runtime.waitForStop === "function") { - return runtime.waitForStop(); - } - - if (runtime.closed instanceof Promise) { - return runtime.closed.then(() => undefined); - } - - return undefined; -} - -function sanitizeServiceSegment(value: string): string { - return value.replaceAll(/[^a-zA-Z0-9_.-]+/g, "-"); -} - -function parseConnectorServicePlatformOption( - value: unknown, -): "auto" | ConnectorServicePlatform { - if (value === undefined) { - return "auto"; - } - - if (value === "auto" || value === "launchd" || value === "systemd") { - return value; - } - - throw createCliError( - "CLI_CONNECTOR_SERVICE_PLATFORM_INVALID", - "Connector service platform must be one of: auto, launchd, systemd", - ); -} - -function resolveConnectorServicePlatform( - inputPlatform: "auto" | ConnectorServicePlatform | undefined, - currentPlatform: NodeJS.Platform, -): ConnectorServicePlatform { - if (inputPlatform && inputPlatform !== "auto") { - return inputPlatform; - } - - if (currentPlatform === "darwin") { - return "launchd"; - } - - if (currentPlatform === "linux") { - return "systemd"; - } - - throw createCliError( - "CLI_CONNECTOR_SERVICE_PLATFORM_UNSUPPORTED", - "Connector service install is supported only on macOS (launchd) and Linux (systemd)", - { - platform: currentPlatform, - }, - ); -} - -function buildConnectorStartArgs( - agentName: string, - commandOptions: ConnectorStartCommandOptions, -): string[] { - const args = ["connector", "start", agentName]; - - if (commandOptions.proxyWsUrl) { - args.push("--proxy-ws-url", commandOptions.proxyWsUrl); - } - - if (commandOptions.openclawBaseUrl) { - args.push("--openclaw-base-url", commandOptions.openclawBaseUrl); - } - - if (commandOptions.openclawHookPath) { - args.push("--openclaw-hook-path", commandOptions.openclawHookPath); - } - - if (commandOptions.openclawHookToken) { - args.push("--openclaw-hook-token", commandOptions.openclawHookToken); - } - - return args; -} - -function resolveCliEntryPath( - resolveCurrentModulePathImpl: ResolveCurrentModulePathLike | undefined, -): string { - const modulePath = - resolveCurrentModulePathImpl?.() ?? fileURLToPath(import.meta.url); - return join(dirname(modulePath), "..", "bin.js"); -} - -function escapeXml(value: string): string { - return value - .replaceAll("&", "&") - .replaceAll("<", "<") - .replaceAll(">", ">") - .replaceAll('"', """) - .replaceAll("'", "'"); -} - -function quoteSystemdArgument(value: string): string { - return `"${value.replaceAll("\\", "\\\\").replaceAll('"', '\\"')}"`; -} - -function createSystemdServiceFileContent(input: { - command: string[]; - description: string; - errorLogPath: string; - outputLogPath: string; - workingDirectory: string; -}): string { - const execStart = input.command.map(quoteSystemdArgument).join(" "); - - return [ - "[Unit]", - `Description=${input.description}`, - "After=network-online.target", - "Wants=network-online.target", - "", - "[Service]", - "Type=simple", - `ExecStart=${execStart}`, - "Restart=always", - "RestartSec=2", - `WorkingDirectory=${quoteSystemdArgument(input.workingDirectory)}`, - `StandardOutput=append:${input.outputLogPath}`, - `StandardError=append:${input.errorLogPath}`, - "", - "[Install]", - "WantedBy=default.target", - "", - ].join("\n"); -} - -function createLaunchdPlistContent(input: { - command: string[]; - label: string; - errorLogPath: string; - outputLogPath: string; - workingDirectory: string; -}): string { - const commandItems = input.command - .map((arg) => ` ${escapeXml(arg)}`) - .join("\n"); - - return [ - '', - '', - '', - "", - " Label", - ` ${escapeXml(input.label)}`, - " ProgramArguments", - " ", - commandItems, - " ", - " RunAtLoad", - " ", - " KeepAlive", - " ", - " WorkingDirectory", - ` ${escapeXml(input.workingDirectory)}`, - " StandardOutPath", - ` ${escapeXml(input.outputLogPath)}`, - " StandardErrorPath", - ` ${escapeXml(input.errorLogPath)}`, - "", - "", - "", - ].join("\n"); -} - -function resolveServiceDependencies( - dependencies: ConnectorCommandDependencies, -) { - const execFileImpl: ExecFileLike = - dependencies.execFileImpl ?? - (async (file, args = []) => { - const result = await execFile(file, [...args]); - return { - stdout: result.stdout ?? "", - stderr: result.stderr ?? "", - }; - }); - - return { - execFileImpl, - getConfigDirImpl: dependencies.getConfigDirImpl ?? getConfigDir, - getHomeDirImpl: dependencies.getHomeDirImpl ?? homedir, - mkdirImpl: dependencies.mkdirImpl ?? mkdir, - removeFileImpl: dependencies.removeFileImpl ?? rm, - resolveCurrentModulePathImpl: dependencies.resolveCurrentModulePathImpl, - resolveCurrentPlatformImpl: - dependencies.resolveCurrentPlatformImpl ?? (() => process.platform), - resolveCurrentUidImpl: - dependencies.resolveCurrentUidImpl ?? - (() => { - if (typeof process.getuid !== "function") { - throw createCliError( - "CLI_CONNECTOR_SERVICE_UID_UNAVAILABLE", - "Current user id is unavailable in this runtime", - ); - } - return process.getuid(); - }), - resolveNodeExecPathImpl: - dependencies.resolveNodeExecPathImpl ?? (() => process.execPath), - writeFileImpl: dependencies.writeFileImpl ?? writeFile, - }; -} - -export async function installConnectorServiceForAgent( - agentName: string, - commandOptions: ConnectorServiceInstallCommandOptions = {}, - dependencies: ConnectorCommandDependencies = {}, -): Promise { - const serviceDependencies = resolveServiceDependencies(dependencies); - const servicePlatform = parseConnectorServicePlatformOption( - commandOptions.platform, - ); - const platform = resolveConnectorServicePlatform( - servicePlatform, - serviceDependencies.resolveCurrentPlatformImpl(), - ); - const configDir = serviceDependencies.getConfigDirImpl(); - const homeDir = serviceDependencies.getHomeDirImpl(); - const logsDir = join(configDir, SERVICE_LOG_DIR_NAME); - const serviceName = sanitizeServiceSegment( - `clawdentity-connector-${agentName}`, - ); - const startArgs = buildConnectorStartArgs(agentName, commandOptions); - const command = [ - serviceDependencies.resolveNodeExecPathImpl(), - resolveCliEntryPath(serviceDependencies.resolveCurrentModulePathImpl), - ...startArgs, - ]; - const outputLogPath = join(logsDir, `${serviceName}.out.log`); - const errorLogPath = join(logsDir, `${serviceName}.err.log`); - - await serviceDependencies.mkdirImpl(logsDir, { recursive: true }); - - if (platform === "systemd") { - const serviceDir = join(homeDir, ".config", "systemd", "user"); - const serviceFilePath = join(serviceDir, `${serviceName}.service`); - - await serviceDependencies.mkdirImpl(serviceDir, { recursive: true }); - await serviceDependencies.writeFileImpl( - serviceFilePath, - createSystemdServiceFileContent({ - command, - description: `Clawdentity connector (${agentName})`, - outputLogPath, - errorLogPath, - workingDirectory: homeDir, - }), - "utf8", - ); - - try { - await serviceDependencies.execFileImpl("systemctl", [ - "--user", - "daemon-reload", - ]); - await serviceDependencies.execFileImpl("systemctl", [ - "--user", - "enable", - "--now", - `${serviceName}.service`, - ]); - } catch (error) { - throw createCliError( - "CLI_CONNECTOR_SERVICE_INSTALL_FAILED", - "Failed to install systemd connector service", - { - reason: error instanceof Error ? error.message : "unknown", - }, - ); - } - - return { - platform, - serviceName, - serviceFilePath, - }; - } - - const launchAgentsDir = join(homeDir, "Library", "LaunchAgents"); - const serviceNameWithDomain = `com.clawdentity.${serviceName}`; - const serviceFilePath = join( - launchAgentsDir, - `${serviceNameWithDomain}.plist`, - ); - - await serviceDependencies.mkdirImpl(launchAgentsDir, { recursive: true }); - await serviceDependencies.writeFileImpl( - serviceFilePath, - createLaunchdPlistContent({ - command, - label: serviceNameWithDomain, - outputLogPath, - errorLogPath, - workingDirectory: homeDir, - }), - "utf8", - ); - - try { - await serviceDependencies.execFileImpl("launchctl", [ - "unload", - "-w", - serviceFilePath, - ]); - } catch { - // Ignore unload failures for first install or already-unloaded service. - } - - try { - await serviceDependencies.execFileImpl("launchctl", [ - "load", - "-w", - serviceFilePath, - ]); - } catch (error) { - throw createCliError( - "CLI_CONNECTOR_SERVICE_INSTALL_FAILED", - "Failed to install launchd connector service", - { - reason: error instanceof Error ? error.message : "unknown", - }, - ); - } - - return { - platform, - serviceName: serviceNameWithDomain, - serviceFilePath, - }; -} - -export async function uninstallConnectorServiceForAgent( - agentName: string, - commandOptions: ConnectorServiceUninstallCommandOptions = {}, - dependencies: ConnectorCommandDependencies = {}, -): Promise { - const serviceDependencies = resolveServiceDependencies(dependencies); - const servicePlatform = parseConnectorServicePlatformOption( - commandOptions.platform, - ); - const platform = resolveConnectorServicePlatform( - servicePlatform, - serviceDependencies.resolveCurrentPlatformImpl(), - ); - const homeDir = serviceDependencies.getHomeDirImpl(); - const serviceName = sanitizeServiceSegment( - `clawdentity-connector-${agentName}`, - ); - - if (platform === "systemd") { - const serviceFilePath = join( - homeDir, - ".config", - "systemd", - "user", - `${serviceName}.service`, - ); - - try { - await serviceDependencies.execFileImpl("systemctl", [ - "--user", - "disable", - "--now", - `${serviceName}.service`, - ]); - } catch { - // Continue uninstall to keep command idempotent. - } - - await serviceDependencies.removeFileImpl(serviceFilePath, { force: true }); - - try { - await serviceDependencies.execFileImpl("systemctl", [ - "--user", - "daemon-reload", - ]); - } catch { - // Continue uninstall; unit file is already removed. - } - - return { - platform, - serviceName, - serviceFilePath, - }; - } - - const serviceNameWithDomain = `com.clawdentity.${serviceName}`; - const serviceFilePath = join( - homeDir, - "Library", - "LaunchAgents", - `${serviceNameWithDomain}.plist`, - ); - - try { - await serviceDependencies.execFileImpl("launchctl", [ - "unload", - "-w", - serviceFilePath, - ]); - } catch { - // Continue uninstall to keep command idempotent. - } - - await serviceDependencies.removeFileImpl(serviceFilePath, { force: true }); - - return { - platform, - serviceName: serviceNameWithDomain, - serviceFilePath, - }; -} - -export async function startConnectorForAgent( - agentName: string, - commandOptions: ConnectorStartCommandOptions = {}, - dependencies: ConnectorCommandDependencies = {}, -): Promise { - const resolveConfigImpl: ResolveConfigLike = - dependencies.resolveConfigImpl ?? resolveConfig; - const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; - const readFileImpl: ReadFileText = - dependencies.readFileImpl ?? ((path, encoding) => readFile(path, encoding)); - const fetchImpl = dependencies.fetchImpl ?? globalThis.fetch; - const loadConnectorModule = - dependencies.loadConnectorModule ?? loadDefaultConnectorModule; - const configDir = getConfigDirImpl(); - const agentDirectory = join(configDir, AGENTS_DIR_NAME, agentName); - - const [ - rawAit, - rawSecretKey, - rawIdentity, - rawRegistryAuth, - assignedConnectorBaseUrl, - relayRuntimeConfig, - config, - connectorModule, - ] = await Promise.all([ - readRequiredTrimmedFile( - join(agentDirectory, AIT_FILE_NAME), - AIT_FILE_NAME, - readFileImpl, - ), - readRequiredTrimmedFile( - join(agentDirectory, SECRET_KEY_FILE_NAME), - SECRET_KEY_FILE_NAME, - readFileImpl, - ), - readRequiredTrimmedFile( - join(agentDirectory, IDENTITY_FILE_NAME), - IDENTITY_FILE_NAME, - readFileImpl, - ), - readRequiredTrimmedFile( - join(agentDirectory, REGISTRY_AUTH_FILE_NAME), - REGISTRY_AUTH_FILE_NAME, - readFileImpl, - ), - readConnectorAssignedBaseUrl(configDir, agentName, readFileImpl), - readRelayRuntimeConfig(configDir, readFileImpl), - resolveConfigImpl(), - loadConnectorModule(), - ]); - - if (typeof connectorModule.startConnectorRuntime !== "function") { - throw createCliError( - "CLI_CONNECTOR_INVALID_PACKAGE_API", - "Connector package does not expose startConnectorRuntime", - ); - } - - const identity = parseAgentIdentity(rawIdentity); - const registryAuth = parseRegistryAuth(rawRegistryAuth); - const resolvedProxyWebsocketUrl = await resolveProxyWebsocketUrl({ - explicitProxyWsUrl: commandOptions.proxyWsUrl, - configProxyUrl: config.proxyUrl, - registryUrl: config.registryUrl, - fetchImpl, - }); - const openclawHookToken = - commandOptions.openclawHookToken ?? relayRuntimeConfig?.openclawHookToken; - const outboundBaseUrl = - resolveConnectorBaseUrlFromEnv() ?? - assignedConnectorBaseUrl ?? - DEFAULT_CONNECTOR_BASE_URL; - const outboundPath = resolveConnectorOutboundPath(); - const runtime = await connectorModule.startConnectorRuntime({ - agentName, - configDir, - registryUrl: config.registryUrl, - outboundBaseUrl, - outboundPath, - proxyWebsocketUrl: resolvedProxyWebsocketUrl, - openclawBaseUrl: commandOptions.openclawBaseUrl, - openclawHookPath: commandOptions.openclawHookPath, - openclawHookToken, - credentials: { - agentDid: identity.did, - ait: rawAit, - secretKey: rawSecretKey, - refreshToken: registryAuth.refreshToken, - accessToken: registryAuth.accessToken, - accessExpiresAt: registryAuth.accessExpiresAt, - refreshExpiresAt: registryAuth.refreshExpiresAt, - tokenType: registryAuth.tokenType, - }, - }); - const outboundUrl = - runtime && isRecord(runtime) && typeof runtime.outboundUrl === "string" - ? runtime.outboundUrl - : resolveOutboundUrl(outboundBaseUrl, outboundPath); - const proxyWebsocketUrl = - runtime && isRecord(runtime) - ? typeof runtime.websocketUrl === "string" - ? runtime.websocketUrl - : typeof runtime.proxyWebsocketUrl === "string" - ? runtime.proxyWebsocketUrl - : resolvedProxyWebsocketUrl - : undefined; - - return { - outboundUrl, - proxyWebsocketUrl, - runtime, - }; -} - -export function createConnectorCommand( - dependencies: ConnectorCommandDependencies = {}, -): Command { - const connector = new Command("connector") - .description("Run local connector runtime for OpenClaw relay handoff") - .addCommand( - new Command("start") - .description("Start connector runtime for a local agent") - .argument("", "Local agent name") - .option( - "--proxy-ws-url ", - "Proxy websocket URL (or CLAWDENTITY_PROXY_WS_URL)", - ) - .option( - "--openclaw-base-url ", - "OpenClaw base URL (default OPENCLAW_BASE_URL or http://127.0.0.1:18789)", - ) - .option( - "--openclaw-hook-path ", - "OpenClaw hooks path (default OPENCLAW_HOOK_PATH or /hooks/agent)", - ) - .option( - "--openclaw-hook-token ", - "OpenClaw hooks token (default OPENCLAW_HOOK_TOKEN)", - ) - .action( - withErrorHandling( - "connector start", - async ( - agentNameInput: string, - commandOptions: ConnectorStartCommandOptions, - ) => { - const agentName = assertValidAgentName(agentNameInput); - - writeStdoutLine( - `Starting connector runtime for agent "${agentName}"...`, - ); - - const started = await startConnectorForAgent( - agentName, - { - proxyWsUrl: commandOptions.proxyWsUrl, - openclawBaseUrl: commandOptions.openclawBaseUrl, - openclawHookPath: commandOptions.openclawHookPath, - openclawHookToken: commandOptions.openclawHookToken, - }, - dependencies, - ); - - writeStdoutLine( - `Connector outbound endpoint: ${started.outboundUrl}`, - ); - if (started.proxyWebsocketUrl) { - writeStdoutLine( - `Connector proxy websocket: ${started.proxyWebsocketUrl}`, - ); - } - writeStdoutLine("Connector runtime is active."); - - const waitPromise = resolveWaitPromise(started.runtime); - if (waitPromise) { - await waitPromise; - } - }, - ), - ), - ) - .addCommand( - new Command("service") - .description("Install or remove connector autostart service") - .addCommand( - new Command("install") - .description("Install and start connector service at login/restart") - .argument("", "Local agent name") - .option( - "--platform ", - "Service platform: auto | launchd | systemd", - ) - .option( - "--proxy-ws-url ", - "Proxy websocket URL (or CLAWDENTITY_PROXY_WS_URL)", - ) - .option( - "--openclaw-base-url ", - "OpenClaw base URL override for connector runtime", - ) - .option( - "--openclaw-hook-path ", - "OpenClaw hooks path override for connector runtime", - ) - .option( - "--openclaw-hook-token ", - "OpenClaw hooks token override for connector runtime", - ) - .action( - withErrorHandling( - "connector service install", - async ( - agentNameInput: string, - commandOptions: ConnectorServiceInstallCommandOptions, - ) => { - const agentName = assertValidAgentName(agentNameInput); - const installed = await installConnectorServiceForAgent( - agentName, - { - platform: commandOptions.platform, - proxyWsUrl: commandOptions.proxyWsUrl, - openclawBaseUrl: commandOptions.openclawBaseUrl, - openclawHookPath: commandOptions.openclawHookPath, - openclawHookToken: commandOptions.openclawHookToken, - }, - dependencies, - ); - - writeStdoutLine( - `Connector service installed (${installed.platform}): ${installed.serviceName}`, - ); - writeStdoutLine(`Service file: ${installed.serviceFilePath}`); - }, - ), - ), - ) - .addCommand( - new Command("uninstall") - .description("Uninstall connector autostart service") - .argument("", "Local agent name") - .option( - "--platform ", - "Service platform: auto | launchd | systemd", - ) - .action( - withErrorHandling( - "connector service uninstall", - async ( - agentNameInput: string, - commandOptions: ConnectorServiceUninstallCommandOptions, - ) => { - const agentName = assertValidAgentName(agentNameInput); - const uninstalled = await uninstallConnectorServiceForAgent( - agentName, - { - platform: commandOptions.platform, - }, - dependencies, - ); - - writeStdoutLine( - `Connector service uninstalled (${uninstalled.platform}): ${uninstalled.serviceName}`, - ); - writeStdoutLine( - `Service file removed: ${uninstalled.serviceFilePath}`, - ); - }, - ), - ), - ), - ); - - logger.debug("cli.connector.command_registered", { - command: "connector", - }); - - return connector; -} +export { createConnectorCommand } from "./connector/command.js"; +export { + installConnectorServiceForAgent, + startConnectorForAgent, + uninstallConnectorServiceForAgent, +} from "./connector/service.js"; +export type { + ConnectorServiceInstallResult, + ConnectorServiceUninstallResult, + ConnectorStartResult, +} from "./connector/types.js"; diff --git a/apps/cli/src/commands/connector/AGENTS.md b/apps/cli/src/commands/connector/AGENTS.md new file mode 100644 index 0000000..b0b7bff --- /dev/null +++ b/apps/cli/src/commands/connector/AGENTS.md @@ -0,0 +1,29 @@ +# AGENTS.md (connector command modules) + +## Purpose +- Keep connector command code modular, testable, and behavior-stable. +- Preserve CLI output/error behavior and existing connector test expectations. + +## Module Boundaries +- `types.ts`: shared connector constants, options, dependency contracts, and result types only. +- `validation.ts`: connector AppError creation, input parsing, URL validation, and platform-option parsing. +- `config.ts`: environment/config resolution, connector assignment lookup, relay runtime config lookup, and outbound/proxy URL resolution. +- `credentials.ts`: required local credential-file reads and identity/registry-auth parsing. +- `runtime.ts`: connector package loading and runtime wait/result extraction helpers. +- `service.ts`: start/install/uninstall orchestration and service file generation. +- `command.ts`: Commander wiring and stdout formatting only. +- `../connector.ts`: thin public facade and stable exports. + +## Guardrails +- Keep each source file under 800 LOC. +- Avoid circular imports across connector modules. +- Keep connector error codes/messages stable; tests and operator workflows rely on deterministic behavior. +- Keep command stdout wording/order stable unless tests and scope explicitly require changes. +- Reuse existing helpers rather than duplicating identity/auth/config parsing logic. +- Keep service name/path generation deterministic from `agentName` so install/uninstall is predictable. + +## Change Workflow +- Add or update tests in `apps/cli/src/commands/connector.test.ts` for behavior changes. +- Run validation before handoff: + - `pnpm -C apps/cli typecheck` + - `pnpm -C apps/cli test -- connector` diff --git a/apps/cli/src/commands/connector/command.ts b/apps/cli/src/commands/connector/command.ts new file mode 100644 index 0000000..b3b9ce2 --- /dev/null +++ b/apps/cli/src/commands/connector/command.ts @@ -0,0 +1,184 @@ +import { createLogger } from "@clawdentity/sdk"; +import { Command } from "commander"; +import { writeStdoutLine } from "../../io.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { withErrorHandling } from "../helpers.js"; +import { resolveWaitPromise } from "./runtime.js"; +import { + installConnectorServiceForAgent, + startConnectorForAgent, + uninstallConnectorServiceForAgent, +} from "./service.js"; +import type { + ConnectorCommandDependencies, + ConnectorServiceInstallCommandOptions, + ConnectorServiceUninstallCommandOptions, + ConnectorStartCommandOptions, +} from "./types.js"; + +const logger = createLogger({ service: "cli", module: "connector" }); + +export function createConnectorCommand( + dependencies: ConnectorCommandDependencies = {}, +): Command { + const connector = new Command("connector") + .description("Run local connector runtime for OpenClaw relay handoff") + .addCommand( + new Command("start") + .description("Start connector runtime for a local agent") + .argument("", "Local agent name") + .option( + "--proxy-ws-url ", + "Proxy websocket URL (or CLAWDENTITY_PROXY_WS_URL)", + ) + .option( + "--openclaw-base-url ", + "OpenClaw base URL (default OPENCLAW_BASE_URL or http://127.0.0.1:18789)", + ) + .option( + "--openclaw-hook-path ", + "OpenClaw hooks path (default OPENCLAW_HOOK_PATH or /hooks/agent)", + ) + .option( + "--openclaw-hook-token ", + "OpenClaw hooks token (default OPENCLAW_HOOK_TOKEN)", + ) + .action( + withErrorHandling( + "connector start", + async ( + agentNameInput: string, + commandOptions: ConnectorStartCommandOptions, + ) => { + const agentName = assertValidAgentName(agentNameInput); + + writeStdoutLine( + `Starting connector runtime for agent "${agentName}"...`, + ); + + const started = await startConnectorForAgent( + agentName, + { + proxyWsUrl: commandOptions.proxyWsUrl, + openclawBaseUrl: commandOptions.openclawBaseUrl, + openclawHookPath: commandOptions.openclawHookPath, + openclawHookToken: commandOptions.openclawHookToken, + }, + dependencies, + ); + + writeStdoutLine( + `Connector outbound endpoint: ${started.outboundUrl}`, + ); + if (started.proxyWebsocketUrl) { + writeStdoutLine( + `Connector proxy websocket: ${started.proxyWebsocketUrl}`, + ); + } + writeStdoutLine("Connector runtime is active."); + + const waitPromise = resolveWaitPromise(started.runtime); + if (waitPromise) { + await waitPromise; + } + }, + ), + ), + ) + .addCommand( + new Command("service") + .description("Install or remove connector autostart service") + .addCommand( + new Command("install") + .description("Install and start connector service at login/restart") + .argument("", "Local agent name") + .option( + "--platform ", + "Service platform: auto | launchd | systemd", + ) + .option( + "--proxy-ws-url ", + "Proxy websocket URL (or CLAWDENTITY_PROXY_WS_URL)", + ) + .option( + "--openclaw-base-url ", + "OpenClaw base URL override for connector runtime", + ) + .option( + "--openclaw-hook-path ", + "OpenClaw hooks path override for connector runtime", + ) + .option( + "--openclaw-hook-token ", + "OpenClaw hooks token override for connector runtime", + ) + .action( + withErrorHandling( + "connector service install", + async ( + agentNameInput: string, + commandOptions: ConnectorServiceInstallCommandOptions, + ) => { + const agentName = assertValidAgentName(agentNameInput); + const installed = await installConnectorServiceForAgent( + agentName, + { + platform: commandOptions.platform, + proxyWsUrl: commandOptions.proxyWsUrl, + openclawBaseUrl: commandOptions.openclawBaseUrl, + openclawHookPath: commandOptions.openclawHookPath, + openclawHookToken: commandOptions.openclawHookToken, + }, + dependencies, + ); + + writeStdoutLine( + `Connector service installed (${installed.platform}): ${installed.serviceName}`, + ); + writeStdoutLine(`Service file: ${installed.serviceFilePath}`); + }, + ), + ), + ) + .addCommand( + new Command("uninstall") + .description("Uninstall connector autostart service") + .argument("", "Local agent name") + .option( + "--platform ", + "Service platform: auto | launchd | systemd", + ) + .action( + withErrorHandling( + "connector service uninstall", + async ( + agentNameInput: string, + commandOptions: ConnectorServiceUninstallCommandOptions, + ) => { + const agentName = assertValidAgentName(agentNameInput); + const uninstalled = await uninstallConnectorServiceForAgent( + agentName, + { + platform: commandOptions.platform, + }, + dependencies, + ); + + writeStdoutLine( + `Connector service uninstalled (${uninstalled.platform}): ${uninstalled.serviceName}`, + ); + writeStdoutLine( + `Service file removed: ${uninstalled.serviceFilePath}`, + ); + }, + ), + ), + ), + ); + + logger.debug("cli.connector.command_registered", { + command: "connector", + }); + + return connector; +} diff --git a/apps/cli/src/commands/connector/config.ts b/apps/cli/src/commands/connector/config.ts new file mode 100644 index 0000000..2f66657 --- /dev/null +++ b/apps/cli/src/commands/connector/config.ts @@ -0,0 +1,178 @@ +import { join } from "node:path"; +import { fetchRegistryMetadata } from "../../config/registry-metadata.js"; +import { + DEFAULT_CONNECTOR_OUTBOUND_PATH, + OPENCLAW_CONNECTORS_FILE_NAME, + OPENCLAW_RELAY_RUNTIME_FILE_NAME, + type OpenclawRelayRuntimeConfig, + type ReadFileText, +} from "./types.js"; +import { + createCliError, + getErrorCode, + isRecord, + normalizeOutboundPath, + parseConnectorBaseUrl, + parseProxyWebsocketUrl, +} from "./validation.js"; + +export function resolveProxyWebsocketUrlFromEnv(): string | undefined { + const explicitProxyWsUrl = process.env.CLAWDENTITY_PROXY_WS_URL; + if ( + typeof explicitProxyWsUrl === "string" && + explicitProxyWsUrl.trim().length > 0 + ) { + return parseProxyWebsocketUrl(explicitProxyWsUrl.trim()); + } + + const proxyUrl = process.env.CLAWDENTITY_PROXY_URL; + if (typeof proxyUrl === "string" && proxyUrl.trim().length > 0) { + return parseProxyWebsocketUrl(proxyUrl.trim()); + } + + return undefined; +} + +export async function resolveProxyWebsocketUrl(input: { + explicitProxyWsUrl?: string; + configProxyUrl?: string; + registryUrl: string; + fetchImpl?: typeof fetch; +}): Promise { + if ( + typeof input.explicitProxyWsUrl === "string" && + input.explicitProxyWsUrl.trim().length > 0 + ) { + return parseProxyWebsocketUrl(input.explicitProxyWsUrl.trim()); + } + + const fromEnv = resolveProxyWebsocketUrlFromEnv(); + if (fromEnv !== undefined) { + return fromEnv; + } + + if ( + typeof input.configProxyUrl === "string" && + input.configProxyUrl.trim().length > 0 + ) { + return parseProxyWebsocketUrl(input.configProxyUrl.trim()); + } + + const fetchImpl = input.fetchImpl ?? globalThis.fetch; + if (typeof fetchImpl === "function") { + try { + const metadata = await fetchRegistryMetadata(input.registryUrl, { + fetchImpl, + }); + return parseProxyWebsocketUrl(metadata.proxyUrl); + } catch { + // Fall through to deterministic operator guidance below. + } + } + + throw createCliError( + "CLI_CONNECTOR_PROXY_URL_REQUIRED", + "Proxy URL is required for connector startup. Run `clawdentity invite redeem ` or set CLAWDENTITY_PROXY_URL / CLAWDENTITY_PROXY_WS_URL.", + ); +} + +export function resolveConnectorBaseUrlFromEnv(): string | undefined { + const value = process.env.CLAWDENTITY_CONNECTOR_BASE_URL; + if (typeof value !== "string" || value.trim().length === 0) { + return undefined; + } + + return parseConnectorBaseUrl(value.trim()); +} + +export async function readConnectorAssignedBaseUrl( + configDir: string, + agentName: string, + readFileImpl: ReadFileText, +): Promise { + const assignmentsPath = join(configDir, OPENCLAW_CONNECTORS_FILE_NAME); + let raw: string; + try { + raw = await readFileImpl(assignmentsPath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw createCliError( + "CLI_CONNECTOR_INVALID_ASSIGNMENTS", + "Connector assignments config is invalid JSON", + { assignmentsPath }, + ); + } + + if (!isRecord(parsed) || !isRecord(parsed.agents)) { + return undefined; + } + + const entry = parsed.agents[agentName]; + if (!isRecord(entry) || typeof entry.connectorBaseUrl !== "string") { + return undefined; + } + + return parseConnectorBaseUrl(entry.connectorBaseUrl); +} + +export function resolveConnectorOutboundPath(): string { + const value = process.env.CLAWDENTITY_CONNECTOR_OUTBOUND_PATH; + if (typeof value !== "string" || value.trim().length === 0) { + return DEFAULT_CONNECTOR_OUTBOUND_PATH; + } + + return normalizeOutboundPath(value); +} + +export function resolveOutboundUrl(baseUrl: string, path: string): string { + return new URL(path, baseUrl).toString(); +} + +export async function readRelayRuntimeConfig( + configDir: string, + readFileImpl: ReadFileText, +): Promise { + const filePath = join(configDir, OPENCLAW_RELAY_RUNTIME_FILE_NAME); + let raw: string; + try { + raw = await readFileImpl(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + return undefined; + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + return undefined; + } + + if (!isRecord(parsed)) { + return undefined; + } + + const openclawHookToken = + typeof parsed.openclawHookToken === "string" && + parsed.openclawHookToken.trim().length > 0 + ? parsed.openclawHookToken.trim() + : undefined; + if (!openclawHookToken) { + return undefined; + } + + return { + openclawHookToken, + }; +} diff --git a/apps/cli/src/commands/connector/credentials.ts b/apps/cli/src/commands/connector/credentials.ts new file mode 100644 index 0000000..5c1067a --- /dev/null +++ b/apps/cli/src/commands/connector/credentials.ts @@ -0,0 +1,95 @@ +import type { ConnectorCredentials, ReadFileText } from "./types.js"; +import { + createCliError, + getErrorCode, + parseAgentDid, + parseJsonRecord, + parseNonEmptyString, +} from "./validation.js"; + +export async function readRequiredTrimmedFile( + filePath: string, + label: string, + readFileImpl: ReadFileText, +): Promise { + let raw: string; + try { + raw = await readFileImpl(filePath, "utf8"); + } catch (error) { + if (getErrorCode(error) === "ENOENT") { + throw createCliError( + "CLI_CONNECTOR_MISSING_AGENT_MATERIAL", + "Local agent credentials are missing for connector startup", + { label }, + ); + } + + throw error; + } + + const trimmed = raw.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_CONNECTOR_MISSING_AGENT_MATERIAL", + "Local agent credentials are missing for connector startup", + { label }, + ); + } + + return trimmed; +} + +export function parseRegistryAuth( + rawRegistryAuth: string, +): Pick< + ConnectorCredentials, + | "accessToken" + | "accessExpiresAt" + | "refreshToken" + | "refreshExpiresAt" + | "tokenType" +> { + const parsed = parseJsonRecord( + rawRegistryAuth, + "CLI_CONNECTOR_INVALID_REGISTRY_AUTH", + "Agent registry auth is invalid for connector startup", + ); + + const refreshToken = parseNonEmptyString(parsed.refreshToken, "refreshToken"); + const accessToken = + typeof parsed.accessToken === "string" && + parsed.accessToken.trim().length > 0 + ? parsed.accessToken.trim() + : undefined; + const accessExpiresAt = + typeof parsed.accessExpiresAt === "string" && + parsed.accessExpiresAt.trim().length > 0 + ? parsed.accessExpiresAt.trim() + : undefined; + const refreshExpiresAt = + typeof parsed.refreshExpiresAt === "string" && + parsed.refreshExpiresAt.trim().length > 0 + ? parsed.refreshExpiresAt.trim() + : undefined; + const tokenType = parsed.tokenType === "Bearer" ? "Bearer" : undefined; + + return { + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + tokenType, + }; +} + +export function parseAgentIdentity(rawIdentity: string): { did: string } { + const parsed = parseJsonRecord( + rawIdentity, + "CLI_CONNECTOR_INVALID_AGENT_IDENTITY", + "Agent identity is invalid for connector startup", + ); + + return { + did: parseAgentDid(parsed.did), + }; +} diff --git a/apps/cli/src/commands/connector/runtime.ts b/apps/cli/src/commands/connector/runtime.ts new file mode 100644 index 0000000..8d64d74 --- /dev/null +++ b/apps/cli/src/commands/connector/runtime.ts @@ -0,0 +1,61 @@ +import { startConnectorRuntime as bundledStartConnectorRuntime } from "@clawdentity/connector"; +import type { ConnectorModule, ConnectorRuntime } from "./types.js"; +import { isRecord } from "./validation.js"; + +export async function loadDefaultConnectorModule(): Promise { + return { + startConnectorRuntime: bundledStartConnectorRuntime, + }; +} + +export function resolveWaitPromise( + runtime: ConnectorRuntime | undefined, +): Promise | undefined { + if (!runtime || !isRecord(runtime)) { + return undefined; + } + + if (typeof runtime.waitUntilStopped === "function") { + return runtime.waitUntilStopped(); + } + + if (typeof runtime.waitForStop === "function") { + return runtime.waitForStop(); + } + + if (runtime.closed instanceof Promise) { + return runtime.closed.then(() => undefined); + } + + return undefined; +} + +export function resolveRuntimeOutboundUrl( + runtime: ConnectorRuntime | undefined, + fallbackOutboundUrl: string, +): string { + if (runtime && isRecord(runtime) && typeof runtime.outboundUrl === "string") { + return runtime.outboundUrl; + } + + return fallbackOutboundUrl; +} + +export function resolveRuntimeProxyWebsocketUrl( + runtime: ConnectorRuntime | undefined, + fallbackProxyWebsocketUrl: string, +): string | undefined { + if (!runtime || !isRecord(runtime)) { + return undefined; + } + + if (typeof runtime.websocketUrl === "string") { + return runtime.websocketUrl; + } + + if (typeof runtime.proxyWebsocketUrl === "string") { + return runtime.proxyWebsocketUrl; + } + + return fallbackProxyWebsocketUrl; +} diff --git a/apps/cli/src/commands/connector/service.ts b/apps/cli/src/commands/connector/service.ts new file mode 100644 index 0000000..9c596b9 --- /dev/null +++ b/apps/cli/src/commands/connector/service.ts @@ -0,0 +1,534 @@ +import { execFile as execFileCallback } from "node:child_process"; +import { mkdir, readFile, rm, writeFile } from "node:fs/promises"; +import { homedir } from "node:os"; +import { basename, dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; +import { promisify } from "node:util"; +import { getConfigDir, resolveConfig } from "../../config/manager.js"; +import { + readConnectorAssignedBaseUrl, + readRelayRuntimeConfig, + resolveConnectorBaseUrlFromEnv, + resolveConnectorOutboundPath, + resolveOutboundUrl, + resolveProxyWebsocketUrl, +} from "./config.js"; +import { + parseAgentIdentity, + parseRegistryAuth, + readRequiredTrimmedFile, +} from "./credentials.js"; +import { + loadDefaultConnectorModule, + resolveRuntimeOutboundUrl, + resolveRuntimeProxyWebsocketUrl, +} from "./runtime.js"; +import { + AGENTS_DIR_NAME, + AIT_FILE_NAME, + type ConnectorCommandDependencies, + type ConnectorServiceInstallCommandOptions, + type ConnectorServiceInstallResult, + type ConnectorServiceUninstallCommandOptions, + type ConnectorServiceUninstallResult, + type ConnectorStartCommandOptions, + type ConnectorStartResult, + DEFAULT_CONNECTOR_BASE_URL, + type ExecFileLike, + IDENTITY_FILE_NAME, + REGISTRY_AUTH_FILE_NAME, + type ReadFileText, + type ResolveConfigLike, + type ResolveCurrentModulePathLike, + SECRET_KEY_FILE_NAME, + SERVICE_LOG_DIR_NAME, +} from "./types.js"; +import { + createCliError, + parseConnectorServicePlatformOption, + resolveConnectorServicePlatform, + sanitizeServiceSegment, +} from "./validation.js"; + +const execFile = promisify(execFileCallback); + +function buildConnectorStartArgs( + agentName: string, + commandOptions: ConnectorStartCommandOptions, +): string[] { + const args = ["connector", "start", agentName]; + + if (commandOptions.proxyWsUrl) { + args.push("--proxy-ws-url", commandOptions.proxyWsUrl); + } + + if (commandOptions.openclawBaseUrl) { + args.push("--openclaw-base-url", commandOptions.openclawBaseUrl); + } + + if (commandOptions.openclawHookPath) { + args.push("--openclaw-hook-path", commandOptions.openclawHookPath); + } + + if (commandOptions.openclawHookToken) { + args.push("--openclaw-hook-token", commandOptions.openclawHookToken); + } + + return args; +} + +function resolveCliEntryPath( + resolveCurrentModulePathImpl: ResolveCurrentModulePathLike | undefined, +): string { + const modulePath = + resolveCurrentModulePathImpl?.() ?? fileURLToPath(import.meta.url); + const moduleDirectory = dirname(modulePath); + + if (basename(moduleDirectory) === "connector") { + return join(moduleDirectory, "..", "..", "bin.js"); + } + + return join(moduleDirectory, "..", "bin.js"); +} + +function escapeXml(value: string): string { + return value + .replaceAll("&", "&") + .replaceAll("<", "<") + .replaceAll(">", ">") + .replaceAll('"', """) + .replaceAll("'", "'"); +} + +function quoteSystemdArgument(value: string): string { + return `"${value.replaceAll("\\", "\\\\").replaceAll('"', '\\"')}"`; +} + +function createSystemdServiceFileContent(input: { + command: string[]; + description: string; + errorLogPath: string; + outputLogPath: string; + workingDirectory: string; +}): string { + const execStart = input.command.map(quoteSystemdArgument).join(" "); + + return [ + "[Unit]", + `Description=${input.description}`, + "After=network-online.target", + "Wants=network-online.target", + "", + "[Service]", + "Type=simple", + `ExecStart=${execStart}`, + "Restart=always", + "RestartSec=2", + `WorkingDirectory=${quoteSystemdArgument(input.workingDirectory)}`, + `StandardOutput=append:${input.outputLogPath}`, + `StandardError=append:${input.errorLogPath}`, + "", + "[Install]", + "WantedBy=default.target", + "", + ].join("\n"); +} + +function createLaunchdPlistContent(input: { + command: string[]; + label: string; + errorLogPath: string; + outputLogPath: string; + workingDirectory: string; +}): string { + const commandItems = input.command + .map((arg) => ` ${escapeXml(arg)}`) + .join("\n"); + + return [ + '', + '', + '', + "", + " Label", + ` ${escapeXml(input.label)}`, + " ProgramArguments", + " ", + commandItems, + " ", + " RunAtLoad", + " ", + " KeepAlive", + " ", + " WorkingDirectory", + ` ${escapeXml(input.workingDirectory)}`, + " StandardOutPath", + ` ${escapeXml(input.outputLogPath)}`, + " StandardErrorPath", + ` ${escapeXml(input.errorLogPath)}`, + "", + "", + "", + ].join("\n"); +} + +function resolveServiceDependencies( + dependencies: ConnectorCommandDependencies, +) { + const execFileImpl: ExecFileLike = + dependencies.execFileImpl ?? + (async (file, args = []) => { + const result = await execFile(file, [...args]); + return { + stdout: result.stdout ?? "", + stderr: result.stderr ?? "", + }; + }); + + return { + execFileImpl, + getConfigDirImpl: dependencies.getConfigDirImpl ?? getConfigDir, + getHomeDirImpl: dependencies.getHomeDirImpl ?? homedir, + mkdirImpl: dependencies.mkdirImpl ?? mkdir, + removeFileImpl: dependencies.removeFileImpl ?? rm, + resolveCurrentModulePathImpl: dependencies.resolveCurrentModulePathImpl, + resolveCurrentPlatformImpl: + dependencies.resolveCurrentPlatformImpl ?? (() => process.platform), + resolveCurrentUidImpl: + dependencies.resolveCurrentUidImpl ?? + (() => { + if (typeof process.getuid !== "function") { + throw createCliError( + "CLI_CONNECTOR_SERVICE_UID_UNAVAILABLE", + "Current user id is unavailable in this runtime", + ); + } + return process.getuid(); + }), + resolveNodeExecPathImpl: + dependencies.resolveNodeExecPathImpl ?? (() => process.execPath), + writeFileImpl: dependencies.writeFileImpl ?? writeFile, + }; +} + +export async function installConnectorServiceForAgent( + agentName: string, + commandOptions: ConnectorServiceInstallCommandOptions = {}, + dependencies: ConnectorCommandDependencies = {}, +): Promise { + const serviceDependencies = resolveServiceDependencies(dependencies); + const servicePlatform = parseConnectorServicePlatformOption( + commandOptions.platform, + ); + const platform = resolveConnectorServicePlatform( + servicePlatform, + serviceDependencies.resolveCurrentPlatformImpl(), + ); + const configDir = serviceDependencies.getConfigDirImpl(); + const homeDir = serviceDependencies.getHomeDirImpl(); + const logsDir = join(configDir, SERVICE_LOG_DIR_NAME); + const serviceName = sanitizeServiceSegment( + `clawdentity-connector-${agentName}`, + ); + const startArgs = buildConnectorStartArgs(agentName, commandOptions); + const command = [ + serviceDependencies.resolveNodeExecPathImpl(), + resolveCliEntryPath(serviceDependencies.resolveCurrentModulePathImpl), + ...startArgs, + ]; + const outputLogPath = join(logsDir, `${serviceName}.out.log`); + const errorLogPath = join(logsDir, `${serviceName}.err.log`); + + await serviceDependencies.mkdirImpl(logsDir, { recursive: true }); + + if (platform === "systemd") { + const serviceDir = join(homeDir, ".config", "systemd", "user"); + const serviceFilePath = join(serviceDir, `${serviceName}.service`); + + await serviceDependencies.mkdirImpl(serviceDir, { recursive: true }); + await serviceDependencies.writeFileImpl( + serviceFilePath, + createSystemdServiceFileContent({ + command, + description: `Clawdentity connector (${agentName})`, + outputLogPath, + errorLogPath, + workingDirectory: homeDir, + }), + "utf8", + ); + + try { + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "daemon-reload", + ]); + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "enable", + "--now", + `${serviceName}.service`, + ]); + } catch (error) { + throw createCliError( + "CLI_CONNECTOR_SERVICE_INSTALL_FAILED", + "Failed to install systemd connector service", + { + reason: error instanceof Error ? error.message : "unknown", + }, + ); + } + + return { + platform, + serviceName, + serviceFilePath, + }; + } + + const launchAgentsDir = join(homeDir, "Library", "LaunchAgents"); + const serviceNameWithDomain = `com.clawdentity.${serviceName}`; + const serviceFilePath = join( + launchAgentsDir, + `${serviceNameWithDomain}.plist`, + ); + + await serviceDependencies.mkdirImpl(launchAgentsDir, { recursive: true }); + await serviceDependencies.writeFileImpl( + serviceFilePath, + createLaunchdPlistContent({ + command, + label: serviceNameWithDomain, + outputLogPath, + errorLogPath, + workingDirectory: homeDir, + }), + "utf8", + ); + + try { + await serviceDependencies.execFileImpl("launchctl", [ + "unload", + "-w", + serviceFilePath, + ]); + } catch { + // Ignore unload failures for first install or already-unloaded service. + } + + try { + await serviceDependencies.execFileImpl("launchctl", [ + "load", + "-w", + serviceFilePath, + ]); + } catch (error) { + throw createCliError( + "CLI_CONNECTOR_SERVICE_INSTALL_FAILED", + "Failed to install launchd connector service", + { + reason: error instanceof Error ? error.message : "unknown", + }, + ); + } + + return { + platform, + serviceName: serviceNameWithDomain, + serviceFilePath, + }; +} + +export async function uninstallConnectorServiceForAgent( + agentName: string, + commandOptions: ConnectorServiceUninstallCommandOptions = {}, + dependencies: ConnectorCommandDependencies = {}, +): Promise { + const serviceDependencies = resolveServiceDependencies(dependencies); + const servicePlatform = parseConnectorServicePlatformOption( + commandOptions.platform, + ); + const platform = resolveConnectorServicePlatform( + servicePlatform, + serviceDependencies.resolveCurrentPlatformImpl(), + ); + const homeDir = serviceDependencies.getHomeDirImpl(); + const serviceName = sanitizeServiceSegment( + `clawdentity-connector-${agentName}`, + ); + + if (platform === "systemd") { + const serviceFilePath = join( + homeDir, + ".config", + "systemd", + "user", + `${serviceName}.service`, + ); + + try { + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "disable", + "--now", + `${serviceName}.service`, + ]); + } catch { + // Continue uninstall to keep command idempotent. + } + + await serviceDependencies.removeFileImpl(serviceFilePath, { force: true }); + + try { + await serviceDependencies.execFileImpl("systemctl", [ + "--user", + "daemon-reload", + ]); + } catch { + // Continue uninstall; unit file is already removed. + } + + return { + platform, + serviceName, + serviceFilePath, + }; + } + + const serviceNameWithDomain = `com.clawdentity.${serviceName}`; + const serviceFilePath = join( + homeDir, + "Library", + "LaunchAgents", + `${serviceNameWithDomain}.plist`, + ); + + try { + await serviceDependencies.execFileImpl("launchctl", [ + "unload", + "-w", + serviceFilePath, + ]); + } catch { + // Continue uninstall to keep command idempotent. + } + + await serviceDependencies.removeFileImpl(serviceFilePath, { force: true }); + + return { + platform, + serviceName: serviceNameWithDomain, + serviceFilePath, + }; +} + +export async function startConnectorForAgent( + agentName: string, + commandOptions: ConnectorStartCommandOptions = {}, + dependencies: ConnectorCommandDependencies = {}, +): Promise { + const resolveConfigImpl: ResolveConfigLike = + dependencies.resolveConfigImpl ?? resolveConfig; + const getConfigDirImpl = dependencies.getConfigDirImpl ?? getConfigDir; + const readFileImpl: ReadFileText = + dependencies.readFileImpl ?? ((path, encoding) => readFile(path, encoding)); + const fetchImpl = dependencies.fetchImpl ?? globalThis.fetch; + const loadConnectorModule = + dependencies.loadConnectorModule ?? loadDefaultConnectorModule; + const configDir = getConfigDirImpl(); + const agentDirectory = join(configDir, AGENTS_DIR_NAME, agentName); + + const [ + rawAit, + rawSecretKey, + rawIdentity, + rawRegistryAuth, + assignedConnectorBaseUrl, + relayRuntimeConfig, + config, + connectorModule, + ] = await Promise.all([ + readRequiredTrimmedFile( + join(agentDirectory, AIT_FILE_NAME), + AIT_FILE_NAME, + readFileImpl, + ), + readRequiredTrimmedFile( + join(agentDirectory, SECRET_KEY_FILE_NAME), + SECRET_KEY_FILE_NAME, + readFileImpl, + ), + readRequiredTrimmedFile( + join(agentDirectory, IDENTITY_FILE_NAME), + IDENTITY_FILE_NAME, + readFileImpl, + ), + readRequiredTrimmedFile( + join(agentDirectory, REGISTRY_AUTH_FILE_NAME), + REGISTRY_AUTH_FILE_NAME, + readFileImpl, + ), + readConnectorAssignedBaseUrl(configDir, agentName, readFileImpl), + readRelayRuntimeConfig(configDir, readFileImpl), + resolveConfigImpl(), + loadConnectorModule(), + ]); + + if (typeof connectorModule.startConnectorRuntime !== "function") { + throw createCliError( + "CLI_CONNECTOR_INVALID_PACKAGE_API", + "Connector package does not expose startConnectorRuntime", + ); + } + + const identity = parseAgentIdentity(rawIdentity); + const registryAuth = parseRegistryAuth(rawRegistryAuth); + const resolvedProxyWebsocketUrl = await resolveProxyWebsocketUrl({ + explicitProxyWsUrl: commandOptions.proxyWsUrl, + configProxyUrl: config.proxyUrl, + registryUrl: config.registryUrl, + fetchImpl, + }); + const openclawHookToken = + commandOptions.openclawHookToken ?? relayRuntimeConfig?.openclawHookToken; + const outboundBaseUrl = + resolveConnectorBaseUrlFromEnv() ?? + assignedConnectorBaseUrl ?? + DEFAULT_CONNECTOR_BASE_URL; + const outboundPath = resolveConnectorOutboundPath(); + const runtime = await connectorModule.startConnectorRuntime({ + agentName, + configDir, + registryUrl: config.registryUrl, + outboundBaseUrl, + outboundPath, + proxyWebsocketUrl: resolvedProxyWebsocketUrl, + openclawBaseUrl: commandOptions.openclawBaseUrl, + openclawHookPath: commandOptions.openclawHookPath, + openclawHookToken, + credentials: { + agentDid: identity.did, + ait: rawAit, + secretKey: rawSecretKey, + refreshToken: registryAuth.refreshToken, + accessToken: registryAuth.accessToken, + accessExpiresAt: registryAuth.accessExpiresAt, + refreshExpiresAt: registryAuth.refreshExpiresAt, + tokenType: registryAuth.tokenType, + }, + }); + const outboundUrl = resolveRuntimeOutboundUrl( + runtime, + resolveOutboundUrl(outboundBaseUrl, outboundPath), + ); + const proxyWebsocketUrl = resolveRuntimeProxyWebsocketUrl( + runtime, + resolvedProxyWebsocketUrl, + ); + + return { + outboundUrl, + proxyWebsocketUrl, + runtime, + }; +} diff --git a/apps/cli/src/commands/connector/types.ts b/apps/cli/src/commands/connector/types.ts new file mode 100644 index 0000000..0a1ee22 --- /dev/null +++ b/apps/cli/src/commands/connector/types.ts @@ -0,0 +1,137 @@ +import type { getConfigDir, resolveConfig } from "../../config/manager.js"; + +export const AGENTS_DIR_NAME = "agents"; +export const IDENTITY_FILE_NAME = "identity.json"; +export const AIT_FILE_NAME = "ait.jwt"; +export const SECRET_KEY_FILE_NAME = "secret.key"; +export const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; +export const OPENCLAW_RELAY_RUNTIME_FILE_NAME = "openclaw-relay.json"; +export const OPENCLAW_CONNECTORS_FILE_NAME = "openclaw-connectors.json"; +export const SERVICE_LOG_DIR_NAME = "logs"; + +export const DEFAULT_CONNECTOR_BASE_URL = "http://127.0.0.1:19400"; +export const DEFAULT_CONNECTOR_OUTBOUND_PATH = "/v1/outbound"; + +export type ConnectorCredentials = { + accessToken?: string; + accessExpiresAt?: string; + agentDid: string; + ait: string; + refreshToken: string; + refreshExpiresAt?: string; + secretKey: string; + tokenType?: "Bearer"; +}; + +export type ConnectorStartInput = { + agentName: string; + configDir: string; + credentials: ConnectorCredentials; + openclawBaseUrl?: string; + openclawHookPath?: string; + openclawHookToken?: string; + outboundBaseUrl: string; + outboundPath: string; + proxyWebsocketUrl?: string; + registryUrl: string; +}; + +export type ConnectorRuntime = { + closed?: Promise; + outboundUrl?: string; + proxyWebsocketUrl?: string; + waitForStop?: () => Promise; + waitUntilStopped?: () => Promise; + websocketUrl?: string; +}; + +export type ConnectorModule = { + startConnectorRuntime?: ( + input: ConnectorStartInput, + ) => Promise; +}; + +export type ReadFileText = (path: string, encoding: "utf8") => Promise; +export type ResolveConfigLike = () => Promise<{ + registryUrl: string; + proxyUrl?: string; +}>; +export type ExecFileLike = ( + file: string, + args?: readonly string[], +) => Promise<{ stderr: string; stdout: string }>; +export type MkdirLike = ( + path: string, + options?: { recursive?: boolean }, +) => Promise; +export type WriteFileLike = ( + filePath: string, + data: string, + encoding: "utf8", +) => Promise; +export type RemoveFileLike = ( + filePath: string, + options?: { force?: boolean }, +) => Promise; +export type ResolveHomeDirLike = () => string; +export type ResolveNodeExecPathLike = () => string; +export type ResolveCurrentPlatformLike = () => NodeJS.Platform; +export type ResolveCurrentModulePathLike = () => string; +export type ResolveCurrentUidLike = () => number; + +export type ConnectorCommandDependencies = { + execFileImpl?: ExecFileLike; + fetchImpl?: typeof fetch; + getConfigDirImpl?: typeof getConfigDir; + getHomeDirImpl?: ResolveHomeDirLike; + loadConnectorModule?: () => Promise; + mkdirImpl?: MkdirLike; + readFileImpl?: ReadFileText; + removeFileImpl?: RemoveFileLike; + resolveCurrentModulePathImpl?: ResolveCurrentModulePathLike; + resolveCurrentPlatformImpl?: ResolveCurrentPlatformLike; + resolveCurrentUidImpl?: ResolveCurrentUidLike; + resolveConfigImpl?: typeof resolveConfig; + resolveNodeExecPathImpl?: ResolveNodeExecPathLike; + writeFileImpl?: WriteFileLike; +}; + +export type ConnectorStartCommandOptions = { + openclawBaseUrl?: string; + openclawHookPath?: string; + openclawHookToken?: string; + proxyWsUrl?: string; +}; + +export type ConnectorServicePlatform = "launchd" | "systemd"; + +export type ConnectorServiceInstallCommandOptions = + ConnectorStartCommandOptions & { + platform?: "auto" | ConnectorServicePlatform; + }; + +export type ConnectorServiceUninstallCommandOptions = { + platform?: "auto" | ConnectorServicePlatform; +}; + +export type ConnectorStartResult = { + outboundUrl: string; + proxyWebsocketUrl?: string; + runtime?: ConnectorRuntime | undefined; +}; + +export type OpenclawRelayRuntimeConfig = { + openclawHookToken?: string; +}; + +export type ConnectorServiceInstallResult = { + serviceFilePath: string; + serviceName: string; + platform: ConnectorServicePlatform; +}; + +export type ConnectorServiceUninstallResult = { + serviceFilePath: string; + serviceName: string; + platform: ConnectorServicePlatform; +}; diff --git a/apps/cli/src/commands/connector/validation.ts b/apps/cli/src/commands/connector/validation.ts new file mode 100644 index 0000000..66c4d75 --- /dev/null +++ b/apps/cli/src/commands/connector/validation.ts @@ -0,0 +1,196 @@ +import { AppError } from "@clawdentity/sdk"; +import type { ConnectorServicePlatform } from "./types.js"; + +export function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function getErrorCode(error: unknown): string | undefined { + if (!isRecord(error)) { + return undefined; + } + + return typeof error.code === "string" ? error.code : undefined; +} + +export function createCliError( + code: string, + message: string, + details?: Record, +): AppError { + return new AppError({ + code, + message, + status: 400, + details, + }); +} + +export function parseNonEmptyString(value: unknown, label: string): string { + if (typeof value !== "string") { + throw createCliError( + "CLI_CONNECTOR_INVALID_INPUT", + "Connector input is invalid", + { + label, + }, + ); + } + + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_CONNECTOR_INVALID_INPUT", + "Connector input is invalid", + { + label, + }, + ); + } + + return trimmed; +} + +export function parseAgentDid(value: unknown): string { + const did = parseNonEmptyString(value, "agent did"); + if (!did.startsWith("did:claw:agent:")) { + throw createCliError( + "CLI_CONNECTOR_INVALID_AGENT_IDENTITY", + "Agent identity is invalid for connector startup", + ); + } + + return did; +} + +export function parseConnectorBaseUrl(value: string): string { + let parsed: URL; + try { + parsed = new URL(value); + } catch { + throw createCliError( + "CLI_CONNECTOR_INVALID_BASE_URL", + "Connector base URL is invalid", + ); + } + + if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { + throw createCliError( + "CLI_CONNECTOR_INVALID_BASE_URL", + "Connector base URL is invalid", + ); + } + + if ( + parsed.pathname === "/" && + parsed.search.length === 0 && + parsed.hash.length === 0 + ) { + return parsed.origin; + } + + return parsed.toString(); +} + +export function parseProxyWebsocketUrl(value: string): string { + let parsed: URL; + try { + parsed = new URL(value); + } catch { + throw createCliError( + "CLI_CONNECTOR_INVALID_PROXY_URL", + "Proxy websocket URL is invalid", + ); + } + + if ( + parsed.protocol !== "ws:" && + parsed.protocol !== "wss:" && + parsed.protocol !== "http:" && + parsed.protocol !== "https:" + ) { + throw createCliError( + "CLI_CONNECTOR_INVALID_PROXY_URL", + "Proxy websocket URL is invalid", + ); + } + + return parsed.toString(); +} + +export function normalizeOutboundPath(pathValue: string): string { + const trimmed = pathValue.trim(); + if (trimmed.length === 0) { + throw createCliError( + "CLI_CONNECTOR_INVALID_OUTBOUND_PATH", + "Connector outbound path is invalid", + ); + } + + return trimmed.startsWith("/") ? trimmed : `/${trimmed}`; +} + +export function parseJsonRecord( + value: string, + code: string, + message: string, +): Record { + let parsed: unknown; + try { + parsed = JSON.parse(value); + } catch { + throw createCliError(code, message); + } + + if (!isRecord(parsed)) { + throw createCliError(code, message); + } + + return parsed; +} + +export function sanitizeServiceSegment(value: string): string { + return value.replaceAll(/[^a-zA-Z0-9_.-]+/g, "-"); +} + +export function parseConnectorServicePlatformOption( + value: unknown, +): "auto" | ConnectorServicePlatform { + if (value === undefined) { + return "auto"; + } + + if (value === "auto" || value === "launchd" || value === "systemd") { + return value; + } + + throw createCliError( + "CLI_CONNECTOR_SERVICE_PLATFORM_INVALID", + "Connector service platform must be one of: auto, launchd, systemd", + ); +} + +export function resolveConnectorServicePlatform( + inputPlatform: "auto" | ConnectorServicePlatform | undefined, + currentPlatform: NodeJS.Platform, +): ConnectorServicePlatform { + if (inputPlatform && inputPlatform !== "auto") { + return inputPlatform; + } + + if (currentPlatform === "darwin") { + return "launchd"; + } + + if (currentPlatform === "linux") { + return "systemd"; + } + + throw createCliError( + "CLI_CONNECTOR_SERVICE_PLATFORM_UNSUPPORTED", + "Connector service install is supported only on macOS (launchd) and Linux (systemd)", + { + platform: currentPlatform, + }, + ); +} From e3a619ed42a61c1624dfcb104f24c4c59685cdc6 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 16:36:15 +0530 Subject: [PATCH 128/190] refactor(cli): split agent command into modules --- apps/cli/src/commands/agent.ts | 985 +--------------------- apps/cli/src/commands/agent/AGENTS.md | 30 + apps/cli/src/commands/agent/auth.ts | 40 + apps/cli/src/commands/agent/command.ts | 205 +++++ apps/cli/src/commands/agent/fs.ts | 299 +++++++ apps/cli/src/commands/agent/paths.ts | 38 + apps/cli/src/commands/agent/registry.ts | 244 ++++++ apps/cli/src/commands/agent/types.ts | 45 + apps/cli/src/commands/agent/validation.ts | 200 +++++ 9 files changed, 1102 insertions(+), 984 deletions(-) create mode 100644 apps/cli/src/commands/agent/AGENTS.md create mode 100644 apps/cli/src/commands/agent/auth.ts create mode 100644 apps/cli/src/commands/agent/command.ts create mode 100644 apps/cli/src/commands/agent/fs.ts create mode 100644 apps/cli/src/commands/agent/paths.ts create mode 100644 apps/cli/src/commands/agent/registry.ts create mode 100644 apps/cli/src/commands/agent/types.ts create mode 100644 apps/cli/src/commands/agent/validation.ts diff --git a/apps/cli/src/commands/agent.ts b/apps/cli/src/commands/agent.ts index dfa1ead..825ca85 100644 --- a/apps/cli/src/commands/agent.ts +++ b/apps/cli/src/commands/agent.ts @@ -1,984 +1 @@ -import { - access, - chmod, - mkdir, - readFile, - rename, - unlink, - writeFile, -} from "node:fs/promises"; -import { join } from "node:path"; -import { - AGENT_REGISTRATION_CHALLENGE_PATH, - canonicalizeAgentRegistrationProof, - decodeBase64url, - parseDid, -} from "@clawdentity/protocol"; -import { - createLogger, - type DecodedAit, - decodeAIT, - encodeEd25519KeypairBase64url, - encodeEd25519SignatureBase64url, - generateEd25519Keypair, - nowUtcMs, - refreshAgentAuthWithClawProof, - signEd25519, - toIso, -} from "@clawdentity/sdk"; -import { Command } from "commander"; -import { getConfigDir, resolveConfig } from "../config/manager.js"; -import { writeStdoutLine } from "../io.js"; -import { assertValidAgentName } from "./agent-name.js"; -import { withErrorHandling } from "./helpers.js"; - -const logger = createLogger({ service: "cli", module: "agent" }); - -const AGENTS_DIR_NAME = "agents"; -const AIT_FILE_NAME = "ait.jwt"; -const IDENTITY_FILE_NAME = "identity.json"; -const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; -const FILE_MODE = 0o600; - -type AgentCreateOptions = { - framework?: string; - ttlDays?: string; -}; - -type AgentRegistrationResponse = { - agent: { - did: string; - name: string; - framework: string; - expiresAt: string; - }; - ait: string; - agentAuth: AgentAuthBundle; -}; - -type AgentRegistrationChallengeResponse = { - challengeId: string; - nonce: string; - ownerDid: string; - expiresAt: string; -}; - -type LocalAgentIdentity = { - did: string; - registryUrl?: string; -}; - -type AgentAuthBundle = { - tokenType: "Bearer"; - accessToken: string; - accessExpiresAt: string; - refreshToken: string; - refreshExpiresAt: string; -}; - -type LocalAgentRegistryAuth = { - refreshToken: string; -}; - -type RegistryErrorEnvelope = { - error?: { - message?: string; - }; -}; - -const isRecord = (value: unknown): value is Record => { - return typeof value === "object" && value !== null; -}; - -const parseNonEmptyString = (value: unknown): string => { - if (typeof value !== "string") { - return ""; - } - - return value.trim(); -}; - -const getAgentDirectory = (name: string): string => { - return join(getConfigDir(), AGENTS_DIR_NAME, name); -}; - -const getAgentAitPath = (name: string): string => { - return join(getAgentDirectory(name), AIT_FILE_NAME); -}; - -const getAgentIdentityPath = (name: string): string => { - return join(getAgentDirectory(name), IDENTITY_FILE_NAME); -}; - -const getAgentSecretKeyPath = (name: string): string => { - return join(getAgentDirectory(name), "secret.key"); -}; - -const getAgentRegistryAuthPath = (name: string): string => { - return join(getAgentDirectory(name), REGISTRY_AUTH_FILE_NAME); -}; - -const readAgentAitToken = async (agentName: string): Promise => { - const aitPath = getAgentAitPath(agentName); - - let rawToken: string; - try { - rawToken = await readFile(aitPath, "utf-8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw new Error(`Agent "${agentName}" not found (${aitPath})`); - } - - throw error; - } - - const token = rawToken.trim(); - if (token.length === 0) { - throw new Error(`Agent "${agentName}" has an empty ${AIT_FILE_NAME}`); - } - - return token; -}; - -const readAgentIdentity = async ( - agentName: string, -): Promise => { - const identityPath = getAgentIdentityPath(agentName); - - let rawIdentity: string; - try { - rawIdentity = await readFile(identityPath, "utf-8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw new Error(`Agent "${agentName}" not found (${identityPath})`); - } - - throw error; - } - - let parsed: unknown; - try { - parsed = JSON.parse(rawIdentity); - } catch { - throw new Error( - `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (must be valid JSON)`, - ); - } - - if (!isRecord(parsed) || typeof parsed.did !== "string") { - throw new Error( - `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (missing did)`, - ); - } - - const did = parsed.did.trim(); - if (did.length === 0) { - throw new Error( - `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (missing did)`, - ); - } - - const registryUrl = parseNonEmptyString(parsed.registryUrl); - return { - did, - registryUrl: registryUrl.length > 0 ? registryUrl : undefined, - }; -}; - -const readAgentSecretKey = async (agentName: string): Promise => { - const secretKeyPath = getAgentSecretKeyPath(agentName); - - let rawSecretKey: string; - try { - rawSecretKey = await readFile(secretKeyPath, "utf-8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw new Error(`Agent "${agentName}" not found (${secretKeyPath})`); - } - throw error; - } - - const encodedSecretKey = rawSecretKey.trim(); - if (encodedSecretKey.length === 0) { - throw new Error(`Agent "${agentName}" has an empty secret.key`); - } - - try { - return decodeBase64url(encodedSecretKey); - } catch { - throw new Error(`Agent "${agentName}" has invalid secret.key format`); - } -}; - -const readAgentRegistryAuth = async ( - agentName: string, -): Promise => { - const registryAuthPath = getAgentRegistryAuthPath(agentName); - - let rawRegistryAuth: string; - try { - rawRegistryAuth = await readFile(registryAuthPath, "utf-8"); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - throw new Error( - `Agent "${agentName}" has no ${REGISTRY_AUTH_FILE_NAME}. Recreate agent identity or re-run auth bootstrap.`, - ); - } - throw error; - } - - let parsed: unknown; - try { - parsed = JSON.parse(rawRegistryAuth); - } catch { - throw new Error( - `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME} (must be valid JSON)`, - ); - } - - if (!isRecord(parsed)) { - throw new Error( - `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME}`, - ); - } - - const refreshToken = parseNonEmptyString(parsed.refreshToken); - if (refreshToken.length === 0) { - throw new Error( - `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME} (missing refreshToken)`, - ); - } - - return { - refreshToken, - }; -}; - -const parseAgentIdFromDid = (agentName: string, did: string): string => { - try { - const parsedDid = parseDid(did); - if (parsedDid.kind !== "agent") { - throw new Error("DID is not an agent DID"); - } - - return parsedDid.ulid; - } catch { - throw new Error( - `Agent "${agentName}" has invalid did in ${IDENTITY_FILE_NAME}: ${did}`, - ); - } -}; - -const formatExpiresAt = (expires: number): string => { - return toIso(expires * 1000); -}; - -const resolveFramework = ( - framework: string | undefined, -): string | undefined => { - if (framework === undefined) { - return undefined; - } - - const normalizedFramework = framework.trim(); - if (normalizedFramework.length === 0) { - throw new Error("--framework must not be empty when provided"); - } - - return normalizedFramework; -}; - -const resolveTtlDays = (ttlDays: string | undefined): number | undefined => { - if (ttlDays === undefined) { - return undefined; - } - - const parsed = Number.parseInt(ttlDays, 10); - if (!Number.isInteger(parsed) || parsed <= 0) { - throw new Error("--ttl-days must be a positive integer"); - } - - return parsed; -}; - -const extractRegistryErrorMessage = (payload: unknown): string | undefined => { - if (!isRecord(payload)) { - return undefined; - } - - const envelope = payload as RegistryErrorEnvelope; - if (!envelope.error || typeof envelope.error.message !== "string") { - return undefined; - } - - const trimmed = envelope.error.message.trim(); - return trimmed.length > 0 ? trimmed : undefined; -}; - -const parseJsonResponse = async (response: Response): Promise => { - try { - return await response.json(); - } catch { - return undefined; - } -}; - -const toRegistryAgentsRequestUrl = ( - registryUrl: string, - agentId?: string, -): string => { - const normalizedBaseUrl = registryUrl.endsWith("/") - ? registryUrl - : `${registryUrl}/`; - - const path = agentId - ? `v1/agents/${encodeURIComponent(agentId)}` - : "v1/agents"; - - return new URL(path, normalizedBaseUrl).toString(); -}; - -const toRegistryAgentChallengeRequestUrl = (registryUrl: string): string => { - const normalizedBaseUrl = registryUrl.endsWith("/") - ? registryUrl - : `${registryUrl}/`; - - return new URL( - AGENT_REGISTRATION_CHALLENGE_PATH.slice(1), - normalizedBaseUrl, - ).toString(); -}; - -const toHttpErrorMessage = (status: number, responseBody: unknown): string => { - const registryMessage = extractRegistryErrorMessage(responseBody); - - if (status === 401) { - return registryMessage - ? `Registry authentication failed (401): ${registryMessage}` - : "Registry authentication failed (401). Check your API key."; - } - - if (status === 400) { - return registryMessage - ? `Registry rejected the request (400): ${registryMessage}` - : "Registry rejected the request (400). Check name/framework/ttl-days."; - } - - if (status >= 500) { - return `Registry server error (${status}). Try again later.`; - } - - if (registryMessage) { - return `Registry request failed (${status}): ${registryMessage}`; - } - - return `Registry request failed (${status})`; -}; - -const parseAgentAuthBundle = (value: unknown): AgentAuthBundle => { - if (!isRecord(value)) { - throw new Error("Registry returned an invalid response payload"); - } - - const tokenType = value.tokenType; - const accessToken = value.accessToken; - const accessExpiresAt = value.accessExpiresAt; - const refreshToken = value.refreshToken; - const refreshExpiresAt = value.refreshExpiresAt; - - if ( - tokenType !== "Bearer" || - typeof accessToken !== "string" || - typeof accessExpiresAt !== "string" || - typeof refreshToken !== "string" || - typeof refreshExpiresAt !== "string" - ) { - throw new Error("Registry returned an invalid response payload"); - } - - return { - tokenType, - accessToken, - accessExpiresAt, - refreshToken, - refreshExpiresAt, - }; -}; - -const parseAgentRegistrationResponse = ( - payload: unknown, -): AgentRegistrationResponse => { - if (!isRecord(payload)) { - throw new Error("Registry returned an invalid response payload"); - } - - const agentValue = payload.agent; - const aitValue = payload.ait; - const agentAuthValue = payload.agentAuth; - - if ( - !isRecord(agentValue) || - typeof aitValue !== "string" || - !isRecord(agentAuthValue) - ) { - throw new Error("Registry returned an invalid response payload"); - } - - const did = agentValue.did; - const name = agentValue.name; - const framework = agentValue.framework; - const expiresAt = agentValue.expiresAt; - - if ( - typeof did !== "string" || - typeof name !== "string" || - typeof framework !== "string" || - typeof expiresAt !== "string" - ) { - throw new Error("Registry returned an invalid response payload"); - } - - return { - agent: { - did, - name, - framework, - expiresAt, - }, - ait: aitValue, - agentAuth: parseAgentAuthBundle(agentAuthValue), - }; -}; - -const parseAgentRegistrationChallengeResponse = ( - payload: unknown, -): AgentRegistrationChallengeResponse => { - if (!isRecord(payload)) { - throw new Error("Registry returned an invalid response payload"); - } - - const challengeId = payload.challengeId; - const nonce = payload.nonce; - const ownerDid = payload.ownerDid; - const expiresAt = payload.expiresAt; - - if ( - typeof challengeId !== "string" || - typeof nonce !== "string" || - typeof ownerDid !== "string" || - typeof expiresAt !== "string" - ) { - throw new Error("Registry returned an invalid response payload"); - } - - return { - challengeId, - nonce, - ownerDid, - expiresAt, - }; -}; - -const ensureAgentDirectoryAvailable = async ( - agentName: string, - agentDirectory: string, -): Promise => { - try { - await access(agentDirectory); - throw new Error(`Agent "${agentName}" already exists at ${agentDirectory}`); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "ENOENT") { - return; - } - - throw error; - } -}; - -const writeSecureFile = async ( - path: string, - content: string, -): Promise => { - await writeFile(path, content, "utf-8"); - await chmod(path, FILE_MODE); -}; - -const writeSecureFileAtomically = async ( - path: string, - content: string, -): Promise => { - const tempPath = `${path}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; - - await writeFile(tempPath, content, "utf-8"); - await chmod(tempPath, FILE_MODE); - - try { - await rename(tempPath, path); - } catch (error) { - try { - await unlink(tempPath); - } catch { - // Best-effort cleanup only. - } - - throw error; - } -}; - -const ensureAgentDirectory = async ( - agentName: string, - agentDirectory: string, -): Promise => { - await mkdir(join(getConfigDir(), AGENTS_DIR_NAME), { recursive: true }); - - try { - await mkdir(agentDirectory); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code === "EEXIST") { - throw new Error( - `Agent "${agentName}" already exists at ${agentDirectory}`, - ); - } - - throw error; - } -}; - -const writeAgentIdentity = async (input: { - agentDirectory: string; - did: string; - name: string; - framework: string; - expiresAt: string; - registryUrl: string; - publicKey: string; - secretKey: string; - ait: string; - agentAuth: AgentAuthBundle; -}): Promise => { - await ensureAgentDirectory(input.name, input.agentDirectory); - - const identityJson = { - did: input.did, - name: input.name, - framework: input.framework, - expiresAt: input.expiresAt, - registryUrl: input.registryUrl, - }; - - await writeSecureFile( - join(input.agentDirectory, "secret.key"), - input.secretKey, - ); - await writeSecureFile( - join(input.agentDirectory, "public.key"), - input.publicKey, - ); - await writeSecureFile( - join(input.agentDirectory, "identity.json"), - `${JSON.stringify(identityJson, null, 2)}\n`, - ); - await writeSecureFile(join(input.agentDirectory, "ait.jwt"), input.ait); - await writeSecureFile( - join(input.agentDirectory, REGISTRY_AUTH_FILE_NAME), - `${JSON.stringify(input.agentAuth, null, 2)}\n`, - ); -}; - -const writeAgentRegistryAuth = async (input: { - agentName: string; - agentAuth: AgentAuthBundle; -}): Promise => { - await writeSecureFileAtomically( - getAgentRegistryAuthPath(input.agentName), - `${JSON.stringify(input.agentAuth, null, 2)}\n`, - ); -}; - -const requestAgentRegistrationChallenge = async (input: { - apiKey: string; - registryUrl: string; - publicKey: string; -}): Promise => { - let response: Response; - try { - response = await fetch( - toRegistryAgentChallengeRequestUrl(input.registryUrl), - { - method: "POST", - headers: { - authorization: `Bearer ${input.apiKey}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: input.publicKey, - }), - }, - ); - } catch { - throw new Error( - "Unable to connect to the registry. Check network access and registryUrl.", - ); - } - - const responseBody = await parseJsonResponse(response); - - if (!response.ok) { - throw new Error(toHttpErrorMessage(response.status, responseBody)); - } - - return parseAgentRegistrationChallengeResponse(responseBody); -}; - -const registerAgent = async (input: { - apiKey: string; - registryUrl: string; - name: string; - publicKey: string; - secretKey: Uint8Array; - framework?: string; - ttlDays?: number; -}): Promise => { - const challenge = await requestAgentRegistrationChallenge({ - apiKey: input.apiKey, - registryUrl: input.registryUrl, - publicKey: input.publicKey, - }); - - const canonicalProof = canonicalizeAgentRegistrationProof({ - challengeId: challenge.challengeId, - nonce: challenge.nonce, - ownerDid: challenge.ownerDid, - publicKey: input.publicKey, - name: input.name, - framework: input.framework, - ttlDays: input.ttlDays, - }); - const challengeSignature = encodeEd25519SignatureBase64url( - await signEd25519( - new TextEncoder().encode(canonicalProof), - input.secretKey, - ), - ); - - const requestBody: { - name: string; - publicKey: string; - challengeId: string; - challengeSignature: string; - framework?: string; - ttlDays?: number; - } = { - name: input.name, - publicKey: input.publicKey, - challengeId: challenge.challengeId, - challengeSignature, - }; - - if (input.framework) { - requestBody.framework = input.framework; - } - - if (input.ttlDays !== undefined) { - requestBody.ttlDays = input.ttlDays; - } - - let response: Response; - try { - response = await fetch(toRegistryAgentsRequestUrl(input.registryUrl), { - method: "POST", - headers: { - authorization: `Bearer ${input.apiKey}`, - "content-type": "application/json", - }, - body: JSON.stringify(requestBody), - }); - } catch { - throw new Error( - "Unable to connect to the registry. Check network access and registryUrl.", - ); - } - - const responseBody = await parseJsonResponse(response); - - if (!response.ok) { - throw new Error(toHttpErrorMessage(response.status, responseBody)); - } - - return parseAgentRegistrationResponse(responseBody); -}; - -const toRevokeHttpErrorMessage = ( - status: number, - responseBody: unknown, -): string => { - const registryMessage = extractRegistryErrorMessage(responseBody); - - if (status === 401) { - return registryMessage - ? `Registry authentication failed (401): ${registryMessage}` - : "Registry authentication failed (401). Check your API key."; - } - - if (status === 404) { - return registryMessage - ? `Agent not found (404): ${registryMessage}` - : "Agent not found in the registry (404)."; - } - - if (status === 409) { - return registryMessage - ? `Agent cannot be revoked (409): ${registryMessage}` - : "Agent cannot be revoked (409)."; - } - - if (status >= 500) { - return `Registry server error (${status}). Try again later.`; - } - - if (registryMessage) { - return `Registry request failed (${status}): ${registryMessage}`; - } - - return `Registry request failed (${status})`; -}; - -const refreshAgentAuth = async (input: { - agentName: string; -}): Promise<{ - registryUrl: string; - agentAuth: AgentAuthBundle; -}> => { - const ait = await readAgentAitToken(input.agentName); - const identity = await readAgentIdentity(input.agentName); - const secretKey = await readAgentSecretKey(input.agentName); - const localAuth = await readAgentRegistryAuth(input.agentName); - - const registryUrl = identity.registryUrl?.trim(); - if (!registryUrl) { - throw new Error( - `Agent "${input.agentName}" identity is missing registryUrl in ${IDENTITY_FILE_NAME}`, - ); - } - - const agentAuth = await refreshAgentAuthWithClawProof({ - registryUrl, - ait, - secretKey, - refreshToken: localAuth.refreshToken, - }); - - return { - registryUrl, - agentAuth, - }; -}; - -const revokeAgent = async (input: { - apiKey: string; - registryUrl: string; - agentId: string; -}): Promise => { - let response: Response; - try { - response = await fetch( - toRegistryAgentsRequestUrl(input.registryUrl, input.agentId), - { - method: "DELETE", - headers: { - authorization: `Bearer ${input.apiKey}`, - }, - }, - ); - } catch { - throw new Error( - "Unable to connect to the registry. Check network access and registryUrl.", - ); - } - - const responseBody = await parseJsonResponse(response); - - if (!response.ok) { - throw new Error(toRevokeHttpErrorMessage(response.status, responseBody)); - } -}; - -const printAgentInspect = (decoded: DecodedAit): void => { - writeStdoutLine(`DID: ${decoded.claims.sub}`); - writeStdoutLine(`Owner: ${decoded.claims.ownerDid}`); - writeStdoutLine(`Expires: ${formatExpiresAt(decoded.claims.exp)}`); - writeStdoutLine(`Key ID: ${decoded.header.kid}`); - writeStdoutLine(`Public Key: ${decoded.claims.cnf.jwk.x}`); - writeStdoutLine(`Framework: ${decoded.claims.framework}`); -}; - -const printAgentInspectCommand = async (name: string): Promise => { - const normalizedName = assertValidAgentName(name); - const aitToken = await readAgentAitToken(normalizedName); - const decoded = decodeAIT(aitToken); - - printAgentInspect(decoded); -}; - -export const createAgentCommand = (): Command => { - const agentCommand = new Command("agent").description( - "Manage local agent identities", - ); - - agentCommand - .command("create ") - .description("Generate and register a new agent identity") - .option( - "--framework ", - "Agent framework label (registry defaults to openclaw)", - ) - .option( - "--ttl-days ", - "Agent token TTL in days (registry default when omitted)", - ) - .action( - withErrorHandling( - "agent create", - async (name: string, options: AgentCreateOptions) => { - const config = await resolveConfig(); - if (!config.apiKey) { - throw new Error( - "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", - ); - } - - const agentName = assertValidAgentName(name); - const framework = resolveFramework(options.framework); - const ttlDays = resolveTtlDays(options.ttlDays); - const agentDirectory = getAgentDirectory(agentName); - - await ensureAgentDirectoryAvailable(agentName, agentDirectory); - - const keypair = await generateEd25519Keypair(); - const encoded = encodeEd25519KeypairBase64url(keypair); - const registration = await registerAgent({ - apiKey: config.apiKey, - registryUrl: config.registryUrl, - name: agentName, - publicKey: encoded.publicKey, - secretKey: keypair.secretKey, - framework, - ttlDays, - }); - - await writeAgentIdentity({ - agentDirectory, - did: registration.agent.did, - name: registration.agent.name, - framework: registration.agent.framework, - expiresAt: registration.agent.expiresAt, - registryUrl: config.registryUrl, - publicKey: encoded.publicKey, - secretKey: encoded.secretKey, - ait: registration.ait, - agentAuth: registration.agentAuth, - }); - - logger.info("cli.agent_created", { - name: registration.agent.name, - did: registration.agent.did, - agentDirectory, - registryUrl: config.registryUrl, - expiresAt: registration.agent.expiresAt, - }); - - writeStdoutLine(`Agent DID: ${registration.agent.did}`); - writeStdoutLine(`Expires At: ${registration.agent.expiresAt}`); - }, - ), - ); - - agentCommand - .command("inspect ") - .description("Decode and show metadata from an agent's stored AIT") - .action( - withErrorHandling("agent inspect", async (name: string) => { - await printAgentInspectCommand(name); - }), - ); - - const authCommand = new Command("auth").description( - "Manage local agent registry auth credentials", - ); - - authCommand - .command("refresh ") - .description("Refresh agent registry auth credentials with Claw proof") - .action( - withErrorHandling("agent auth refresh", async (name: string) => { - const agentName = assertValidAgentName(name); - const result = await refreshAgentAuth({ - agentName, - }); - - await writeAgentRegistryAuth({ - agentName, - agentAuth: result.agentAuth, - }); - - logger.info("cli.agent_auth_refreshed", { - name: agentName, - registryUrl: result.registryUrl, - accessExpiresAt: result.agentAuth.accessExpiresAt, - refreshExpiresAt: result.agentAuth.refreshExpiresAt, - }); - - writeStdoutLine(`Agent auth refreshed: ${agentName}`); - writeStdoutLine( - `Access Expires At: ${result.agentAuth.accessExpiresAt}`, - ); - writeStdoutLine( - `Refresh Expires At: ${result.agentAuth.refreshExpiresAt}`, - ); - }), - ); - - agentCommand.addCommand(authCommand); - - agentCommand - .command("revoke ") - .description("Revoke a local agent identity via the registry") - .action( - withErrorHandling("agent revoke", async (name: string) => { - const config = await resolveConfig(); - if (!config.apiKey) { - throw new Error( - "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", - ); - } - - const agentName = assertValidAgentName(name); - const identity = await readAgentIdentity(agentName); - const agentId = parseAgentIdFromDid(agentName, identity.did); - - await revokeAgent({ - apiKey: config.apiKey, - registryUrl: config.registryUrl, - agentId, - }); - - logger.info("cli.agent_revoked", { - name: agentName, - did: identity.did, - agentId, - registryUrl: config.registryUrl, - }); - - writeStdoutLine(`Agent revoked: ${agentName} (${identity.did})`); - writeStdoutLine("CRL visibility depends on verifier refresh interval."); - }), - ); - - return agentCommand; -}; +export { createAgentCommand } from "./agent/command.js"; diff --git a/apps/cli/src/commands/agent/AGENTS.md b/apps/cli/src/commands/agent/AGENTS.md new file mode 100644 index 0000000..4d23d41 --- /dev/null +++ b/apps/cli/src/commands/agent/AGENTS.md @@ -0,0 +1,30 @@ +# AGENTS.md (agent command modules) + +## Purpose +- Keep agent command code modular, testable, and behavior-stable. +- Preserve CLI output/error behavior and existing `agent` test expectations. + +## Module Boundaries +- `types.ts`: shared agent command data contracts only. +- `paths.ts`: all agent file-name constants and local path resolution. +- `validation.ts`: input parsing/validation, DID parsing, response parsing, and shared formatting helpers. +- `fs.ts`: local identity/auth file reads and secure writes. +- `registry.ts`: registry HTTP URL builders, challenge/register/revoke requests, and HTTP error mapping. +- `auth.ts`: registry-auth refresh orchestration via shared SDK client. +- `command.ts`: Commander wiring and stdout formatting only. +- `../agent.ts`: thin public facade exporting `createAgentCommand`. + +## Guardrails +- Keep every source file under 800 LOC. +- Avoid circular imports across `paths.ts`, `validation.ts`, `fs.ts`, `registry.ts`, `auth.ts`, and `command.ts`. +- Keep user-facing error strings stable unless tests and issue scope require a change. +- Keep command stdout wording/order stable; tests depend on deterministic output. +- Reuse helpers instead of duplicating path parsing, JSON parsing, or registry error mapping. +- Keep agent private key material local-only; never log or send private key values. + +## Change Workflow +- Add/update tests in `apps/cli/src/commands/agent.test.ts` when behavior changes. +- Run validations before handoff: + - `pnpm -C apps/cli typecheck` + - `pnpm -C apps/cli test -- agent` + - `pnpm lint` diff --git a/apps/cli/src/commands/agent/auth.ts b/apps/cli/src/commands/agent/auth.ts new file mode 100644 index 0000000..58bbc73 --- /dev/null +++ b/apps/cli/src/commands/agent/auth.ts @@ -0,0 +1,40 @@ +import { refreshAgentAuthWithClawProof } from "@clawdentity/sdk"; +import { + readAgentAitToken, + readAgentIdentity, + readAgentRegistryAuth, + readAgentSecretKey, +} from "./fs.js"; +import { IDENTITY_FILE_NAME } from "./paths.js"; +import type { AgentAuthBundle } from "./types.js"; + +export const refreshAgentAuth = async (input: { + agentName: string; +}): Promise<{ + registryUrl: string; + agentAuth: AgentAuthBundle; +}> => { + const ait = await readAgentAitToken(input.agentName); + const identity = await readAgentIdentity(input.agentName); + const secretKey = await readAgentSecretKey(input.agentName); + const localAuth = await readAgentRegistryAuth(input.agentName); + + const registryUrl = identity.registryUrl?.trim(); + if (!registryUrl) { + throw new Error( + `Agent "${input.agentName}" identity is missing registryUrl in ${IDENTITY_FILE_NAME}`, + ); + } + + const agentAuth = await refreshAgentAuthWithClawProof({ + registryUrl, + ait, + secretKey, + refreshToken: localAuth.refreshToken, + }); + + return { + registryUrl, + agentAuth, + }; +}; diff --git a/apps/cli/src/commands/agent/command.ts b/apps/cli/src/commands/agent/command.ts new file mode 100644 index 0000000..a1bdbfe --- /dev/null +++ b/apps/cli/src/commands/agent/command.ts @@ -0,0 +1,205 @@ +import { + createLogger, + type DecodedAit, + decodeAIT, + encodeEd25519KeypairBase64url, + generateEd25519Keypair, +} from "@clawdentity/sdk"; +import { Command } from "commander"; +import { resolveConfig } from "../../config/manager.js"; +import { writeStdoutLine } from "../../io.js"; +import { assertValidAgentName } from "../agent-name.js"; +import { withErrorHandling } from "../helpers.js"; +import { refreshAgentAuth } from "./auth.js"; +import { + ensureAgentDirectoryAvailable, + readAgentAitToken, + readAgentIdentity, + writeAgentIdentity, + writeAgentRegistryAuth, +} from "./fs.js"; +import { getAgentDirectory } from "./paths.js"; +import { registerAgent, revokeAgent } from "./registry.js"; +import type { AgentCreateOptions } from "./types.js"; +import { + formatExpiresAt, + parseAgentIdFromDid, + resolveFramework, + resolveTtlDays, +} from "./validation.js"; + +const logger = createLogger({ service: "cli", module: "agent" }); + +const printAgentInspect = (decoded: DecodedAit): void => { + writeStdoutLine(`DID: ${decoded.claims.sub}`); + writeStdoutLine(`Owner: ${decoded.claims.ownerDid}`); + writeStdoutLine(`Expires: ${formatExpiresAt(decoded.claims.exp)}`); + writeStdoutLine(`Key ID: ${decoded.header.kid}`); + writeStdoutLine(`Public Key: ${decoded.claims.cnf.jwk.x}`); + writeStdoutLine(`Framework: ${decoded.claims.framework}`); +}; + +const printAgentInspectCommand = async (name: string): Promise => { + const normalizedName = assertValidAgentName(name); + const aitToken = await readAgentAitToken(normalizedName); + const decoded = decodeAIT(aitToken); + + printAgentInspect(decoded); +}; + +export const createAgentCommand = (): Command => { + const agentCommand = new Command("agent").description( + "Manage local agent identities", + ); + + agentCommand + .command("create ") + .description("Generate and register a new agent identity") + .option( + "--framework ", + "Agent framework label (registry defaults to openclaw)", + ) + .option( + "--ttl-days ", + "Agent token TTL in days (registry default when omitted)", + ) + .action( + withErrorHandling( + "agent create", + async (name: string, options: AgentCreateOptions) => { + const config = await resolveConfig(); + if (!config.apiKey) { + throw new Error( + "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", + ); + } + + const agentName = assertValidAgentName(name); + const framework = resolveFramework(options.framework); + const ttlDays = resolveTtlDays(options.ttlDays); + const agentDirectory = getAgentDirectory(agentName); + + await ensureAgentDirectoryAvailable(agentName, agentDirectory); + + const keypair = await generateEd25519Keypair(); + const encoded = encodeEd25519KeypairBase64url(keypair); + const registration = await registerAgent({ + apiKey: config.apiKey, + registryUrl: config.registryUrl, + name: agentName, + publicKey: encoded.publicKey, + secretKey: keypair.secretKey, + framework, + ttlDays, + }); + + await writeAgentIdentity({ + agentDirectory, + did: registration.agent.did, + name: registration.agent.name, + framework: registration.agent.framework, + expiresAt: registration.agent.expiresAt, + registryUrl: config.registryUrl, + publicKey: encoded.publicKey, + secretKey: encoded.secretKey, + ait: registration.ait, + agentAuth: registration.agentAuth, + }); + + logger.info("cli.agent_created", { + name: registration.agent.name, + did: registration.agent.did, + agentDirectory, + registryUrl: config.registryUrl, + expiresAt: registration.agent.expiresAt, + }); + + writeStdoutLine(`Agent DID: ${registration.agent.did}`); + writeStdoutLine(`Expires At: ${registration.agent.expiresAt}`); + }, + ), + ); + + agentCommand + .command("inspect ") + .description("Decode and show metadata from an agent's stored AIT") + .action( + withErrorHandling("agent inspect", async (name: string) => { + await printAgentInspectCommand(name); + }), + ); + + const authCommand = new Command("auth").description( + "Manage local agent registry auth credentials", + ); + + authCommand + .command("refresh ") + .description("Refresh agent registry auth credentials with Claw proof") + .action( + withErrorHandling("agent auth refresh", async (name: string) => { + const agentName = assertValidAgentName(name); + const result = await refreshAgentAuth({ + agentName, + }); + + await writeAgentRegistryAuth({ + agentName, + agentAuth: result.agentAuth, + }); + + logger.info("cli.agent_auth_refreshed", { + name: agentName, + registryUrl: result.registryUrl, + accessExpiresAt: result.agentAuth.accessExpiresAt, + refreshExpiresAt: result.agentAuth.refreshExpiresAt, + }); + + writeStdoutLine(`Agent auth refreshed: ${agentName}`); + writeStdoutLine( + `Access Expires At: ${result.agentAuth.accessExpiresAt}`, + ); + writeStdoutLine( + `Refresh Expires At: ${result.agentAuth.refreshExpiresAt}`, + ); + }), + ); + + agentCommand.addCommand(authCommand); + + agentCommand + .command("revoke ") + .description("Revoke a local agent identity via the registry") + .action( + withErrorHandling("agent revoke", async (name: string) => { + const config = await resolveConfig(); + if (!config.apiKey) { + throw new Error( + "API key is not configured. Run `clawdentity config set apiKey ` or set CLAWDENTITY_API_KEY.", + ); + } + + const agentName = assertValidAgentName(name); + const identity = await readAgentIdentity(agentName); + const agentId = parseAgentIdFromDid(agentName, identity.did); + + await revokeAgent({ + apiKey: config.apiKey, + registryUrl: config.registryUrl, + agentId, + }); + + logger.info("cli.agent_revoked", { + name: agentName, + did: identity.did, + agentId, + registryUrl: config.registryUrl, + }); + + writeStdoutLine(`Agent revoked: ${agentName} (${identity.did})`); + writeStdoutLine("CRL visibility depends on verifier refresh interval."); + }), + ); + + return agentCommand; +}; diff --git a/apps/cli/src/commands/agent/fs.ts b/apps/cli/src/commands/agent/fs.ts new file mode 100644 index 0000000..99ca1c0 --- /dev/null +++ b/apps/cli/src/commands/agent/fs.ts @@ -0,0 +1,299 @@ +import { + access, + chmod, + mkdir, + readFile, + rename, + unlink, + writeFile, +} from "node:fs/promises"; +import { join } from "node:path"; +import { decodeBase64url } from "@clawdentity/protocol"; +import { nowUtcMs } from "@clawdentity/sdk"; +import { + AIT_FILE_NAME, + FILE_MODE, + getAgentAitPath, + getAgentIdentityPath, + getAgentRegistryAuthPath, + getAgentSecretKeyPath, + getAgentsDirectory, + IDENTITY_FILE_NAME, + PUBLIC_KEY_FILE_NAME, + REGISTRY_AUTH_FILE_NAME, + SECRET_KEY_FILE_NAME, +} from "./paths.js"; +import type { + AgentAuthBundle, + LocalAgentIdentity, + LocalAgentRegistryAuth, +} from "./types.js"; +import { parseNonEmptyString } from "./validation.js"; + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +export const readAgentAitToken = async (agentName: string): Promise => { + const aitPath = getAgentAitPath(agentName); + + let rawToken: string; + try { + rawToken = await readFile(aitPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error(`Agent "${agentName}" not found (${aitPath})`); + } + + throw error; + } + + const token = rawToken.trim(); + if (token.length === 0) { + throw new Error(`Agent "${agentName}" has an empty ${AIT_FILE_NAME}`); + } + + return token; +}; + +export const readAgentIdentity = async ( + agentName: string, +): Promise => { + const identityPath = getAgentIdentityPath(agentName); + + let rawIdentity: string; + try { + rawIdentity = await readFile(identityPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error(`Agent "${agentName}" not found (${identityPath})`); + } + + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawIdentity); + } catch { + throw new Error( + `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (must be valid JSON)`, + ); + } + + if (!isRecord(parsed) || typeof parsed.did !== "string") { + throw new Error( + `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (missing did)`, + ); + } + + const did = parsed.did.trim(); + if (did.length === 0) { + throw new Error( + `Agent "${agentName}" has invalid ${IDENTITY_FILE_NAME} (missing did)`, + ); + } + + const registryUrl = parseNonEmptyString(parsed.registryUrl); + return { + did, + registryUrl: registryUrl.length > 0 ? registryUrl : undefined, + }; +}; + +export const readAgentSecretKey = async ( + agentName: string, +): Promise => { + const secretKeyPath = getAgentSecretKeyPath(agentName); + + let rawSecretKey: string; + try { + rawSecretKey = await readFile(secretKeyPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error(`Agent "${agentName}" not found (${secretKeyPath})`); + } + throw error; + } + + const encodedSecretKey = rawSecretKey.trim(); + if (encodedSecretKey.length === 0) { + throw new Error( + `Agent "${agentName}" has an empty ${SECRET_KEY_FILE_NAME}`, + ); + } + + try { + return decodeBase64url(encodedSecretKey); + } catch { + throw new Error( + `Agent "${agentName}" has invalid ${SECRET_KEY_FILE_NAME} format`, + ); + } +}; + +export const readAgentRegistryAuth = async ( + agentName: string, +): Promise => { + const registryAuthPath = getAgentRegistryAuthPath(agentName); + + let rawRegistryAuth: string; + try { + rawRegistryAuth = await readFile(registryAuthPath, "utf-8"); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + throw new Error( + `Agent "${agentName}" has no ${REGISTRY_AUTH_FILE_NAME}. Recreate agent identity or re-run auth bootstrap.`, + ); + } + throw error; + } + + let parsed: unknown; + try { + parsed = JSON.parse(rawRegistryAuth); + } catch { + throw new Error( + `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME} (must be valid JSON)`, + ); + } + + if (!isRecord(parsed)) { + throw new Error( + `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME}`, + ); + } + + const refreshToken = parseNonEmptyString(parsed.refreshToken); + if (refreshToken.length === 0) { + throw new Error( + `Agent "${agentName}" has invalid ${REGISTRY_AUTH_FILE_NAME} (missing refreshToken)`, + ); + } + + return { + refreshToken, + }; +}; + +export const ensureAgentDirectoryAvailable = async ( + agentName: string, + agentDirectory: string, +): Promise => { + try { + await access(agentDirectory); + throw new Error(`Agent "${agentName}" already exists at ${agentDirectory}`); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "ENOENT") { + return; + } + + throw error; + } +}; + +const writeSecureFile = async ( + path: string, + content: string, +): Promise => { + await writeFile(path, content, "utf-8"); + await chmod(path, FILE_MODE); +}; + +const writeSecureFileAtomically = async ( + path: string, + content: string, +): Promise => { + const tempPath = `${path}.tmp-${nowUtcMs()}-${Math.random().toString(16).slice(2)}`; + + await writeFile(tempPath, content, "utf-8"); + await chmod(tempPath, FILE_MODE); + + try { + await rename(tempPath, path); + } catch (error) { + try { + await unlink(tempPath); + } catch { + // Best-effort cleanup only. + } + + throw error; + } +}; + +const ensureAgentDirectory = async ( + agentName: string, + agentDirectory: string, +): Promise => { + await mkdir(getAgentsDirectory(), { recursive: true }); + + try { + await mkdir(agentDirectory); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code === "EEXIST") { + throw new Error( + `Agent "${agentName}" already exists at ${agentDirectory}`, + ); + } + + throw error; + } +}; + +export const writeAgentIdentity = async (input: { + agentDirectory: string; + did: string; + name: string; + framework: string; + expiresAt: string; + registryUrl: string; + publicKey: string; + secretKey: string; + ait: string; + agentAuth: AgentAuthBundle; +}): Promise => { + await ensureAgentDirectory(input.name, input.agentDirectory); + + const identityJson = { + did: input.did, + name: input.name, + framework: input.framework, + expiresAt: input.expiresAt, + registryUrl: input.registryUrl, + }; + + await writeSecureFile( + join(input.agentDirectory, SECRET_KEY_FILE_NAME), + input.secretKey, + ); + await writeSecureFile( + join(input.agentDirectory, PUBLIC_KEY_FILE_NAME), + input.publicKey, + ); + await writeSecureFile( + join(input.agentDirectory, IDENTITY_FILE_NAME), + `${JSON.stringify(identityJson, null, 2)}\n`, + ); + await writeSecureFile(join(input.agentDirectory, AIT_FILE_NAME), input.ait); + await writeSecureFile( + join(input.agentDirectory, REGISTRY_AUTH_FILE_NAME), + `${JSON.stringify(input.agentAuth, null, 2)}\n`, + ); +}; + +export const writeAgentRegistryAuth = async (input: { + agentName: string; + agentAuth: AgentAuthBundle; +}): Promise => { + await writeSecureFileAtomically( + getAgentRegistryAuthPath(input.agentName), + `${JSON.stringify(input.agentAuth, null, 2)}\n`, + ); +}; diff --git a/apps/cli/src/commands/agent/paths.ts b/apps/cli/src/commands/agent/paths.ts new file mode 100644 index 0000000..faedd7b --- /dev/null +++ b/apps/cli/src/commands/agent/paths.ts @@ -0,0 +1,38 @@ +import { join } from "node:path"; +import { getConfigDir } from "../../config/manager.js"; + +export const AGENTS_DIR_NAME = "agents"; +export const AIT_FILE_NAME = "ait.jwt"; +export const IDENTITY_FILE_NAME = "identity.json"; +export const PUBLIC_KEY_FILE_NAME = "public.key"; +export const SECRET_KEY_FILE_NAME = "secret.key"; +export const REGISTRY_AUTH_FILE_NAME = "registry-auth.json"; +export const FILE_MODE = 0o600; + +export const getAgentsDirectory = (): string => { + return join(getConfigDir(), AGENTS_DIR_NAME); +}; + +export const getAgentDirectory = (name: string): string => { + return join(getAgentsDirectory(), name); +}; + +export const getAgentAitPath = (name: string): string => { + return join(getAgentDirectory(name), AIT_FILE_NAME); +}; + +export const getAgentIdentityPath = (name: string): string => { + return join(getAgentDirectory(name), IDENTITY_FILE_NAME); +}; + +export const getAgentPublicKeyPath = (name: string): string => { + return join(getAgentDirectory(name), PUBLIC_KEY_FILE_NAME); +}; + +export const getAgentSecretKeyPath = (name: string): string => { + return join(getAgentDirectory(name), SECRET_KEY_FILE_NAME); +}; + +export const getAgentRegistryAuthPath = (name: string): string => { + return join(getAgentDirectory(name), REGISTRY_AUTH_FILE_NAME); +}; diff --git a/apps/cli/src/commands/agent/registry.ts b/apps/cli/src/commands/agent/registry.ts new file mode 100644 index 0000000..1b786f7 --- /dev/null +++ b/apps/cli/src/commands/agent/registry.ts @@ -0,0 +1,244 @@ +import { + AGENT_REGISTRATION_CHALLENGE_PATH, + canonicalizeAgentRegistrationProof, +} from "@clawdentity/protocol"; +import { encodeEd25519SignatureBase64url, signEd25519 } from "@clawdentity/sdk"; +import type { + AgentRegistrationChallengeResponse, + AgentRegistrationResponse, +} from "./types.js"; +import { + extractRegistryErrorMessage, + parseAgentRegistrationChallengeResponse, + parseAgentRegistrationResponse, + parseJsonResponse, +} from "./validation.js"; + +const toRegistryAgentsRequestUrl = ( + registryUrl: string, + agentId?: string, +): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + const path = agentId + ? `v1/agents/${encodeURIComponent(agentId)}` + : "v1/agents"; + + return new URL(path, normalizedBaseUrl).toString(); +}; + +const toRegistryAgentChallengeRequestUrl = (registryUrl: string): string => { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + + return new URL( + AGENT_REGISTRATION_CHALLENGE_PATH.slice(1), + normalizedBaseUrl, + ).toString(); +}; + +const toHttpErrorMessage = (status: number, responseBody: unknown): string => { + const registryMessage = extractRegistryErrorMessage(responseBody); + + if (status === 401) { + return registryMessage + ? `Registry authentication failed (401): ${registryMessage}` + : "Registry authentication failed (401). Check your API key."; + } + + if (status === 400) { + return registryMessage + ? `Registry rejected the request (400): ${registryMessage}` + : "Registry rejected the request (400). Check name/framework/ttl-days."; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (registryMessage) { + return `Registry request failed (${status}): ${registryMessage}`; + } + + return `Registry request failed (${status})`; +}; + +const toRevokeHttpErrorMessage = ( + status: number, + responseBody: unknown, +): string => { + const registryMessage = extractRegistryErrorMessage(responseBody); + + if (status === 401) { + return registryMessage + ? `Registry authentication failed (401): ${registryMessage}` + : "Registry authentication failed (401). Check your API key."; + } + + if (status === 404) { + return registryMessage + ? `Agent not found (404): ${registryMessage}` + : "Agent not found in the registry (404)."; + } + + if (status === 409) { + return registryMessage + ? `Agent cannot be revoked (409): ${registryMessage}` + : "Agent cannot be revoked (409)."; + } + + if (status >= 500) { + return `Registry server error (${status}). Try again later.`; + } + + if (registryMessage) { + return `Registry request failed (${status}): ${registryMessage}`; + } + + return `Registry request failed (${status})`; +}; + +const requestAgentRegistrationChallenge = async (input: { + apiKey: string; + registryUrl: string; + publicKey: string; +}): Promise => { + let response: Response; + try { + response = await fetch( + toRegistryAgentChallengeRequestUrl(input.registryUrl), + { + method: "POST", + headers: { + authorization: `Bearer ${input.apiKey}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: input.publicKey, + }), + }, + ); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw new Error(toHttpErrorMessage(response.status, responseBody)); + } + + return parseAgentRegistrationChallengeResponse(responseBody); +}; + +export const registerAgent = async (input: { + apiKey: string; + registryUrl: string; + name: string; + publicKey: string; + secretKey: Uint8Array; + framework?: string; + ttlDays?: number; +}): Promise => { + const challenge = await requestAgentRegistrationChallenge({ + apiKey: input.apiKey, + registryUrl: input.registryUrl, + publicKey: input.publicKey, + }); + + const canonicalProof = canonicalizeAgentRegistrationProof({ + challengeId: challenge.challengeId, + nonce: challenge.nonce, + ownerDid: challenge.ownerDid, + publicKey: input.publicKey, + name: input.name, + framework: input.framework, + ttlDays: input.ttlDays, + }); + const challengeSignature = encodeEd25519SignatureBase64url( + await signEd25519( + new TextEncoder().encode(canonicalProof), + input.secretKey, + ), + ); + + const requestBody: { + name: string; + publicKey: string; + challengeId: string; + challengeSignature: string; + framework?: string; + ttlDays?: number; + } = { + name: input.name, + publicKey: input.publicKey, + challengeId: challenge.challengeId, + challengeSignature, + }; + + if (input.framework) { + requestBody.framework = input.framework; + } + + if (input.ttlDays !== undefined) { + requestBody.ttlDays = input.ttlDays; + } + + let response: Response; + try { + response = await fetch(toRegistryAgentsRequestUrl(input.registryUrl), { + method: "POST", + headers: { + authorization: `Bearer ${input.apiKey}`, + "content-type": "application/json", + }, + body: JSON.stringify(requestBody), + }); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw new Error(toHttpErrorMessage(response.status, responseBody)); + } + + return parseAgentRegistrationResponse(responseBody); +}; + +export const revokeAgent = async (input: { + apiKey: string; + registryUrl: string; + agentId: string; +}): Promise => { + let response: Response; + try { + response = await fetch( + toRegistryAgentsRequestUrl(input.registryUrl, input.agentId), + { + method: "DELETE", + headers: { + authorization: `Bearer ${input.apiKey}`, + }, + }, + ); + } catch { + throw new Error( + "Unable to connect to the registry. Check network access and registryUrl.", + ); + } + + const responseBody = await parseJsonResponse(response); + + if (!response.ok) { + throw new Error(toRevokeHttpErrorMessage(response.status, responseBody)); + } +}; diff --git a/apps/cli/src/commands/agent/types.ts b/apps/cli/src/commands/agent/types.ts new file mode 100644 index 0000000..78dd823 --- /dev/null +++ b/apps/cli/src/commands/agent/types.ts @@ -0,0 +1,45 @@ +export type AgentCreateOptions = { + framework?: string; + ttlDays?: string; +}; + +export type AgentAuthBundle = { + tokenType: "Bearer"; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; +}; + +export type AgentRegistrationResponse = { + agent: { + did: string; + name: string; + framework: string; + expiresAt: string; + }; + ait: string; + agentAuth: AgentAuthBundle; +}; + +export type AgentRegistrationChallengeResponse = { + challengeId: string; + nonce: string; + ownerDid: string; + expiresAt: string; +}; + +export type LocalAgentIdentity = { + did: string; + registryUrl?: string; +}; + +export type LocalAgentRegistryAuth = { + refreshToken: string; +}; + +export type RegistryErrorEnvelope = { + error?: { + message?: string; + }; +}; diff --git a/apps/cli/src/commands/agent/validation.ts b/apps/cli/src/commands/agent/validation.ts new file mode 100644 index 0000000..646aed9 --- /dev/null +++ b/apps/cli/src/commands/agent/validation.ts @@ -0,0 +1,200 @@ +import { parseDid } from "@clawdentity/protocol"; +import { toIso } from "@clawdentity/sdk"; +import { IDENTITY_FILE_NAME } from "./paths.js"; +import type { + AgentAuthBundle, + AgentRegistrationChallengeResponse, + AgentRegistrationResponse, + RegistryErrorEnvelope, +} from "./types.js"; + +const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; + +export const parseNonEmptyString = (value: unknown): string => { + if (typeof value !== "string") { + return ""; + } + + return value.trim(); +}; + +export const parseAgentIdFromDid = (agentName: string, did: string): string => { + try { + const parsedDid = parseDid(did); + if (parsedDid.kind !== "agent") { + throw new Error("DID is not an agent DID"); + } + + return parsedDid.ulid; + } catch { + throw new Error( + `Agent "${agentName}" has invalid did in ${IDENTITY_FILE_NAME}: ${did}`, + ); + } +}; + +export const formatExpiresAt = (expires: number): string => { + return toIso(expires * 1000); +}; + +export const resolveFramework = ( + framework: string | undefined, +): string | undefined => { + if (framework === undefined) { + return undefined; + } + + const normalizedFramework = framework.trim(); + if (normalizedFramework.length === 0) { + throw new Error("--framework must not be empty when provided"); + } + + return normalizedFramework; +}; + +export const resolveTtlDays = ( + ttlDays: string | undefined, +): number | undefined => { + if (ttlDays === undefined) { + return undefined; + } + + const parsed = Number.parseInt(ttlDays, 10); + if (!Number.isInteger(parsed) || parsed <= 0) { + throw new Error("--ttl-days must be a positive integer"); + } + + return parsed; +}; + +export const extractRegistryErrorMessage = ( + payload: unknown, +): string | undefined => { + if (!isRecord(payload)) { + return undefined; + } + + const envelope = payload as RegistryErrorEnvelope; + if (!envelope.error || typeof envelope.error.message !== "string") { + return undefined; + } + + const trimmed = envelope.error.message.trim(); + return trimmed.length > 0 ? trimmed : undefined; +}; + +export const parseJsonResponse = async ( + response: Response, +): Promise => { + try { + return await response.json(); + } catch { + return undefined; + } +}; + +const parseAgentAuthBundle = (value: unknown): AgentAuthBundle => { + if (!isRecord(value)) { + throw new Error("Registry returned an invalid response payload"); + } + + const tokenType = value.tokenType; + const accessToken = value.accessToken; + const accessExpiresAt = value.accessExpiresAt; + const refreshToken = value.refreshToken; + const refreshExpiresAt = value.refreshExpiresAt; + + if ( + tokenType !== "Bearer" || + typeof accessToken !== "string" || + typeof accessExpiresAt !== "string" || + typeof refreshToken !== "string" || + typeof refreshExpiresAt !== "string" + ) { + throw new Error("Registry returned an invalid response payload"); + } + + return { + tokenType, + accessToken, + accessExpiresAt, + refreshToken, + refreshExpiresAt, + }; +}; + +export const parseAgentRegistrationResponse = ( + payload: unknown, +): AgentRegistrationResponse => { + if (!isRecord(payload)) { + throw new Error("Registry returned an invalid response payload"); + } + + const agentValue = payload.agent; + const aitValue = payload.ait; + const agentAuthValue = payload.agentAuth; + + if ( + !isRecord(agentValue) || + typeof aitValue !== "string" || + !isRecord(agentAuthValue) + ) { + throw new Error("Registry returned an invalid response payload"); + } + + const did = agentValue.did; + const name = agentValue.name; + const framework = agentValue.framework; + const expiresAt = agentValue.expiresAt; + + if ( + typeof did !== "string" || + typeof name !== "string" || + typeof framework !== "string" || + typeof expiresAt !== "string" + ) { + throw new Error("Registry returned an invalid response payload"); + } + + return { + agent: { + did, + name, + framework, + expiresAt, + }, + ait: aitValue, + agentAuth: parseAgentAuthBundle(agentAuthValue), + }; +}; + +export const parseAgentRegistrationChallengeResponse = ( + payload: unknown, +): AgentRegistrationChallengeResponse => { + if (!isRecord(payload)) { + throw new Error("Registry returned an invalid response payload"); + } + + const challengeId = payload.challengeId; + const nonce = payload.nonce; + const ownerDid = payload.ownerDid; + const expiresAt = payload.expiresAt; + + if ( + typeof challengeId !== "string" || + typeof nonce !== "string" || + typeof ownerDid !== "string" || + typeof expiresAt !== "string" + ) { + throw new Error("Registry returned an invalid response payload"); + } + + return { + challengeId, + nonce, + ownerDid, + expiresAt, + }; +}; From db401d4e765e600b4c6ff8dbe34414efd615845f Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 16:43:43 +0530 Subject: [PATCH 129/190] refactor(proxy): split proxy trust state into modules --- apps/proxy/src/AGENTS.md | 4 +- apps/proxy/src/proxy-trust-state.ts | 821 +----------------- apps/proxy/src/proxy-trust-state/AGENTS.md | 30 + .../proxy/src/proxy-trust-state/controller.ts | 52 ++ apps/proxy/src/proxy-trust-state/handlers.ts | 363 ++++++++ apps/proxy/src/proxy-trust-state/index.ts | 1 + apps/proxy/src/proxy-trust-state/storage.ts | 299 +++++++ apps/proxy/src/proxy-trust-state/types.ts | 43 + apps/proxy/src/proxy-trust-state/utils.ts | 132 +++ 9 files changed, 923 insertions(+), 822 deletions(-) create mode 100644 apps/proxy/src/proxy-trust-state/AGENTS.md create mode 100644 apps/proxy/src/proxy-trust-state/controller.ts create mode 100644 apps/proxy/src/proxy-trust-state/handlers.ts create mode 100644 apps/proxy/src/proxy-trust-state/index.ts create mode 100644 apps/proxy/src/proxy-trust-state/storage.ts create mode 100644 apps/proxy/src/proxy-trust-state/types.ts create mode 100644 apps/proxy/src/proxy-trust-state/utils.ts diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index fa25e60..6a449da 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -31,7 +31,7 @@ ## Maintainability - Prefer schema-driven parsing with small pure helpers for coercion/overrides. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. -- Keep trust/pairing state centralized in `proxy-trust-store.ts` and `proxy-trust-state.ts` (Durable Object backed). +- Keep trust/pairing state centralized in `proxy-trust-store.ts` and `proxy-trust-state/` (Durable Object backed; `proxy-trust-state.ts` remains the facade export). - Keep shared trust key/expiry helpers in `proxy-trust-keys.ts`; do not duplicate pair-key or expiry-normalization logic across store/state runtimes. - Keep pairing route logic isolated in `pairing-route.ts`; `server.ts` should compose it, not implement policy details. - Keep `ALLOW_ALL_VERIFIED` removed; fail fast when deprecated bypass flags are provided. @@ -89,7 +89,7 @@ - Reject blank/whitespace `requestId`, `senderAgentDid`, and `recipientAgentDid` in `relay-delivery-receipt-route.ts` so invalid receipt payloads fail as `400` client errors before DO RPC. - Receipt reads/writes must verify authenticated/trusted sender-recipient pairs and enforce recipient DID ownership at the route layer. - Keep `conversationId` and `replyTo` metadata flowing from `/hooks/agent` into relay queue/deliver frames for downstream ordering and callback semantics. -- Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state.ts` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. +- Keep Durable Object trust routes explicit in `proxy-trust-store.ts`/`proxy-trust-state/` and use route constants from one source (`TRUST_STORE_ROUTES`) to avoid drift. - Index pairing tickets by ticket `kid` in both in-memory and Durable Object stores; persist the original full ticket string alongside each entry and require exact ticket match on confirm. - Keep identity augmentation logic in small pure helpers (`sanitizeIdentityField`, `buildIdentityBlock`, payload mutation helper) inside `agent-hook-route.ts`; avoid spreading identity-format logic into `server.ts`. - When identity injection is enabled, sanitize identity fields (strip control chars, normalize whitespace, enforce max lengths) and mutate only string `message` fields. diff --git a/apps/proxy/src/proxy-trust-state.ts b/apps/proxy/src/proxy-trust-state.ts index 348aaac..1f95b79 100644 --- a/apps/proxy/src/proxy-trust-state.ts +++ b/apps/proxy/src/proxy-trust-state.ts @@ -1,820 +1 @@ -import { nowUtcMs } from "@clawdentity/sdk"; -import { - normalizePairingTicketText, - PairingTicketParseError, - parsePairingTicket, -} from "./pairing-ticket.js"; -import { normalizeExpiryToWholeSecond, toPairKey } from "./proxy-trust-keys.js"; -import { - type PairingTicketConfirmInput, - type PairingTicketInput, - type PairingTicketStatusInput, - type PeerProfile, - TRUST_STORE_ROUTES, -} from "./proxy-trust-store.js"; - -type StoredPairingTicket = { - ticket: string; - expiresAtMs: number; - initiatorAgentDid: string; - initiatorProfile: PeerProfile; - issuerProxyUrl: string; -}; - -type StoredConfirmedPairingTicket = { - ticket: string; - expiresAtMs: number; - initiatorAgentDid: string; - initiatorProfile: PeerProfile; - responderAgentDid: string; - responderProfile: PeerProfile; - issuerProxyUrl: string; - confirmedAtMs: number; -}; - -type PairingTicketMap = Record; -type ConfirmedPairingTicketMap = Record; -type AgentPeersIndex = Record; -type ExpirableTrustState = { - pairingTickets: PairingTicketMap; - confirmedPairingTickets: ConfirmedPairingTicketMap; -}; - -const PAIRS_STORAGE_KEY = "trust:pairs"; -const AGENT_PEERS_STORAGE_KEY = "trust:agent-peers"; -const PAIRING_TICKETS_STORAGE_KEY = "trust:pairing-tickets"; -const CONFIRMED_PAIRING_TICKETS_STORAGE_KEY = "trust:pairing-tickets-confirmed"; - -function isNonEmptyString(value: unknown): value is string { - return typeof value === "string" && value.trim().length > 0; -} - -function parsePeerProfile(value: unknown): PeerProfile | undefined { - if (typeof value !== "object" || value === null) { - return undefined; - } - - const entry = value as { - agentName?: unknown; - humanName?: unknown; - proxyOrigin?: unknown; - }; - if ( - !isNonEmptyString(entry.agentName) || - !isNonEmptyString(entry.humanName) - ) { - return undefined; - } - - const profile: PeerProfile = { - agentName: entry.agentName.trim(), - humanName: entry.humanName.trim(), - }; - if (entry.proxyOrigin !== undefined) { - if (!isNonEmptyString(entry.proxyOrigin)) { - return undefined; - } - - let parsedProxyOrigin: URL; - try { - parsedProxyOrigin = new URL(entry.proxyOrigin.trim()); - } catch { - return undefined; - } - if ( - parsedProxyOrigin.protocol !== "https:" && - parsedProxyOrigin.protocol !== "http:" - ) { - return undefined; - } - profile.proxyOrigin = parsedProxyOrigin.origin; - } - - return profile; -} - -function addPeer( - index: AgentPeersIndex, - leftAgentDid: string, - rightAgentDid: string, -): void { - const peers = new Set(index[leftAgentDid] ?? []); - peers.add(rightAgentDid); - index[leftAgentDid] = [...peers].sort(); -} - -function toErrorResponse(input: { - code: string; - message: string; - status: number; -}): Response { - return Response.json( - { - error: { - code: input.code, - message: input.message, - }, - }, - { status: input.status }, - ); -} - -async function parseBody(request: Request): Promise { - try { - return await request.json(); - } catch { - return undefined; - } -} - -export class ProxyTrustState { - private readonly state: DurableObjectState; - - constructor(state: DurableObjectState) { - this.state = state; - } - - async fetch(request: Request): Promise { - const url = new URL(request.url); - - if (request.method !== "POST") { - return new Response("Not found", { status: 404 }); - } - - if (url.pathname === TRUST_STORE_ROUTES.createPairingTicket) { - return this.handleCreatePairingTicket(request); - } - - if (url.pathname === TRUST_STORE_ROUTES.confirmPairingTicket) { - return this.handleConfirmPairingTicket(request); - } - - if (url.pathname === TRUST_STORE_ROUTES.getPairingTicketStatus) { - return this.handleGetPairingTicketStatus(request); - } - - if (url.pathname === TRUST_STORE_ROUTES.upsertPair) { - return this.handleUpsertPair(request); - } - - if (url.pathname === TRUST_STORE_ROUTES.isPairAllowed) { - return this.handleIsPairAllowed(request); - } - - if (url.pathname === TRUST_STORE_ROUTES.isAgentKnown) { - return this.handleIsAgentKnown(request); - } - - return new Response("Not found", { status: 404 }); - } - - async alarm(): Promise { - const nowMs = nowUtcMs(); - const expirableState = await this.loadExpirableState(); - const mutated = this.removeExpiredEntries(expirableState, nowMs); - if (mutated) { - await this.saveExpirableState(expirableState, { - pairingTickets: true, - confirmedPairingTickets: true, - }); - } - await this.scheduleNextCodeCleanup( - expirableState.pairingTickets, - expirableState.confirmedPairingTickets, - ); - } - - private async handleCreatePairingTicket(request: Request): Promise { - const body = (await parseBody(request)) as - | Partial - | undefined; - const initiatorProfile = parsePeerProfile(body?.initiatorProfile); - if ( - !body || - !isNonEmptyString(body.initiatorAgentDid) || - !initiatorProfile || - !isNonEmptyString(body.issuerProxyUrl) || - !isNonEmptyString(body.ticket) || - typeof body.expiresAtMs !== "number" || - !Number.isInteger(body.expiresAtMs) || - body.expiresAtMs <= 0 - ) { - return toErrorResponse({ - code: "PROXY_PAIR_START_INVALID_BODY", - message: "Pairing ticket create input is invalid", - status: 400, - }); - } - - const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); - const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( - body.expiresAtMs, - ); - const ticket = normalizePairingTicketText(body.ticket); - let parsedTicket: ReturnType; - try { - parsedTicket = parsePairingTicket(ticket); - } catch (error) { - if (error instanceof PairingTicketParseError) { - return toErrorResponse({ - code: error.code, - message: error.message, - status: 400, - }); - } - - throw error; - } - - if (parsedTicket.iss !== body.issuerProxyUrl) { - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_INVALID_ISSUER", - message: "Pairing ticket issuer URL is invalid", - status: 400, - }); - } - - if (parsedTicket.exp * 1000 !== normalizedExpiresAtMs) { - return toErrorResponse({ - code: "PROXY_PAIR_START_INVALID_BODY", - message: "Pairing ticket expiry is invalid", - status: 400, - }); - } - - if (normalizedExpiresAtMs <= nowMs) { - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_EXPIRED", - message: "Pairing ticket has expired", - status: 410, - }); - } - - const expirableState = await this.loadExpirableState(); - expirableState.pairingTickets[parsedTicket.kid] = { - ticket, - initiatorAgentDid: body.initiatorAgentDid, - initiatorProfile, - issuerProxyUrl: parsedTicket.iss, - expiresAtMs: normalizedExpiresAtMs, - }; - delete expirableState.confirmedPairingTickets[parsedTicket.kid]; - - await this.saveExpirableStateAndSchedule(expirableState, { - pairingTickets: true, - confirmedPairingTickets: true, - }); - - return Response.json({ - ticket, - expiresAtMs: normalizedExpiresAtMs, - initiatorAgentDid: body.initiatorAgentDid, - initiatorProfile, - issuerProxyUrl: parsedTicket.iss, - }); - } - - private async handleConfirmPairingTicket( - request: Request, - ): Promise { - const body = (await parseBody(request)) as - | Partial - | undefined; - const responderProfile = parsePeerProfile(body?.responderProfile); - if ( - !body || - !isNonEmptyString(body.ticket) || - !isNonEmptyString(body.responderAgentDid) || - !responderProfile - ) { - return toErrorResponse({ - code: "PROXY_PAIR_CONFIRM_INVALID_BODY", - message: "Pairing ticket confirm input is invalid", - status: 400, - }); - } - - const ticket = normalizePairingTicketText(body.ticket); - let parsedTicket: ReturnType; - try { - parsedTicket = parsePairingTicket(ticket); - } catch (error) { - if (error instanceof PairingTicketParseError) { - return toErrorResponse({ - code: error.code, - message: error.message, - status: 400, - }); - } - - throw error; - } - - const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); - const expirableState = await this.loadExpirableState(); - const stored = expirableState.pairingTickets[parsedTicket.kid]; - - if (!stored || stored.ticket !== ticket) { - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_NOT_FOUND", - message: "Pairing ticket not found", - status: 404, - }); - } - - if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { - delete expirableState.pairingTickets[parsedTicket.kid]; - delete expirableState.confirmedPairingTickets[parsedTicket.kid]; - await this.saveExpirableStateAndSchedule(expirableState, { - pairingTickets: true, - confirmedPairingTickets: true, - }); - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_EXPIRED", - message: "Pairing ticket has expired", - status: 410, - }); - } - - if (stored.issuerProxyUrl !== parsedTicket.iss) { - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_INVALID_ISSUER", - message: "Pairing ticket issuer URL is invalid", - status: 400, - }); - } - - const pairs = await this.loadPairs(); - pairs.add(toPairKey(stored.initiatorAgentDid, body.responderAgentDid)); - - const agentPeers = await this.loadAgentPeers(); - addPeer(agentPeers, stored.initiatorAgentDid, body.responderAgentDid); - addPeer(agentPeers, body.responderAgentDid, stored.initiatorAgentDid); - - await this.savePairs(pairs); - await this.saveAgentPeers(agentPeers); - - delete expirableState.pairingTickets[parsedTicket.kid]; - expirableState.confirmedPairingTickets[parsedTicket.kid] = { - ticket, - expiresAtMs: stored.expiresAtMs, - initiatorAgentDid: stored.initiatorAgentDid, - initiatorProfile: stored.initiatorProfile, - responderAgentDid: body.responderAgentDid, - responderProfile, - issuerProxyUrl: stored.issuerProxyUrl, - confirmedAtMs: normalizeExpiryToWholeSecond(nowMs), - }; - await this.saveExpirableStateAndSchedule(expirableState, { - pairingTickets: true, - confirmedPairingTickets: true, - }); - - return Response.json({ - initiatorAgentDid: stored.initiatorAgentDid, - initiatorProfile: stored.initiatorProfile, - responderAgentDid: body.responderAgentDid, - responderProfile, - issuerProxyUrl: stored.issuerProxyUrl, - }); - } - - private async handleGetPairingTicketStatus( - request: Request, - ): Promise { - const body = (await parseBody(request)) as - | Partial - | undefined; - if (!body || !isNonEmptyString(body.ticket)) { - return toErrorResponse({ - code: "PROXY_PAIR_STATUS_INVALID_BODY", - message: "Pairing ticket status input is invalid", - status: 400, - }); - } - - const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); - const ticket = normalizePairingTicketText(body.ticket); - let parsedTicket: ReturnType; - try { - parsedTicket = parsePairingTicket(ticket); - } catch (error) { - if (error instanceof PairingTicketParseError) { - return toErrorResponse({ - code: error.code, - message: error.message, - status: 400, - }); - } - - throw error; - } - - const expirableState = await this.loadExpirableState(); - - const pending = expirableState.pairingTickets[parsedTicket.kid]; - if (pending && pending.ticket === ticket) { - if (pending.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { - delete expirableState.pairingTickets[parsedTicket.kid]; - await this.saveExpirableStateAndSchedule(expirableState, { - pairingTickets: true, - }); - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_EXPIRED", - message: "Pairing ticket has expired", - status: 410, - }); - } - - return Response.json({ - status: "pending", - ticket: pending.ticket, - initiatorAgentDid: pending.initiatorAgentDid, - initiatorProfile: pending.initiatorProfile, - issuerProxyUrl: pending.issuerProxyUrl, - expiresAtMs: pending.expiresAtMs, - }); - } - - const confirmed = expirableState.confirmedPairingTickets[parsedTicket.kid]; - if (confirmed && confirmed.ticket === ticket) { - if (confirmed.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { - delete expirableState.confirmedPairingTickets[parsedTicket.kid]; - await this.saveExpirableStateAndSchedule(expirableState, { - confirmedPairingTickets: true, - }); - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_EXPIRED", - message: "Pairing ticket has expired", - status: 410, - }); - } - - return Response.json({ - status: "confirmed", - ticket: confirmed.ticket, - initiatorAgentDid: confirmed.initiatorAgentDid, - initiatorProfile: confirmed.initiatorProfile, - responderAgentDid: confirmed.responderAgentDid, - responderProfile: confirmed.responderProfile, - issuerProxyUrl: confirmed.issuerProxyUrl, - expiresAtMs: confirmed.expiresAtMs, - confirmedAtMs: confirmed.confirmedAtMs, - }); - } - - if (parsedTicket.exp * 1000 <= nowMs) { - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_EXPIRED", - message: "Pairing ticket has expired", - status: 410, - }); - } - - return toErrorResponse({ - code: "PROXY_PAIR_TICKET_NOT_FOUND", - message: "Pairing ticket not found", - status: 404, - }); - } - - private async handleUpsertPair(request: Request): Promise { - const body = (await parseBody(request)) as - | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } - | undefined; - if ( - !body || - !isNonEmptyString(body.initiatorAgentDid) || - !isNonEmptyString(body.responderAgentDid) - ) { - return toErrorResponse({ - code: "PROXY_PAIR_UPSERT_INVALID_BODY", - message: "Pair upsert input is invalid", - status: 400, - }); - } - - const pairs = await this.loadPairs(); - pairs.add(toPairKey(body.initiatorAgentDid, body.responderAgentDid)); - await this.savePairs(pairs); - - const agentPeers = await this.loadAgentPeers(); - addPeer(agentPeers, body.initiatorAgentDid, body.responderAgentDid); - addPeer(agentPeers, body.responderAgentDid, body.initiatorAgentDid); - await this.saveAgentPeers(agentPeers); - - return Response.json({ ok: true }); - } - - private async handleIsPairAllowed(request: Request): Promise { - const body = (await parseBody(request)) as - | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } - | undefined; - if ( - !body || - !isNonEmptyString(body.initiatorAgentDid) || - !isNonEmptyString(body.responderAgentDid) - ) { - return toErrorResponse({ - code: "PROXY_PAIR_CHECK_INVALID_BODY", - message: "Pair check input is invalid", - status: 400, - }); - } - - if (body.initiatorAgentDid === body.responderAgentDid) { - return Response.json({ allowed: true }); - } - - const pairs = await this.loadPairs(); - return Response.json({ - allowed: pairs.has( - toPairKey(body.initiatorAgentDid, body.responderAgentDid), - ), - }); - } - - private async handleIsAgentKnown(request: Request): Promise { - const body = (await parseBody(request)) as - | { agentDid?: unknown } - | undefined; - if (!body || !isNonEmptyString(body.agentDid)) { - return toErrorResponse({ - code: "PROXY_AGENT_KNOWN_INVALID_BODY", - message: "Agent known check input is invalid", - status: 400, - }); - } - - const agentPeers = await this.loadAgentPeers(); - if ((agentPeers[body.agentDid]?.length ?? 0) > 0) { - return Response.json({ known: true }); - } - - return Response.json({ known: false }); - } - - private async loadExpirableState(): Promise { - const [pairingTickets, confirmedPairingTickets] = await Promise.all([ - this.loadPairingTickets(), - this.loadConfirmedPairingTickets(), - ]); - - return { pairingTickets, confirmedPairingTickets }; - } - - private removeExpiredEntries( - state: ExpirableTrustState, - nowMs: number, - ): boolean { - let mutated = false; - - for (const [ticketKid, details] of Object.entries(state.pairingTickets)) { - if (details.expiresAtMs <= nowMs) { - delete state.pairingTickets[ticketKid]; - mutated = true; - } - } - - for (const [ticketKid, details] of Object.entries( - state.confirmedPairingTickets, - )) { - if (details.expiresAtMs <= nowMs) { - delete state.confirmedPairingTickets[ticketKid]; - mutated = true; - } - } - - return mutated; - } - - private async saveExpirableState( - state: ExpirableTrustState, - options: { - pairingTickets?: boolean; - confirmedPairingTickets?: boolean; - }, - ): Promise { - const saves: Promise[] = []; - if (options.pairingTickets) { - saves.push(this.savePairingTickets(state.pairingTickets)); - } - if (options.confirmedPairingTickets) { - saves.push( - this.saveConfirmedPairingTickets(state.confirmedPairingTickets), - ); - } - if (saves.length > 0) { - await Promise.all(saves); - } - } - - private async saveExpirableStateAndSchedule( - state: ExpirableTrustState, - options: { - pairingTickets?: boolean; - confirmedPairingTickets?: boolean; - }, - ): Promise { - await this.saveExpirableState(state, options); - await this.scheduleNextCodeCleanup( - state.pairingTickets, - state.confirmedPairingTickets, - ); - } - - private async loadPairs(): Promise> { - const raw = await this.state.storage.get(PAIRS_STORAGE_KEY); - if (!Array.isArray(raw)) { - return new Set(); - } - - const normalized = raw.filter((value) => typeof value === "string"); - return new Set(normalized); - } - - private async savePairs(pairs: Set): Promise { - await this.state.storage.put(PAIRS_STORAGE_KEY, [...pairs].sort()); - } - - private async loadAgentPeers(): Promise { - const raw = await this.state.storage.get( - AGENT_PEERS_STORAGE_KEY, - ); - if (typeof raw !== "object" || raw === null) { - return {}; - } - - const normalized: AgentPeersIndex = {}; - for (const [agentDid, peers] of Object.entries(raw)) { - if (!Array.isArray(peers)) { - continue; - } - - normalized[agentDid] = peers.filter((peer): peer is string => - isNonEmptyString(peer), - ); - } - - return normalized; - } - - private async saveAgentPeers(agentPeers: AgentPeersIndex): Promise { - await this.state.storage.put(AGENT_PEERS_STORAGE_KEY, agentPeers); - } - - private async loadPairingTickets(): Promise { - const raw = await this.state.storage.get( - PAIRING_TICKETS_STORAGE_KEY, - ); - - if (typeof raw !== "object" || raw === null) { - return {}; - } - - const normalized: PairingTicketMap = {}; - for (const [entryKey, value] of Object.entries(raw)) { - if (typeof value !== "object" || value === null) { - continue; - } - - const entry = value as { - ticket?: unknown; - expiresAtMs?: unknown; - initiatorAgentDid?: unknown; - initiatorProfile?: unknown; - issuerProxyUrl?: unknown; - }; - const initiatorProfile = parsePeerProfile(entry.initiatorProfile); - if ( - !isNonEmptyString(entry.initiatorAgentDid) || - !initiatorProfile || - !isNonEmptyString(entry.issuerProxyUrl) || - typeof entry.expiresAtMs !== "number" || - !Number.isInteger(entry.expiresAtMs) - ) { - continue; - } - - const ticketCandidate = isNonEmptyString(entry.ticket) - ? entry.ticket - : entryKey; - let parsedTicket: ReturnType; - try { - parsedTicket = parsePairingTicket(ticketCandidate); - } catch { - continue; - } - - normalized[parsedTicket.kid] = { - ticket: ticketCandidate, - expiresAtMs: entry.expiresAtMs, - initiatorAgentDid: entry.initiatorAgentDid, - initiatorProfile, - issuerProxyUrl: parsedTicket.iss, - }; - } - - return normalized; - } - - private async savePairingTickets( - pairingTickets: PairingTicketMap, - ): Promise { - await this.state.storage.put(PAIRING_TICKETS_STORAGE_KEY, pairingTickets); - } - - private async loadConfirmedPairingTickets(): Promise { - const raw = await this.state.storage.get( - CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, - ); - - if (typeof raw !== "object" || raw === null) { - return {}; - } - - const normalized: ConfirmedPairingTicketMap = {}; - for (const [entryKey, value] of Object.entries(raw)) { - if (typeof value !== "object" || value === null) { - continue; - } - - const entry = value as { - ticket?: unknown; - expiresAtMs?: unknown; - initiatorAgentDid?: unknown; - initiatorProfile?: unknown; - responderAgentDid?: unknown; - responderProfile?: unknown; - issuerProxyUrl?: unknown; - confirmedAtMs?: unknown; - }; - const initiatorProfile = parsePeerProfile(entry.initiatorProfile); - const responderProfile = parsePeerProfile(entry.responderProfile); - - if ( - !isNonEmptyString(entry.initiatorAgentDid) || - !initiatorProfile || - !isNonEmptyString(entry.responderAgentDid) || - !responderProfile || - !isNonEmptyString(entry.issuerProxyUrl) || - typeof entry.expiresAtMs !== "number" || - !Number.isInteger(entry.expiresAtMs) || - typeof entry.confirmedAtMs !== "number" || - !Number.isInteger(entry.confirmedAtMs) - ) { - continue; - } - - const ticketCandidate = isNonEmptyString(entry.ticket) - ? entry.ticket - : entryKey; - let parsedTicket: ReturnType; - try { - parsedTicket = parsePairingTicket(ticketCandidate); - } catch { - continue; - } - - normalized[parsedTicket.kid] = { - ticket: ticketCandidate, - expiresAtMs: entry.expiresAtMs, - initiatorAgentDid: entry.initiatorAgentDid, - initiatorProfile, - responderAgentDid: entry.responderAgentDid, - responderProfile, - issuerProxyUrl: parsedTicket.iss, - confirmedAtMs: entry.confirmedAtMs, - }; - } - - return normalized; - } - - private async saveConfirmedPairingTickets( - pairingTickets: ConfirmedPairingTicketMap, - ): Promise { - await this.state.storage.put( - CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, - pairingTickets, - ); - } - - private async scheduleNextCodeCleanup( - pairingTickets: PairingTicketMap, - confirmedPairingTickets: ConfirmedPairingTicketMap, - ): Promise { - const expiryValues = [ - ...Object.values(pairingTickets), - ...Object.values(confirmedPairingTickets), - ].map((details) => details.expiresAtMs); - - if (expiryValues.length === 0) { - await this.state.storage.deleteAlarm(); - return; - } - - const earliestExpiry = Math.min(...expiryValues); - await this.state.storage.setAlarm(earliestExpiry); - } -} +export { ProxyTrustState } from "./proxy-trust-state/index.js"; diff --git a/apps/proxy/src/proxy-trust-state/AGENTS.md b/apps/proxy/src/proxy-trust-state/AGENTS.md new file mode 100644 index 0000000..70e3576 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/AGENTS.md @@ -0,0 +1,30 @@ +# AGENTS.md (apps/proxy/src/proxy-trust-state) + +## Purpose +- Keep Durable Object trust/pairing state logic modular while preserving the public runtime contract exported from `../proxy-trust-state.ts`. + +## Module Boundaries +- `index.ts` re-exports the runtime class surface for this module. +- `controller.ts` owns only request routing (`fetch`) and alarm orchestration (`alarm`). +- `handlers.ts` owns route behavior and response payloads for trust-store RPC paths. +- `storage.ts` owns Durable Object storage IO, normalization of persisted data, and alarm scheduling. +- `utils.ts` owns shared validation and parsing helpers (`parseBody`, `parsePeerProfile`, ticket parse/error mapping). +- `types.ts` owns persisted state shapes and storage keys. + +## Invariants +- Keep route dispatch tied to `TRUST_STORE_ROUTES`; do not hardcode duplicate paths. +- Keep ticket normalization/parsing strict and centralized via `parseNormalizedPairingTicket`. +- Keep pairing ticket expiry behavior unchanged: + - creation rejects expired tickets (`410`) + - confirm/status delete expired entries before returning `410` + - alarm cleanup removes expired pending/confirmed entries and re-schedules next alarm. +- Keep pair authorization symmetric using `toPairKey` + `addPeer` for both directions. +- Keep storage normalization defensive: ignore malformed persisted records instead of throwing. +- Keep external API stable: + - class name remains `ProxyTrustState` + - imports from `./proxy-trust-state.js` must continue working. + +## Maintainability +- Add new helper logic in `utils.ts` or `storage.ts` instead of duplicating parsing/validation in handlers. +- Keep handler methods focused on one endpoint each and avoid cross-endpoint side effects. +- If persistence schema changes, update `types.ts` and corresponding normalization logic in `storage.ts` in the same change. diff --git a/apps/proxy/src/proxy-trust-state/controller.ts b/apps/proxy/src/proxy-trust-state/controller.ts new file mode 100644 index 0000000..8d75721 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/controller.ts @@ -0,0 +1,52 @@ +import { nowUtcMs } from "@clawdentity/sdk"; +import { TRUST_STORE_ROUTES } from "../proxy-trust-store.js"; +import { ProxyTrustStateHandlers } from "./handlers.js"; +import { ProxyTrustStateStorage } from "./storage.js"; + +export class ProxyTrustState { + private readonly handlers: ProxyTrustStateHandlers; + private readonly storage: ProxyTrustStateStorage; + + constructor(state: DurableObjectState) { + this.storage = new ProxyTrustStateStorage(state); + this.handlers = new ProxyTrustStateHandlers(this.storage); + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + if (request.method !== "POST") { + return new Response("Not found", { status: 404 }); + } + + if (url.pathname === TRUST_STORE_ROUTES.createPairingTicket) { + return this.handlers.handleCreatePairingTicket(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.confirmPairingTicket) { + return this.handlers.handleConfirmPairingTicket(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.getPairingTicketStatus) { + return this.handlers.handleGetPairingTicketStatus(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.upsertPair) { + return this.handlers.handleUpsertPair(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.isPairAllowed) { + return this.handlers.handleIsPairAllowed(request); + } + + if (url.pathname === TRUST_STORE_ROUTES.isAgentKnown) { + return this.handlers.handleIsAgentKnown(request); + } + + return new Response("Not found", { status: 404 }); + } + + async alarm(): Promise { + await this.storage.runAlarmCleanup(nowUtcMs()); + } +} diff --git a/apps/proxy/src/proxy-trust-state/handlers.ts b/apps/proxy/src/proxy-trust-state/handlers.ts new file mode 100644 index 0000000..6c6eec7 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/handlers.ts @@ -0,0 +1,363 @@ +import { nowUtcMs } from "@clawdentity/sdk"; +import { + normalizeExpiryToWholeSecond, + toPairKey, +} from "../proxy-trust-keys.js"; +import type { + PairingTicketConfirmInput, + PairingTicketInput, + PairingTicketStatusInput, +} from "../proxy-trust-store.js"; +import type { ProxyTrustStateStorage } from "./storage.js"; +import { + addPeer, + isNonEmptyString, + parseBody, + parseNormalizedPairingTicket, + parsePeerProfile, + toErrorResponse, +} from "./utils.js"; + +export class ProxyTrustStateHandlers { + private readonly storage: ProxyTrustStateStorage; + + constructor(storage: ProxyTrustStateStorage) { + this.storage = storage; + } + + async handleCreatePairingTicket(request: Request): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + const initiatorProfile = parsePeerProfile(body?.initiatorProfile); + if ( + !body || + !isNonEmptyString(body.initiatorAgentDid) || + !initiatorProfile || + !isNonEmptyString(body.issuerProxyUrl) || + !isNonEmptyString(body.ticket) || + typeof body.expiresAtMs !== "number" || + !Number.isInteger(body.expiresAtMs) || + body.expiresAtMs <= 0 + ) { + return toErrorResponse({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket create input is invalid", + status: 400, + }); + } + + const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); + const normalizedExpiresAtMs = normalizeExpiryToWholeSecond( + body.expiresAtMs, + ); + const parsedTicketResult = parseNormalizedPairingTicket(body.ticket); + if (!parsedTicketResult.ok) { + return parsedTicketResult.response; + } + + const { parsedTicket, ticket } = parsedTicketResult; + if (parsedTicket.iss !== body.issuerProxyUrl) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_INVALID_ISSUER", + message: "Pairing ticket issuer URL is invalid", + status: 400, + }); + } + + if (parsedTicket.exp * 1000 !== normalizedExpiresAtMs) { + return toErrorResponse({ + code: "PROXY_PAIR_START_INVALID_BODY", + message: "Pairing ticket expiry is invalid", + status: 400, + }); + } + + if (normalizedExpiresAtMs <= nowMs) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + const expirableState = await this.storage.loadExpirableState(); + expirableState.pairingTickets[parsedTicket.kid] = { + ticket, + initiatorAgentDid: body.initiatorAgentDid, + initiatorProfile, + issuerProxyUrl: parsedTicket.iss, + expiresAtMs: normalizedExpiresAtMs, + }; + delete expirableState.confirmedPairingTickets[parsedTicket.kid]; + + await this.storage.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); + + return Response.json({ + ticket, + expiresAtMs: normalizedExpiresAtMs, + initiatorAgentDid: body.initiatorAgentDid, + initiatorProfile, + issuerProxyUrl: parsedTicket.iss, + }); + } + + async handleConfirmPairingTicket(request: Request): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + const responderProfile = parsePeerProfile(body?.responderProfile); + if ( + !body || + !isNonEmptyString(body.ticket) || + !isNonEmptyString(body.responderAgentDid) || + !responderProfile + ) { + return toErrorResponse({ + code: "PROXY_PAIR_CONFIRM_INVALID_BODY", + message: "Pairing ticket confirm input is invalid", + status: 400, + }); + } + + const parsedTicketResult = parseNormalizedPairingTicket(body.ticket); + if (!parsedTicketResult.ok) { + return parsedTicketResult.response; + } + + const { parsedTicket, ticket } = parsedTicketResult; + const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); + const expirableState = await this.storage.loadExpirableState(); + const stored = expirableState.pairingTickets[parsedTicket.kid]; + + if (!stored || stored.ticket !== ticket) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", + status: 404, + }); + } + + if (stored.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + delete expirableState.pairingTickets[parsedTicket.kid]; + delete expirableState.confirmedPairingTickets[parsedTicket.kid]; + await this.storage.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + if (stored.issuerProxyUrl !== parsedTicket.iss) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_INVALID_ISSUER", + message: "Pairing ticket issuer URL is invalid", + status: 400, + }); + } + + const pairs = await this.storage.loadPairs(); + pairs.add(toPairKey(stored.initiatorAgentDid, body.responderAgentDid)); + + const agentPeers = await this.storage.loadAgentPeers(); + addPeer(agentPeers, stored.initiatorAgentDid, body.responderAgentDid); + addPeer(agentPeers, body.responderAgentDid, stored.initiatorAgentDid); + + await this.storage.savePairs(pairs); + await this.storage.saveAgentPeers(agentPeers); + + delete expirableState.pairingTickets[parsedTicket.kid]; + expirableState.confirmedPairingTickets[parsedTicket.kid] = { + ticket, + expiresAtMs: stored.expiresAtMs, + initiatorAgentDid: stored.initiatorAgentDid, + initiatorProfile: stored.initiatorProfile, + responderAgentDid: body.responderAgentDid, + responderProfile, + issuerProxyUrl: stored.issuerProxyUrl, + confirmedAtMs: normalizeExpiryToWholeSecond(nowMs), + }; + await this.storage.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); + + return Response.json({ + initiatorAgentDid: stored.initiatorAgentDid, + initiatorProfile: stored.initiatorProfile, + responderAgentDid: body.responderAgentDid, + responderProfile, + issuerProxyUrl: stored.issuerProxyUrl, + }); + } + + async handleGetPairingTicketStatus(request: Request): Promise { + const body = (await parseBody(request)) as + | Partial + | undefined; + if (!body || !isNonEmptyString(body.ticket)) { + return toErrorResponse({ + code: "PROXY_PAIR_STATUS_INVALID_BODY", + message: "Pairing ticket status input is invalid", + status: 400, + }); + } + + const nowMs = typeof body.nowMs === "number" ? body.nowMs : nowUtcMs(); + const parsedTicketResult = parseNormalizedPairingTicket(body.ticket); + if (!parsedTicketResult.ok) { + return parsedTicketResult.response; + } + + const { parsedTicket, ticket } = parsedTicketResult; + const expirableState = await this.storage.loadExpirableState(); + + const pending = expirableState.pairingTickets[parsedTicket.kid]; + if (pending && pending.ticket === ticket) { + if (pending.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + delete expirableState.pairingTickets[parsedTicket.kid]; + await this.storage.saveExpirableStateAndSchedule(expirableState, { + pairingTickets: true, + }); + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return Response.json({ + status: "pending", + ticket: pending.ticket, + initiatorAgentDid: pending.initiatorAgentDid, + initiatorProfile: pending.initiatorProfile, + issuerProxyUrl: pending.issuerProxyUrl, + expiresAtMs: pending.expiresAtMs, + }); + } + + const confirmed = expirableState.confirmedPairingTickets[parsedTicket.kid]; + if (confirmed && confirmed.ticket === ticket) { + if (confirmed.expiresAtMs <= nowMs || parsedTicket.exp * 1000 <= nowMs) { + delete expirableState.confirmedPairingTickets[parsedTicket.kid]; + await this.storage.saveExpirableStateAndSchedule(expirableState, { + confirmedPairingTickets: true, + }); + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return Response.json({ + status: "confirmed", + ticket: confirmed.ticket, + initiatorAgentDid: confirmed.initiatorAgentDid, + initiatorProfile: confirmed.initiatorProfile, + responderAgentDid: confirmed.responderAgentDid, + responderProfile: confirmed.responderProfile, + issuerProxyUrl: confirmed.issuerProxyUrl, + expiresAtMs: confirmed.expiresAtMs, + confirmedAtMs: confirmed.confirmedAtMs, + }); + } + + if (parsedTicket.exp * 1000 <= nowMs) { + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_EXPIRED", + message: "Pairing ticket has expired", + status: 410, + }); + } + + return toErrorResponse({ + code: "PROXY_PAIR_TICKET_NOT_FOUND", + message: "Pairing ticket not found", + status: 404, + }); + } + + async handleUpsertPair(request: Request): Promise { + const body = (await parseBody(request)) as + | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } + | undefined; + if ( + !body || + !isNonEmptyString(body.initiatorAgentDid) || + !isNonEmptyString(body.responderAgentDid) + ) { + return toErrorResponse({ + code: "PROXY_PAIR_UPSERT_INVALID_BODY", + message: "Pair upsert input is invalid", + status: 400, + }); + } + + const pairs = await this.storage.loadPairs(); + pairs.add(toPairKey(body.initiatorAgentDid, body.responderAgentDid)); + await this.storage.savePairs(pairs); + + const agentPeers = await this.storage.loadAgentPeers(); + addPeer(agentPeers, body.initiatorAgentDid, body.responderAgentDid); + addPeer(agentPeers, body.responderAgentDid, body.initiatorAgentDid); + await this.storage.saveAgentPeers(agentPeers); + + return Response.json({ ok: true }); + } + + async handleIsPairAllowed(request: Request): Promise { + const body = (await parseBody(request)) as + | { initiatorAgentDid?: unknown; responderAgentDid?: unknown } + | undefined; + if ( + !body || + !isNonEmptyString(body.initiatorAgentDid) || + !isNonEmptyString(body.responderAgentDid) + ) { + return toErrorResponse({ + code: "PROXY_PAIR_CHECK_INVALID_BODY", + message: "Pair check input is invalid", + status: 400, + }); + } + + if (body.initiatorAgentDid === body.responderAgentDid) { + return Response.json({ allowed: true }); + } + + const pairs = await this.storage.loadPairs(); + return Response.json({ + allowed: pairs.has( + toPairKey(body.initiatorAgentDid, body.responderAgentDid), + ), + }); + } + + async handleIsAgentKnown(request: Request): Promise { + const body = (await parseBody(request)) as + | { agentDid?: unknown } + | undefined; + if (!body || !isNonEmptyString(body.agentDid)) { + return toErrorResponse({ + code: "PROXY_AGENT_KNOWN_INVALID_BODY", + message: "Agent known check input is invalid", + status: 400, + }); + } + + const agentPeers = await this.storage.loadAgentPeers(); + if ((agentPeers[body.agentDid]?.length ?? 0) > 0) { + return Response.json({ known: true }); + } + + return Response.json({ known: false }); + } +} diff --git a/apps/proxy/src/proxy-trust-state/index.ts b/apps/proxy/src/proxy-trust-state/index.ts new file mode 100644 index 0000000..f14b611 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/index.ts @@ -0,0 +1 @@ +export { ProxyTrustState } from "./controller.js"; diff --git a/apps/proxy/src/proxy-trust-state/storage.ts b/apps/proxy/src/proxy-trust-state/storage.ts new file mode 100644 index 0000000..78046e5 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/storage.ts @@ -0,0 +1,299 @@ +import { parsePairingTicket } from "../pairing-ticket.js"; +import type { + AgentPeersIndex, + ConfirmedPairingTicketMap, + ExpirableStateSaveOptions, + ExpirableTrustState, + PairingTicketMap, +} from "./types.js"; +import { + AGENT_PEERS_STORAGE_KEY, + CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, + PAIRING_TICKETS_STORAGE_KEY, + PAIRS_STORAGE_KEY, +} from "./types.js"; +import { isNonEmptyString, parsePeerProfile } from "./utils.js"; + +export class ProxyTrustStateStorage { + private readonly state: DurableObjectState; + + constructor(state: DurableObjectState) { + this.state = state; + } + + async runAlarmCleanup(nowMs: number): Promise { + const expirableState = await this.loadExpirableState(); + const mutated = this.removeExpiredEntries(expirableState, nowMs); + if (mutated) { + await this.saveExpirableState(expirableState, { + pairingTickets: true, + confirmedPairingTickets: true, + }); + } + + await this.scheduleNextCodeCleanup( + expirableState.pairingTickets, + expirableState.confirmedPairingTickets, + ); + } + + async loadExpirableState(): Promise { + const [pairingTickets, confirmedPairingTickets] = await Promise.all([ + this.loadPairingTickets(), + this.loadConfirmedPairingTickets(), + ]); + + return { pairingTickets, confirmedPairingTickets }; + } + + async saveExpirableStateAndSchedule( + state: ExpirableTrustState, + options: ExpirableStateSaveOptions, + ): Promise { + await this.saveExpirableState(state, options); + await this.scheduleNextCodeCleanup( + state.pairingTickets, + state.confirmedPairingTickets, + ); + } + + async loadPairs(): Promise> { + const raw = await this.state.storage.get(PAIRS_STORAGE_KEY); + if (!Array.isArray(raw)) { + return new Set(); + } + + const normalized = raw.filter((value) => typeof value === "string"); + return new Set(normalized); + } + + async savePairs(pairs: Set): Promise { + await this.state.storage.put(PAIRS_STORAGE_KEY, [...pairs].sort()); + } + + async loadAgentPeers(): Promise { + const raw = await this.state.storage.get( + AGENT_PEERS_STORAGE_KEY, + ); + if (typeof raw !== "object" || raw === null) { + return {}; + } + + const normalized: AgentPeersIndex = {}; + for (const [agentDid, peers] of Object.entries(raw)) { + if (!Array.isArray(peers)) { + continue; + } + + normalized[agentDid] = peers.filter((peer): peer is string => + isNonEmptyString(peer), + ); + } + + return normalized; + } + + async saveAgentPeers(agentPeers: AgentPeersIndex): Promise { + await this.state.storage.put(AGENT_PEERS_STORAGE_KEY, agentPeers); + } + + private removeExpiredEntries( + state: ExpirableTrustState, + nowMs: number, + ): boolean { + let mutated = false; + + for (const [ticketKid, details] of Object.entries(state.pairingTickets)) { + if (details.expiresAtMs <= nowMs) { + delete state.pairingTickets[ticketKid]; + mutated = true; + } + } + + for (const [ticketKid, details] of Object.entries( + state.confirmedPairingTickets, + )) { + if (details.expiresAtMs <= nowMs) { + delete state.confirmedPairingTickets[ticketKid]; + mutated = true; + } + } + + return mutated; + } + + private async saveExpirableState( + state: ExpirableTrustState, + options: ExpirableStateSaveOptions, + ): Promise { + const saves: Promise[] = []; + if (options.pairingTickets) { + saves.push(this.savePairingTickets(state.pairingTickets)); + } + + if (options.confirmedPairingTickets) { + saves.push( + this.saveConfirmedPairingTickets(state.confirmedPairingTickets), + ); + } + + if (saves.length > 0) { + await Promise.all(saves); + } + } + + private async loadPairingTickets(): Promise { + const raw = await this.state.storage.get( + PAIRING_TICKETS_STORAGE_KEY, + ); + + if (typeof raw !== "object" || raw === null) { + return {}; + } + + const normalized: PairingTicketMap = {}; + for (const [entryKey, value] of Object.entries(raw)) { + if (typeof value !== "object" || value === null) { + continue; + } + + const entry = value as { + ticket?: unknown; + expiresAtMs?: unknown; + initiatorAgentDid?: unknown; + initiatorProfile?: unknown; + issuerProxyUrl?: unknown; + }; + const initiatorProfile = parsePeerProfile(entry.initiatorProfile); + if ( + !isNonEmptyString(entry.initiatorAgentDid) || + !initiatorProfile || + !isNonEmptyString(entry.issuerProxyUrl) || + typeof entry.expiresAtMs !== "number" || + !Number.isInteger(entry.expiresAtMs) + ) { + continue; + } + + const ticketCandidate = isNonEmptyString(entry.ticket) + ? entry.ticket + : entryKey; + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(ticketCandidate); + } catch { + continue; + } + + normalized[parsedTicket.kid] = { + ticket: ticketCandidate, + expiresAtMs: entry.expiresAtMs, + initiatorAgentDid: entry.initiatorAgentDid, + initiatorProfile, + issuerProxyUrl: parsedTicket.iss, + }; + } + + return normalized; + } + + private async savePairingTickets( + pairingTickets: PairingTicketMap, + ): Promise { + await this.state.storage.put(PAIRING_TICKETS_STORAGE_KEY, pairingTickets); + } + + private async loadConfirmedPairingTickets(): Promise { + const raw = await this.state.storage.get( + CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, + ); + + if (typeof raw !== "object" || raw === null) { + return {}; + } + + const normalized: ConfirmedPairingTicketMap = {}; + for (const [entryKey, value] of Object.entries(raw)) { + if (typeof value !== "object" || value === null) { + continue; + } + + const entry = value as { + ticket?: unknown; + expiresAtMs?: unknown; + initiatorAgentDid?: unknown; + initiatorProfile?: unknown; + responderAgentDid?: unknown; + responderProfile?: unknown; + issuerProxyUrl?: unknown; + confirmedAtMs?: unknown; + }; + const initiatorProfile = parsePeerProfile(entry.initiatorProfile); + const responderProfile = parsePeerProfile(entry.responderProfile); + + if ( + !isNonEmptyString(entry.initiatorAgentDid) || + !initiatorProfile || + !isNonEmptyString(entry.responderAgentDid) || + !responderProfile || + !isNonEmptyString(entry.issuerProxyUrl) || + typeof entry.expiresAtMs !== "number" || + !Number.isInteger(entry.expiresAtMs) || + typeof entry.confirmedAtMs !== "number" || + !Number.isInteger(entry.confirmedAtMs) + ) { + continue; + } + + const ticketCandidate = isNonEmptyString(entry.ticket) + ? entry.ticket + : entryKey; + let parsedTicket: ReturnType; + try { + parsedTicket = parsePairingTicket(ticketCandidate); + } catch { + continue; + } + + normalized[parsedTicket.kid] = { + ticket: ticketCandidate, + expiresAtMs: entry.expiresAtMs, + initiatorAgentDid: entry.initiatorAgentDid, + initiatorProfile, + responderAgentDid: entry.responderAgentDid, + responderProfile, + issuerProxyUrl: parsedTicket.iss, + confirmedAtMs: entry.confirmedAtMs, + }; + } + + return normalized; + } + + private async saveConfirmedPairingTickets( + pairingTickets: ConfirmedPairingTicketMap, + ): Promise { + await this.state.storage.put( + CONFIRMED_PAIRING_TICKETS_STORAGE_KEY, + pairingTickets, + ); + } + + private async scheduleNextCodeCleanup( + pairingTickets: PairingTicketMap, + confirmedPairingTickets: ConfirmedPairingTicketMap, + ): Promise { + const expiryValues = [ + ...Object.values(pairingTickets), + ...Object.values(confirmedPairingTickets), + ].map((details) => details.expiresAtMs); + + if (expiryValues.length === 0) { + await this.state.storage.deleteAlarm(); + return; + } + + const earliestExpiry = Math.min(...expiryValues); + await this.state.storage.setAlarm(earliestExpiry); + } +} diff --git a/apps/proxy/src/proxy-trust-state/types.ts b/apps/proxy/src/proxy-trust-state/types.ts new file mode 100644 index 0000000..4b14f5b --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/types.ts @@ -0,0 +1,43 @@ +import type { PeerProfile } from "../proxy-trust-store.js"; + +export type StoredPairingTicket = { + ticket: string; + expiresAtMs: number; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + issuerProxyUrl: string; +}; + +export type StoredConfirmedPairingTicket = { + ticket: string; + expiresAtMs: number; + initiatorAgentDid: string; + initiatorProfile: PeerProfile; + responderAgentDid: string; + responderProfile: PeerProfile; + issuerProxyUrl: string; + confirmedAtMs: number; +}; + +export type PairingTicketMap = Record; +export type ConfirmedPairingTicketMap = Record< + string, + StoredConfirmedPairingTicket +>; +export type AgentPeersIndex = Record; + +export type ExpirableTrustState = { + pairingTickets: PairingTicketMap; + confirmedPairingTickets: ConfirmedPairingTicketMap; +}; + +export type ExpirableStateSaveOptions = { + pairingTickets?: boolean; + confirmedPairingTickets?: boolean; +}; + +export const PAIRS_STORAGE_KEY = "trust:pairs"; +export const AGENT_PEERS_STORAGE_KEY = "trust:agent-peers"; +export const PAIRING_TICKETS_STORAGE_KEY = "trust:pairing-tickets"; +export const CONFIRMED_PAIRING_TICKETS_STORAGE_KEY = + "trust:pairing-tickets-confirmed"; diff --git a/apps/proxy/src/proxy-trust-state/utils.ts b/apps/proxy/src/proxy-trust-state/utils.ts new file mode 100644 index 0000000..5980664 --- /dev/null +++ b/apps/proxy/src/proxy-trust-state/utils.ts @@ -0,0 +1,132 @@ +import { + normalizePairingTicketText, + PairingTicketParseError, + parsePairingTicket, +} from "../pairing-ticket.js"; +import type { PeerProfile } from "../proxy-trust-store.js"; +import type { AgentPeersIndex } from "./types.js"; + +export function isNonEmptyString(value: unknown): value is string { + return typeof value === "string" && value.trim().length > 0; +} + +export function parsePeerProfile(value: unknown): PeerProfile | undefined { + if (typeof value !== "object" || value === null) { + return undefined; + } + + const entry = value as { + agentName?: unknown; + humanName?: unknown; + proxyOrigin?: unknown; + }; + + if ( + !isNonEmptyString(entry.agentName) || + !isNonEmptyString(entry.humanName) + ) { + return undefined; + } + + const profile: PeerProfile = { + agentName: entry.agentName.trim(), + humanName: entry.humanName.trim(), + }; + + if (entry.proxyOrigin !== undefined) { + if (!isNonEmptyString(entry.proxyOrigin)) { + return undefined; + } + + let parsedProxyOrigin: URL; + try { + parsedProxyOrigin = new URL(entry.proxyOrigin.trim()); + } catch { + return undefined; + } + + if ( + parsedProxyOrigin.protocol !== "https:" && + parsedProxyOrigin.protocol !== "http:" + ) { + return undefined; + } + + profile.proxyOrigin = parsedProxyOrigin.origin; + } + + return profile; +} + +export function addPeer( + index: AgentPeersIndex, + leftAgentDid: string, + rightAgentDid: string, +): void { + const peers = new Set(index[leftAgentDid] ?? []); + peers.add(rightAgentDid); + index[leftAgentDid] = [...peers].sort(); +} + +export function toErrorResponse(input: { + code: string; + message: string; + status: number; +}): Response { + return Response.json( + { + error: { + code: input.code, + message: input.message, + }, + }, + { status: input.status }, + ); +} + +export async function parseBody(request: Request): Promise { + try { + return await request.json(); + } catch { + return undefined; + } +} + +type ParsedPairingTicket = ReturnType; + +export type PairingTicketParseResult = + | { + ok: true; + ticket: string; + parsedTicket: ParsedPairingTicket; + } + | { + ok: false; + response: Response; + }; + +export function parseNormalizedPairingTicket( + rawTicket: string, +): PairingTicketParseResult { + const ticket = normalizePairingTicketText(rawTicket); + try { + return { + ok: true, + ticket, + parsedTicket: parsePairingTicket(ticket), + }; + } catch (error) { + if (error instanceof PairingTicketParseError) { + return { + ok: false, + response: toErrorResponse({ + code: error.code, + message: error.message, + status: 400, + }), + }; + } + + throw error; + } +} From aecd6116b6a84891e1944a556b698b4c9ba24412 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 16:56:46 +0530 Subject: [PATCH 130/190] refactor(registry): split server into modular route files --- apps/registry/src/server.ts | 2823 +---------------- apps/registry/src/server/AGENTS.md | 28 + apps/registry/src/server/constants.ts | 146 + .../src/server/create-registry-app.ts | 171 + .../registry/src/server/helpers/db-queries.ts | 239 ++ apps/registry/src/server/helpers/event-bus.ts | 127 + apps/registry/src/server/helpers/parsers.ts | 408 +++ apps/registry/src/server/index.ts | 6 + apps/registry/src/server/routes/admin.ts | 168 + apps/registry/src/server/routes/agent-auth.ts | 421 +++ apps/registry/src/server/routes/agents.ts | 599 ++++ apps/registry/src/server/routes/health.ts | 134 + .../src/server/routes/internal-services.ts | 295 ++ apps/registry/src/server/routes/invites.ts | 275 ++ .../registry/src/server/routes/me-api-keys.ts | 149 + 15 files changed, 3167 insertions(+), 2822 deletions(-) create mode 100644 apps/registry/src/server/AGENTS.md create mode 100644 apps/registry/src/server/constants.ts create mode 100644 apps/registry/src/server/create-registry-app.ts create mode 100644 apps/registry/src/server/helpers/db-queries.ts create mode 100644 apps/registry/src/server/helpers/event-bus.ts create mode 100644 apps/registry/src/server/helpers/parsers.ts create mode 100644 apps/registry/src/server/index.ts create mode 100644 apps/registry/src/server/routes/admin.ts create mode 100644 apps/registry/src/server/routes/agent-auth.ts create mode 100644 apps/registry/src/server/routes/agents.ts create mode 100644 apps/registry/src/server/routes/health.ts create mode 100644 apps/registry/src/server/routes/internal-services.ts create mode 100644 apps/registry/src/server/routes/invites.ts create mode 100644 apps/registry/src/server/routes/me-api-keys.ts diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 0cc04de..fd3d658 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -1,2822 +1 @@ -import { - ADMIN_BOOTSTRAP_PATH, - ADMIN_INTERNAL_SERVICES_PATH, - AGENT_AUTH_REFRESH_PATH, - AGENT_AUTH_VALIDATE_PATH, - AGENT_REGISTRATION_CHALLENGE_PATH, - generateUlid, - INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, - INVITES_PATH, - INVITES_REDEEM_PATH, - ME_API_KEYS_PATH, - makeHumanDid, - parseDid, - parseUlid, - REGISTRY_METADATA_PATH, -} from "@clawdentity/protocol"; -import { - AppError, - createEventEnvelope, - createHonoErrorHandler, - createInMemoryEventBus, - createLogger, - createQueueEventBus, - createRequestContextMiddleware, - createRequestLoggingMiddleware, - type EventBus, - nowIso, - nowUtcMs, - parseRegistryConfig, - type QueuePublisher, - type RegistryConfig, - shouldExposeVerboseErrors, - signAIT, - signCRL, -} from "@clawdentity/sdk"; -import { and, desc, eq, isNull, lt } from "drizzle-orm"; -import { Hono } from "hono"; -import { parseAdminBootstrapPayload } from "./admin-bootstrap.js"; -import { - agentAuthRefreshConflictError, - agentAuthRefreshRejectedError, - issueAgentAuth, - parseAgentAuthRefreshPayload, - toAgentAuthResponse, -} from "./agent-auth-lifecycle.js"; -import { mapAgentListRow, parseAgentListQuery } from "./agent-list.js"; -import { parseAgentOwnershipPath } from "./agent-ownership.js"; -import { - buildAgentRegistrationChallenge, - buildAgentRegistrationFromParsed, - buildAgentReissue, - parseAgentRegistrationBody, - resolveRegistryIssuer, - verifyAgentRegistrationOwnershipProof, -} from "./agent-registration.js"; -import { - agentResolveNotFoundError, - mapResolvedAgentRow, - parseAgentResolvePath, -} from "./agent-resolve.js"; -import { - agentNotFoundError, - invalidAgentReissueStateError, - invalidAgentRevokeStateError, - parseAgentRevokePath, -} from "./agent-revocation.js"; -import { - apiKeyNotFoundError, - mapApiKeyMetadataRow, - parseApiKeyCreatePayload, - parseApiKeyRevokePath, -} from "./api-key-lifecycle.js"; -import { - deriveAccessTokenLookupPrefix, - deriveRefreshTokenLookupPrefix, - hashAgentToken, - parseAccessToken, -} from "./auth/agent-auth-token.js"; -import { verifyAgentClawRequest } from "./auth/agent-claw-auth.js"; -import { - type AuthenticatedHuman, - createApiKeyAuth, -} from "./auth/api-key-auth.js"; -import { - constantTimeEqual, - deriveApiKeyLookupPrefix, - generateApiKeyToken, - hashApiKeyToken, -} from "./auth/api-key-token.js"; -import { parseInternalServiceScopesPayload } from "./auth/internal-service-scopes.js"; -import { - type AuthenticatedService, - createServiceAuth, - deriveInternalServiceSecretPrefix, - generateInternalServiceSecret, - hashInternalServiceSecret, -} from "./auth/service-auth.js"; -import { createDb } from "./db/client.js"; -import { - agent_auth_events, - agent_auth_sessions, - agent_registration_challenges, - agents, - api_keys, - humans, - internal_services, - invites, - revocations, -} from "./db/schema.js"; -import { - generateInviteCode, - inviteCreateForbiddenError, - inviteRedeemAlreadyUsedError, - inviteRedeemCodeInvalidError, - inviteRedeemExpiredError, - parseInviteCreatePayload, - parseInviteRedeemPayload, -} from "./invite-lifecycle.js"; -import { - AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS, - AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS, - AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS, - AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS, - CRL_RATE_LIMIT_MAX_REQUESTS, - CRL_RATE_LIMIT_WINDOW_MS, - createInMemoryRateLimit, - RESOLVE_RATE_LIMIT_MAX_REQUESTS, - RESOLVE_RATE_LIMIT_WINDOW_MS, -} from "./rate-limit.js"; -import { resolveRegistrySigner } from "./registry-signer.js"; - -type Bindings = { - DB: D1Database; - ENVIRONMENT: string; - APP_VERSION?: string; - PROXY_URL?: string; - REGISTRY_ISSUER_URL?: string; - EVENT_BUS_BACKEND?: "memory" | "queue"; - EVENT_BUS_QUEUE?: QueuePublisher; - BOOTSTRAP_SECRET?: string; - REGISTRY_SIGNING_KEY?: string; - REGISTRY_SIGNING_KEYS?: string; -}; -const logger = createLogger({ service: "registry" }); -const REGISTRY_CACHE_MAX_AGE_SECONDS = 300; -const REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS = 60; -const REGISTRY_KEY_CACHE_CONTROL = `public, max-age=${REGISTRY_CACHE_MAX_AGE_SECONDS}, s-maxage=${REGISTRY_CACHE_MAX_AGE_SECONDS}, stale-while-revalidate=${REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS}`; -const REGISTRY_CRL_CACHE_CONTROL = `public, max-age=${REGISTRY_CACHE_MAX_AGE_SECONDS}, s-maxage=${REGISTRY_CACHE_MAX_AGE_SECONDS}, stale-while-revalidate=${REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS}`; -const CRL_EXPIRY_SAFETY_BUFFER_SECONDS = 30; -const CRL_TTL_SECONDS = - REGISTRY_CACHE_MAX_AGE_SECONDS + - REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS + - CRL_EXPIRY_SAFETY_BUFFER_SECONDS; -const PROXY_URL_BY_ENVIRONMENT: Record = - { - development: "https://dev.proxy.clawdentity.com", - production: "https://proxy.clawdentity.com", - test: "https://dev.proxy.clawdentity.com", - }; -// Deterministic bootstrap identity guarantees one-time admin creation under races. -const BOOTSTRAP_ADMIN_HUMAN_ID = "00000000000000000000000000"; -const REGISTRY_SERVICE_EVENT_VERSION = "v1"; - -const AGENT_AUTH_EVENT_NAME_BY_TYPE: Record< - "issued" | "refreshed" | "revoked" | "refresh_rejected", - string -> = { - issued: "agent.auth.issued", - refreshed: "agent.auth.refreshed", - revoked: "agent.auth.revoked", - refresh_rejected: "agent.auth.refresh_rejected", -}; - -type OwnedAgent = { - id: string; - did: string; - name: string; - framework: string | null; - public_key: string; - status: "active" | "revoked"; - expires_at: string | null; - current_jti: string | null; -}; - -type OwnedAgentRegistrationChallenge = { - id: string; - owner_id: string; - public_key: string; - nonce: string; - status: "pending" | "used"; - expires_at: string; - used_at: string | null; -}; - -type OwnedAgentAuthSession = { - id: string; - agent_id: string; - refresh_key_hash: string; - refresh_key_prefix: string; - refresh_issued_at: string; - refresh_expires_at: string; - refresh_last_used_at: string | null; - access_key_hash: string; - access_key_prefix: string; - access_issued_at: string; - access_expires_at: string; - access_last_used_at: string | null; - status: "active" | "revoked"; - revoked_at: string | null; - created_at: string; - updated_at: string; -}; - -type InviteRow = { - id: string; - code: string; - created_by: string; - redeemed_by: string | null; - expires_at: string | null; - created_at: string; -}; - -type CrlSnapshotRow = { - id: string; - jti: string; - reason: string | null; - revoked_at: string; - agent_did: string; -}; - -type RegistryRateLimitRuntimeOptions = { - nowMs?: () => number; - resolveMaxRequests?: number; - resolveWindowMs?: number; - crlMaxRequests?: number; - crlWindowMs?: number; - agentAuthRefreshMaxRequests?: number; - agentAuthRefreshWindowMs?: number; - agentAuthValidateMaxRequests?: number; - agentAuthValidateWindowMs?: number; -}; - -type CreateRegistryAppOptions = { - rateLimit?: RegistryRateLimitRuntimeOptions; - eventBus?: EventBus; -}; - -function crlBuildError(options: { - environment: RegistryConfig["ENVIRONMENT"]; - message: string; - details?: { - fieldErrors: Record; - formErrors: string[]; - }; -}): AppError { - const exposeDetails = shouldExposeVerboseErrors(options.environment); - return new AppError({ - code: "CRL_BUILD_FAILED", - message: exposeDetails - ? options.message - : "CRL snapshot could not be generated", - status: 500, - expose: exposeDetails, - details: exposeDetails ? options.details : undefined, - }); -} - -function parseRevokedAtSeconds(options: { - environment: RegistryConfig["ENVIRONMENT"]; - revocationId: string; - revokedAtIso: string; -}): number { - const epochMillis = Date.parse(options.revokedAtIso); - if (!Number.isFinite(epochMillis)) { - throw crlBuildError({ - environment: options.environment, - message: "CRL revocation timestamp is invalid", - details: { - fieldErrors: { - revokedAt: [ - `revocation ${options.revocationId} has invalid revoked_at timestamp`, - ], - }, - formErrors: [], - }, - }); - } - - return Math.floor(epochMillis / 1000); -} - -function buildCrlClaims(input: { - rows: CrlSnapshotRow[]; - environment: RegistryConfig["ENVIRONMENT"]; - issuer: string; - nowSeconds: number; -}) { - return { - iss: input.issuer, - jti: generateUlid(nowUtcMs()), - iat: input.nowSeconds, - exp: input.nowSeconds + CRL_TTL_SECONDS, - revocations: input.rows.map((row) => { - const base = { - jti: row.jti, - agentDid: row.agent_did, - revokedAt: parseRevokedAtSeconds({ - environment: input.environment, - revocationId: row.id, - revokedAtIso: row.revoked_at, - }), - }; - - if (typeof row.reason === "string" && row.reason.length > 0) { - return { - ...base, - reason: row.reason, - }; - } - - return base; - }), - }; -} - -async function findOwnedAgent(input: { - db: ReturnType; - ownerId: string; - agentId: string; -}): Promise { - const rows = await input.db - .select({ - id: agents.id, - did: agents.did, - name: agents.name, - framework: agents.framework, - public_key: agents.public_key, - status: agents.status, - expires_at: agents.expires_at, - current_jti: agents.current_jti, - }) - .from(agents) - .where( - and(eq(agents.owner_id, input.ownerId), eq(agents.id, input.agentId)), - ) - .limit(1); - - return rows[0]; -} - -async function findAgentAuthSessionByAgentId(input: { - db: ReturnType; - agentId: string; -}): Promise { - const rows = await input.db - .select({ - id: agent_auth_sessions.id, - agent_id: agent_auth_sessions.agent_id, - refresh_key_hash: agent_auth_sessions.refresh_key_hash, - refresh_key_prefix: agent_auth_sessions.refresh_key_prefix, - refresh_issued_at: agent_auth_sessions.refresh_issued_at, - refresh_expires_at: agent_auth_sessions.refresh_expires_at, - refresh_last_used_at: agent_auth_sessions.refresh_last_used_at, - access_key_hash: agent_auth_sessions.access_key_hash, - access_key_prefix: agent_auth_sessions.access_key_prefix, - access_issued_at: agent_auth_sessions.access_issued_at, - access_expires_at: agent_auth_sessions.access_expires_at, - access_last_used_at: agent_auth_sessions.access_last_used_at, - status: agent_auth_sessions.status, - revoked_at: agent_auth_sessions.revoked_at, - created_at: agent_auth_sessions.created_at, - updated_at: agent_auth_sessions.updated_at, - }) - .from(agent_auth_sessions) - .where(eq(agent_auth_sessions.agent_id, input.agentId)) - .limit(1); - - return rows[0]; -} - -async function findOwnedAgentByDid(input: { - db: ReturnType; - did: string; -}): Promise { - const rows = await input.db - .select({ - id: agents.id, - did: agents.did, - name: agents.name, - framework: agents.framework, - public_key: agents.public_key, - status: agents.status, - expires_at: agents.expires_at, - current_jti: agents.current_jti, - }) - .from(agents) - .where(eq(agents.did, input.did)) - .limit(1); - - return rows[0]; -} - -async function findOwnedAgentRegistrationChallenge(input: { - db: ReturnType; - ownerId: string; - challengeId: string; -}): Promise { - const rows = await input.db - .select({ - id: agent_registration_challenges.id, - owner_id: agent_registration_challenges.owner_id, - public_key: agent_registration_challenges.public_key, - nonce: agent_registration_challenges.nonce, - status: agent_registration_challenges.status, - expires_at: agent_registration_challenges.expires_at, - used_at: agent_registration_challenges.used_at, - }) - .from(agent_registration_challenges) - .where( - and( - eq(agent_registration_challenges.owner_id, input.ownerId), - eq(agent_registration_challenges.id, input.challengeId), - ), - ) - .limit(1); - - return rows[0]; -} - -async function findInviteByCode(input: { - db: ReturnType; - code: string; -}): Promise { - const rows = await input.db - .select({ - id: invites.id, - code: invites.code, - created_by: invites.created_by, - redeemed_by: invites.redeemed_by, - expires_at: invites.expires_at, - created_at: invites.created_at, - }) - .from(invites) - .where(eq(invites.code, input.code)) - .limit(1); - - return rows[0]; -} - -async function findInviteById(input: { - db: ReturnType; - id: string; -}): Promise { - const rows = await input.db - .select({ - id: invites.id, - code: invites.code, - created_by: invites.created_by, - redeemed_by: invites.redeemed_by, - expires_at: invites.expires_at, - created_at: invites.created_at, - }) - .from(invites) - .where(eq(invites.id, input.id)) - .limit(1); - - return rows[0]; -} - -function isInviteExpired(input: { - expiresAt: string | null; - nowMillis: number; -}) { - if (typeof input.expiresAt !== "string") { - return false; - } - - const expiresAtMillis = Date.parse(input.expiresAt); - if (!Number.isFinite(expiresAtMillis)) { - return true; - } - - return expiresAtMillis <= input.nowMillis; -} - -function isIsoExpired(expiresAtIso: string, nowMillis: number): boolean { - const parsed = Date.parse(expiresAtIso); - if (!Number.isFinite(parsed)) { - return true; - } - - return parsed <= nowMillis; -} - -function parseAgentAuthValidatePayload(payload: unknown): { - agentDid: string; - aitJti: string; -} { - if (!payload || typeof payload !== "object" || Array.isArray(payload)) { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_INVALID", - message: "Validation payload is invalid", - status: 400, - expose: true, - }); - } - - const value = payload as Record; - const agentDid = - typeof value.agentDid === "string" ? value.agentDid.trim() : ""; - const aitJti = typeof value.aitJti === "string" ? value.aitJti.trim() : ""; - - if (agentDid.length === 0 || aitJti.length === 0) { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_INVALID", - message: "Validation payload is invalid", - status: 400, - expose: true, - }); - } - - return { - agentDid, - aitJti, - }; -} - -function parseAgentAccessHeaderToken(token: string | undefined): string { - try { - return parseAccessToken(token); - } catch { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", - message: "Agent access token is invalid", - status: 401, - expose: true, - }); - } -} - -function parseInternalServiceName(value: unknown): string { - const normalized = typeof value === "string" ? value.trim() : ""; - if (!/^[a-z0-9][a-z0-9-_]{1,63}$/i.test(normalized)) { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID", - message: "Internal service payload is invalid", - status: 400, - expose: true, - }); - } - - return normalized; -} - -function parseInternalServiceCreatePayload(payload: unknown): { - name: string; - scopes: string[]; -} { - if (!payload || typeof payload !== "object" || Array.isArray(payload)) { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID", - message: "Internal service payload is invalid", - status: 400, - expose: true, - }); - } - - const value = payload as Record; - return { - name: parseInternalServiceName(value.name), - scopes: parseInternalServiceScopesPayload(value.scopes), - }; -} - -function parseInternalServicePathId(input: { - id: string; - environment: RegistryConfig["ENVIRONMENT"]; -}): string { - const candidate = input.id.trim(); - try { - return parseUlid(candidate).value; - } catch { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID_PATH", - message: - input.environment === "production" - ? "Request could not be processed" - : "Internal service path is invalid", - status: 400, - expose: input.environment !== "production", - details: - input.environment === "production" - ? undefined - : { - fieldErrors: { id: ["id must be a valid ULID"] }, - formErrors: [], - }, - }); - } -} - -function parseInternalServiceRotatePayload(payload: unknown): { - scopes?: string[]; -} { - if (payload === undefined || payload === null) { - return {}; - } - if (typeof payload !== "object" || Array.isArray(payload)) { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID", - message: "Internal service payload is invalid", - status: 400, - expose: true, - }); - } - - const value = payload as Record; - if (value.scopes === undefined) { - return {}; - } - - return { - scopes: parseInternalServiceScopesPayload(value.scopes), - }; -} - -function resolveEventBusBackend( - config: RegistryConfig, -): NonNullable { - if (config.EVENT_BUS_BACKEND === "memory") { - return "memory"; - } - - if (config.EVENT_BUS_BACKEND === "queue") { - return "queue"; - } - - return config.ENVIRONMENT === "development" || - config.ENVIRONMENT === "production" - ? "queue" - : "memory"; -} - -function resolveRegistryEventBus(input: { - config: RegistryConfig; - bindings: Bindings; - explicitBus?: EventBus; -}): EventBus { - if (input.explicitBus !== undefined) { - return input.explicitBus; - } - - const backend = resolveEventBusBackend(input.config); - if (backend === "memory") { - return createInMemoryEventBus(); - } - - const queue = input.bindings.EVENT_BUS_QUEUE; - if (queue === undefined) { - throw new AppError({ - code: "CONFIG_VALIDATION_FAILED", - message: "Registry configuration is invalid", - status: 500, - expose: true, - details: { - fieldErrors: { - EVENT_BUS_QUEUE: [ - "EVENT_BUS_QUEUE is required when EVENT_BUS_BACKEND is queue", - ], - }, - formErrors: [], - }, - }); - } - - return createQueueEventBus(queue); -} - -function parseHumanDid(value: unknown): string { - if (typeof value !== "string") { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - const candidate = value.trim(); - try { - const parsed = parseDid(candidate); - if (parsed.kind !== "human") { - throw new Error("invalid"); - } - } catch { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - if (candidate.length === 0) { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - return candidate; -} - -function parseAgentDid(value: unknown): string { - if (typeof value !== "string") { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - const candidate = value.trim(); - try { - const parsed = parseDid(candidate); - if (parsed.kind !== "agent") { - throw new Error("invalid"); - } - } catch { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - if (candidate.length === 0) { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - return candidate; -} - -function parseInternalOwnershipCheckPayload(payload: unknown): { - ownerDid: string; - agentDid: string; -} { - if (!payload || typeof payload !== "object" || Array.isArray(payload)) { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - const value = payload as Record; - let ownerDid: string; - try { - ownerDid = parseHumanDid(value.ownerDid); - } catch { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - return { - ownerDid, - agentDid: parseAgentDid(value.agentDid), - }; -} - -async function insertAgentAuthEvent(input: { - db: ReturnType; - agentId: string; - sessionId: string; - eventType: "issued" | "refreshed" | "revoked" | "refresh_rejected"; - reason?: string; - metadata?: Record; - createdAt?: string; - eventBus?: EventBus; - initiatedByAccountId?: string | null; -}): Promise { - const createdAt = input.createdAt ?? nowIso(); - await input.db.insert(agent_auth_events).values({ - id: generateUlid(nowUtcMs()), - agent_id: input.agentId, - session_id: input.sessionId, - event_type: input.eventType, - reason: input.reason ?? null, - metadata_json: - input.metadata === undefined ? null : JSON.stringify(input.metadata), - created_at: createdAt, - }); - - if (input.eventBus === undefined) { - return; - } - - const eventData: Record = { - agentId: input.agentId, - sessionId: input.sessionId, - }; - if (input.reason !== undefined) { - eventData.reason = input.reason; - } - if (input.metadata !== undefined) { - eventData.metadata = input.metadata; - } - - try { - await input.eventBus.publish( - createEventEnvelope({ - type: AGENT_AUTH_EVENT_NAME_BY_TYPE[input.eventType], - version: REGISTRY_SERVICE_EVENT_VERSION, - initiatedByAccountId: input.initiatedByAccountId ?? null, - timestampUtc: createdAt, - data: eventData, - }), - ); - } catch (error) { - logger.warn("registry.event_bus.publish_failed", { - eventType: input.eventType, - errorName: error instanceof Error ? error.name : "unknown", - }); - } -} - -async function resolveInviteRedeemStateError(input: { - db: ReturnType; - inviteId: string; - nowMillis: number; -}): Promise { - const latestInvite = await findInviteById({ - db: input.db, - id: input.inviteId, - }); - - if (!latestInvite) { - return inviteRedeemCodeInvalidError(); - } - - if (latestInvite.redeemed_by !== null) { - return inviteRedeemAlreadyUsedError(); - } - - if ( - isInviteExpired({ - expiresAt: latestInvite.expires_at, - nowMillis: input.nowMillis, - }) - ) { - return inviteRedeemExpiredError(); - } - - return inviteRedeemCodeInvalidError(); -} - -function requireCurrentJti(input: { - currentJti: string | null; - onInvalid: (reason: string) => AppError; -}): string { - if (typeof input.currentJti !== "string" || input.currentJti.length === 0) { - throw input.onInvalid("agent.current_jti is required"); - } - - return input.currentJti; -} - -function isUnsupportedLocalTransactionError(error: unknown): boolean { - return ( - error instanceof Error && error.message.includes("Failed query: begin") - ); -} - -function getMutationRowCount(result: unknown): number | undefined { - if (!result || typeof result !== "object") { - return undefined; - } - - const directChanges = (result as { changes?: unknown }).changes; - if (typeof directChanges === "number") { - return directChanges; - } - - const rowsAffected = (result as { rowsAffected?: unknown }).rowsAffected; - if (typeof rowsAffected === "number") { - return rowsAffected; - } - - const metaChanges = (result as { meta?: { changes?: unknown } }).meta - ?.changes; - if (typeof metaChanges === "number") { - return metaChanges; - } - - return undefined; -} - -function requireBootstrapSecret(bootstrapSecret: string | undefined): string { - if (typeof bootstrapSecret === "string" && bootstrapSecret.length > 0) { - return bootstrapSecret; - } - - throw new AppError({ - code: "ADMIN_BOOTSTRAP_DISABLED", - message: "Admin bootstrap is disabled", - status: 503, - expose: true, - }); -} - -function parseBootstrapSecretHeader(headerValue: string | undefined): string { - if (typeof headerValue !== "string" || headerValue.trim().length === 0) { - throw new AppError({ - code: "ADMIN_BOOTSTRAP_UNAUTHORIZED", - message: "Bootstrap secret is required", - status: 401, - expose: true, - }); - } - - return headerValue.trim(); -} - -function assertBootstrapSecretAuthorized(input: { - provided: string; - expected: string; -}): void { - if (!constantTimeEqual(input.provided, input.expected)) { - throw new AppError({ - code: "ADMIN_BOOTSTRAP_UNAUTHORIZED", - message: "Bootstrap secret is invalid", - status: 401, - expose: true, - }); - } -} - -function adminBootstrapAlreadyCompletedError(): AppError { - return new AppError({ - code: "ADMIN_BOOTSTRAP_ALREADY_COMPLETED", - message: "Admin bootstrap has already completed", - status: 409, - expose: true, - }); -} - -function resolveProxyUrl(config: RegistryConfig): string { - return config.PROXY_URL ?? PROXY_URL_BY_ENVIRONMENT[config.ENVIRONMENT]; -} - -function createRegistryApp(options: CreateRegistryAppOptions = {}) { - let cachedConfig: RegistryConfig | undefined; - let cachedEventBus: EventBus | undefined; - let cachedEventBusKey: string | undefined; - - function getConfig(bindings: Bindings): RegistryConfig { - if (cachedConfig) { - return cachedConfig; - } - - cachedConfig = parseRegistryConfig(bindings, { - requireRuntimeKeys: true, - }); - return cachedConfig; - } - - function getEventBus(bindings: Bindings): EventBus { - if (options.eventBus !== undefined) { - return options.eventBus; - } - - const config = getConfig(bindings); - const resolvedBackend = resolveEventBusBackend(config); - const key = `${config.ENVIRONMENT}|${resolvedBackend}|${ - bindings.EVENT_BUS_QUEUE === undefined ? "no-queue" : "has-queue" - }`; - if (cachedEventBus && cachedEventBusKey === key) { - return cachedEventBus; - } - - const resolved = resolveRegistryEventBus({ - config, - bindings, - explicitBus: options.eventBus, - }); - cachedEventBus = resolved; - cachedEventBusKey = key; - return resolved; - } - - const app = new Hono<{ - Bindings: Bindings; - Variables: { - requestId: string; - human: AuthenticatedHuman; - service: AuthenticatedService; - }; - }>(); - const rateLimitOptions = options.rateLimit; - const resolveRateLimit = createInMemoryRateLimit({ - bucketKey: "resolve", - maxRequests: - rateLimitOptions?.resolveMaxRequests ?? RESOLVE_RATE_LIMIT_MAX_REQUESTS, - windowMs: rateLimitOptions?.resolveWindowMs ?? RESOLVE_RATE_LIMIT_WINDOW_MS, - nowMs: rateLimitOptions?.nowMs, - }); - const crlRateLimit = createInMemoryRateLimit({ - bucketKey: "crl", - maxRequests: - rateLimitOptions?.crlMaxRequests ?? CRL_RATE_LIMIT_MAX_REQUESTS, - windowMs: rateLimitOptions?.crlWindowMs ?? CRL_RATE_LIMIT_WINDOW_MS, - nowMs: rateLimitOptions?.nowMs, - }); - const agentAuthRefreshRateLimit = createInMemoryRateLimit({ - bucketKey: "agent_auth_refresh", - maxRequests: - rateLimitOptions?.agentAuthRefreshMaxRequests ?? - AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS, - windowMs: - rateLimitOptions?.agentAuthRefreshWindowMs ?? - AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS, - nowMs: rateLimitOptions?.nowMs, - }); - const agentAuthValidateRateLimit = createInMemoryRateLimit({ - bucketKey: "agent_auth_validate", - maxRequests: - rateLimitOptions?.agentAuthValidateMaxRequests ?? - AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS, - windowMs: - rateLimitOptions?.agentAuthValidateWindowMs ?? - AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS, - nowMs: rateLimitOptions?.nowMs, - }); - - app.use("*", createRequestContextMiddleware()); - app.use("*", createRequestLoggingMiddleware(logger)); - app.onError(createHonoErrorHandler(logger)); - - app.get("/health", (c) => { - const config = getConfig(c.env); - return c.json({ - status: "ok", - version: config.APP_VERSION ?? "0.0.0", - environment: config.ENVIRONMENT, - }); - }); - - app.get(REGISTRY_METADATA_PATH, (c) => { - const config = getConfig(c.env); - return c.json({ - status: "ok", - environment: config.ENVIRONMENT, - version: config.APP_VERSION ?? "0.0.0", - registryUrl: c.req.url ? new URL(c.req.url).origin : undefined, - proxyUrl: resolveProxyUrl(config), - }); - }); - - app.post(ADMIN_BOOTSTRAP_PATH, async (c) => { - const config = getConfig(c.env); - const expectedBootstrapSecret = requireBootstrapSecret( - config.BOOTSTRAP_SECRET, - ); - const providedBootstrapSecret = parseBootstrapSecretHeader( - c.req.header("x-bootstrap-secret"), - ); - assertBootstrapSecretAuthorized({ - provided: providedBootstrapSecret, - expected: expectedBootstrapSecret, - }); - - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "ADMIN_BOOTSTRAP_INVALID", - message: "Request body must be valid JSON", - status: 400, - expose: true, - }); - } - - const bootstrapPayload = parseAdminBootstrapPayload({ - payload, - environment: config.ENVIRONMENT, - }); - - const db = createDb(c.env.DB); - const activeAdminRows = await db - .select({ id: humans.id }) - .from(humans) - .where(eq(humans.role, "admin")) - .limit(1); - if (activeAdminRows.length > 0) { - throw adminBootstrapAlreadyCompletedError(); - } - - const humanId = BOOTSTRAP_ADMIN_HUMAN_ID; - const humanDid = makeHumanDid(humanId); - const apiKeyToken = generateApiKeyToken(); - const apiKeyHash = await hashApiKeyToken(apiKeyToken); - const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); - const apiKeyId = generateUlid(nowUtcMs() + 1); - const createdAt = nowIso(); - - const applyBootstrapMutation = async ( - executor: typeof db, - options: { rollbackOnApiKeyFailure: boolean }, - ): Promise => { - const insertAdminResult = await executor - .insert(humans) - .values({ - id: humanId, - did: humanDid, - display_name: bootstrapPayload.displayName, - role: "admin", - status: "active", - created_at: createdAt, - updated_at: createdAt, - }) - .onConflictDoNothing({ - target: humans.id, - }); - - const insertedRows = getMutationRowCount(insertAdminResult); - if (insertedRows === 0) { - throw adminBootstrapAlreadyCompletedError(); - } - - try { - await executor.insert(api_keys).values({ - id: apiKeyId, - human_id: humanId, - key_hash: apiKeyHash, - key_prefix: apiKeyPrefix, - name: bootstrapPayload.apiKeyName, - status: "active", - created_at: createdAt, - last_used_at: null, - }); - } catch (error) { - if (options.rollbackOnApiKeyFailure) { - try { - await executor.delete(humans).where(eq(humans.id, humanId)); - } catch (rollbackError) { - logger.error("registry.admin_bootstrap_rollback_failed", { - rollbackErrorName: - rollbackError instanceof Error ? rollbackError.name : "unknown", - }); - } - } - - throw error; - } - }; - - try { - await db.transaction(async (tx) => { - await applyBootstrapMutation(tx as unknown as typeof db, { - rollbackOnApiKeyFailure: false, - }); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyBootstrapMutation(db, { - rollbackOnApiKeyFailure: true, - }); - } - - return c.json( - { - human: { - id: humanId, - did: humanDid, - displayName: bootstrapPayload.displayName, - role: "admin", - status: "active", - }, - apiKey: { - id: apiKeyId, - name: bootstrapPayload.apiKeyName, - token: apiKeyToken, - }, - }, - 201, - ); - }); - - app.get("/.well-known/claw-keys.json", (c) => { - const config = getConfig(c.env); - return c.json( - { - keys: config.REGISTRY_SIGNING_KEYS ?? [], - }, - 200, - { - "Cache-Control": REGISTRY_KEY_CACHE_CONTROL, - }, - ); - }); - - app.get("/v1/crl", crlRateLimit, async (c) => { - const config = getConfig(c.env); - const db = createDb(c.env.DB); - - const rows = await db - .select({ - id: revocations.id, - jti: revocations.jti, - reason: revocations.reason, - revoked_at: revocations.revoked_at, - agent_did: agents.did, - }) - .from(revocations) - .innerJoin(agents, eq(revocations.agent_id, agents.id)) - .orderBy(desc(revocations.revoked_at), desc(revocations.id)); - - if (rows.length === 0) { - throw new AppError({ - code: "CRL_NOT_FOUND", - message: "CRL snapshot is not available", - status: 404, - expose: true, - }); - } - - const signer = await resolveRegistrySigner(config); - const nowSeconds = Math.floor(nowUtcMs() / 1000); - const claims = buildCrlClaims({ - rows, - environment: config.ENVIRONMENT, - issuer: resolveRegistryIssuer(config), - nowSeconds, - }); - const crl = await signCRL({ - claims, - signerKid: signer.signerKid, - signerKeypair: signer.signerKeypair, - }); - - return c.json({ crl }, 200, { - "Cache-Control": REGISTRY_CRL_CACHE_CONTROL, - }); - }); - - app.get("/v1/resolve/:id", resolveRateLimit, async (c) => { - const config = getConfig(c.env); - const id = parseAgentResolvePath({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - const db = createDb(c.env.DB); - - const rows = await db - .select({ - did: agents.did, - name: agents.name, - framework: agents.framework, - status: agents.status, - owner_did: humans.did, - }) - .from(agents) - .innerJoin(humans, eq(agents.owner_id, humans.id)) - .where(eq(agents.id, id)) - .limit(1); - - const row = rows[0]; - if (!row) { - throw agentResolveNotFoundError(); - } - - return c.json(mapResolvedAgentRow(row)); - }); - - app.post(ADMIN_INTERNAL_SERVICES_PATH, createApiKeyAuth(), async (c) => { - const human = c.get("human"); - if (human.role !== "admin") { - throw new AppError({ - code: "INTERNAL_SERVICE_CREATE_FORBIDDEN", - message: "Admin role is required", - status: 403, - expose: true, - }); - } - - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID", - message: "Internal service payload is invalid", - status: 400, - expose: true, - }); - } - - const parsed = parseInternalServiceCreatePayload(payload); - const db = createDb(c.env.DB); - const existingRows = await db - .select({ - id: internal_services.id, - }) - .from(internal_services) - .where(eq(internal_services.name, parsed.name)) - .limit(1); - if (existingRows[0]) { - throw new AppError({ - code: "INTERNAL_SERVICE_ALREADY_EXISTS", - message: "Internal service already exists", - status: 409, - expose: true, - }); - } - - const secret = generateInternalServiceSecret(); - const secretHash = await hashInternalServiceSecret(secret); - const secretPrefix = deriveInternalServiceSecretPrefix(secret); - const createdAt = nowIso(); - const serviceId = generateUlid(nowUtcMs()); - await db.insert(internal_services).values({ - id: serviceId, - name: parsed.name, - secret_hash: secretHash, - secret_prefix: secretPrefix, - scopes_json: JSON.stringify(parsed.scopes), - status: "active", - created_by: human.id, - rotated_at: null, - last_used_at: null, - created_at: createdAt, - updated_at: createdAt, - }); - - return c.json( - { - internalService: { - id: serviceId, - name: parsed.name, - scopes: parsed.scopes, - status: "active", - createdAt, - updatedAt: createdAt, - rotatedAt: null, - lastUsedAt: null, - secret, - }, - }, - 201, - ); - }); - - app.get(ADMIN_INTERNAL_SERVICES_PATH, createApiKeyAuth(), async (c) => { - const human = c.get("human"); - if (human.role !== "admin") { - throw new AppError({ - code: "INTERNAL_SERVICE_LIST_FORBIDDEN", - message: "Admin role is required", - status: 403, - expose: true, - }); - } - - const db = createDb(c.env.DB); - const rows = await db - .select({ - id: internal_services.id, - name: internal_services.name, - scopesJson: internal_services.scopes_json, - status: internal_services.status, - createdAt: internal_services.created_at, - updatedAt: internal_services.updated_at, - rotatedAt: internal_services.rotated_at, - lastUsedAt: internal_services.last_used_at, - }) - .from(internal_services) - .orderBy(desc(internal_services.created_at), desc(internal_services.id)); - - return c.json({ - internalServices: rows.map((row) => ({ - id: row.id, - name: row.name, - scopes: JSON.parse(row.scopesJson) as string[], - status: row.status, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - rotatedAt: row.rotatedAt, - lastUsedAt: row.lastUsedAt, - })), - }); - }); - - app.post( - `${ADMIN_INTERNAL_SERVICES_PATH}/:id/rotate`, - createApiKeyAuth(), - async (c) => { - const config = getConfig(c.env); - const human = c.get("human"); - if (human.role !== "admin") { - throw new AppError({ - code: "INTERNAL_SERVICE_ROTATE_FORBIDDEN", - message: "Admin role is required", - status: 403, - expose: true, - }); - } - - const serviceId = parseInternalServicePathId({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - - let payload: unknown = {}; - try { - const rawBody = await c.req.text(); - if (rawBody.trim().length > 0) { - payload = JSON.parse(rawBody); - } - } catch { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID", - message: "Internal service payload is invalid", - status: 400, - expose: true, - }); - } - - const parsedPayload = parseInternalServiceRotatePayload(payload); - const db = createDb(c.env.DB); - const rows = await db - .select({ - id: internal_services.id, - name: internal_services.name, - scopesJson: internal_services.scopes_json, - status: internal_services.status, - }) - .from(internal_services) - .where(eq(internal_services.id, serviceId)) - .limit(1); - const service = rows[0]; - if (!service) { - throw new AppError({ - code: "INTERNAL_SERVICE_NOT_FOUND", - message: "Internal service was not found", - status: 404, - expose: true, - }); - } - if (service.status !== "active") { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID_STATE", - message: "Internal service cannot be rotated", - status: 409, - expose: true, - }); - } - - const scopes = - parsedPayload.scopes ?? - ((JSON.parse(service.scopesJson) as unknown[]).filter( - (scope): scope is string => - typeof scope === "string" && scope.trim().length > 0, - ) as string[]); - if (scopes.length === 0) { - throw new AppError({ - code: "INTERNAL_SERVICE_INVALID", - message: "Internal service payload is invalid", - status: 400, - expose: true, - }); - } - - const secret = generateInternalServiceSecret(); - const secretHash = await hashInternalServiceSecret(secret); - const secretPrefix = deriveInternalServiceSecretPrefix(secret); - const rotatedAt = nowIso(); - await db - .update(internal_services) - .set({ - secret_hash: secretHash, - secret_prefix: secretPrefix, - scopes_json: JSON.stringify(scopes), - rotated_at: rotatedAt, - updated_at: rotatedAt, - }) - .where(eq(internal_services.id, service.id)); - - return c.json({ - internalService: { - id: service.id, - name: service.name, - scopes, - status: "active", - rotatedAt, - updatedAt: rotatedAt, - secret, - }, - }); - }, - ); - - app.get("/v1/me", createApiKeyAuth(), (c) => { - return c.json({ human: c.get("human") }); - }); - - app.post(INVITES_PATH, createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); - - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "INVITE_CREATE_INVALID", - message: exposeDetails - ? "Request body must be valid JSON" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - }); - } - - const human = c.get("human"); - if (human.role !== "admin") { - throw inviteCreateForbiddenError(); - } - - const parsedPayload = parseInviteCreatePayload({ - payload, - environment: config.ENVIRONMENT, - nowMs: nowUtcMs(), - }); - - const inviteId = generateUlid(nowUtcMs()); - const inviteCode = generateInviteCode(); - const createdAt = nowIso(); - const db = createDb(c.env.DB); - await db.insert(invites).values({ - id: inviteId, - code: inviteCode, - created_by: human.id, - redeemed_by: null, - agent_id: null, - expires_at: parsedPayload.expiresAt, - created_at: createdAt, - }); - - return c.json( - { - invite: { - id: inviteId, - code: inviteCode, - createdBy: human.id, - expiresAt: parsedPayload.expiresAt, - createdAt, - }, - }, - 201, - ); - }); - - app.post(INVITES_REDEEM_PATH, async (c) => { - const config = getConfig(c.env); - const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); - - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "INVITE_REDEEM_INVALID", - message: exposeDetails - ? "Request body must be valid JSON" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - }); - } - - const parsedPayload = parseInviteRedeemPayload({ - payload, - environment: config.ENVIRONMENT, - }); - - const db = createDb(c.env.DB); - const invite = await findInviteByCode({ - db, - code: parsedPayload.code, - }); - - if (!invite) { - throw inviteRedeemCodeInvalidError(); - } - - const nowMillis = nowUtcMs(); - if (invite.redeemed_by !== null) { - throw inviteRedeemAlreadyUsedError(); - } - - if ( - isInviteExpired({ - expiresAt: invite.expires_at, - nowMillis, - }) - ) { - throw inviteRedeemExpiredError(); - } - - const humanId = generateUlid(nowMillis); - const humanDid = makeHumanDid(humanId); - const apiKeyToken = generateApiKeyToken(); - const apiKeyHash = await hashApiKeyToken(apiKeyToken); - const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); - const apiKeyId = generateUlid(nowMillis + 1); - const createdAt = nowIso(); - - const applyRedeemMutation = async ( - executor: typeof db, - options: { rollbackOnFailure: boolean }, - ): Promise => { - await executor.insert(humans).values({ - id: humanId, - did: humanDid, - display_name: parsedPayload.displayName, - role: "user", - status: "active", - created_at: createdAt, - updated_at: createdAt, - }); - - let inviteRedeemed = false; - try { - const inviteUpdateResult = await executor - .update(invites) - .set({ - redeemed_by: humanId, - }) - .where(and(eq(invites.id, invite.id), isNull(invites.redeemed_by))); - - const updatedRows = getMutationRowCount(inviteUpdateResult); - if (updatedRows === 0) { - throw await resolveInviteRedeemStateError({ - db: executor, - inviteId: invite.id, - nowMillis, - }); - } - inviteRedeemed = true; - - await executor.insert(api_keys).values({ - id: apiKeyId, - human_id: humanId, - key_hash: apiKeyHash, - key_prefix: apiKeyPrefix, - name: parsedPayload.apiKeyName, - status: "active", - created_at: createdAt, - last_used_at: null, - }); - } catch (error) { - if (options.rollbackOnFailure) { - if (inviteRedeemed) { - try { - await executor - .update(invites) - .set({ - redeemed_by: null, - }) - .where( - and( - eq(invites.id, invite.id), - eq(invites.redeemed_by, humanId), - ), - ); - } catch (rollbackError) { - logger.error("registry.invite_redeem_rollback_failed", { - rollbackErrorName: - rollbackError instanceof Error - ? rollbackError.name - : "unknown", - stage: "invite_unlink", - }); - } - } - - try { - await executor.delete(humans).where(eq(humans.id, humanId)); - } catch (rollbackError) { - logger.error("registry.invite_redeem_rollback_failed", { - rollbackErrorName: - rollbackError instanceof Error ? rollbackError.name : "unknown", - stage: "human_delete", - }); - } - } - - throw error; - } - }; - - try { - await db.transaction(async (tx) => { - await applyRedeemMutation(tx as unknown as typeof db, { - rollbackOnFailure: false, - }); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyRedeemMutation(db, { - rollbackOnFailure: true, - }); - } - - return c.json( - { - human: { - id: humanId, - did: humanDid, - displayName: parsedPayload.displayName, - role: "user", - status: "active", - }, - apiKey: { - id: apiKeyId, - name: parsedPayload.apiKeyName, - token: apiKeyToken, - }, - proxyUrl: resolveProxyUrl(config), - }, - 201, - ); - }); - - app.post(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); - - let payload: unknown = {}; - try { - const rawBody = await c.req.text(); - if (rawBody.trim().length > 0) { - payload = JSON.parse(rawBody); - } - } catch { - throw new AppError({ - code: "API_KEY_CREATE_INVALID", - message: exposeDetails - ? "Request body must be valid JSON" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - }); - } - - const parsedPayload = parseApiKeyCreatePayload({ - payload, - environment: config.ENVIRONMENT, - }); - - const human = c.get("human"); - const apiKeyToken = generateApiKeyToken(); - const apiKeyHash = await hashApiKeyToken(apiKeyToken); - const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); - const apiKeyId = generateUlid(nowUtcMs() + 1); - const createdAt = nowIso(); - - const db = createDb(c.env.DB); - await db.insert(api_keys).values({ - id: apiKeyId, - human_id: human.id, - key_hash: apiKeyHash, - key_prefix: apiKeyPrefix, - name: parsedPayload.name, - status: "active", - created_at: createdAt, - last_used_at: null, - }); - - return c.json( - { - apiKey: { - id: apiKeyId, - name: parsedPayload.name, - status: "active", - createdAt, - lastUsedAt: null, - token: apiKeyToken, - }, - }, - 201, - ); - }); - - app.get(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { - const human = c.get("human"); - const db = createDb(c.env.DB); - - const rows = await db - .select({ - id: api_keys.id, - name: api_keys.name, - status: api_keys.status, - created_at: api_keys.created_at, - last_used_at: api_keys.last_used_at, - }) - .from(api_keys) - .where(eq(api_keys.human_id, human.id)) - .orderBy(desc(api_keys.created_at), desc(api_keys.id)); - - return c.json({ - apiKeys: rows.map(mapApiKeyMetadataRow), - }); - }); - - app.delete(`${ME_API_KEYS_PATH}/:id`, createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const apiKeyId = parseApiKeyRevokePath({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - const human = c.get("human"); - const db = createDb(c.env.DB); - - const rows = await db - .select({ - id: api_keys.id, - status: api_keys.status, - }) - .from(api_keys) - .where(and(eq(api_keys.id, apiKeyId), eq(api_keys.human_id, human.id))) - .limit(1); - - const existingKey = rows[0]; - if (!existingKey) { - throw apiKeyNotFoundError(); - } - - if (existingKey.status === "revoked") { - return c.body(null, 204); - } - - await db - .update(api_keys) - .set({ - status: "revoked", - }) - .where(and(eq(api_keys.id, apiKeyId), eq(api_keys.human_id, human.id))); - - return c.body(null, 204); - }); - - app.get("/v1/agents", createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const query = parseAgentListQuery({ - query: c.req.query(), - environment: config.ENVIRONMENT, - }); - const human = c.get("human"); - const db = createDb(c.env.DB); - - const filters = [eq(agents.owner_id, human.id)]; - if (query.status) { - filters.push(eq(agents.status, query.status)); - } - if (query.framework) { - filters.push(eq(agents.framework, query.framework)); - } - if (query.cursor) { - filters.push(lt(agents.id, query.cursor)); - } - - const rows = await db - .select({ - id: agents.id, - did: agents.did, - name: agents.name, - status: agents.status, - expires_at: agents.expires_at, - }) - .from(agents) - .where(and(...filters)) - .orderBy(desc(agents.id)) - .limit(query.limit + 1); - - const hasNextPage = rows.length > query.limit; - const pageRows = hasNextPage ? rows.slice(0, query.limit) : rows; - const nextCursor = hasNextPage - ? (pageRows[pageRows.length - 1]?.id ?? null) - : null; - - return c.json({ - agents: pageRows.map(mapAgentListRow), - pagination: { - limit: query.limit, - nextCursor, - }, - }); - }); - - app.get("/v1/agents/:id/ownership", createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const agentId = parseAgentOwnershipPath({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - const human = c.get("human"); - const db = createDb(c.env.DB); - - const rows = await db - .select({ - id: agents.id, - }) - .from(agents) - .where(and(eq(agents.owner_id, human.id), eq(agents.id, agentId))) - .limit(1); - - return c.json({ - ownsAgent: rows.length > 0, - }); - }); - - app.post( - INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, - createServiceAuth({ - requiredScopes: ["identity.read"], - }), - async (c) => { - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "AGENT_OWNERSHIP_INVALID", - message: "Ownership payload is invalid", - status: 400, - expose: true, - }); - } - - const parsed = parseInternalOwnershipCheckPayload(payload); - const db = createDb(c.env.DB); - - const rows = await db - .select({ - ownerDid: humans.did, - status: agents.status, - }) - .from(agents) - .innerJoin(humans, eq(agents.owner_id, humans.id)) - .where(eq(agents.did, parsed.agentDid)) - .limit(1); - - const row = rows[0]; - return c.json({ - ownsAgent: row !== undefined && row.ownerDid === parsed.ownerDid, - agentStatus: row?.status ?? null, - }); - }, - ); - - app.post(AGENT_REGISTRATION_CHALLENGE_PATH, createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); - - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "AGENT_REGISTRATION_CHALLENGE_INVALID", - message: exposeDetails - ? "Request body must be valid JSON" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - }); - } - - const human = c.get("human"); - const challenge = buildAgentRegistrationChallenge({ - payload, - ownerId: human.id, - ownerDid: human.did, - environment: config.ENVIRONMENT, - }); - - const db = createDb(c.env.DB); - await db.insert(agent_registration_challenges).values({ - id: challenge.challenge.id, - owner_id: challenge.challenge.ownerId, - public_key: challenge.challenge.publicKey, - nonce: challenge.challenge.nonce, - status: challenge.challenge.status, - expires_at: challenge.challenge.expiresAt, - used_at: challenge.challenge.usedAt, - created_at: challenge.challenge.createdAt, - updated_at: challenge.challenge.updatedAt, - }); - - return c.json(challenge.response, 201); - }); - - app.post("/v1/agents", createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); - - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "AGENT_REGISTRATION_INVALID", - message: exposeDetails - ? "Request body must be valid JSON" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - }); - } - - const human = c.get("human"); - const parsedBody = parseAgentRegistrationBody(payload, config.ENVIRONMENT); - const db = createDb(c.env.DB); - - const challenge = await findOwnedAgentRegistrationChallenge({ - db, - ownerId: human.id, - challengeId: parsedBody.challengeId, - }); - - if (!challenge) { - throw new AppError({ - code: "AGENT_REGISTRATION_CHALLENGE_NOT_FOUND", - message: exposeDetails - ? "Registration challenge was not found" - : "Request could not be processed", - status: 400, - expose: true, - }); - } - - await verifyAgentRegistrationOwnershipProof({ - parsedBody, - challenge: { - id: challenge.id, - ownerId: challenge.owner_id, - publicKey: challenge.public_key, - nonce: challenge.nonce, - status: challenge.status, - expiresAt: challenge.expires_at, - usedAt: challenge.used_at, - }, - ownerDid: human.did, - environment: config.ENVIRONMENT, - }); - const registration = buildAgentRegistrationFromParsed({ - parsedBody, - ownerDid: human.did, - issuer: resolveRegistryIssuer(config), - }); - const signer = await resolveRegistrySigner(config); - const ait = await signAIT({ - claims: registration.claims, - signerKid: signer.signerKid, - signerKeypair: signer.signerKeypair, - }); - - const initialAuth = await issueAgentAuth(); - const challengeUsedAt = nowIso(); - const applyRegistrationMutation = async ( - executor: typeof db, - options: { rollbackOnAgentInsertFailure: boolean }, - ): Promise => { - const challengeUpdateResult = await executor - .update(agent_registration_challenges) - .set({ - status: "used", - used_at: challengeUsedAt, - updated_at: challengeUsedAt, - }) - .where( - and( - eq(agent_registration_challenges.id, challenge.id), - eq(agent_registration_challenges.owner_id, human.id), - eq(agent_registration_challenges.status, "pending"), - ), - ); - - const updatedRows = getMutationRowCount(challengeUpdateResult); - if (updatedRows === 0) { - throw new AppError({ - code: "AGENT_REGISTRATION_CHALLENGE_REPLAYED", - message: exposeDetails - ? "Registration challenge has already been used" - : "Request could not be processed", - status: 400, - expose: true, - }); - } - - try { - await executor.insert(agents).values({ - id: registration.agent.id, - did: registration.agent.did, - owner_id: human.id, - name: registration.agent.name, - framework: registration.agent.framework, - public_key: registration.agent.publicKey, - current_jti: registration.agent.currentJti, - status: registration.agent.status, - expires_at: registration.agent.expiresAt, - created_at: registration.agent.createdAt, - updated_at: registration.agent.updatedAt, - }); - - await executor.insert(agent_auth_sessions).values({ - id: initialAuth.sessionId, - agent_id: registration.agent.id, - refresh_key_hash: initialAuth.refreshTokenHash, - refresh_key_prefix: initialAuth.refreshTokenPrefix, - refresh_issued_at: initialAuth.refreshIssuedAt, - refresh_expires_at: initialAuth.refreshExpiresAt, - refresh_last_used_at: null, - access_key_hash: initialAuth.accessTokenHash, - access_key_prefix: initialAuth.accessTokenPrefix, - access_issued_at: initialAuth.accessIssuedAt, - access_expires_at: initialAuth.accessExpiresAt, - access_last_used_at: null, - status: "active", - revoked_at: null, - created_at: initialAuth.createdAt, - updated_at: initialAuth.updatedAt, - }); - - await insertAgentAuthEvent({ - db: executor, - agentId: registration.agent.id, - sessionId: initialAuth.sessionId, - eventType: "issued", - createdAt: initialAuth.createdAt, - metadata: { - actor: "agent_registration", - }, - eventBus: getEventBus(c.env), - initiatedByAccountId: human.did, - }); - } catch (error) { - if (options.rollbackOnAgentInsertFailure) { - try { - await executor - .delete(agent_auth_sessions) - .where(eq(agent_auth_sessions.id, initialAuth.sessionId)); - } catch (rollbackError) { - logger.error("registry.agent_registration_rollback_failed", { - rollbackErrorName: - rollbackError instanceof Error ? rollbackError.name : "unknown", - stage: "auth_session_delete", - }); - } - - try { - await executor - .delete(agents) - .where(eq(agents.id, registration.agent.id)); - } catch (rollbackError) { - logger.error("registry.agent_registration_rollback_failed", { - rollbackErrorName: - rollbackError instanceof Error ? rollbackError.name : "unknown", - stage: "agent_delete", - }); - } - - await executor - .update(agent_registration_challenges) - .set({ - status: "pending", - used_at: null, - updated_at: nowIso(), - }) - .where( - and( - eq(agent_registration_challenges.id, challenge.id), - eq(agent_registration_challenges.owner_id, human.id), - eq(agent_registration_challenges.status, "used"), - ), - ); - } - - throw error; - } - }; - - try { - await db.transaction(async (tx) => { - await applyRegistrationMutation(tx as unknown as typeof db, { - rollbackOnAgentInsertFailure: false, - }); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyRegistrationMutation(db, { - rollbackOnAgentInsertFailure: true, - }); - } - - return c.json( - { - agent: registration.agent, - ait, - agentAuth: toAgentAuthResponse({ - accessToken: initialAuth.accessToken, - accessExpiresAt: initialAuth.accessExpiresAt, - refreshToken: initialAuth.refreshToken, - refreshExpiresAt: initialAuth.refreshExpiresAt, - }), - }, - 201, - ); - }); - - app.post(AGENT_AUTH_REFRESH_PATH, agentAuthRefreshRateLimit, async (c) => { - const config = getConfig(c.env); - const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); - const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); - - let payload: unknown; - try { - const rawBody = new TextDecoder().decode(bodyBytes); - payload = rawBody.trim().length === 0 ? {} : JSON.parse(rawBody); - } catch { - throw new AppError({ - code: "AGENT_AUTH_REFRESH_INVALID", - message: exposeDetails - ? "Request body must be valid JSON" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - }); - } - - const parsedPayload = parseAgentAuthRefreshPayload({ - payload, - environment: config.ENVIRONMENT, - }); - const claims = await verifyAgentClawRequest({ - config, - request: c.req.raw, - bodyBytes, - }); - const nowMillis = nowUtcMs(); - const db = createDb(c.env.DB); - const existingAgent = await findOwnedAgentByDid({ - db, - did: claims.sub, - }); - - if (!existingAgent || existingAgent.status !== "active") { - throw agentAuthRefreshRejectedError({ - code: "AGENT_AUTH_REFRESH_INVALID", - message: "Agent auth refresh token is invalid", - }); - } - - if (existingAgent.current_jti !== claims.jti) { - throw agentAuthRefreshRejectedError({ - code: "AGENT_AUTH_REFRESH_INVALID", - message: "Agent auth refresh token is invalid", - }); - } - - const existingSession = await findAgentAuthSessionByAgentId({ - db, - agentId: existingAgent.id, - }); - - if (!existingSession) { - throw agentAuthRefreshRejectedError({ - code: "AGENT_AUTH_REFRESH_INVALID", - message: "Agent auth refresh token is invalid", - }); - } - - if (existingSession.status !== "active") { - throw agentAuthRefreshRejectedError({ - code: "AGENT_AUTH_REFRESH_REVOKED", - message: "Agent auth refresh token is revoked", - }); - } - - const refreshPrefix = deriveRefreshTokenLookupPrefix( - parsedPayload.refreshToken, - ); - const refreshHash = await hashAgentToken(parsedPayload.refreshToken); - const refreshTokenMatches = - existingSession.refresh_key_prefix === refreshPrefix && - constantTimeEqual(existingSession.refresh_key_hash, refreshHash); - - if (!refreshTokenMatches) { - await insertAgentAuthEvent({ - db, - agentId: existingAgent.id, - sessionId: existingSession.id, - eventType: "refresh_rejected", - reason: "invalid_refresh_token", - eventBus: getEventBus(c.env), - initiatedByAccountId: claims.ownerDid, - }); - throw agentAuthRefreshRejectedError({ - code: "AGENT_AUTH_REFRESH_INVALID", - message: "Agent auth refresh token is invalid", - }); - } - - if (isIsoExpired(existingSession.refresh_expires_at, nowMillis)) { - const revokedAt = nowIso(); - await db - .update(agent_auth_sessions) - .set({ - status: "revoked", - revoked_at: revokedAt, - updated_at: revokedAt, - }) - .where(eq(agent_auth_sessions.id, existingSession.id)); - await insertAgentAuthEvent({ - db, - agentId: existingAgent.id, - sessionId: existingSession.id, - eventType: "revoked", - reason: "refresh_token_expired", - createdAt: revokedAt, - eventBus: getEventBus(c.env), - initiatedByAccountId: claims.ownerDid, - }); - throw agentAuthRefreshRejectedError({ - code: "AGENT_AUTH_REFRESH_EXPIRED", - message: "Agent auth refresh token is expired", - }); - } - - const rotatedAuth = await issueAgentAuth({ - nowMs: nowMillis, - }); - const refreshedAt = nowIso(); - const applyRefreshMutation = async (executor: typeof db): Promise => { - const updateResult = await executor - .update(agent_auth_sessions) - .set({ - refresh_key_hash: rotatedAuth.refreshTokenHash, - refresh_key_prefix: rotatedAuth.refreshTokenPrefix, - refresh_issued_at: rotatedAuth.refreshIssuedAt, - refresh_expires_at: rotatedAuth.refreshExpiresAt, - refresh_last_used_at: refreshedAt, - access_key_hash: rotatedAuth.accessTokenHash, - access_key_prefix: rotatedAuth.accessTokenPrefix, - access_issued_at: rotatedAuth.accessIssuedAt, - access_expires_at: rotatedAuth.accessExpiresAt, - access_last_used_at: null, - status: "active", - revoked_at: null, - updated_at: refreshedAt, - }) - .where( - and( - eq(agent_auth_sessions.id, existingSession.id), - eq(agent_auth_sessions.status, "active"), - eq(agent_auth_sessions.refresh_key_hash, refreshHash), - ), - ); - - const updatedRows = getMutationRowCount(updateResult); - if (updatedRows === 0) { - throw agentAuthRefreshConflictError(); - } - - await insertAgentAuthEvent({ - db: executor, - agentId: existingAgent.id, - sessionId: existingSession.id, - eventType: "refreshed", - createdAt: refreshedAt, - eventBus: getEventBus(c.env), - initiatedByAccountId: claims.ownerDid, - }); - }; - - try { - await db.transaction(async (tx) => { - await applyRefreshMutation(tx as unknown as typeof db); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyRefreshMutation(db); - } - - return c.json({ - agentAuth: toAgentAuthResponse({ - accessToken: rotatedAuth.accessToken, - accessExpiresAt: rotatedAuth.accessExpiresAt, - refreshToken: rotatedAuth.refreshToken, - refreshExpiresAt: rotatedAuth.refreshExpiresAt, - }), - }); - }); - - app.post(AGENT_AUTH_VALIDATE_PATH, agentAuthValidateRateLimit, async (c) => { - let payload: unknown; - try { - payload = await c.req.json(); - } catch { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_INVALID", - message: "Validation payload is invalid", - status: 400, - expose: true, - }); - } - - const parsedPayload = parseAgentAuthValidatePayload(payload); - const accessToken = parseAgentAccessHeaderToken( - c.req.header("x-claw-agent-access"), - ); - - const db = createDb(c.env.DB); - const existingAgent = await findOwnedAgentByDid({ - db, - did: parsedPayload.agentDid, - }); - if ( - !existingAgent || - existingAgent.status !== "active" || - existingAgent.current_jti !== parsedPayload.aitJti - ) { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", - message: "Agent access token is invalid", - status: 401, - expose: true, - }); - } - - const existingSession = await findAgentAuthSessionByAgentId({ - db, - agentId: existingAgent.id, - }); - if (!existingSession || existingSession.status !== "active") { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", - message: "Agent access token is invalid", - status: 401, - expose: true, - }); - } - - const nowMillis = nowUtcMs(); - if (isIsoExpired(existingSession.access_expires_at, nowMillis)) { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_EXPIRED", - message: "Agent access token is expired", - status: 401, - expose: true, - }); - } - - const accessTokenPrefix = deriveAccessTokenLookupPrefix(accessToken); - const accessTokenHash = await hashAgentToken(accessToken); - const accessTokenMatches = - existingSession.access_key_prefix === accessTokenPrefix && - constantTimeEqual(existingSession.access_key_hash, accessTokenHash); - if (!accessTokenMatches) { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", - message: "Agent access token is invalid", - status: 401, - expose: true, - }); - } - - const accessLastUsedAt = nowIso(); - const updateResult = await db - .update(agent_auth_sessions) - .set({ - access_last_used_at: accessLastUsedAt, - updated_at: accessLastUsedAt, - }) - .where( - and( - eq(agent_auth_sessions.id, existingSession.id), - eq(agent_auth_sessions.status, "active"), - eq(agent_auth_sessions.access_key_hash, accessTokenHash), - ), - ); - - const updatedRows = getMutationRowCount(updateResult); - if (updatedRows === 0) { - throw new AppError({ - code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", - message: "Agent access token is invalid", - status: 401, - expose: true, - }); - } - - return c.body(null, 204); - }); - - app.delete("/v1/agents/:id/auth/revoke", createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const agentId = parseAgentRevokePath({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - const human = c.get("human"); - const db = createDb(c.env.DB); - const existingAgent = await findOwnedAgent({ - db, - ownerId: human.id, - agentId, - }); - - if (!existingAgent) { - throw agentNotFoundError(); - } - - const existingSession = await findAgentAuthSessionByAgentId({ - db, - agentId: existingAgent.id, - }); - if (!existingSession || existingSession.status === "revoked") { - return c.body(null, 204); - } - - const revokedAt = nowIso(); - const applyAuthRevokeMutation = async ( - executor: typeof db, - ): Promise => { - await executor - .update(agent_auth_sessions) - .set({ - status: "revoked", - revoked_at: revokedAt, - updated_at: revokedAt, - }) - .where( - and( - eq(agent_auth_sessions.id, existingSession.id), - eq(agent_auth_sessions.status, "active"), - ), - ); - - await insertAgentAuthEvent({ - db: executor, - agentId: existingAgent.id, - sessionId: existingSession.id, - eventType: "revoked", - reason: "owner_auth_revoke", - createdAt: revokedAt, - eventBus: getEventBus(c.env), - initiatedByAccountId: human.did, - }); - }; - - try { - await db.transaction(async (tx) => { - await applyAuthRevokeMutation(tx as unknown as typeof db); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyAuthRevokeMutation(db); - } - - return c.body(null, 204); - }); - - app.delete("/v1/agents/:id", createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const agentId = parseAgentRevokePath({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - const human = c.get("human"); - const db = createDb(c.env.DB); - - const existingAgent = await findOwnedAgent({ - db, - ownerId: human.id, - agentId, - }); - - if (!existingAgent) { - throw agentNotFoundError(); - } - - if (existingAgent.status === "revoked") { - return c.body(null, 204); - } - - const currentJti = requireCurrentJti({ - currentJti: existingAgent.current_jti, - onInvalid: (reason) => - invalidAgentRevokeStateError({ - environment: config.ENVIRONMENT, - reason: `${reason} for revocation`, - }), - }); - - const existingSession = await findAgentAuthSessionByAgentId({ - db, - agentId: existingAgent.id, - }); - const revokedAt = nowIso(); - const applyRevokeMutation = async (executor: typeof db): Promise => { - await executor - .update(agents) - .set({ - status: "revoked", - updated_at: revokedAt, - }) - .where(eq(agents.id, existingAgent.id)); - - await executor - .insert(revocations) - .values({ - id: generateUlid(nowUtcMs()), - jti: currentJti, - agent_id: existingAgent.id, - reason: null, - revoked_at: revokedAt, - }) - .onConflictDoNothing({ - target: revocations.jti, - }); - - if (existingSession && existingSession.status === "active") { - await executor - .update(agent_auth_sessions) - .set({ - status: "revoked", - revoked_at: revokedAt, - updated_at: revokedAt, - }) - .where( - and( - eq(agent_auth_sessions.id, existingSession.id), - eq(agent_auth_sessions.status, "active"), - ), - ); - - await insertAgentAuthEvent({ - db: executor, - agentId: existingAgent.id, - sessionId: existingSession.id, - eventType: "revoked", - reason: "agent_revoked", - createdAt: revokedAt, - eventBus: getEventBus(c.env), - initiatedByAccountId: human.did, - }); - } - }; - - try { - await db.transaction(async (tx) => { - await applyRevokeMutation(tx as unknown as typeof db); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyRevokeMutation(db); - } - - return c.body(null, 204); - }); - - app.post("/v1/agents/:id/reissue", createApiKeyAuth(), async (c) => { - const config = getConfig(c.env); - const agentId = parseAgentRevokePath({ - id: c.req.param("id"), - environment: config.ENVIRONMENT, - }); - const human = c.get("human"); - const db = createDb(c.env.DB); - - const existingAgent = await findOwnedAgent({ - db, - ownerId: human.id, - agentId, - }); - - if (!existingAgent) { - throw agentNotFoundError(); - } - - if (existingAgent.status === "revoked") { - throw invalidAgentReissueStateError({ - environment: config.ENVIRONMENT, - field: "status", - reason: "revoked agents cannot be reissued", - }); - } - - const currentJti = requireCurrentJti({ - currentJti: existingAgent.current_jti, - onInvalid: (reason) => - invalidAgentReissueStateError({ - environment: config.ENVIRONMENT, - reason: `${reason} for reissue`, - }), - }); - - const reissue = buildAgentReissue({ - id: existingAgent.id, - did: existingAgent.did, - ownerDid: human.did, - name: existingAgent.name, - framework: existingAgent.framework, - publicKey: existingAgent.public_key, - previousExpiresAt: existingAgent.expires_at, - issuer: resolveRegistryIssuer(config), - }); - const signer = await resolveRegistrySigner(config); - const ait = await signAIT({ - claims: reissue.claims, - signerKid: signer.signerKid, - signerKeypair: signer.signerKeypair, - }); - - const revokedAt = nowIso(); - const applyReissueMutation = async (executor: typeof db): Promise => { - const updateResult = await executor - .update(agents) - .set({ - status: "active", - current_jti: reissue.agent.currentJti, - expires_at: reissue.agent.expiresAt, - updated_at: reissue.agent.updatedAt, - }) - .where( - and( - eq(agents.id, existingAgent.id), - eq(agents.status, "active"), - eq(agents.current_jti, currentJti), - ), - ); - - const updatedRows = getMutationRowCount(updateResult); - if (updatedRows === 0) { - throw invalidAgentReissueStateError({ - environment: config.ENVIRONMENT, - field: "currentJti", - reason: "agent state changed during reissue; retry request", - }); - } - - await executor - .insert(revocations) - .values({ - id: generateUlid(nowUtcMs()), - jti: currentJti, - agent_id: existingAgent.id, - reason: "reissued", - revoked_at: revokedAt, - }) - .onConflictDoNothing({ - target: revocations.jti, - }); - }; - - try { - await db.transaction(async (tx) => { - await applyReissueMutation(tx as unknown as typeof db); - }); - } catch (error) { - if (!isUnsupportedLocalTransactionError(error)) { - throw error; - } - - await applyReissueMutation(db); - } - - return c.json({ agent: reissue.agent, ait }); - }); - - return app; -} - -const app = createRegistryApp(); - -export { createRegistryApp }; -export default app; +export { createRegistryApp, default } from "./server/index.js"; diff --git a/apps/registry/src/server/AGENTS.md b/apps/registry/src/server/AGENTS.md new file mode 100644 index 0000000..cc4c9f0 --- /dev/null +++ b/apps/registry/src/server/AGENTS.md @@ -0,0 +1,28 @@ +# AGENTS.md (apps/registry/src/server) + +## Purpose +- Keep registry runtime composition modular and behavior-stable. +- Preserve external contracts while splitting route and helper responsibilities. + +## Entry Contract +- `../server.ts` must remain the public facade export used by existing imports/tests. +- `index.ts` in this folder is the composition entrypoint and must export: + - default app instance (`createRegistryApp()`) + - named `createRegistryApp` factory. + +## Module Boundaries +- `create-registry-app.ts`: app wiring only (middleware, config cache, event bus cache, rate limits, route registration). +- `constants.ts`: shared types and immutable constants. +- `helpers/*.ts`: reusable pure helpers and data-access helpers. +- `routes/*.ts`: route registration only; keep per-route behavior and status codes unchanged. + +## Safety Rules +- Do not duplicate parser/query logic across route files; lift shared behavior to `helpers/`. +- Keep environment-aware error exposure unchanged (`shouldExposeVerboseErrors` paths). +- Preserve transaction-first flow and local rollback fallbacks where present. +- Keep route registration order stable unless a route conflict requires change. + +## Validation +- For server changes, run: + - `pnpm -C apps/registry typecheck` + - `pnpm -C apps/registry test -- server` diff --git a/apps/registry/src/server/constants.ts b/apps/registry/src/server/constants.ts new file mode 100644 index 0000000..522824a --- /dev/null +++ b/apps/registry/src/server/constants.ts @@ -0,0 +1,146 @@ +import { + createLogger, + type EventBus, + type QueuePublisher, + type RegistryConfig, +} from "@clawdentity/sdk"; +import type { Hono } from "hono"; +import type { AuthenticatedHuman } from "../auth/api-key-auth.js"; +import type { AuthenticatedService } from "../auth/service-auth.js"; + +export type Bindings = { + DB: D1Database; + ENVIRONMENT: string; + APP_VERSION?: string; + PROXY_URL?: string; + REGISTRY_ISSUER_URL?: string; + EVENT_BUS_BACKEND?: "memory" | "queue"; + EVENT_BUS_QUEUE?: QueuePublisher; + BOOTSTRAP_SECRET?: string; + REGISTRY_SIGNING_KEY?: string; + REGISTRY_SIGNING_KEYS?: string; +}; + +export type RegistryRateLimitRuntimeOptions = { + nowMs?: () => number; + resolveMaxRequests?: number; + resolveWindowMs?: number; + crlMaxRequests?: number; + crlWindowMs?: number; + agentAuthRefreshMaxRequests?: number; + agentAuthRefreshWindowMs?: number; + agentAuthValidateMaxRequests?: number; + agentAuthValidateWindowMs?: number; +}; + +export type CreateRegistryAppOptions = { + rateLimit?: RegistryRateLimitRuntimeOptions; + eventBus?: EventBus; +}; + +export type RegistryApp = Hono<{ + Bindings: Bindings; + Variables: { + requestId: string; + human: AuthenticatedHuman; + service: AuthenticatedService; + }; +}>; + +export type RegistryRouteDependencies = { + app: RegistryApp; + getConfig: (bindings: Bindings) => RegistryConfig; + getEventBus: (bindings: Bindings) => EventBus; +}; + +export type OwnedAgent = { + id: string; + did: string; + name: string; + framework: string | null; + public_key: string; + status: "active" | "revoked"; + expires_at: string | null; + current_jti: string | null; +}; + +export type OwnedAgentRegistrationChallenge = { + id: string; + owner_id: string; + public_key: string; + nonce: string; + status: "pending" | "used"; + expires_at: string; + used_at: string | null; +}; + +export type OwnedAgentAuthSession = { + id: string; + agent_id: string; + refresh_key_hash: string; + refresh_key_prefix: string; + refresh_issued_at: string; + refresh_expires_at: string; + refresh_last_used_at: string | null; + access_key_hash: string; + access_key_prefix: string; + access_issued_at: string; + access_expires_at: string; + access_last_used_at: string | null; + status: "active" | "revoked"; + revoked_at: string | null; + created_at: string; + updated_at: string; +}; + +export type InviteRow = { + id: string; + code: string; + created_by: string; + redeemed_by: string | null; + expires_at: string | null; + created_at: string; +}; + +export type CrlSnapshotRow = { + id: string; + jti: string; + reason: string | null; + revoked_at: string; + agent_did: string; +}; + +export const logger = createLogger({ service: "registry" }); +export const REGISTRY_CACHE_MAX_AGE_SECONDS = 300; +export const REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS = 60; +export const REGISTRY_KEY_CACHE_CONTROL = `public, max-age=${REGISTRY_CACHE_MAX_AGE_SECONDS}, s-maxage=${REGISTRY_CACHE_MAX_AGE_SECONDS}, stale-while-revalidate=${REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS}`; +export const REGISTRY_CRL_CACHE_CONTROL = `public, max-age=${REGISTRY_CACHE_MAX_AGE_SECONDS}, s-maxage=${REGISTRY_CACHE_MAX_AGE_SECONDS}, stale-while-revalidate=${REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS}`; + +const CRL_EXPIRY_SAFETY_BUFFER_SECONDS = 30; +export const CRL_TTL_SECONDS = + REGISTRY_CACHE_MAX_AGE_SECONDS + + REGISTRY_CACHE_STALE_WHILE_REVALIDATE_SECONDS + + CRL_EXPIRY_SAFETY_BUFFER_SECONDS; + +export const PROXY_URL_BY_ENVIRONMENT: Record< + RegistryConfig["ENVIRONMENT"], + string +> = { + development: "https://dev.proxy.clawdentity.com", + production: "https://proxy.clawdentity.com", + test: "https://dev.proxy.clawdentity.com", +}; + +// Deterministic bootstrap identity guarantees one-time admin creation under races. +export const BOOTSTRAP_ADMIN_HUMAN_ID = "00000000000000000000000000"; +export const REGISTRY_SERVICE_EVENT_VERSION = "v1"; + +export const AGENT_AUTH_EVENT_NAME_BY_TYPE: Record< + "issued" | "refreshed" | "revoked" | "refresh_rejected", + string +> = { + issued: "agent.auth.issued", + refreshed: "agent.auth.refreshed", + revoked: "agent.auth.revoked", + refresh_rejected: "agent.auth.refresh_rejected", +}; diff --git a/apps/registry/src/server/create-registry-app.ts b/apps/registry/src/server/create-registry-app.ts new file mode 100644 index 0000000..d208e08 --- /dev/null +++ b/apps/registry/src/server/create-registry-app.ts @@ -0,0 +1,171 @@ +import { + createHonoErrorHandler, + createRequestContextMiddleware, + createRequestLoggingMiddleware, + type EventBus, + parseRegistryConfig, + type RegistryConfig, +} from "@clawdentity/sdk"; +import { Hono } from "hono"; +import type { AuthenticatedHuman } from "../auth/api-key-auth.js"; +import type { AuthenticatedService } from "../auth/service-auth.js"; +import { + AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS, + AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS, + AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS, + AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS, + CRL_RATE_LIMIT_MAX_REQUESTS, + CRL_RATE_LIMIT_WINDOW_MS, + createInMemoryRateLimit, + RESOLVE_RATE_LIMIT_MAX_REQUESTS, + RESOLVE_RATE_LIMIT_WINDOW_MS, +} from "../rate-limit.js"; +import { + type Bindings, + type CreateRegistryAppOptions, + logger, + type RegistryApp, +} from "./constants.js"; +import { + resolveEventBusBackend, + resolveRegistryEventBus, +} from "./helpers/event-bus.js"; +import { registerAdminRoutes } from "./routes/admin.js"; +import { registerAgentAuthRoutes } from "./routes/agent-auth.js"; +import { registerAgentRoutes } from "./routes/agents.js"; +import { registerHealthRoutes } from "./routes/health.js"; +import { registerInternalServiceRoutes } from "./routes/internal-services.js"; +import { registerInviteRoutes } from "./routes/invites.js"; +import { registerMeApiKeyRoutes } from "./routes/me-api-keys.js"; + +export function createRegistryApp(options: CreateRegistryAppOptions = {}) { + let cachedConfig: RegistryConfig | undefined; + let cachedEventBus: EventBus | undefined; + let cachedEventBusKey: string | undefined; + + function getConfig(bindings: Bindings): RegistryConfig { + if (cachedConfig) { + return cachedConfig; + } + + cachedConfig = parseRegistryConfig(bindings, { + requireRuntimeKeys: true, + }); + return cachedConfig; + } + + function getEventBus(bindings: Bindings): EventBus { + if (options.eventBus !== undefined) { + return options.eventBus; + } + + const config = getConfig(bindings); + const resolvedBackend = resolveEventBusBackend(config); + const key = `${config.ENVIRONMENT}|${resolvedBackend}|${ + bindings.EVENT_BUS_QUEUE === undefined ? "no-queue" : "has-queue" + }`; + if (cachedEventBus && cachedEventBusKey === key) { + return cachedEventBus; + } + + const resolved = resolveRegistryEventBus({ + config, + bindings, + explicitBus: options.eventBus, + }); + cachedEventBus = resolved; + cachedEventBusKey = key; + return resolved; + } + + const app: RegistryApp = new Hono<{ + Bindings: Bindings; + Variables: { + requestId: string; + human: AuthenticatedHuman; + service: AuthenticatedService; + }; + }>(); + + const rateLimitOptions = options.rateLimit; + const resolveRateLimit = createInMemoryRateLimit({ + bucketKey: "resolve", + maxRequests: + rateLimitOptions?.resolveMaxRequests ?? RESOLVE_RATE_LIMIT_MAX_REQUESTS, + windowMs: rateLimitOptions?.resolveWindowMs ?? RESOLVE_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + const crlRateLimit = createInMemoryRateLimit({ + bucketKey: "crl", + maxRequests: + rateLimitOptions?.crlMaxRequests ?? CRL_RATE_LIMIT_MAX_REQUESTS, + windowMs: rateLimitOptions?.crlWindowMs ?? CRL_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + const agentAuthRefreshRateLimit = createInMemoryRateLimit({ + bucketKey: "agent_auth_refresh", + maxRequests: + rateLimitOptions?.agentAuthRefreshMaxRequests ?? + AGENT_AUTH_REFRESH_RATE_LIMIT_MAX_REQUESTS, + windowMs: + rateLimitOptions?.agentAuthRefreshWindowMs ?? + AGENT_AUTH_REFRESH_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + const agentAuthValidateRateLimit = createInMemoryRateLimit({ + bucketKey: "agent_auth_validate", + maxRequests: + rateLimitOptions?.agentAuthValidateMaxRequests ?? + AGENT_AUTH_VALIDATE_RATE_LIMIT_MAX_REQUESTS, + windowMs: + rateLimitOptions?.agentAuthValidateWindowMs ?? + AGENT_AUTH_VALIDATE_RATE_LIMIT_WINDOW_MS, + nowMs: rateLimitOptions?.nowMs, + }); + + app.use("*", createRequestContextMiddleware()); + app.use("*", createRequestLoggingMiddleware(logger)); + app.onError(createHonoErrorHandler(logger)); + + registerHealthRoutes({ + app, + getConfig, + getEventBus, + resolveRateLimit, + crlRateLimit, + }); + registerAdminRoutes({ + app, + getConfig, + getEventBus, + }); + registerInternalServiceRoutes({ + app, + getConfig, + getEventBus, + }); + registerInviteRoutes({ + app, + getConfig, + getEventBus, + }); + registerMeApiKeyRoutes({ + app, + getConfig, + getEventBus, + }); + registerAgentRoutes({ + app, + getConfig, + getEventBus, + }); + registerAgentAuthRoutes({ + app, + getConfig, + getEventBus, + agentAuthRefreshRateLimit, + agentAuthValidateRateLimit, + }); + + return app; +} diff --git a/apps/registry/src/server/helpers/db-queries.ts b/apps/registry/src/server/helpers/db-queries.ts new file mode 100644 index 0000000..429a815 --- /dev/null +++ b/apps/registry/src/server/helpers/db-queries.ts @@ -0,0 +1,239 @@ +import { and, eq } from "drizzle-orm"; +import type { createDb } from "../../db/client.js"; +import { + agent_auth_sessions, + agent_registration_challenges, + agents, + invites, +} from "../../db/schema.js"; +import { + inviteRedeemAlreadyUsedError, + inviteRedeemCodeInvalidError, + inviteRedeemExpiredError, +} from "../../invite-lifecycle.js"; +import type { + InviteRow, + OwnedAgent, + OwnedAgentAuthSession, + OwnedAgentRegistrationChallenge, +} from "../constants.js"; + +export async function findOwnedAgent(input: { + db: ReturnType; + ownerId: string; + agentId: string; +}): Promise { + const rows = await input.db + .select({ + id: agents.id, + did: agents.did, + name: agents.name, + framework: agents.framework, + public_key: agents.public_key, + status: agents.status, + expires_at: agents.expires_at, + current_jti: agents.current_jti, + }) + .from(agents) + .where( + and(eq(agents.owner_id, input.ownerId), eq(agents.id, input.agentId)), + ) + .limit(1); + + return rows[0]; +} + +export async function findAgentAuthSessionByAgentId(input: { + db: ReturnType; + agentId: string; +}): Promise { + const rows = await input.db + .select({ + id: agent_auth_sessions.id, + agent_id: agent_auth_sessions.agent_id, + refresh_key_hash: agent_auth_sessions.refresh_key_hash, + refresh_key_prefix: agent_auth_sessions.refresh_key_prefix, + refresh_issued_at: agent_auth_sessions.refresh_issued_at, + refresh_expires_at: agent_auth_sessions.refresh_expires_at, + refresh_last_used_at: agent_auth_sessions.refresh_last_used_at, + access_key_hash: agent_auth_sessions.access_key_hash, + access_key_prefix: agent_auth_sessions.access_key_prefix, + access_issued_at: agent_auth_sessions.access_issued_at, + access_expires_at: agent_auth_sessions.access_expires_at, + access_last_used_at: agent_auth_sessions.access_last_used_at, + status: agent_auth_sessions.status, + revoked_at: agent_auth_sessions.revoked_at, + created_at: agent_auth_sessions.created_at, + updated_at: agent_auth_sessions.updated_at, + }) + .from(agent_auth_sessions) + .where(eq(agent_auth_sessions.agent_id, input.agentId)) + .limit(1); + + return rows[0]; +} + +export async function findOwnedAgentByDid(input: { + db: ReturnType; + did: string; +}): Promise { + const rows = await input.db + .select({ + id: agents.id, + did: agents.did, + name: agents.name, + framework: agents.framework, + public_key: agents.public_key, + status: agents.status, + expires_at: agents.expires_at, + current_jti: agents.current_jti, + }) + .from(agents) + .where(eq(agents.did, input.did)) + .limit(1); + + return rows[0]; +} + +export async function findOwnedAgentRegistrationChallenge(input: { + db: ReturnType; + ownerId: string; + challengeId: string; +}): Promise { + const rows = await input.db + .select({ + id: agent_registration_challenges.id, + owner_id: agent_registration_challenges.owner_id, + public_key: agent_registration_challenges.public_key, + nonce: agent_registration_challenges.nonce, + status: agent_registration_challenges.status, + expires_at: agent_registration_challenges.expires_at, + used_at: agent_registration_challenges.used_at, + }) + .from(agent_registration_challenges) + .where( + and( + eq(agent_registration_challenges.owner_id, input.ownerId), + eq(agent_registration_challenges.id, input.challengeId), + ), + ) + .limit(1); + + return rows[0]; +} + +export async function findInviteByCode(input: { + db: ReturnType; + code: string; +}): Promise { + const rows = await input.db + .select({ + id: invites.id, + code: invites.code, + created_by: invites.created_by, + redeemed_by: invites.redeemed_by, + expires_at: invites.expires_at, + created_at: invites.created_at, + }) + .from(invites) + .where(eq(invites.code, input.code)) + .limit(1); + + return rows[0]; +} + +export async function findInviteById(input: { + db: ReturnType; + id: string; +}): Promise { + const rows = await input.db + .select({ + id: invites.id, + code: invites.code, + created_by: invites.created_by, + redeemed_by: invites.redeemed_by, + expires_at: invites.expires_at, + created_at: invites.created_at, + }) + .from(invites) + .where(eq(invites.id, input.id)) + .limit(1); + + return rows[0]; +} + +export function isInviteExpired(input: { + expiresAt: string | null; + nowMillis: number; +}) { + if (typeof input.expiresAt !== "string") { + return false; + } + + const expiresAtMillis = Date.parse(input.expiresAt); + if (!Number.isFinite(expiresAtMillis)) { + return true; + } + + return expiresAtMillis <= input.nowMillis; +} + +export async function resolveInviteRedeemStateError(input: { + db: ReturnType; + inviteId: string; + nowMillis: number; +}) { + const latestInvite = await findInviteById({ + db: input.db, + id: input.inviteId, + }); + + if (!latestInvite) { + return inviteRedeemCodeInvalidError(); + } + + if (latestInvite.redeemed_by !== null) { + return inviteRedeemAlreadyUsedError(); + } + + if ( + isInviteExpired({ + expiresAt: latestInvite.expires_at, + nowMillis: input.nowMillis, + }) + ) { + return inviteRedeemExpiredError(); + } + + return inviteRedeemCodeInvalidError(); +} + +export function isUnsupportedLocalTransactionError(error: unknown): boolean { + return ( + error instanceof Error && error.message.includes("Failed query: begin") + ); +} + +export function getMutationRowCount(result: unknown): number | undefined { + if (!result || typeof result !== "object") { + return undefined; + } + + const directChanges = (result as { changes?: unknown }).changes; + if (typeof directChanges === "number") { + return directChanges; + } + + const rowsAffected = (result as { rowsAffected?: unknown }).rowsAffected; + if (typeof rowsAffected === "number") { + return rowsAffected; + } + + const metaChanges = (result as { meta?: { changes?: unknown } }).meta + ?.changes; + if (typeof metaChanges === "number") { + return metaChanges; + } + + return undefined; +} diff --git a/apps/registry/src/server/helpers/event-bus.ts b/apps/registry/src/server/helpers/event-bus.ts new file mode 100644 index 0000000..750f343 --- /dev/null +++ b/apps/registry/src/server/helpers/event-bus.ts @@ -0,0 +1,127 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { + AppError, + createEventEnvelope, + createInMemoryEventBus, + createQueueEventBus, + type EventBus, + nowIso, + nowUtcMs, + type RegistryConfig, +} from "@clawdentity/sdk"; +import type { createDb } from "../../db/client.js"; +import { agent_auth_events } from "../../db/schema.js"; +import { + AGENT_AUTH_EVENT_NAME_BY_TYPE, + type Bindings, + logger, + REGISTRY_SERVICE_EVENT_VERSION, +} from "../constants.js"; + +export function resolveEventBusBackend( + config: RegistryConfig, +): NonNullable { + if (config.EVENT_BUS_BACKEND === "memory") { + return "memory"; + } + + if (config.EVENT_BUS_BACKEND === "queue") { + return "queue"; + } + + return config.ENVIRONMENT === "development" || + config.ENVIRONMENT === "production" + ? "queue" + : "memory"; +} + +export function resolveRegistryEventBus(input: { + config: RegistryConfig; + bindings: Bindings; + explicitBus?: EventBus; +}): EventBus { + if (input.explicitBus !== undefined) { + return input.explicitBus; + } + + const backend = resolveEventBusBackend(input.config); + if (backend === "memory") { + return createInMemoryEventBus(); + } + + const queue = input.bindings.EVENT_BUS_QUEUE; + if (queue === undefined) { + throw new AppError({ + code: "CONFIG_VALIDATION_FAILED", + message: "Registry configuration is invalid", + status: 500, + expose: true, + details: { + fieldErrors: { + EVENT_BUS_QUEUE: [ + "EVENT_BUS_QUEUE is required when EVENT_BUS_BACKEND is queue", + ], + }, + formErrors: [], + }, + }); + } + + return createQueueEventBus(queue); +} + +export async function insertAgentAuthEvent(input: { + db: ReturnType; + agentId: string; + sessionId: string; + eventType: "issued" | "refreshed" | "revoked" | "refresh_rejected"; + reason?: string; + metadata?: Record; + createdAt?: string; + eventBus?: EventBus; + initiatedByAccountId?: string | null; +}): Promise { + const createdAt = input.createdAt ?? nowIso(); + await input.db.insert(agent_auth_events).values({ + id: generateUlid(nowUtcMs()), + agent_id: input.agentId, + session_id: input.sessionId, + event_type: input.eventType, + reason: input.reason ?? null, + metadata_json: + input.metadata === undefined ? null : JSON.stringify(input.metadata), + created_at: createdAt, + }); + + if (input.eventBus === undefined) { + return; + } + + const eventData: Record = { + agentId: input.agentId, + sessionId: input.sessionId, + }; + if (input.reason !== undefined) { + eventData.reason = input.reason; + } + if (input.metadata !== undefined) { + eventData.metadata = input.metadata; + } + + try { + await input.eventBus.publish( + createEventEnvelope({ + type: AGENT_AUTH_EVENT_NAME_BY_TYPE[input.eventType], + version: REGISTRY_SERVICE_EVENT_VERSION, + initiatedByAccountId: input.initiatedByAccountId ?? null, + timestampUtc: createdAt, + data: eventData, + }), + ); + } catch (error) { + logger.warn("registry.event_bus.publish_failed", { + eventType: input.eventType, + errorName: error instanceof Error ? error.name : "unknown", + }); + } +} diff --git a/apps/registry/src/server/helpers/parsers.ts b/apps/registry/src/server/helpers/parsers.ts new file mode 100644 index 0000000..e55e93c --- /dev/null +++ b/apps/registry/src/server/helpers/parsers.ts @@ -0,0 +1,408 @@ +import { generateUlid, parseDid, parseUlid } from "@clawdentity/protocol"; +import { + AppError, + nowUtcMs, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; +import { parseAccessToken } from "../../auth/agent-auth-token.js"; +import { constantTimeEqual } from "../../auth/api-key-token.js"; +import { parseInternalServiceScopesPayload } from "../../auth/internal-service-scopes.js"; +import { + CRL_TTL_SECONDS, + type CrlSnapshotRow, + PROXY_URL_BY_ENVIRONMENT, +} from "../constants.js"; + +function crlBuildError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + message: string; + details?: { + fieldErrors: Record; + formErrors: string[]; + }; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "CRL_BUILD_FAILED", + message: exposeDetails + ? options.message + : "CRL snapshot could not be generated", + status: 500, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +function parseRevokedAtSeconds(options: { + environment: RegistryConfig["ENVIRONMENT"]; + revocationId: string; + revokedAtIso: string; +}): number { + const epochMillis = Date.parse(options.revokedAtIso); + if (!Number.isFinite(epochMillis)) { + throw crlBuildError({ + environment: options.environment, + message: "CRL revocation timestamp is invalid", + details: { + fieldErrors: { + revokedAt: [ + `revocation ${options.revocationId} has invalid revoked_at timestamp`, + ], + }, + formErrors: [], + }, + }); + } + + return Math.floor(epochMillis / 1000); +} + +export function buildCrlClaims(input: { + rows: CrlSnapshotRow[]; + environment: RegistryConfig["ENVIRONMENT"]; + issuer: string; + nowSeconds: number; +}) { + return { + iss: input.issuer, + jti: generateUlid(nowUtcMs()), + iat: input.nowSeconds, + exp: input.nowSeconds + CRL_TTL_SECONDS, + revocations: input.rows.map((row) => { + const base = { + jti: row.jti, + agentDid: row.agent_did, + revokedAt: parseRevokedAtSeconds({ + environment: input.environment, + revocationId: row.id, + revokedAtIso: row.revoked_at, + }), + }; + + if (typeof row.reason === "string" && row.reason.length > 0) { + return { + ...base, + reason: row.reason, + }; + } + + return base; + }), + }; +} + +export function parseAgentAuthValidatePayload(payload: unknown): { + agentDid: string; + aitJti: string; +} { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_INVALID", + message: "Validation payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + const agentDid = + typeof value.agentDid === "string" ? value.agentDid.trim() : ""; + const aitJti = typeof value.aitJti === "string" ? value.aitJti.trim() : ""; + + if (agentDid.length === 0 || aitJti.length === 0) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_INVALID", + message: "Validation payload is invalid", + status: 400, + expose: true, + }); + } + + return { + agentDid, + aitJti, + }; +} + +export function parseAgentAccessHeaderToken(token: string | undefined): string { + try { + return parseAccessToken(token); + } catch { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } +} + +function parseInternalServiceName(value: unknown): string { + const normalized = typeof value === "string" ? value.trim() : ""; + if (!/^[a-z0-9][a-z0-9-_]{1,63}$/i.test(normalized)) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + return normalized; +} + +export function parseInternalServiceCreatePayload(payload: unknown): { + name: string; + scopes: string[]; +} { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + return { + name: parseInternalServiceName(value.name), + scopes: parseInternalServiceScopesPayload(value.scopes), + }; +} + +export function parseInternalServicePathId(input: { + id: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): string { + const candidate = input.id.trim(); + try { + return parseUlid(candidate).value; + } catch { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID_PATH", + message: + input.environment === "production" + ? "Request could not be processed" + : "Internal service path is invalid", + status: 400, + expose: input.environment !== "production", + details: + input.environment === "production" + ? undefined + : { + fieldErrors: { id: ["id must be a valid ULID"] }, + formErrors: [], + }, + }); + } +} + +export function parseInternalServiceRotatePayload(payload: unknown): { + scopes?: string[]; +} { + if (payload === undefined || payload === null) { + return {}; + } + if (typeof payload !== "object" || Array.isArray(payload)) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + if (value.scopes === undefined) { + return {}; + } + + return { + scopes: parseInternalServiceScopesPayload(value.scopes), + }; +} + +function parseHumanDid(value: unknown): string { + if (typeof value !== "string") { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "human") { + throw new Error("invalid"); + } + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + if (candidate.length === 0) { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + return candidate; +} + +function parseAgentDid(value: unknown): string { + if (typeof value !== "string") { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + const candidate = value.trim(); + try { + const parsed = parseDid(candidate); + if (parsed.kind !== "agent") { + throw new Error("invalid"); + } + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + if (candidate.length === 0) { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + return candidate; +} + +export function parseInternalOwnershipCheckPayload(payload: unknown): { + ownerDid: string; + agentDid: string; +} { + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + const value = payload as Record; + let ownerDid: string; + try { + ownerDid = parseHumanDid(value.ownerDid); + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + return { + ownerDid, + agentDid: parseAgentDid(value.agentDid), + }; +} + +export function requireCurrentJti(input: { + currentJti: string | null; + onInvalid: (reason: string) => AppError; +}): string { + if (typeof input.currentJti !== "string" || input.currentJti.length === 0) { + throw input.onInvalid("agent.current_jti is required"); + } + + return input.currentJti; +} + +export function requireBootstrapSecret( + bootstrapSecret: string | undefined, +): string { + if (typeof bootstrapSecret === "string" && bootstrapSecret.length > 0) { + return bootstrapSecret; + } + + throw new AppError({ + code: "ADMIN_BOOTSTRAP_DISABLED", + message: "Admin bootstrap is disabled", + status: 503, + expose: true, + }); +} + +export function parseBootstrapSecretHeader( + headerValue: string | undefined, +): string { + if (typeof headerValue !== "string" || headerValue.trim().length === 0) { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_UNAUTHORIZED", + message: "Bootstrap secret is required", + status: 401, + expose: true, + }); + } + + return headerValue.trim(); +} + +export function assertBootstrapSecretAuthorized(input: { + provided: string; + expected: string; +}): void { + if (!constantTimeEqual(input.provided, input.expected)) { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_UNAUTHORIZED", + message: "Bootstrap secret is invalid", + status: 401, + expose: true, + }); + } +} + +export function adminBootstrapAlreadyCompletedError(): AppError { + return new AppError({ + code: "ADMIN_BOOTSTRAP_ALREADY_COMPLETED", + message: "Admin bootstrap has already completed", + status: 409, + expose: true, + }); +} + +export function resolveProxyUrl(config: RegistryConfig): string { + return config.PROXY_URL ?? PROXY_URL_BY_ENVIRONMENT[config.ENVIRONMENT]; +} + +export function isIsoExpired(expiresAtIso: string, nowMillis: number): boolean { + const parsed = Date.parse(expiresAtIso); + if (!Number.isFinite(parsed)) { + return true; + } + + return parsed <= nowMillis; +} diff --git a/apps/registry/src/server/index.ts b/apps/registry/src/server/index.ts new file mode 100644 index 0000000..07a5f1b --- /dev/null +++ b/apps/registry/src/server/index.ts @@ -0,0 +1,6 @@ +import { createRegistryApp } from "./create-registry-app.js"; + +const app = createRegistryApp(); + +export { createRegistryApp }; +export default app; diff --git a/apps/registry/src/server/routes/admin.ts b/apps/registry/src/server/routes/admin.ts new file mode 100644 index 0000000..9f4320c --- /dev/null +++ b/apps/registry/src/server/routes/admin.ts @@ -0,0 +1,168 @@ +import { + ADMIN_BOOTSTRAP_PATH, + generateUlid, + makeHumanDid, +} from "@clawdentity/protocol"; +import { AppError, nowIso, nowUtcMs } from "@clawdentity/sdk"; +import { eq } from "drizzle-orm"; +import { parseAdminBootstrapPayload } from "../../admin-bootstrap.js"; +import { + deriveApiKeyLookupPrefix, + generateApiKeyToken, + hashApiKeyToken, +} from "../../auth/api-key-token.js"; +import { createDb } from "../../db/client.js"; +import { api_keys, humans } from "../../db/schema.js"; +import { + BOOTSTRAP_ADMIN_HUMAN_ID, + logger, + type RegistryRouteDependencies, +} from "../constants.js"; +import { + getMutationRowCount, + isUnsupportedLocalTransactionError, +} from "../helpers/db-queries.js"; +import { + adminBootstrapAlreadyCompletedError, + assertBootstrapSecretAuthorized, + parseBootstrapSecretHeader, + requireBootstrapSecret, +} from "../helpers/parsers.js"; + +export function registerAdminRoutes(input: RegistryRouteDependencies): void { + const { app, getConfig } = input; + + app.post(ADMIN_BOOTSTRAP_PATH, async (c) => { + const config = getConfig(c.env); + const expectedBootstrapSecret = requireBootstrapSecret( + config.BOOTSTRAP_SECRET, + ); + const providedBootstrapSecret = parseBootstrapSecretHeader( + c.req.header("x-bootstrap-secret"), + ); + assertBootstrapSecretAuthorized({ + provided: providedBootstrapSecret, + expected: expectedBootstrapSecret, + }); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "ADMIN_BOOTSTRAP_INVALID", + message: "Request body must be valid JSON", + status: 400, + expose: true, + }); + } + + const bootstrapPayload = parseAdminBootstrapPayload({ + payload, + environment: config.ENVIRONMENT, + }); + + const db = createDb(c.env.DB); + const activeAdminRows = await db + .select({ id: humans.id }) + .from(humans) + .where(eq(humans.role, "admin")) + .limit(1); + if (activeAdminRows.length > 0) { + throw adminBootstrapAlreadyCompletedError(); + } + + const humanId = BOOTSTRAP_ADMIN_HUMAN_ID; + const humanDid = makeHumanDid(humanId); + const apiKeyToken = generateApiKeyToken(); + const apiKeyHash = await hashApiKeyToken(apiKeyToken); + const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); + const apiKeyId = generateUlid(nowUtcMs() + 1); + const createdAt = nowIso(); + + const applyBootstrapMutation = async ( + executor: typeof db, + options: { rollbackOnApiKeyFailure: boolean }, + ): Promise => { + const insertAdminResult = await executor + .insert(humans) + .values({ + id: humanId, + did: humanDid, + display_name: bootstrapPayload.displayName, + role: "admin", + status: "active", + created_at: createdAt, + updated_at: createdAt, + }) + .onConflictDoNothing({ + target: humans.id, + }); + + const insertedRows = getMutationRowCount(insertAdminResult); + if (insertedRows === 0) { + throw adminBootstrapAlreadyCompletedError(); + } + + try { + await executor.insert(api_keys).values({ + id: apiKeyId, + human_id: humanId, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: bootstrapPayload.apiKeyName, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + } catch (error) { + if (options.rollbackOnApiKeyFailure) { + try { + await executor.delete(humans).where(eq(humans.id, humanId)); + } catch (rollbackError) { + logger.error("registry.admin_bootstrap_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + }); + } + } + + throw error; + } + }; + + try { + await db.transaction(async (tx) => { + await applyBootstrapMutation(tx as unknown as typeof db, { + rollbackOnApiKeyFailure: false, + }); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyBootstrapMutation(db, { + rollbackOnApiKeyFailure: true, + }); + } + + return c.json( + { + human: { + id: humanId, + did: humanDid, + displayName: bootstrapPayload.displayName, + role: "admin", + status: "active", + }, + apiKey: { + id: apiKeyId, + name: bootstrapPayload.apiKeyName, + token: apiKeyToken, + }, + }, + 201, + ); + }); +} diff --git a/apps/registry/src/server/routes/agent-auth.ts b/apps/registry/src/server/routes/agent-auth.ts new file mode 100644 index 0000000..7ae62e3 --- /dev/null +++ b/apps/registry/src/server/routes/agent-auth.ts @@ -0,0 +1,421 @@ +import { + AGENT_AUTH_REFRESH_PATH, + AGENT_AUTH_VALIDATE_PATH, +} from "@clawdentity/protocol"; +import { + AppError, + nowIso, + nowUtcMs, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; +import { and, eq } from "drizzle-orm"; +import type { MiddlewareHandler } from "hono"; +import { + agentAuthRefreshConflictError, + agentAuthRefreshRejectedError, + issueAgentAuth, + parseAgentAuthRefreshPayload, + toAgentAuthResponse, +} from "../../agent-auth-lifecycle.js"; +import { + agentNotFoundError, + parseAgentRevokePath, +} from "../../agent-revocation.js"; +import { + deriveAccessTokenLookupPrefix, + deriveRefreshTokenLookupPrefix, + hashAgentToken, +} from "../../auth/agent-auth-token.js"; +import { verifyAgentClawRequest } from "../../auth/agent-claw-auth.js"; +import { createApiKeyAuth } from "../../auth/api-key-auth.js"; +import { constantTimeEqual } from "../../auth/api-key-token.js"; +import { createDb } from "../../db/client.js"; +import { agent_auth_sessions } from "../../db/schema.js"; +import type { RegistryRouteDependencies } from "../constants.js"; +import { + findAgentAuthSessionByAgentId, + findOwnedAgent, + findOwnedAgentByDid, + getMutationRowCount, + isUnsupportedLocalTransactionError, +} from "../helpers/db-queries.js"; +import { insertAgentAuthEvent } from "../helpers/event-bus.js"; +import { + isIsoExpired, + parseAgentAccessHeaderToken, + parseAgentAuthValidatePayload, +} from "../helpers/parsers.js"; + +export function registerAgentAuthRoutes( + input: RegistryRouteDependencies & { + agentAuthRefreshRateLimit: MiddlewareHandler; + agentAuthValidateRateLimit: MiddlewareHandler; + }, +): void { + const { + app, + getConfig, + getEventBus, + agentAuthRefreshRateLimit, + agentAuthValidateRateLimit, + } = input; + + app.post(AGENT_AUTH_REFRESH_PATH, agentAuthRefreshRateLimit, async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); + + let payload: unknown; + try { + const rawBody = new TextDecoder().decode(bodyBytes); + payload = rawBody.trim().length === 0 ? {} : JSON.parse(rawBody); + } catch { + throw new AppError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const parsedPayload = parseAgentAuthRefreshPayload({ + payload, + environment: config.ENVIRONMENT, + }); + const claims = await verifyAgentClawRequest({ + config, + request: c.req.raw, + bodyBytes, + }); + const nowMillis = nowUtcMs(); + const db = createDb(c.env.DB); + const existingAgent = await findOwnedAgentByDid({ + db, + did: claims.sub, + }); + + if (!existingAgent || existingAgent.status !== "active") { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + if (existingAgent.current_jti !== claims.jti) { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + + if (!existingSession) { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + if (existingSession.status !== "active") { + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_REVOKED", + message: "Agent auth refresh token is revoked", + }); + } + + const refreshPrefix = deriveRefreshTokenLookupPrefix( + parsedPayload.refreshToken, + ); + const refreshHash = await hashAgentToken(parsedPayload.refreshToken); + const refreshTokenMatches = + existingSession.refresh_key_prefix === refreshPrefix && + constantTimeEqual(existingSession.refresh_key_hash, refreshHash); + + if (!refreshTokenMatches) { + await insertAgentAuthEvent({ + db, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "refresh_rejected", + reason: "invalid_refresh_token", + eventBus: getEventBus(c.env), + initiatedByAccountId: claims.ownerDid, + }); + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_INVALID", + message: "Agent auth refresh token is invalid", + }); + } + + if (isIsoExpired(existingSession.refresh_expires_at, nowMillis)) { + const revokedAt = nowIso(); + await db + .update(agent_auth_sessions) + .set({ + status: "revoked", + revoked_at: revokedAt, + updated_at: revokedAt, + }) + .where(eq(agent_auth_sessions.id, existingSession.id)); + await insertAgentAuthEvent({ + db, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "revoked", + reason: "refresh_token_expired", + createdAt: revokedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: claims.ownerDid, + }); + throw agentAuthRefreshRejectedError({ + code: "AGENT_AUTH_REFRESH_EXPIRED", + message: "Agent auth refresh token is expired", + }); + } + + const rotatedAuth = await issueAgentAuth({ + nowMs: nowMillis, + }); + const refreshedAt = nowIso(); + const applyRefreshMutation = async (executor: typeof db): Promise => { + const updateResult = await executor + .update(agent_auth_sessions) + .set({ + refresh_key_hash: rotatedAuth.refreshTokenHash, + refresh_key_prefix: rotatedAuth.refreshTokenPrefix, + refresh_issued_at: rotatedAuth.refreshIssuedAt, + refresh_expires_at: rotatedAuth.refreshExpiresAt, + refresh_last_used_at: refreshedAt, + access_key_hash: rotatedAuth.accessTokenHash, + access_key_prefix: rotatedAuth.accessTokenPrefix, + access_issued_at: rotatedAuth.accessIssuedAt, + access_expires_at: rotatedAuth.accessExpiresAt, + access_last_used_at: null, + status: "active", + revoked_at: null, + updated_at: refreshedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + eq(agent_auth_sessions.refresh_key_hash, refreshHash), + ), + ); + + const updatedRows = getMutationRowCount(updateResult); + if (updatedRows === 0) { + throw agentAuthRefreshConflictError(); + } + + await insertAgentAuthEvent({ + db: executor, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "refreshed", + createdAt: refreshedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: claims.ownerDid, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyRefreshMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRefreshMutation(db); + } + + return c.json({ + agentAuth: toAgentAuthResponse({ + accessToken: rotatedAuth.accessToken, + accessExpiresAt: rotatedAuth.accessExpiresAt, + refreshToken: rotatedAuth.refreshToken, + refreshExpiresAt: rotatedAuth.refreshExpiresAt, + }), + }); + }); + + app.post(AGENT_AUTH_VALIDATE_PATH, agentAuthValidateRateLimit, async (c) => { + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_INVALID", + message: "Validation payload is invalid", + status: 400, + expose: true, + }); + } + + const parsedPayload = parseAgentAuthValidatePayload(payload); + const accessToken = parseAgentAccessHeaderToken( + c.req.header("x-claw-agent-access"), + ); + + const db = createDb(c.env.DB); + const existingAgent = await findOwnedAgentByDid({ + db, + did: parsedPayload.agentDid, + }); + if ( + !existingAgent || + existingAgent.status !== "active" || + existingAgent.current_jti !== parsedPayload.aitJti + ) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + if (!existingSession || existingSession.status !== "active") { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + const nowMillis = nowUtcMs(); + if (isIsoExpired(existingSession.access_expires_at, nowMillis)) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_EXPIRED", + message: "Agent access token is expired", + status: 401, + expose: true, + }); + } + + const accessTokenPrefix = deriveAccessTokenLookupPrefix(accessToken); + const accessTokenHash = await hashAgentToken(accessToken); + const accessTokenMatches = + existingSession.access_key_prefix === accessTokenPrefix && + constantTimeEqual(existingSession.access_key_hash, accessTokenHash); + if (!accessTokenMatches) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + const accessLastUsedAt = nowIso(); + const updateResult = await db + .update(agent_auth_sessions) + .set({ + access_last_used_at: accessLastUsedAt, + updated_at: accessLastUsedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + eq(agent_auth_sessions.access_key_hash, accessTokenHash), + ), + ); + + const updatedRows = getMutationRowCount(updateResult); + if (updatedRows === 0) { + throw new AppError({ + code: "AGENT_AUTH_VALIDATE_UNAUTHORIZED", + message: "Agent access token is invalid", + status: 401, + expose: true, + }); + } + + return c.body(null, 204); + }); + + app.delete("/v1/agents/:id/auth/revoke", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + const existingAgent = await findOwnedAgent({ + db, + ownerId: human.id, + agentId, + }); + + if (!existingAgent) { + throw agentNotFoundError(); + } + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + if (!existingSession || existingSession.status === "revoked") { + return c.body(null, 204); + } + + const revokedAt = nowIso(); + const applyAuthRevokeMutation = async ( + executor: typeof db, + ): Promise => { + await executor + .update(agent_auth_sessions) + .set({ + status: "revoked", + revoked_at: revokedAt, + updated_at: revokedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + ), + ); + + await insertAgentAuthEvent({ + db: executor, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "revoked", + reason: "owner_auth_revoke", + createdAt: revokedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: human.did, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyAuthRevokeMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyAuthRevokeMutation(db); + } + + return c.body(null, 204); + }); +} diff --git a/apps/registry/src/server/routes/agents.ts b/apps/registry/src/server/routes/agents.ts new file mode 100644 index 0000000..fcb71aa --- /dev/null +++ b/apps/registry/src/server/routes/agents.ts @@ -0,0 +1,599 @@ +import { + AGENT_REGISTRATION_CHALLENGE_PATH, + generateUlid, +} from "@clawdentity/protocol"; +import { + AppError, + nowIso, + nowUtcMs, + shouldExposeVerboseErrors, + signAIT, +} from "@clawdentity/sdk"; +import { and, desc, eq, lt } from "drizzle-orm"; +import { + issueAgentAuth, + toAgentAuthResponse, +} from "../../agent-auth-lifecycle.js"; +import { mapAgentListRow, parseAgentListQuery } from "../../agent-list.js"; +import { parseAgentOwnershipPath } from "../../agent-ownership.js"; +import { + buildAgentRegistrationChallenge, + buildAgentRegistrationFromParsed, + buildAgentReissue, + parseAgentRegistrationBody, + resolveRegistryIssuer, + verifyAgentRegistrationOwnershipProof, +} from "../../agent-registration.js"; +import { + agentNotFoundError, + invalidAgentReissueStateError, + invalidAgentRevokeStateError, + parseAgentRevokePath, +} from "../../agent-revocation.js"; +import { createApiKeyAuth } from "../../auth/api-key-auth.js"; +import { createDb } from "../../db/client.js"; +import { + agent_auth_sessions, + agent_registration_challenges, + agents, + revocations, +} from "../../db/schema.js"; +import { resolveRegistrySigner } from "../../registry-signer.js"; +import { logger, type RegistryRouteDependencies } from "../constants.js"; +import { + findAgentAuthSessionByAgentId, + findOwnedAgent, + findOwnedAgentRegistrationChallenge, + getMutationRowCount, + isUnsupportedLocalTransactionError, +} from "../helpers/db-queries.js"; +import { insertAgentAuthEvent } from "../helpers/event-bus.js"; +import { requireCurrentJti } from "../helpers/parsers.js"; + +export function registerAgentRoutes(input: RegistryRouteDependencies): void { + const { app, getConfig, getEventBus } = input; + + app.get("/v1/agents", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const query = parseAgentListQuery({ + query: c.req.query(), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const filters = [eq(agents.owner_id, human.id)]; + if (query.status) { + filters.push(eq(agents.status, query.status)); + } + if (query.framework) { + filters.push(eq(agents.framework, query.framework)); + } + if (query.cursor) { + filters.push(lt(agents.id, query.cursor)); + } + + const rows = await db + .select({ + id: agents.id, + did: agents.did, + name: agents.name, + status: agents.status, + expires_at: agents.expires_at, + }) + .from(agents) + .where(and(...filters)) + .orderBy(desc(agents.id)) + .limit(query.limit + 1); + + const hasNextPage = rows.length > query.limit; + const pageRows = hasNextPage ? rows.slice(0, query.limit) : rows; + const nextCursor = hasNextPage + ? (pageRows[pageRows.length - 1]?.id ?? null) + : null; + + return c.json({ + agents: pageRows.map(mapAgentListRow), + pagination: { + limit: query.limit, + nextCursor, + }, + }); + }); + + app.get("/v1/agents/:id/ownership", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentOwnershipPath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: agents.id, + }) + .from(agents) + .where(and(eq(agents.owner_id, human.id), eq(agents.id, agentId))) + .limit(1); + + return c.json({ + ownsAgent: rows.length > 0, + }); + }); + + app.post(AGENT_REGISTRATION_CHALLENGE_PATH, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const human = c.get("human"); + const challenge = buildAgentRegistrationChallenge({ + payload, + ownerId: human.id, + ownerDid: human.did, + environment: config.ENVIRONMENT, + }); + + const db = createDb(c.env.DB); + await db.insert(agent_registration_challenges).values({ + id: challenge.challenge.id, + owner_id: challenge.challenge.ownerId, + public_key: challenge.challenge.publicKey, + nonce: challenge.challenge.nonce, + status: challenge.challenge.status, + expires_at: challenge.challenge.expiresAt, + used_at: challenge.challenge.usedAt, + created_at: challenge.challenge.createdAt, + updated_at: challenge.challenge.updatedAt, + }); + + return c.json(challenge.response, 201); + }); + + app.post("/v1/agents", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_REGISTRATION_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const human = c.get("human"); + const parsedBody = parseAgentRegistrationBody(payload, config.ENVIRONMENT); + const db = createDb(c.env.DB); + + const challenge = await findOwnedAgentRegistrationChallenge({ + db, + ownerId: human.id, + challengeId: parsedBody.challengeId, + }); + + if (!challenge) { + throw new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_NOT_FOUND", + message: exposeDetails + ? "Registration challenge was not found" + : "Request could not be processed", + status: 400, + expose: true, + }); + } + + await verifyAgentRegistrationOwnershipProof({ + parsedBody, + challenge: { + id: challenge.id, + ownerId: challenge.owner_id, + publicKey: challenge.public_key, + nonce: challenge.nonce, + status: challenge.status, + expiresAt: challenge.expires_at, + usedAt: challenge.used_at, + }, + ownerDid: human.did, + environment: config.ENVIRONMENT, + }); + const registration = buildAgentRegistrationFromParsed({ + parsedBody, + ownerDid: human.did, + issuer: resolveRegistryIssuer(config), + }); + const signer = await resolveRegistrySigner(config); + const ait = await signAIT({ + claims: registration.claims, + signerKid: signer.signerKid, + signerKeypair: signer.signerKeypair, + }); + + const initialAuth = await issueAgentAuth(); + const challengeUsedAt = nowIso(); + const applyRegistrationMutation = async ( + executor: typeof db, + options: { rollbackOnAgentInsertFailure: boolean }, + ): Promise => { + const challengeUpdateResult = await executor + .update(agent_registration_challenges) + .set({ + status: "used", + used_at: challengeUsedAt, + updated_at: challengeUsedAt, + }) + .where( + and( + eq(agent_registration_challenges.id, challenge.id), + eq(agent_registration_challenges.owner_id, human.id), + eq(agent_registration_challenges.status, "pending"), + ), + ); + + const updatedRows = getMutationRowCount(challengeUpdateResult); + if (updatedRows === 0) { + throw new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_REPLAYED", + message: exposeDetails + ? "Registration challenge has already been used" + : "Request could not be processed", + status: 400, + expose: true, + }); + } + + try { + await executor.insert(agents).values({ + id: registration.agent.id, + did: registration.agent.did, + owner_id: human.id, + name: registration.agent.name, + framework: registration.agent.framework, + public_key: registration.agent.publicKey, + current_jti: registration.agent.currentJti, + status: registration.agent.status, + expires_at: registration.agent.expiresAt, + created_at: registration.agent.createdAt, + updated_at: registration.agent.updatedAt, + }); + + await executor.insert(agent_auth_sessions).values({ + id: initialAuth.sessionId, + agent_id: registration.agent.id, + refresh_key_hash: initialAuth.refreshTokenHash, + refresh_key_prefix: initialAuth.refreshTokenPrefix, + refresh_issued_at: initialAuth.refreshIssuedAt, + refresh_expires_at: initialAuth.refreshExpiresAt, + refresh_last_used_at: null, + access_key_hash: initialAuth.accessTokenHash, + access_key_prefix: initialAuth.accessTokenPrefix, + access_issued_at: initialAuth.accessIssuedAt, + access_expires_at: initialAuth.accessExpiresAt, + access_last_used_at: null, + status: "active", + revoked_at: null, + created_at: initialAuth.createdAt, + updated_at: initialAuth.updatedAt, + }); + + await insertAgentAuthEvent({ + db: executor, + agentId: registration.agent.id, + sessionId: initialAuth.sessionId, + eventType: "issued", + createdAt: initialAuth.createdAt, + metadata: { + actor: "agent_registration", + }, + eventBus: getEventBus(c.env), + initiatedByAccountId: human.did, + }); + } catch (error) { + if (options.rollbackOnAgentInsertFailure) { + try { + await executor + .delete(agent_auth_sessions) + .where(eq(agent_auth_sessions.id, initialAuth.sessionId)); + } catch (rollbackError) { + logger.error("registry.agent_registration_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + stage: "auth_session_delete", + }); + } + + try { + await executor + .delete(agents) + .where(eq(agents.id, registration.agent.id)); + } catch (rollbackError) { + logger.error("registry.agent_registration_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + stage: "agent_delete", + }); + } + + await executor + .update(agent_registration_challenges) + .set({ + status: "pending", + used_at: null, + updated_at: nowIso(), + }) + .where( + and( + eq(agent_registration_challenges.id, challenge.id), + eq(agent_registration_challenges.owner_id, human.id), + eq(agent_registration_challenges.status, "used"), + ), + ); + } + + throw error; + } + }; + + try { + await db.transaction(async (tx) => { + await applyRegistrationMutation(tx as unknown as typeof db, { + rollbackOnAgentInsertFailure: false, + }); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRegistrationMutation(db, { + rollbackOnAgentInsertFailure: true, + }); + } + + return c.json( + { + agent: registration.agent, + ait, + agentAuth: toAgentAuthResponse({ + accessToken: initialAuth.accessToken, + accessExpiresAt: initialAuth.accessExpiresAt, + refreshToken: initialAuth.refreshToken, + refreshExpiresAt: initialAuth.refreshExpiresAt, + }), + }, + 201, + ); + }); + + app.delete("/v1/agents/:id", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const existingAgent = await findOwnedAgent({ + db, + ownerId: human.id, + agentId, + }); + + if (!existingAgent) { + throw agentNotFoundError(); + } + + if (existingAgent.status === "revoked") { + return c.body(null, 204); + } + + const currentJti = requireCurrentJti({ + currentJti: existingAgent.current_jti, + onInvalid: (reason) => + invalidAgentRevokeStateError({ + environment: config.ENVIRONMENT, + reason: `${reason} for revocation`, + }), + }); + + const existingSession = await findAgentAuthSessionByAgentId({ + db, + agentId: existingAgent.id, + }); + const revokedAt = nowIso(); + const applyRevokeMutation = async (executor: typeof db): Promise => { + await executor + .update(agents) + .set({ + status: "revoked", + updated_at: revokedAt, + }) + .where(eq(agents.id, existingAgent.id)); + + await executor + .insert(revocations) + .values({ + id: generateUlid(nowUtcMs()), + jti: currentJti, + agent_id: existingAgent.id, + reason: null, + revoked_at: revokedAt, + }) + .onConflictDoNothing({ + target: revocations.jti, + }); + + if (existingSession && existingSession.status === "active") { + await executor + .update(agent_auth_sessions) + .set({ + status: "revoked", + revoked_at: revokedAt, + updated_at: revokedAt, + }) + .where( + and( + eq(agent_auth_sessions.id, existingSession.id), + eq(agent_auth_sessions.status, "active"), + ), + ); + + await insertAgentAuthEvent({ + db: executor, + agentId: existingAgent.id, + sessionId: existingSession.id, + eventType: "revoked", + reason: "agent_revoked", + createdAt: revokedAt, + eventBus: getEventBus(c.env), + initiatedByAccountId: human.did, + }); + } + }; + + try { + await db.transaction(async (tx) => { + await applyRevokeMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRevokeMutation(db); + } + + return c.body(null, 204); + }); + + app.post("/v1/agents/:id/reissue", createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const agentId = parseAgentRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const existingAgent = await findOwnedAgent({ + db, + ownerId: human.id, + agentId, + }); + + if (!existingAgent) { + throw agentNotFoundError(); + } + + if (existingAgent.status === "revoked") { + throw invalidAgentReissueStateError({ + environment: config.ENVIRONMENT, + field: "status", + reason: "revoked agents cannot be reissued", + }); + } + + const currentJti = requireCurrentJti({ + currentJti: existingAgent.current_jti, + onInvalid: (reason) => + invalidAgentReissueStateError({ + environment: config.ENVIRONMENT, + reason: `${reason} for reissue`, + }), + }); + + const reissue = buildAgentReissue({ + id: existingAgent.id, + did: existingAgent.did, + ownerDid: human.did, + name: existingAgent.name, + framework: existingAgent.framework, + publicKey: existingAgent.public_key, + previousExpiresAt: existingAgent.expires_at, + issuer: resolveRegistryIssuer(config), + }); + const signer = await resolveRegistrySigner(config); + const ait = await signAIT({ + claims: reissue.claims, + signerKid: signer.signerKid, + signerKeypair: signer.signerKeypair, + }); + + const revokedAt = nowIso(); + const applyReissueMutation = async (executor: typeof db): Promise => { + const updateResult = await executor + .update(agents) + .set({ + status: "active", + current_jti: reissue.agent.currentJti, + expires_at: reissue.agent.expiresAt, + updated_at: reissue.agent.updatedAt, + }) + .where( + and( + eq(agents.id, existingAgent.id), + eq(agents.status, "active"), + eq(agents.current_jti, currentJti), + ), + ); + + const updatedRows = getMutationRowCount(updateResult); + if (updatedRows === 0) { + throw invalidAgentReissueStateError({ + environment: config.ENVIRONMENT, + field: "currentJti", + reason: "agent state changed during reissue; retry request", + }); + } + + await executor + .insert(revocations) + .values({ + id: generateUlid(nowUtcMs()), + jti: currentJti, + agent_id: existingAgent.id, + reason: "reissued", + revoked_at: revokedAt, + }) + .onConflictDoNothing({ + target: revocations.jti, + }); + }; + + try { + await db.transaction(async (tx) => { + await applyReissueMutation(tx as unknown as typeof db); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyReissueMutation(db); + } + + return c.json({ agent: reissue.agent, ait }); + }); +} diff --git a/apps/registry/src/server/routes/health.ts b/apps/registry/src/server/routes/health.ts new file mode 100644 index 0000000..6775115 --- /dev/null +++ b/apps/registry/src/server/routes/health.ts @@ -0,0 +1,134 @@ +import { REGISTRY_METADATA_PATH } from "@clawdentity/protocol"; +import { AppError, nowUtcMs, signCRL } from "@clawdentity/sdk"; +import { desc, eq } from "drizzle-orm"; +import type { MiddlewareHandler } from "hono"; +import { resolveRegistryIssuer } from "../../agent-registration.js"; +import { + agentResolveNotFoundError, + mapResolvedAgentRow, + parseAgentResolvePath, +} from "../../agent-resolve.js"; +import { createDb } from "../../db/client.js"; +import { agents, humans, revocations } from "../../db/schema.js"; +import { resolveRegistrySigner } from "../../registry-signer.js"; +import { + REGISTRY_CRL_CACHE_CONTROL, + REGISTRY_KEY_CACHE_CONTROL, + type RegistryRouteDependencies, +} from "../constants.js"; +import { buildCrlClaims, resolveProxyUrl } from "../helpers/parsers.js"; + +export function registerHealthRoutes( + input: RegistryRouteDependencies & { + resolveRateLimit: MiddlewareHandler; + crlRateLimit: MiddlewareHandler; + }, +): void { + const { app, getConfig, crlRateLimit, resolveRateLimit } = input; + + app.get("/health", (c) => { + const config = getConfig(c.env); + return c.json({ + status: "ok", + version: config.APP_VERSION ?? "0.0.0", + environment: config.ENVIRONMENT, + }); + }); + + app.get(REGISTRY_METADATA_PATH, (c) => { + const config = getConfig(c.env); + return c.json({ + status: "ok", + environment: config.ENVIRONMENT, + version: config.APP_VERSION ?? "0.0.0", + registryUrl: c.req.url ? new URL(c.req.url).origin : undefined, + proxyUrl: resolveProxyUrl(config), + }); + }); + + app.get("/.well-known/claw-keys.json", (c) => { + const config = getConfig(c.env); + return c.json( + { + keys: config.REGISTRY_SIGNING_KEYS ?? [], + }, + 200, + { + "Cache-Control": REGISTRY_KEY_CACHE_CONTROL, + }, + ); + }); + + app.get("/v1/crl", crlRateLimit, async (c) => { + const config = getConfig(c.env); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: revocations.id, + jti: revocations.jti, + reason: revocations.reason, + revoked_at: revocations.revoked_at, + agent_did: agents.did, + }) + .from(revocations) + .innerJoin(agents, eq(revocations.agent_id, agents.id)) + .orderBy(desc(revocations.revoked_at), desc(revocations.id)); + + if (rows.length === 0) { + throw new AppError({ + code: "CRL_NOT_FOUND", + message: "CRL snapshot is not available", + status: 404, + expose: true, + }); + } + + const signer = await resolveRegistrySigner(config); + const nowSeconds = Math.floor(nowUtcMs() / 1000); + const claims = buildCrlClaims({ + rows, + environment: config.ENVIRONMENT, + issuer: resolveRegistryIssuer(config), + nowSeconds, + }); + const crl = await signCRL({ + claims, + signerKid: signer.signerKid, + signerKeypair: signer.signerKeypair, + }); + + return c.json({ crl }, 200, { + "Cache-Control": REGISTRY_CRL_CACHE_CONTROL, + }); + }); + + app.get("/v1/resolve/:id", resolveRateLimit, async (c) => { + const config = getConfig(c.env); + const id = parseAgentResolvePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + did: agents.did, + name: agents.name, + framework: agents.framework, + status: agents.status, + owner_did: humans.did, + }) + .from(agents) + .innerJoin(humans, eq(agents.owner_id, humans.id)) + .where(eq(agents.id, id)) + .limit(1); + + const row = rows[0]; + if (!row) { + throw agentResolveNotFoundError(); + } + + return c.json(mapResolvedAgentRow(row)); + }); +} diff --git a/apps/registry/src/server/routes/internal-services.ts b/apps/registry/src/server/routes/internal-services.ts new file mode 100644 index 0000000..081e968 --- /dev/null +++ b/apps/registry/src/server/routes/internal-services.ts @@ -0,0 +1,295 @@ +import { + ADMIN_INTERNAL_SERVICES_PATH, + generateUlid, + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, +} from "@clawdentity/protocol"; +import { AppError, nowIso, nowUtcMs } from "@clawdentity/sdk"; +import { desc, eq } from "drizzle-orm"; +import { createApiKeyAuth } from "../../auth/api-key-auth.js"; +import { + createServiceAuth, + deriveInternalServiceSecretPrefix, + generateInternalServiceSecret, + hashInternalServiceSecret, +} from "../../auth/service-auth.js"; +import { createDb } from "../../db/client.js"; +import { agents, humans, internal_services } from "../../db/schema.js"; +import type { RegistryRouteDependencies } from "../constants.js"; +import { + parseInternalOwnershipCheckPayload, + parseInternalServiceCreatePayload, + parseInternalServicePathId, + parseInternalServiceRotatePayload, +} from "../helpers/parsers.js"; + +export function registerInternalServiceRoutes( + input: RegistryRouteDependencies, +): void { + const { app, getConfig } = input; + + app.post(ADMIN_INTERNAL_SERVICES_PATH, createApiKeyAuth(), async (c) => { + const human = c.get("human"); + if (human.role !== "admin") { + throw new AppError({ + code: "INTERNAL_SERVICE_CREATE_FORBIDDEN", + message: "Admin role is required", + status: 403, + expose: true, + }); + } + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const parsed = parseInternalServiceCreatePayload(payload); + const db = createDb(c.env.DB); + const existingRows = await db + .select({ + id: internal_services.id, + }) + .from(internal_services) + .where(eq(internal_services.name, parsed.name)) + .limit(1); + if (existingRows[0]) { + throw new AppError({ + code: "INTERNAL_SERVICE_ALREADY_EXISTS", + message: "Internal service already exists", + status: 409, + expose: true, + }); + } + + const secret = generateInternalServiceSecret(); + const secretHash = await hashInternalServiceSecret(secret); + const secretPrefix = deriveInternalServiceSecretPrefix(secret); + const createdAt = nowIso(); + const serviceId = generateUlid(nowUtcMs()); + await db.insert(internal_services).values({ + id: serviceId, + name: parsed.name, + secret_hash: secretHash, + secret_prefix: secretPrefix, + scopes_json: JSON.stringify(parsed.scopes), + status: "active", + created_by: human.id, + rotated_at: null, + last_used_at: null, + created_at: createdAt, + updated_at: createdAt, + }); + + return c.json( + { + internalService: { + id: serviceId, + name: parsed.name, + scopes: parsed.scopes, + status: "active", + createdAt, + updatedAt: createdAt, + rotatedAt: null, + lastUsedAt: null, + secret, + }, + }, + 201, + ); + }); + + app.get(ADMIN_INTERNAL_SERVICES_PATH, createApiKeyAuth(), async (c) => { + const human = c.get("human"); + if (human.role !== "admin") { + throw new AppError({ + code: "INTERNAL_SERVICE_LIST_FORBIDDEN", + message: "Admin role is required", + status: 403, + expose: true, + }); + } + + const db = createDb(c.env.DB); + const rows = await db + .select({ + id: internal_services.id, + name: internal_services.name, + scopesJson: internal_services.scopes_json, + status: internal_services.status, + createdAt: internal_services.created_at, + updatedAt: internal_services.updated_at, + rotatedAt: internal_services.rotated_at, + lastUsedAt: internal_services.last_used_at, + }) + .from(internal_services) + .orderBy(desc(internal_services.created_at), desc(internal_services.id)); + + return c.json({ + internalServices: rows.map((row) => ({ + id: row.id, + name: row.name, + scopes: JSON.parse(row.scopesJson) as string[], + status: row.status, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + rotatedAt: row.rotatedAt, + lastUsedAt: row.lastUsedAt, + })), + }); + }); + + app.post( + `${ADMIN_INTERNAL_SERVICES_PATH}/:id/rotate`, + createApiKeyAuth(), + async (c) => { + const config = getConfig(c.env); + const human = c.get("human"); + if (human.role !== "admin") { + throw new AppError({ + code: "INTERNAL_SERVICE_ROTATE_FORBIDDEN", + message: "Admin role is required", + status: 403, + expose: true, + }); + } + + const serviceId = parseInternalServicePathId({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + + let payload: unknown = {}; + try { + const rawBody = await c.req.text(); + if (rawBody.trim().length > 0) { + payload = JSON.parse(rawBody); + } + } catch { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const parsedPayload = parseInternalServiceRotatePayload(payload); + const db = createDb(c.env.DB); + const rows = await db + .select({ + id: internal_services.id, + name: internal_services.name, + scopesJson: internal_services.scopes_json, + status: internal_services.status, + }) + .from(internal_services) + .where(eq(internal_services.id, serviceId)) + .limit(1); + const service = rows[0]; + if (!service) { + throw new AppError({ + code: "INTERNAL_SERVICE_NOT_FOUND", + message: "Internal service was not found", + status: 404, + expose: true, + }); + } + if (service.status !== "active") { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID_STATE", + message: "Internal service cannot be rotated", + status: 409, + expose: true, + }); + } + + const scopes = + parsedPayload.scopes ?? + ((JSON.parse(service.scopesJson) as unknown[]).filter( + (scope): scope is string => + typeof scope === "string" && scope.trim().length > 0, + ) as string[]); + if (scopes.length === 0) { + throw new AppError({ + code: "INTERNAL_SERVICE_INVALID", + message: "Internal service payload is invalid", + status: 400, + expose: true, + }); + } + + const secret = generateInternalServiceSecret(); + const secretHash = await hashInternalServiceSecret(secret); + const secretPrefix = deriveInternalServiceSecretPrefix(secret); + const rotatedAt = nowIso(); + await db + .update(internal_services) + .set({ + secret_hash: secretHash, + secret_prefix: secretPrefix, + scopes_json: JSON.stringify(scopes), + rotated_at: rotatedAt, + updated_at: rotatedAt, + }) + .where(eq(internal_services.id, service.id)); + + return c.json({ + internalService: { + id: service.id, + name: service.name, + scopes, + status: "active", + rotatedAt, + updatedAt: rotatedAt, + secret, + }, + }); + }, + ); + + app.post( + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, + createServiceAuth({ + requiredScopes: ["identity.read"], + }), + async (c) => { + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "AGENT_OWNERSHIP_INVALID", + message: "Ownership payload is invalid", + status: 400, + expose: true, + }); + } + + const parsed = parseInternalOwnershipCheckPayload(payload); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + ownerDid: humans.did, + status: agents.status, + }) + .from(agents) + .innerJoin(humans, eq(agents.owner_id, humans.id)) + .where(eq(agents.did, parsed.agentDid)) + .limit(1); + + const row = rows[0]; + return c.json({ + ownsAgent: row !== undefined && row.ownerDid === parsed.ownerDid, + agentStatus: row?.status ?? null, + }); + }, + ); +} diff --git a/apps/registry/src/server/routes/invites.ts b/apps/registry/src/server/routes/invites.ts new file mode 100644 index 0000000..d0ba60d --- /dev/null +++ b/apps/registry/src/server/routes/invites.ts @@ -0,0 +1,275 @@ +import { + generateUlid, + INVITES_PATH, + INVITES_REDEEM_PATH, + makeHumanDid, +} from "@clawdentity/protocol"; +import { + AppError, + nowIso, + nowUtcMs, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; +import { and, eq, isNull } from "drizzle-orm"; +import { createApiKeyAuth } from "../../auth/api-key-auth.js"; +import { + deriveApiKeyLookupPrefix, + generateApiKeyToken, + hashApiKeyToken, +} from "../../auth/api-key-token.js"; +import { createDb } from "../../db/client.js"; +import { api_keys, humans, invites } from "../../db/schema.js"; +import { + generateInviteCode, + inviteCreateForbiddenError, + inviteRedeemAlreadyUsedError, + inviteRedeemCodeInvalidError, + inviteRedeemExpiredError, + parseInviteCreatePayload, + parseInviteRedeemPayload, +} from "../../invite-lifecycle.js"; +import { logger, type RegistryRouteDependencies } from "../constants.js"; +import { + findInviteByCode, + getMutationRowCount, + isInviteExpired, + isUnsupportedLocalTransactionError, + resolveInviteRedeemStateError, +} from "../helpers/db-queries.js"; +import { resolveProxyUrl } from "../helpers/parsers.js"; + +export function registerInviteRoutes(input: RegistryRouteDependencies): void { + const { app, getConfig } = input; + + app.post(INVITES_PATH, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "INVITE_CREATE_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const human = c.get("human"); + if (human.role !== "admin") { + throw inviteCreateForbiddenError(); + } + + const parsedPayload = parseInviteCreatePayload({ + payload, + environment: config.ENVIRONMENT, + nowMs: nowUtcMs(), + }); + + const inviteId = generateUlid(nowUtcMs()); + const inviteCode = generateInviteCode(); + const createdAt = nowIso(); + const db = createDb(c.env.DB); + await db.insert(invites).values({ + id: inviteId, + code: inviteCode, + created_by: human.id, + redeemed_by: null, + agent_id: null, + expires_at: parsedPayload.expiresAt, + created_at: createdAt, + }); + + return c.json( + { + invite: { + id: inviteId, + code: inviteCode, + createdBy: human.id, + expiresAt: parsedPayload.expiresAt, + createdAt, + }, + }, + 201, + ); + }); + + app.post(INVITES_REDEEM_PATH, async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown; + try { + payload = await c.req.json(); + } catch { + throw new AppError({ + code: "INVITE_REDEEM_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const parsedPayload = parseInviteRedeemPayload({ + payload, + environment: config.ENVIRONMENT, + }); + + const db = createDb(c.env.DB); + const invite = await findInviteByCode({ + db, + code: parsedPayload.code, + }); + + if (!invite) { + throw inviteRedeemCodeInvalidError(); + } + + const nowMillis = nowUtcMs(); + if (invite.redeemed_by !== null) { + throw inviteRedeemAlreadyUsedError(); + } + + if ( + isInviteExpired({ + expiresAt: invite.expires_at, + nowMillis, + }) + ) { + throw inviteRedeemExpiredError(); + } + + const humanId = generateUlid(nowMillis); + const humanDid = makeHumanDid(humanId); + const apiKeyToken = generateApiKeyToken(); + const apiKeyHash = await hashApiKeyToken(apiKeyToken); + const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); + const apiKeyId = generateUlid(nowMillis + 1); + const createdAt = nowIso(); + + const applyRedeemMutation = async ( + executor: typeof db, + options: { rollbackOnFailure: boolean }, + ): Promise => { + await executor.insert(humans).values({ + id: humanId, + did: humanDid, + display_name: parsedPayload.displayName, + role: "user", + status: "active", + created_at: createdAt, + updated_at: createdAt, + }); + + let inviteRedeemed = false; + try { + const inviteUpdateResult = await executor + .update(invites) + .set({ + redeemed_by: humanId, + }) + .where(and(eq(invites.id, invite.id), isNull(invites.redeemed_by))); + + const updatedRows = getMutationRowCount(inviteUpdateResult); + if (updatedRows === 0) { + throw await resolveInviteRedeemStateError({ + db: executor, + inviteId: invite.id, + nowMillis, + }); + } + inviteRedeemed = true; + + await executor.insert(api_keys).values({ + id: apiKeyId, + human_id: humanId, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: parsedPayload.apiKeyName, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + } catch (error) { + if (options.rollbackOnFailure) { + if (inviteRedeemed) { + try { + await executor + .update(invites) + .set({ + redeemed_by: null, + }) + .where( + and( + eq(invites.id, invite.id), + eq(invites.redeemed_by, humanId), + ), + ); + } catch (rollbackError) { + logger.error("registry.invite_redeem_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error + ? rollbackError.name + : "unknown", + stage: "invite_unlink", + }); + } + } + + try { + await executor.delete(humans).where(eq(humans.id, humanId)); + } catch (rollbackError) { + logger.error("registry.invite_redeem_rollback_failed", { + rollbackErrorName: + rollbackError instanceof Error ? rollbackError.name : "unknown", + stage: "human_delete", + }); + } + } + + throw error; + } + }; + + try { + await db.transaction(async (tx) => { + await applyRedeemMutation(tx as unknown as typeof db, { + rollbackOnFailure: false, + }); + }); + } catch (error) { + if (!isUnsupportedLocalTransactionError(error)) { + throw error; + } + + await applyRedeemMutation(db, { + rollbackOnFailure: true, + }); + } + + return c.json( + { + human: { + id: humanId, + did: humanDid, + displayName: parsedPayload.displayName, + role: "user", + status: "active", + }, + apiKey: { + id: apiKeyId, + name: parsedPayload.apiKeyName, + token: apiKeyToken, + }, + proxyUrl: resolveProxyUrl(config), + }, + 201, + ); + }); +} diff --git a/apps/registry/src/server/routes/me-api-keys.ts b/apps/registry/src/server/routes/me-api-keys.ts new file mode 100644 index 0000000..7a9af22 --- /dev/null +++ b/apps/registry/src/server/routes/me-api-keys.ts @@ -0,0 +1,149 @@ +import { generateUlid, ME_API_KEYS_PATH } from "@clawdentity/protocol"; +import { + AppError, + nowIso, + nowUtcMs, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; +import { and, desc, eq } from "drizzle-orm"; +import { + apiKeyNotFoundError, + mapApiKeyMetadataRow, + parseApiKeyCreatePayload, + parseApiKeyRevokePath, +} from "../../api-key-lifecycle.js"; +import { createApiKeyAuth } from "../../auth/api-key-auth.js"; +import { + deriveApiKeyLookupPrefix, + generateApiKeyToken, + hashApiKeyToken, +} from "../../auth/api-key-token.js"; +import { createDb } from "../../db/client.js"; +import { api_keys } from "../../db/schema.js"; +import type { RegistryRouteDependencies } from "../constants.js"; + +export function registerMeApiKeyRoutes(input: RegistryRouteDependencies): void { + const { app, getConfig } = input; + + app.get("/v1/me", createApiKeyAuth(), (c) => { + return c.json({ human: c.get("human") }); + }); + + app.post(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const exposeDetails = shouldExposeVerboseErrors(config.ENVIRONMENT); + + let payload: unknown = {}; + try { + const rawBody = await c.req.text(); + if (rawBody.trim().length > 0) { + payload = JSON.parse(rawBody); + } + } catch { + throw new AppError({ + code: "API_KEY_CREATE_INVALID", + message: exposeDetails + ? "Request body must be valid JSON" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + }); + } + + const parsedPayload = parseApiKeyCreatePayload({ + payload, + environment: config.ENVIRONMENT, + }); + + const human = c.get("human"); + const apiKeyToken = generateApiKeyToken(); + const apiKeyHash = await hashApiKeyToken(apiKeyToken); + const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); + const apiKeyId = generateUlid(nowUtcMs() + 1); + const createdAt = nowIso(); + + const db = createDb(c.env.DB); + await db.insert(api_keys).values({ + id: apiKeyId, + human_id: human.id, + key_hash: apiKeyHash, + key_prefix: apiKeyPrefix, + name: parsedPayload.name, + status: "active", + created_at: createdAt, + last_used_at: null, + }); + + return c.json( + { + apiKey: { + id: apiKeyId, + name: parsedPayload.name, + status: "active", + createdAt, + lastUsedAt: null, + token: apiKeyToken, + }, + }, + 201, + ); + }); + + app.get(ME_API_KEYS_PATH, createApiKeyAuth(), async (c) => { + const human = c.get("human"); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: api_keys.id, + name: api_keys.name, + status: api_keys.status, + created_at: api_keys.created_at, + last_used_at: api_keys.last_used_at, + }) + .from(api_keys) + .where(eq(api_keys.human_id, human.id)) + .orderBy(desc(api_keys.created_at), desc(api_keys.id)); + + return c.json({ + apiKeys: rows.map(mapApiKeyMetadataRow), + }); + }); + + app.delete(`${ME_API_KEYS_PATH}/:id`, createApiKeyAuth(), async (c) => { + const config = getConfig(c.env); + const apiKeyId = parseApiKeyRevokePath({ + id: c.req.param("id"), + environment: config.ENVIRONMENT, + }); + const human = c.get("human"); + const db = createDb(c.env.DB); + + const rows = await db + .select({ + id: api_keys.id, + status: api_keys.status, + }) + .from(api_keys) + .where(and(eq(api_keys.id, apiKeyId), eq(api_keys.human_id, human.id))) + .limit(1); + + const existingKey = rows[0]; + if (!existingKey) { + throw apiKeyNotFoundError(); + } + + if (existingKey.status === "revoked") { + return c.body(null, 204); + } + + await db + .update(api_keys) + .set({ + status: "revoked", + }) + .where(and(eq(api_keys.id, apiKeyId), eq(api_keys.human_id, human.id))); + + return c.body(null, 204); + }); +} From 5dfc3ed4be933491a681cd45a94d9100385feff2 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 17:05:16 +0530 Subject: [PATCH 131/190] test(cli): split agent tests into focused files --- apps/cli/src/commands/agent.test.ts | 857 ------------------ apps/cli/src/commands/agent.test/AGENTS.md | 24 + .../commands/agent.test/auth-refresh.test.ts | 90 ++ .../src/commands/agent.test/create.test.ts | 218 +++++ apps/cli/src/commands/agent.test/helpers.ts | 337 +++++++ .../src/commands/agent.test/inspect.test.ts | 108 +++ .../src/commands/agent.test/revoke.test.ts | 180 ++++ 7 files changed, 957 insertions(+), 857 deletions(-) delete mode 100644 apps/cli/src/commands/agent.test.ts create mode 100644 apps/cli/src/commands/agent.test/AGENTS.md create mode 100644 apps/cli/src/commands/agent.test/auth-refresh.test.ts create mode 100644 apps/cli/src/commands/agent.test/create.test.ts create mode 100644 apps/cli/src/commands/agent.test/helpers.ts create mode 100644 apps/cli/src/commands/agent.test/inspect.test.ts create mode 100644 apps/cli/src/commands/agent.test/revoke.test.ts diff --git a/apps/cli/src/commands/agent.test.ts b/apps/cli/src/commands/agent.test.ts deleted file mode 100644 index f36ae08..0000000 --- a/apps/cli/src/commands/agent.test.ts +++ /dev/null @@ -1,857 +0,0 @@ -import { - access, - chmod, - mkdir, - readFile, - rename, - unlink, - writeFile, -} from "node:fs/promises"; -import { Command } from "commander"; -import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; - -vi.mock("node:fs/promises", () => ({ - access: vi.fn(), - chmod: vi.fn(), - mkdir: vi.fn(), - readFile: vi.fn(), - rename: vi.fn(), - unlink: vi.fn(), - writeFile: vi.fn(), -})); - -vi.mock("../config/manager.js", () => ({ - getConfigDir: vi.fn(() => "/mock-home/.clawdentity"), - resolveConfig: vi.fn(), -})); - -vi.mock("@clawdentity/sdk", () => ({ - createLogger: vi.fn(() => ({ - child: vi.fn(), - debug: vi.fn(), - info: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - })), - decodeAIT: vi.fn(), - encodeEd25519SignatureBase64url: vi.fn(), - encodeEd25519KeypairBase64url: vi.fn(), - generateEd25519Keypair: vi.fn(), - nowUtcMs: vi.fn(() => 1_700_000_000_000), - refreshAgentAuthWithClawProof: vi.fn(), - signEd25519: vi.fn(), - toIso: vi.fn((value: Date | string | number) => - new Date(value).toISOString(), - ), -})); - -import { - type DecodedAit, - decodeAIT, - encodeEd25519KeypairBase64url, - encodeEd25519SignatureBase64url, - generateEd25519Keypair, - nowUtcMs, - refreshAgentAuthWithClawProof, - signEd25519, - toIso, -} from "@clawdentity/sdk"; -import { resolveConfig } from "../config/manager.js"; -import { createAgentCommand } from "./agent.js"; - -const mockedAccess = vi.mocked(access); -const mockedChmod = vi.mocked(chmod); -const mockedMkdir = vi.mocked(mkdir); -const mockedReadFile = vi.mocked(readFile); -const mockedRename = vi.mocked(rename); -const mockedUnlink = vi.mocked(unlink); -const mockedWriteFile = vi.mocked(writeFile); -const mockedResolveConfig = vi.mocked(resolveConfig); -const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); -const mockedNowUtcMs = vi.mocked(nowUtcMs); -const mockedRefreshAgentAuthWithClawProof = vi.mocked( - refreshAgentAuthWithClawProof, -); -const mockedSignEd25519 = vi.mocked(signEd25519); -const mockedEncodeEd25519SignatureBase64url = vi.mocked( - encodeEd25519SignatureBase64url, -); -const mockedEncodeEd25519KeypairBase64url = vi.mocked( - encodeEd25519KeypairBase64url, -); -const mockedDecodeAIT = vi.mocked(decodeAIT); -const mockedToIso = vi.mocked(toIso); - -const mockFetch = vi.fn(); - -const buildErrnoError = (code: string): NodeJS.ErrnoException => { - const error = new Error(code) as NodeJS.ErrnoException; - error.code = code; - return error; -}; - -const createJsonResponse = (status: number, body: unknown): Response => { - return { - ok: status >= 200 && status < 300, - status, - json: vi.fn(async () => body), - } as unknown as Response; -}; - -const runAgentCommand = async (args: string[]) => { - const stdout: string[] = []; - const stderr: string[] = []; - const previousExitCode = process.exitCode; - - const stdoutSpy = vi - .spyOn(process.stdout, "write") - .mockImplementation((chunk: unknown) => { - stdout.push(String(chunk)); - return true; - }); - const stderrSpy = vi - .spyOn(process.stderr, "write") - .mockImplementation((chunk: unknown) => { - stderr.push(String(chunk)); - return true; - }); - - process.exitCode = undefined; - - const command = createAgentCommand(); - command.configureOutput({ - writeOut: (message) => stdout.push(message), - writeErr: (message) => stderr.push(message), - outputError: (message) => stderr.push(message), - }); - - const root = new Command("clawdentity"); - root.addCommand(command); - - try { - await root.parseAsync(["node", "clawdentity", "agent", ...args]); - } finally { - stdoutSpy.mockRestore(); - stderrSpy.mockRestore(); - } - - const exitCode = process.exitCode; - process.exitCode = previousExitCode; - - return { - exitCode, - stderr: stderr.join(""), - stdout: stdout.join(""), - }; -}; - -describe("agent create command", () => { - beforeEach(() => { - vi.clearAllMocks(); - mockFetch.mockReset(); - vi.stubGlobal("fetch", mockFetch); - - mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://registry.clawdentity.com", - apiKey: "pat_123", - }); - - mockedAccess.mockRejectedValue(buildErrnoError("ENOENT")); - mockedMkdir.mockResolvedValue(undefined); - mockedWriteFile.mockResolvedValue(undefined); - mockedRename.mockResolvedValue(undefined); - mockedUnlink.mockResolvedValue(undefined); - mockedChmod.mockResolvedValue(undefined); - - mockedGenerateEd25519Keypair.mockResolvedValue({ - publicKey: Uint8Array.from({ length: 32 }, (_, index) => index + 1), - secretKey: Uint8Array.from({ length: 32 }, (_, index) => 64 - index), - }); - mockedNowUtcMs.mockReturnValue(1_700_000_000_000); - mockedToIso.mockImplementation((value: Date | string | number) => - new Date(value).toISOString(), - ); - - mockedEncodeEd25519KeypairBase64url.mockReturnValue({ - publicKey: "public-key-b64url", - secretKey: "secret-key-b64url", - }); - - mockedSignEd25519.mockResolvedValue(Uint8Array.from([1, 2, 3])); - mockedEncodeEd25519SignatureBase64url.mockReturnValue( - "challenge-signature-b64url", - ); - - mockFetch.mockImplementation(async (input) => { - const url = String(input); - if (url.endsWith("/v1/agents/challenge")) { - return createJsonResponse(201, { - challengeId: "01JCHALLENGEID1234567890ABC", - nonce: "challenge-nonce-b64url", - ownerDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - expiresAt: "2030-01-01T00:05:00.000Z", - }); - } - - return createJsonResponse(201, { - agent: { - did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - name: "agent-01", - framework: "openclaw", - expiresAt: "2030-01-01T00:00:00.000Z", - }, - ait: "ait.jwt.value", - agentAuth: { - tokenType: "Bearer", - accessToken: "clw_agt_access_token", - accessExpiresAt: "2030-01-01T00:15:00.000Z", - refreshToken: "clw_rft_refresh_token", - refreshExpiresAt: "2030-01-31T00:00:00.000Z", - }, - }); - }); - }); - - afterEach(() => { - process.exitCode = undefined; - vi.unstubAllGlobals(); - }); - - it("creates an agent identity and writes all files", async () => { - const result = await runAgentCommand(["create", "agent-01"]); - - expect(mockedGenerateEd25519Keypair).toHaveBeenCalled(); - expect(mockedSignEd25519).toHaveBeenCalledWith( - expect.any(Uint8Array), - expect.any(Uint8Array), - ); - expect(mockedEncodeEd25519SignatureBase64url).toHaveBeenCalledWith( - Uint8Array.from([1, 2, 3]), - ); - expect(mockFetch).toHaveBeenCalledWith( - "https://registry.clawdentity.com/v1/agents/challenge", - expect.objectContaining({ - method: "POST", - headers: expect.objectContaining({ - authorization: "Bearer pat_123", - "content-type": "application/json", - }), - }), - ); - expect(mockFetch).toHaveBeenCalledWith( - "https://registry.clawdentity.com/v1/agents", - expect.objectContaining({ - method: "POST", - headers: expect.objectContaining({ - authorization: "Bearer pat_123", - "content-type": "application/json", - }), - }), - ); - - expect(mockedWriteFile).toHaveBeenCalledTimes(5); - expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/secret.key", - "secret-key-b64url", - "utf-8", - ); - expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/public.key", - "public-key-b64url", - "utf-8", - ); - expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/identity.json", - expect.stringContaining( - '"did": "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"', - ), - "utf-8", - ); - expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/ait.jwt", - "ait.jwt.value", - "utf-8", - ); - expect(mockedWriteFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", - expect.stringContaining('"refreshToken": "clw_rft_refresh_token"'), - "utf-8", - ); - - expect(result.stdout).toContain( - "Agent DID: did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - ); - expect(result.stdout).toContain("Expires At: 2030-01-01T00:00:00.000Z"); - expect(result.exitCode).toBeUndefined(); - }); - - it("fails when API key is missing", async () => { - mockedResolveConfig.mockResolvedValueOnce({ - registryUrl: "https://registry.clawdentity.com", - }); - - const result = await runAgentCommand(["create", "agent-01"]); - - expect(result.stderr).toContain("API key is not configured"); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - }); - - it("handles registry 401 responses", async () => { - mockFetch.mockResolvedValueOnce( - createJsonResponse(401, { - error: { - message: "Invalid API key", - }, - }), - ); - - const result = await runAgentCommand(["create", "agent-01"]); - - expect(result.stderr).toContain("authentication failed"); - expect(result.exitCode).toBe(1); - }); - - it("handles registry 400 responses", async () => { - mockFetch.mockResolvedValueOnce( - createJsonResponse(400, { - error: { - message: "name contains invalid characters", - }, - }), - ); - - const result = await runAgentCommand(["create", "agent-01"]); - - expect(result.stderr).toContain("rejected the request"); - expect(result.exitCode).toBe(1); - }); - - it("handles registry connection errors", async () => { - mockFetch.mockRejectedValueOnce(new Error("socket hang up")); - - const result = await runAgentCommand(["create", "agent-01"]); - - expect(result.stderr).toContain("Unable to connect to the registry"); - expect(result.exitCode).toBe(1); - }); - - it("fails when agent directory already exists", async () => { - mockedAccess.mockResolvedValueOnce(undefined); - - const result = await runAgentCommand(["create", "agent-01"]); - - expect(result.stderr).toContain("already exists"); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - }); - - it("sets 0600 permissions on every identity file", async () => { - await runAgentCommand(["create", "agent-01"]); - - expect(mockedChmod).toHaveBeenCalledTimes(5); - expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/secret.key", - 0o600, - ); - expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/public.key", - 0o600, - ); - expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/identity.json", - 0o600, - ); - expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/ait.jwt", - 0o600, - ); - expect(mockedChmod).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", - 0o600, - ); - }); - - it("sends optional framework and ttl-days values", async () => { - await runAgentCommand([ - "create", - "agent-01", - "--framework", - "langgraph", - "--ttl-days", - "45", - ]); - - const request = mockFetch.mock.calls[1] as [string, RequestInit]; - const requestBody = JSON.parse(String(request[1]?.body)) as { - framework?: string; - ttlDays?: number; - challengeId?: string; - challengeSignature?: string; - }; - - expect(requestBody.framework).toBe("langgraph"); - expect(requestBody.ttlDays).toBe(45); - expect(requestBody.challengeId).toBe("01JCHALLENGEID1234567890ABC"); - expect(requestBody.challengeSignature).toBe("challenge-signature-b64url"); - }); - - it("rejects dot-segment agent names before hitting the filesystem", async () => { - const result = await runAgentCommand(["create", "."]); - - expect(result.stderr).toContain('Agent name must not be "." or "..".'); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - expect(mockedMkdir).not.toHaveBeenCalled(); - expect(mockedWriteFile).not.toHaveBeenCalled(); - }); -}); - -describe("agent auth refresh command", () => { - beforeEach(() => { - vi.clearAllMocks(); - mockFetch.mockReset(); - vi.stubGlobal("fetch", mockFetch); - - mockedReadFile.mockImplementation(async (path) => { - const filePath = String(path); - if (filePath.endsWith("/ait.jwt")) { - return "ait.jwt.value"; - } - if (filePath.endsWith("/identity.json")) { - return JSON.stringify({ - did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - registryUrl: "https://registry.clawdentity.com", - }); - } - if (filePath.endsWith("/secret.key")) { - return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; - } - if (filePath.endsWith("/registry-auth.json")) { - return JSON.stringify({ - tokenType: "Bearer", - accessToken: "clw_agt_old_access", - accessExpiresAt: "2030-01-01T00:15:00.000Z", - refreshToken: "clw_rft_old_refresh", - refreshExpiresAt: "2030-01-31T00:00:00.000Z", - }); - } - - throw buildErrnoError("ENOENT"); - }); - - mockedRefreshAgentAuthWithClawProof.mockResolvedValue({ - tokenType: "Bearer", - accessToken: "clw_agt_new_access", - accessExpiresAt: "2030-01-02T00:15:00.000Z", - refreshToken: "clw_rft_new_refresh", - refreshExpiresAt: "2030-02-01T00:00:00.000Z", - }); - }); - - afterEach(() => { - process.exitCode = undefined; - vi.unstubAllGlobals(); - }); - - it("refreshes agent auth and rewrites registry-auth.json", async () => { - const result = await runAgentCommand(["auth", "refresh", "agent-01"]); - - expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( - expect.objectContaining({ - registryUrl: "https://registry.clawdentity.com", - ait: "ait.jwt.value", - refreshToken: "clw_rft_old_refresh", - }), - ); - const [tempPath, tempContents, tempEncoding] = mockedWriteFile.mock - .calls[0] as [string, string, BufferEncoding]; - expect(tempPath).toContain( - "/mock-home/.clawdentity/agents/agent-01/registry-auth.json.tmp-", - ); - expect(tempContents).toContain('"refreshToken": "clw_rft_new_refresh"'); - expect(tempEncoding).toBe("utf-8"); - expect(mockedRename).toHaveBeenCalledWith( - tempPath, - "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", - ); - expect(mockedWriteFile).not.toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/registry-auth.json", - expect.stringContaining('"refreshToken": "clw_rft_new_refresh"'), - "utf-8", - ); - expect(result.stdout).toContain("Agent auth refreshed: agent-01"); - expect(result.exitCode).toBeUndefined(); - }); - - it("fails when registry-auth.json is missing", async () => { - mockedReadFile.mockImplementation(async (path) => { - const filePath = String(path); - if (filePath.endsWith("/registry-auth.json")) { - throw buildErrnoError("ENOENT"); - } - if (filePath.endsWith("/ait.jwt")) { - return "ait.jwt.value"; - } - if (filePath.endsWith("/identity.json")) { - return JSON.stringify({ - did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - registryUrl: "https://registry.clawdentity.com", - }); - } - if (filePath.endsWith("/secret.key")) { - return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; - } - - throw buildErrnoError("ENOENT"); - }); - - const result = await runAgentCommand(["auth", "refresh", "agent-01"]); - - expect(result.stderr).toContain("registry-auth.json"); - expect(result.exitCode).toBe(1); - expect(mockedRefreshAgentAuthWithClawProof).not.toHaveBeenCalled(); - }); - - it("passes base-path registry urls through to shared refresh client", async () => { - mockedReadFile.mockImplementation(async (path) => { - const filePath = String(path); - if (filePath.endsWith("/ait.jwt")) { - return "ait.jwt.value"; - } - if (filePath.endsWith("/identity.json")) { - return JSON.stringify({ - did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - registryUrl: "https://registry.clawdentity.com/registry", - }); - } - if (filePath.endsWith("/secret.key")) { - return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; - } - if (filePath.endsWith("/registry-auth.json")) { - return JSON.stringify({ - tokenType: "Bearer", - accessToken: "clw_agt_old_access", - accessExpiresAt: "2030-01-01T00:15:00.000Z", - refreshToken: "clw_rft_old_refresh", - refreshExpiresAt: "2030-01-31T00:00:00.000Z", - }); - } - - throw buildErrnoError("ENOENT"); - }); - - await runAgentCommand(["auth", "refresh", "agent-01"]); - - expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( - expect.objectContaining({ - registryUrl: "https://registry.clawdentity.com/registry", - }), - ); - }); -}); - -describe("agent revoke command", () => { - const agentDid = "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"; - const agentId = "01HF7YAT00W6W7CM7N3W5FDXT4"; - - beforeEach(() => { - vi.clearAllMocks(); - mockFetch.mockReset(); - vi.stubGlobal("fetch", mockFetch); - - mockedResolveConfig.mockResolvedValue({ - registryUrl: "https://registry.clawdentity.com", - apiKey: "pat_123", - }); - - mockedReadFile.mockResolvedValue( - JSON.stringify({ - did: agentDid, - }), - ); - - mockFetch.mockResolvedValue( - createJsonResponse(204, { - ok: true, - }), - ); - }); - - afterEach(() => { - process.exitCode = undefined; - vi.unstubAllGlobals(); - }); - - it("revokes agent by local name and prints confirmation", async () => { - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(mockedReadFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/identity.json", - "utf-8", - ); - expect(mockFetch).toHaveBeenCalledWith( - `https://registry.clawdentity.com/v1/agents/${agentId}`, - expect.objectContaining({ - method: "DELETE", - headers: expect.objectContaining({ - authorization: "Bearer pat_123", - }), - }), - ); - - expect(result.stdout).toContain(`Agent revoked: agent-01 (${agentDid})`); - expect(result.stdout).toContain( - "CRL visibility depends on verifier refresh interval.", - ); - expect(result.exitCode).toBeUndefined(); - }); - - it("treats repeat revoke as success (idempotent 204)", async () => { - mockFetch.mockResolvedValueOnce( - createJsonResponse(204, { - ok: true, - }), - ); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stdout).toContain("Agent revoked: agent-01"); - expect(result.exitCode).toBeUndefined(); - }); - - it("fails when API key is missing", async () => { - mockedResolveConfig.mockResolvedValueOnce({ - registryUrl: "https://registry.clawdentity.com", - }); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("API key is not configured"); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - }); - - it("fails when local identity.json does not exist", async () => { - mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("not found"); - expect(result.stderr).toContain("identity.json"); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - }); - - it("fails when identity.json is invalid JSON", async () => { - mockedReadFile.mockResolvedValueOnce("{ did:"); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("identity.json"); - expect(result.stderr).toContain("valid JSON"); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - }); - - it("fails when identity did is invalid", async () => { - mockedReadFile.mockResolvedValueOnce( - JSON.stringify({ - did: "invalid-did", - }), - ); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("invalid did"); - expect(result.stderr).toContain("identity.json"); - expect(result.exitCode).toBe(1); - expect(mockFetch).not.toHaveBeenCalled(); - }); - - it("handles registry 401 responses", async () => { - mockFetch.mockResolvedValueOnce( - createJsonResponse(401, { - error: { - message: "Invalid API key", - }, - }), - ); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("authentication failed"); - expect(result.exitCode).toBe(1); - }); - - it("handles registry 404 responses", async () => { - mockFetch.mockResolvedValueOnce( - createJsonResponse(404, { - error: { - message: "Agent not found", - }, - }), - ); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("Agent not found"); - expect(result.exitCode).toBe(1); - }); - - it("handles registry 409 responses", async () => { - mockFetch.mockResolvedValueOnce( - createJsonResponse(409, { - error: { - message: "Agent cannot be revoked", - }, - }), - ); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("cannot be revoked"); - expect(result.exitCode).toBe(1); - }); - - it("handles registry connection errors", async () => { - mockFetch.mockRejectedValueOnce(new Error("socket hang up")); - - const result = await runAgentCommand(["revoke", "agent-01"]); - - expect(result.stderr).toContain("Unable to connect to the registry"); - expect(result.exitCode).toBe(1); - }); - - it("rejects dot-segment agent names before resolving identity path", async () => { - const result = await runAgentCommand(["revoke", ".."]); - - expect(result.stderr).toContain('Agent name must not be "." or "..".'); - expect(result.exitCode).toBe(1); - expect(mockedReadFile).not.toHaveBeenCalled(); - expect(mockFetch).not.toHaveBeenCalled(); - }); -}); - -describe("agent inspect command", () => { - const decodedAit: DecodedAit = { - header: { - alg: "EdDSA", - typ: "AIT", - kid: "key-01", - }, - claims: { - iss: "https://registry.clawdentity.dev", - sub: "did:claw:agent:abc", - ownerDid: "did:claw:human:def", - name: "agent-01", - framework: "openclaw", - cnf: { - jwk: { - kty: "OKP", - crv: "Ed25519", - x: "pub-key", - }, - }, - iat: 1672531100, - nbf: 1672531100, - exp: 1672531200, - jti: "01HF7YAT00W6W7CM7N3W5FDXT4", - }, - }; - - beforeEach(() => { - vi.clearAllMocks(); - mockFetch.mockReset(); - mockedReadFile.mockResolvedValue("mock-ait-token"); - mockedDecodeAIT.mockReturnValue(decodedAit); - }); - - afterEach(() => { - process.exitCode = undefined; - }); - - it("displays all six decoded AIT fields", async () => { - const result = await runAgentCommand(["inspect", "agent-01"]); - - expect(result.stdout).toContain("DID: did:claw:agent:abc"); - expect(result.stdout).toContain("Owner: did:claw:human:def"); - expect(result.stdout).toContain("Expires: 2023-01-01T00:00:00.000Z"); - expect(result.stdout).toContain("Key ID: key-01"); - expect(result.stdout).toContain("Public Key: pub-key"); - expect(result.stdout).toContain("Framework: openclaw"); - expect(result.exitCode).toBeUndefined(); - }); - - it("reads AIT from the expected local file path", async () => { - await runAgentCommand(["inspect", "agent-01"]); - - expect(mockedReadFile).toHaveBeenCalledWith( - "/mock-home/.clawdentity/agents/agent-01/ait.jwt", - "utf-8", - ); - expect(mockedDecodeAIT).toHaveBeenCalledWith("mock-ait-token"); - }); - - it("fails when the AIT file is missing", async () => { - mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); - - const result = await runAgentCommand(["inspect", "agent-01"]); - - expect(result.stderr).toContain("not found"); - expect(result.stderr).toContain("ait.jwt"); - expect(result.exitCode).toBe(1); - }); - - it("rejects dot-segment agent names before resolving the AIT path", async () => { - const result = await runAgentCommand(["inspect", ".."]); - - expect(result.stderr).toContain('Agent name must not be "." or "..".'); - expect(result.exitCode).toBe(1); - expect(mockedReadFile).not.toHaveBeenCalled(); - }); - - it("fails when the AIT file is empty", async () => { - mockedReadFile.mockResolvedValueOnce(" \n"); - - const result = await runAgentCommand(["inspect", "agent-01"]); - - expect(result.stderr).toContain("empty"); - expect(result.stderr).toContain("ait.jwt"); - expect(result.exitCode).toBe(1); - }); - - it("fails when AIT decoding fails", async () => { - mockedDecodeAIT.mockImplementationOnce(() => { - throw new Error("Invalid AIT payload"); - }); - - const result = await runAgentCommand(["inspect", "agent-01"]); - - expect(result.stderr).toContain("Invalid AIT payload"); - expect(result.exitCode).toBe(1); - }); - - it("fails on invalid agent names", async () => { - const result = await runAgentCommand(["inspect", "agent/../../etc"]); - - expect(result.stderr).toContain("invalid characters"); - expect(mockedReadFile).not.toHaveBeenCalled(); - expect(result.exitCode).toBe(1); - }); - - it("formats exp as ISO-8601", async () => { - mockedDecodeAIT.mockReturnValueOnce({ - ...decodedAit, - claims: { - ...decodedAit.claims, - exp: 1893456000, - }, - }); - - const result = await runAgentCommand(["inspect", "agent-01"]); - - expect(result.stdout).toContain("Expires: 2030-01-01T00:00:00.000Z"); - expect(result.exitCode).toBeUndefined(); - }); -}); diff --git a/apps/cli/src/commands/agent.test/AGENTS.md b/apps/cli/src/commands/agent.test/AGENTS.md new file mode 100644 index 0000000..03fbcb0 --- /dev/null +++ b/apps/cli/src/commands/agent.test/AGENTS.md @@ -0,0 +1,24 @@ +# AGENTS.md (agent command tests) + +## Purpose +- Keep `agent` command tests modular, deterministic, and easy to extend. +- Preserve behavior parity with command output and error handling contracts. + +## File Boundaries +- `helpers.ts`: shared mocks, fixtures, command runner, and reusable setup helpers. +- `create.test.ts`: `agent create` success/failure and file-permission cases. +- `auth-refresh.test.ts`: `agent auth refresh` file loading, refresh calls, and atomic-write checks. +- `revoke.test.ts`: `agent revoke` local identity parsing and registry error handling. +- `inspect.test.ts`: `agent inspect` AIT parsing and validation/output coverage. + +## Test Guardrails +- Keep each test file under 800 LOC. +- Reuse `helpers.ts` for mock setup and path/fixture constants; do not duplicate bootstrap scaffolding. +- Keep tests hermetic: no real filesystem/network calls, no dependence on host env state. +- Preserve stable stdout/stderr assertions and `process.exitCode` assertions for CLI behavior. +- Reset global stubs in `afterEach` whenever `fetch` or other globals are stubbed. + +## Validation +- Run before handoff: + - `pnpm -C apps/cli typecheck` + - `pnpm -C apps/cli test -- agent` diff --git a/apps/cli/src/commands/agent.test/auth-refresh.test.ts b/apps/cli/src/commands/agent.test/auth-refresh.test.ts new file mode 100644 index 0000000..6230253 --- /dev/null +++ b/apps/cli/src/commands/agent.test/auth-refresh.test.ts @@ -0,0 +1,90 @@ +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { + agentPath, + cleanupAgentTestGlobals, + DEFAULT_AGENT_NAME, + mockedRefreshAgentAuthWithClawProof, + mockedRename, + mockedWriteFile, + resetAgentTestMocks, + runAgentCommand, + setupAuthRefreshDefaults, + setupAuthRefreshReadFiles, + stubAgentFetch, +} from "./helpers.js"; + +describe("agent auth refresh command", () => { + beforeEach(() => { + resetAgentTestMocks(); + stubAgentFetch(); + setupAuthRefreshDefaults(); + }); + + afterEach(() => { + cleanupAgentTestGlobals(); + }); + + it("refreshes agent auth and rewrites registry-auth.json", async () => { + const result = await runAgentCommand([ + "auth", + "refresh", + DEFAULT_AGENT_NAME, + ]); + + expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( + expect.objectContaining({ + registryUrl: "https://registry.clawdentity.com", + ait: "ait.jwt.value", + refreshToken: "clw_rft_old_refresh", + }), + ); + const [tempPath, tempContents, tempEncoding] = mockedWriteFile.mock + .calls[0] as [string, string, BufferEncoding]; + expect(tempPath).toContain( + `${agentPath(DEFAULT_AGENT_NAME, "registry-auth.json")}.tmp-`, + ); + expect(tempContents).toContain('"refreshToken": "clw_rft_new_refresh"'); + expect(tempEncoding).toBe("utf-8"); + expect(mockedRename).toHaveBeenCalledWith( + tempPath, + agentPath(DEFAULT_AGENT_NAME, "registry-auth.json"), + ); + expect(mockedWriteFile).not.toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "registry-auth.json"), + expect.stringContaining('"refreshToken": "clw_rft_new_refresh"'), + "utf-8", + ); + expect(result.stdout).toContain( + `Agent auth refreshed: ${DEFAULT_AGENT_NAME}`, + ); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when registry-auth.json is missing", async () => { + setupAuthRefreshReadFiles({ missingRegistryAuth: true }); + + const result = await runAgentCommand([ + "auth", + "refresh", + DEFAULT_AGENT_NAME, + ]); + + expect(result.stderr).toContain("registry-auth.json"); + expect(result.exitCode).toBe(1); + expect(mockedRefreshAgentAuthWithClawProof).not.toHaveBeenCalled(); + }); + + it("passes base-path registry urls through to shared refresh client", async () => { + setupAuthRefreshReadFiles({ + registryUrl: "https://registry.clawdentity.com/registry", + }); + + await runAgentCommand(["auth", "refresh", DEFAULT_AGENT_NAME]); + + expect(mockedRefreshAgentAuthWithClawProof).toHaveBeenCalledWith( + expect.objectContaining({ + registryUrl: "https://registry.clawdentity.com/registry", + }), + ); + }); +}); diff --git a/apps/cli/src/commands/agent.test/create.test.ts b/apps/cli/src/commands/agent.test/create.test.ts new file mode 100644 index 0000000..926cdc9 --- /dev/null +++ b/apps/cli/src/commands/agent.test/create.test.ts @@ -0,0 +1,218 @@ +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { + agentPath, + cleanupAgentTestGlobals, + createJsonResponse, + DEFAULT_AGENT_DID, + DEFAULT_AGENT_NAME, + mockedAccess, + mockedChmod, + mockedEncodeEd25519SignatureBase64url, + mockedGenerateEd25519Keypair, + mockedMkdir, + mockedResolveConfig, + mockedSignEd25519, + mockedWriteFile, + mockFetch, + resetAgentTestMocks, + runAgentCommand, + setupCreateCommandDefaults, + stubAgentFetch, +} from "./helpers.js"; + +describe("agent create command", () => { + beforeEach(() => { + resetAgentTestMocks(); + stubAgentFetch(); + setupCreateCommandDefaults(); + }); + + afterEach(() => { + cleanupAgentTestGlobals(); + }); + + it("creates an agent identity and writes all files", async () => { + const result = await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(mockedGenerateEd25519Keypair).toHaveBeenCalled(); + expect(mockedSignEd25519).toHaveBeenCalledWith( + expect.any(Uint8Array), + expect.any(Uint8Array), + ); + expect(mockedEncodeEd25519SignatureBase64url).toHaveBeenCalledWith( + Uint8Array.from([1, 2, 3]), + ); + expect(mockFetch).toHaveBeenCalledWith( + "https://registry.clawdentity.com/v1/agents/challenge", + expect.objectContaining({ + method: "POST", + headers: expect.objectContaining({ + authorization: "Bearer pat_123", + "content-type": "application/json", + }), + }), + ); + expect(mockFetch).toHaveBeenCalledWith( + "https://registry.clawdentity.com/v1/agents", + expect.objectContaining({ + method: "POST", + headers: expect.objectContaining({ + authorization: "Bearer pat_123", + "content-type": "application/json", + }), + }), + ); + + expect(mockedWriteFile).toHaveBeenCalledTimes(5); + expect(mockedWriteFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "secret.key"), + "secret-key-b64url", + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "public.key"), + "public-key-b64url", + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "identity.json"), + expect.stringContaining(`"did": "${DEFAULT_AGENT_DID}"`), + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "ait.jwt"), + "ait.jwt.value", + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "registry-auth.json"), + expect.stringContaining('"refreshToken": "clw_rft_refresh_token"'), + "utf-8", + ); + + expect(result.stdout).toContain(`Agent DID: ${DEFAULT_AGENT_DID}`); + expect(result.stdout).toContain("Expires At: 2030-01-01T00:00:00.000Z"); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when API key is missing", async () => { + mockedResolveConfig.mockResolvedValueOnce({ + registryUrl: "https://registry.clawdentity.com", + }); + + const result = await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("API key is not configured"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("handles registry 401 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(401, { + error: { + message: "Invalid API key", + }, + }), + ); + + const result = await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("authentication failed"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry 400 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(400, { + error: { + message: "name contains invalid characters", + }, + }), + ); + + const result = await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("rejected the request"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry connection errors", async () => { + mockFetch.mockRejectedValueOnce(new Error("socket hang up")); + + const result = await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("Unable to connect to the registry"); + expect(result.exitCode).toBe(1); + }); + + it("fails when agent directory already exists", async () => { + mockedAccess.mockResolvedValueOnce(undefined); + + const result = await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("already exists"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("sets 0600 permissions on every identity file", async () => { + await runAgentCommand(["create", DEFAULT_AGENT_NAME]); + + expect(mockedChmod).toHaveBeenCalledTimes(5); + expect(mockedChmod).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "secret.key"), + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "public.key"), + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "identity.json"), + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "ait.jwt"), + 0o600, + ); + expect(mockedChmod).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "registry-auth.json"), + 0o600, + ); + }); + + it("sends optional framework and ttl-days values", async () => { + await runAgentCommand([ + "create", + DEFAULT_AGENT_NAME, + "--framework", + "langgraph", + "--ttl-days", + "45", + ]); + + const request = mockFetch.mock.calls[1] as [string, RequestInit]; + const requestBody = JSON.parse(String(request[1]?.body)) as { + framework?: string; + ttlDays?: number; + challengeId?: string; + challengeSignature?: string; + }; + + expect(requestBody.framework).toBe("langgraph"); + expect(requestBody.ttlDays).toBe(45); + expect(requestBody.challengeId).toBe("01JCHALLENGEID1234567890ABC"); + expect(requestBody.challengeSignature).toBe("challenge-signature-b64url"); + }); + + it("rejects dot-segment agent names before hitting the filesystem", async () => { + const result = await runAgentCommand(["create", "."]); + + expect(result.stderr).toContain('Agent name must not be "." or "..".'); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + expect(mockedMkdir).not.toHaveBeenCalled(); + expect(mockedWriteFile).not.toHaveBeenCalled(); + }); +}); diff --git a/apps/cli/src/commands/agent.test/helpers.ts b/apps/cli/src/commands/agent.test/helpers.ts new file mode 100644 index 0000000..92a9edd --- /dev/null +++ b/apps/cli/src/commands/agent.test/helpers.ts @@ -0,0 +1,337 @@ +import { + access, + chmod, + mkdir, + readFile, + rename, + unlink, + writeFile, +} from "node:fs/promises"; +import { Command } from "commander"; +import { vi } from "vitest"; + +vi.mock("node:fs/promises", () => ({ + access: vi.fn(), + chmod: vi.fn(), + mkdir: vi.fn(), + readFile: vi.fn(), + rename: vi.fn(), + unlink: vi.fn(), + writeFile: vi.fn(), +})); + +vi.mock("../../config/manager.js", () => ({ + getConfigDir: vi.fn(() => "/mock-home/.clawdentity"), + resolveConfig: vi.fn(), +})); + +vi.mock("@clawdentity/sdk", () => ({ + createLogger: vi.fn(() => ({ + child: vi.fn(), + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + })), + decodeAIT: vi.fn(), + encodeEd25519SignatureBase64url: vi.fn(), + encodeEd25519KeypairBase64url: vi.fn(), + generateEd25519Keypair: vi.fn(), + nowUtcMs: vi.fn(() => 1_700_000_000_000), + refreshAgentAuthWithClawProof: vi.fn(), + signEd25519: vi.fn(), + toIso: vi.fn((value: Date | string | number) => + new Date(value).toISOString(), + ), +})); + +import { + type DecodedAit, + decodeAIT, + encodeEd25519KeypairBase64url, + encodeEd25519SignatureBase64url, + generateEd25519Keypair, + nowUtcMs, + refreshAgentAuthWithClawProof, + signEd25519, + toIso, +} from "@clawdentity/sdk"; +import { resolveConfig } from "../../config/manager.js"; +import { createAgentCommand } from "../agent.js"; + +export const DEFAULT_REGISTRY_URL = "https://registry.clawdentity.com"; +export const DEFAULT_API_KEY = "pat_123"; +export const DEFAULT_AGENT_NAME = "agent-01"; +export const DEFAULT_AGENT_DID = "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"; +export const DEFAULT_AGENT_ID = "01HF7YAT00W6W7CM7N3W5FDXT4"; + +export const mockedAccess = vi.mocked(access); +export const mockedChmod = vi.mocked(chmod); +export const mockedMkdir = vi.mocked(mkdir); +export const mockedReadFile = vi.mocked(readFile); +export const mockedRename = vi.mocked(rename); +export const mockedUnlink = vi.mocked(unlink); +export const mockedWriteFile = vi.mocked(writeFile); +export const mockedResolveConfig = vi.mocked(resolveConfig); +export const mockedGenerateEd25519Keypair = vi.mocked(generateEd25519Keypair); +export const mockedNowUtcMs = vi.mocked(nowUtcMs); +export const mockedRefreshAgentAuthWithClawProof = vi.mocked( + refreshAgentAuthWithClawProof, +); +export const mockedSignEd25519 = vi.mocked(signEd25519); +export const mockedEncodeEd25519SignatureBase64url = vi.mocked( + encodeEd25519SignatureBase64url, +); +export const mockedEncodeEd25519KeypairBase64url = vi.mocked( + encodeEd25519KeypairBase64url, +); +export const mockedDecodeAIT = vi.mocked(decodeAIT); +export const mockedToIso = vi.mocked(toIso); + +export const mockFetch = vi.fn(); + +export const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +export const createJsonResponse = (status: number, body: unknown): Response => { + return { + ok: status >= 200 && status < 300, + status, + json: vi.fn(async () => body), + } as unknown as Response; +}; + +export const agentPath = (agentName: string, fileName: string): string => + `/mock-home/.clawdentity/agents/${agentName}/${fileName}`; + +export const runAgentCommand = async (args: string[]) => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + const command = createAgentCommand(); + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "agent", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +}; + +export const resetAgentTestMocks = () => { + vi.clearAllMocks(); + mockFetch.mockReset(); +}; + +export const stubAgentFetch = () => { + vi.stubGlobal("fetch", mockFetch); +}; + +export const resetProcessExitCode = () => { + process.exitCode = undefined; +}; + +export const cleanupAgentTestGlobals = () => { + process.exitCode = undefined; + vi.unstubAllGlobals(); +}; + +export const setupCreateCommandDefaults = () => { + mockedResolveConfig.mockResolvedValue({ + registryUrl: DEFAULT_REGISTRY_URL, + apiKey: DEFAULT_API_KEY, + }); + + mockedAccess.mockRejectedValue(buildErrnoError("ENOENT")); + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + mockedRename.mockResolvedValue(undefined); + mockedUnlink.mockResolvedValue(undefined); + mockedChmod.mockResolvedValue(undefined); + + mockedGenerateEd25519Keypair.mockResolvedValue({ + publicKey: Uint8Array.from({ length: 32 }, (_, index) => index + 1), + secretKey: Uint8Array.from({ length: 32 }, (_, index) => 64 - index), + }); + mockedNowUtcMs.mockReturnValue(1_700_000_000_000); + mockedToIso.mockImplementation((value: Date | string | number) => + new Date(value).toISOString(), + ); + + mockedEncodeEd25519KeypairBase64url.mockReturnValue({ + publicKey: "public-key-b64url", + secretKey: "secret-key-b64url", + }); + + mockedSignEd25519.mockResolvedValue(Uint8Array.from([1, 2, 3])); + mockedEncodeEd25519SignatureBase64url.mockReturnValue( + "challenge-signature-b64url", + ); + + mockFetch.mockImplementation(async (input) => { + const url = String(input); + if (url.endsWith("/v1/agents/challenge")) { + return createJsonResponse(201, { + challengeId: "01JCHALLENGEID1234567890ABC", + nonce: "challenge-nonce-b64url", + ownerDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + expiresAt: "2030-01-01T00:05:00.000Z", + }); + } + + return createJsonResponse(201, { + agent: { + did: DEFAULT_AGENT_DID, + name: DEFAULT_AGENT_NAME, + framework: "openclaw", + expiresAt: "2030-01-01T00:00:00.000Z", + }, + ait: "ait.jwt.value", + agentAuth: { + tokenType: "Bearer", + accessToken: "clw_agt_access_token", + accessExpiresAt: "2030-01-01T00:15:00.000Z", + refreshToken: "clw_rft_refresh_token", + refreshExpiresAt: "2030-01-31T00:00:00.000Z", + }, + }); + }); +}; + +type AuthReadFileOptions = { + missingRegistryAuth?: boolean; + registryUrl?: string; +}; + +export const setupAuthRefreshReadFiles = ( + options: AuthReadFileOptions = {}, +) => { + const { missingRegistryAuth = false, registryUrl = DEFAULT_REGISTRY_URL } = + options; + mockedReadFile.mockImplementation(async (path) => { + const filePath = String(path); + if (filePath.endsWith("/ait.jwt")) { + return "ait.jwt.value"; + } + if (filePath.endsWith("/identity.json")) { + return JSON.stringify({ + did: DEFAULT_AGENT_DID, + registryUrl, + }); + } + if (filePath.endsWith("/secret.key")) { + return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; + } + if (filePath.endsWith("/registry-auth.json")) { + if (missingRegistryAuth) { + throw buildErrnoError("ENOENT"); + } + return JSON.stringify({ + tokenType: "Bearer", + accessToken: "clw_agt_old_access", + accessExpiresAt: "2030-01-01T00:15:00.000Z", + refreshToken: "clw_rft_old_refresh", + refreshExpiresAt: "2030-01-31T00:00:00.000Z", + }); + } + + throw buildErrnoError("ENOENT"); + }); +}; + +export const setupAuthRefreshDefaults = () => { + setupAuthRefreshReadFiles(); + mockedRefreshAgentAuthWithClawProof.mockResolvedValue({ + tokenType: "Bearer", + accessToken: "clw_agt_new_access", + accessExpiresAt: "2030-01-02T00:15:00.000Z", + refreshToken: "clw_rft_new_refresh", + refreshExpiresAt: "2030-02-01T00:00:00.000Z", + }); +}; + +export const setupRevokeDefaults = () => { + mockedResolveConfig.mockResolvedValue({ + registryUrl: DEFAULT_REGISTRY_URL, + apiKey: DEFAULT_API_KEY, + }); + + mockedReadFile.mockResolvedValue( + JSON.stringify({ + did: DEFAULT_AGENT_DID, + }), + ); + + mockFetch.mockResolvedValue( + createJsonResponse(204, { + ok: true, + }), + ); +}; + +export const decodedAitFixture: DecodedAit = { + header: { + alg: "EdDSA", + typ: "AIT", + kid: "key-01", + }, + claims: { + iss: "https://registry.clawdentity.dev", + sub: "did:claw:agent:abc", + ownerDid: "did:claw:human:def", + name: "agent-01", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: "pub-key", + }, + }, + iat: 1672531100, + nbf: 1672531100, + exp: 1672531200, + jti: "01HF7YAT00W6W7CM7N3W5FDXT4", + }, +}; + +export const setupInspectDefaults = () => { + mockedReadFile.mockResolvedValue("mock-ait-token"); + mockedDecodeAIT.mockReturnValue(decodedAitFixture); +}; diff --git a/apps/cli/src/commands/agent.test/inspect.test.ts b/apps/cli/src/commands/agent.test/inspect.test.ts new file mode 100644 index 0000000..d79ea8d --- /dev/null +++ b/apps/cli/src/commands/agent.test/inspect.test.ts @@ -0,0 +1,108 @@ +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { + agentPath, + buildErrnoError, + DEFAULT_AGENT_NAME, + decodedAitFixture, + mockedDecodeAIT, + mockedReadFile, + resetAgentTestMocks, + resetProcessExitCode, + runAgentCommand, + setupInspectDefaults, +} from "./helpers.js"; + +describe("agent inspect command", () => { + beforeEach(() => { + resetAgentTestMocks(); + setupInspectDefaults(); + }); + + afterEach(() => { + resetProcessExitCode(); + }); + + it("displays all six decoded AIT fields", async () => { + const result = await runAgentCommand(["inspect", DEFAULT_AGENT_NAME]); + + expect(result.stdout).toContain("DID: did:claw:agent:abc"); + expect(result.stdout).toContain("Owner: did:claw:human:def"); + expect(result.stdout).toContain("Expires: 2023-01-01T00:00:00.000Z"); + expect(result.stdout).toContain("Key ID: key-01"); + expect(result.stdout).toContain("Public Key: pub-key"); + expect(result.stdout).toContain("Framework: openclaw"); + expect(result.exitCode).toBeUndefined(); + }); + + it("reads AIT from the expected local file path", async () => { + await runAgentCommand(["inspect", DEFAULT_AGENT_NAME]); + + expect(mockedReadFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "ait.jwt"), + "utf-8", + ); + expect(mockedDecodeAIT).toHaveBeenCalledWith("mock-ait-token"); + }); + + it("fails when the AIT file is missing", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + const result = await runAgentCommand(["inspect", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("not found"); + expect(result.stderr).toContain("ait.jwt"); + expect(result.exitCode).toBe(1); + }); + + it("rejects dot-segment agent names before resolving the AIT path", async () => { + const result = await runAgentCommand(["inspect", ".."]); + + expect(result.stderr).toContain('Agent name must not be "." or "..".'); + expect(result.exitCode).toBe(1); + expect(mockedReadFile).not.toHaveBeenCalled(); + }); + + it("fails when the AIT file is empty", async () => { + mockedReadFile.mockResolvedValueOnce(" \n"); + + const result = await runAgentCommand(["inspect", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("empty"); + expect(result.stderr).toContain("ait.jwt"); + expect(result.exitCode).toBe(1); + }); + + it("fails when AIT decoding fails", async () => { + mockedDecodeAIT.mockImplementationOnce(() => { + throw new Error("Invalid AIT payload"); + }); + + const result = await runAgentCommand(["inspect", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("Invalid AIT payload"); + expect(result.exitCode).toBe(1); + }); + + it("fails on invalid agent names", async () => { + const result = await runAgentCommand(["inspect", "agent/../../etc"]); + + expect(result.stderr).toContain("invalid characters"); + expect(mockedReadFile).not.toHaveBeenCalled(); + expect(result.exitCode).toBe(1); + }); + + it("formats exp as ISO-8601", async () => { + mockedDecodeAIT.mockReturnValueOnce({ + ...decodedAitFixture, + claims: { + ...decodedAitFixture.claims, + exp: 1893456000, + }, + }); + + const result = await runAgentCommand(["inspect", DEFAULT_AGENT_NAME]); + + expect(result.stdout).toContain("Expires: 2030-01-01T00:00:00.000Z"); + expect(result.exitCode).toBeUndefined(); + }); +}); diff --git a/apps/cli/src/commands/agent.test/revoke.test.ts b/apps/cli/src/commands/agent.test/revoke.test.ts new file mode 100644 index 0000000..c71d2e1 --- /dev/null +++ b/apps/cli/src/commands/agent.test/revoke.test.ts @@ -0,0 +1,180 @@ +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { + agentPath, + buildErrnoError, + cleanupAgentTestGlobals, + createJsonResponse, + DEFAULT_AGENT_DID, + DEFAULT_AGENT_ID, + DEFAULT_AGENT_NAME, + mockedReadFile, + mockedResolveConfig, + mockFetch, + resetAgentTestMocks, + runAgentCommand, + setupRevokeDefaults, + stubAgentFetch, +} from "./helpers.js"; + +describe("agent revoke command", () => { + beforeEach(() => { + resetAgentTestMocks(); + stubAgentFetch(); + setupRevokeDefaults(); + }); + + afterEach(() => { + cleanupAgentTestGlobals(); + }); + + it("revokes agent by local name and prints confirmation", async () => { + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(mockedReadFile).toHaveBeenCalledWith( + agentPath(DEFAULT_AGENT_NAME, "identity.json"), + "utf-8", + ); + expect(mockFetch).toHaveBeenCalledWith( + `https://registry.clawdentity.com/v1/agents/${DEFAULT_AGENT_ID}`, + expect.objectContaining({ + method: "DELETE", + headers: expect.objectContaining({ + authorization: "Bearer pat_123", + }), + }), + ); + + expect(result.stdout).toContain( + `Agent revoked: ${DEFAULT_AGENT_NAME} (${DEFAULT_AGENT_DID})`, + ); + expect(result.stdout).toContain( + "CRL visibility depends on verifier refresh interval.", + ); + expect(result.exitCode).toBeUndefined(); + }); + + it("treats repeat revoke as success (idempotent 204)", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(204, { + ok: true, + }), + ); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stdout).toContain(`Agent revoked: ${DEFAULT_AGENT_NAME}`); + expect(result.exitCode).toBeUndefined(); + }); + + it("fails when API key is missing", async () => { + mockedResolveConfig.mockResolvedValueOnce({ + registryUrl: "https://registry.clawdentity.com", + }); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("API key is not configured"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails when local identity.json does not exist", async () => { + mockedReadFile.mockRejectedValueOnce(buildErrnoError("ENOENT")); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("not found"); + expect(result.stderr).toContain("identity.json"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails when identity.json is invalid JSON", async () => { + mockedReadFile.mockResolvedValueOnce("{ did:"); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("identity.json"); + expect(result.stderr).toContain("valid JSON"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("fails when identity did is invalid", async () => { + mockedReadFile.mockResolvedValueOnce( + JSON.stringify({ + did: "invalid-did", + }), + ); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("invalid did"); + expect(result.stderr).toContain("identity.json"); + expect(result.exitCode).toBe(1); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it("handles registry 401 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(401, { + error: { + message: "Invalid API key", + }, + }), + ); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("authentication failed"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry 404 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(404, { + error: { + message: "Agent not found", + }, + }), + ); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("Agent not found"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry 409 responses", async () => { + mockFetch.mockResolvedValueOnce( + createJsonResponse(409, { + error: { + message: "Agent cannot be revoked", + }, + }), + ); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("cannot be revoked"); + expect(result.exitCode).toBe(1); + }); + + it("handles registry connection errors", async () => { + mockFetch.mockRejectedValueOnce(new Error("socket hang up")); + + const result = await runAgentCommand(["revoke", DEFAULT_AGENT_NAME]); + + expect(result.stderr).toContain("Unable to connect to the registry"); + expect(result.exitCode).toBe(1); + }); + + it("rejects dot-segment agent names before resolving identity path", async () => { + const result = await runAgentCommand(["revoke", ".."]); + + expect(result.stderr).toContain('Agent name must not be "." or "..".'); + expect(result.exitCode).toBe(1); + expect(mockedReadFile).not.toHaveBeenCalled(); + expect(mockFetch).not.toHaveBeenCalled(); + }); +}); From d860f2d8d31c23aa0ee68426241273d80cea398e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 17:15:39 +0530 Subject: [PATCH 132/190] test(cli,proxy): split oversized pair and auth middleware tests --- apps/cli/src/commands/pair.test.ts | 1005 ----------------- apps/cli/src/commands/pair.test/AGENTS.md | 23 + .../src/commands/pair.test/confirm.test.ts | 246 ++++ apps/cli/src/commands/pair.test/helpers.ts | 166 +++ .../cli/src/commands/pair.test/output.test.ts | 165 +++ apps/cli/src/commands/pair.test/start.test.ts | 227 ++++ .../cli/src/commands/pair.test/status.test.ts | 223 ++++ apps/proxy/src/auth-middleware.test.ts | 944 ---------------- apps/proxy/src/auth-middleware.test/AGENTS.md | 18 + .../auth-middleware.test/agent-access.test.ts | 131 +++ .../src/auth-middleware.test/basic.test.ts | 124 ++ .../proxy/src/auth-middleware.test/helpers.ts | 261 +++++ .../auth-middleware.test/robustness.test.ts | 188 +++ .../src/auth-middleware.test/rotation.test.ts | 192 ++++ 14 files changed, 1964 insertions(+), 1949 deletions(-) delete mode 100644 apps/cli/src/commands/pair.test.ts create mode 100644 apps/cli/src/commands/pair.test/AGENTS.md create mode 100644 apps/cli/src/commands/pair.test/confirm.test.ts create mode 100644 apps/cli/src/commands/pair.test/helpers.ts create mode 100644 apps/cli/src/commands/pair.test/output.test.ts create mode 100644 apps/cli/src/commands/pair.test/start.test.ts create mode 100644 apps/cli/src/commands/pair.test/status.test.ts delete mode 100644 apps/proxy/src/auth-middleware.test.ts create mode 100644 apps/proxy/src/auth-middleware.test/AGENTS.md create mode 100644 apps/proxy/src/auth-middleware.test/agent-access.test.ts create mode 100644 apps/proxy/src/auth-middleware.test/basic.test.ts create mode 100644 apps/proxy/src/auth-middleware.test/helpers.ts create mode 100644 apps/proxy/src/auth-middleware.test/robustness.test.ts create mode 100644 apps/proxy/src/auth-middleware.test/rotation.test.ts diff --git a/apps/cli/src/commands/pair.test.ts b/apps/cli/src/commands/pair.test.ts deleted file mode 100644 index 25ad1a0..0000000 --- a/apps/cli/src/commands/pair.test.ts +++ /dev/null @@ -1,1005 +0,0 @@ -import { - encodeEd25519KeypairBase64url, - generateEd25519Keypair, -} from "@clawdentity/sdk"; -import { Command } from "commander"; -import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import { resetClawdentityEnv } from "../test-env.js"; -import { - confirmPairing, - createPairCommand, - getPairingStatus, - startPairing, -} from "./pair.js"; - -const buildErrnoError = (code: string): NodeJS.ErrnoException => { - const error = new Error(code) as NodeJS.ErrnoException; - error.code = code; - return error; -}; - -type PairFixture = { - ait: string; - secretKeyBase64url: string; -}; - -const INITIATOR_PROFILE = { - agentName: "alpha", - humanName: "Ravi", -}; - -const RESPONDER_PROFILE = { - agentName: "beta", - humanName: "Ira", -}; - -const createPairFixture = async (): Promise => { - const keypair = await generateEd25519Keypair(); - const encoded = encodeEd25519KeypairBase64url(keypair); - const header = Buffer.from(JSON.stringify({ alg: "EdDSA", typ: "JWT" })) - .toString("base64url") - .trim(); - const payload = Buffer.from( - JSON.stringify({ - sub: "did:claw:agent:01HAAA11111111111111111111", - }), - ) - .toString("base64url") - .trim(); - - return { - ait: `${header}.${payload}.sig`, - secretKeyBase64url: encoded.secretKey, - }; -}; - -const createReadFileMock = (fixture: PairFixture) => { - return vi.fn(async (filePath: string, encoding?: BufferEncoding) => { - if (filePath.endsWith("/ait.jwt")) { - return fixture.ait; - } - - if (filePath.endsWith("/secret.key")) { - return fixture.secretKeyBase64url; - } - - if (filePath.endsWith("pair.png")) { - if (encoding) { - return ""; - } - return new Uint8Array([1, 2, 3, 4]); - } - - throw buildErrnoError("ENOENT"); - }); -}; - -const previousEnv = process.env; - -describe("pair command helpers", () => { - beforeEach(() => { - vi.clearAllMocks(); - process.env = resetClawdentityEnv(previousEnv); - }); - - afterEach(() => { - process.env = previousEnv; - }); - - it("starts pairing with local agent proof and configured owner PAT", async () => { - const fixture = await createPairFixture(); - const readFileImpl = createReadFileMock(fixture); - const readdirImpl = vi.fn(async () => [ - "alpha-pair-1699999000.png", - "alpha-pair-1699999500.png", - "notes.txt", - ]); - const unlinkImpl = vi.fn(async () => undefined); - const writeFileImpl = vi.fn( - async ( - _filePath: string, - _data: string | Uint8Array, - _encoding?: BufferEncoding, - ) => undefined, - ); - const mkdirImpl = vi.fn(async () => undefined); - const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - ticket: "clwpair1_eyJ2IjoxfQ", - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }); - - const result = await startPairing( - "alpha", - { - ttlSeconds: "900", - qr: true, - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-start", - readFileImpl: - readFileImpl as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: - writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: - mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, - readdirImpl: - readdirImpl as unknown as typeof import("node:fs/promises").readdir, - unlinkImpl: - unlinkImpl as unknown as typeof import("node:fs/promises").unlink, - qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), - resolveConfigImpl: async () => ({ - registryUrl: "https://dev.registry.clawdentity.com/", - humanName: INITIATOR_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.ticket).toBe("clwpair1_eyJ2IjoxfQ"); - expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); - expect(result.qrPath).toContain( - "/tmp/.clawdentity/pairing/alpha-pair-1700000000.png", - ); - expect(readdirImpl).toHaveBeenCalledTimes(1); - expect(unlinkImpl).toHaveBeenCalledTimes(1); - expect(unlinkImpl).toHaveBeenCalledWith( - "/tmp/.clawdentity/pairing/alpha-pair-1699999000.png", - ); - expect(writeFileImpl).toHaveBeenCalledTimes(1); - expect(mkdirImpl).toHaveBeenCalledTimes(1); - const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; - expect(init?.method).toBe("POST"); - const headers = new Headers(init?.headers); - expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); - expect(headers.get("x-claw-proof")).toBeTruthy(); - expect(headers.get("x-claw-body-sha256")).toBeTruthy(); - expect(headers.get("x-claw-timestamp")).toBe("1700000000"); - expect(headers.get("x-claw-nonce")).toBe("nonce-start"); - expect(String(init?.body ?? "")).toContain("ttlSeconds"); - expect(String(init?.body ?? "")).toContain("initiatorProfile"); - }); - - it("uses CLAWDENTITY_PROXY_URL when no proxy override options are present", async () => { - process.env.CLAWDENTITY_PROXY_URL = "https://env.proxy.example"; - const fixture = await createPairFixture(); - - const result = await startPairing( - "alpha", - {}, - { - fetchImpl: (async () => - Response.json( - { - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - ticket: "clwpair1_eyJ2IjoxfQ", - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - )) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-start", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - resolveConfigImpl: async () => ({ - registryUrl: "https://dev.registry.clawdentity.com/", - humanName: INITIATOR_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.proxyUrl).toBe("https://env.proxy.example/"); - }); - - it("uses registry metadata proxyUrl when env override is omitted", async () => { - const fixture = await createPairFixture(); - - const result = await startPairing( - "alpha", - {}, - { - fetchImpl: (async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://saved.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - ticket: "clwpair1_eyJ2IjoxfQ", - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-start", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - resolveConfigImpl: async () => ({ - registryUrl: "https://dev.registry.clawdentity.com/", - proxyUrl: "https://saved.proxy.example", - humanName: INITIATOR_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.proxyUrl).toBe("https://saved.proxy.example/"); - }); - - it("fails when configured proxyUrl does not match registry metadata", async () => { - const fixture = await createPairFixture(); - - await expect( - startPairing( - "alpha", - {}, - { - fetchImpl: (async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://proxy.clawdentity.com", - }, - { status: 200 }, - ); - } - - return Response.json( - { - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - ticket: "clwpair1_eyJ2IjoxfQ", - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-start", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - proxyUrl: "https://stale.proxy.clawdentity.com", - humanName: INITIATOR_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ), - ).rejects.toMatchObject({ - code: "CLI_PAIR_PROXY_URL_MISMATCH", - }); - }); - - it("routes confirm to ticket issuer proxy when local proxy origin differs", async () => { - const fixture = await createPairFixture(); - const ticket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const fetchImpl = vi.fn(async (url: string, init?: RequestInit) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://beta.proxy.example", - }, - { status: 200 }, - ); - } - - expect(url).toBe("https://alpha.proxy.example/pair/confirm"); - const requestBody = JSON.parse(String(init?.body ?? "{}")) as { - responderProfile?: { proxyOrigin?: string }; - }; - expect(requestBody.responderProfile?.proxyOrigin).toBe( - "https://beta.proxy.example", - ); - - return Response.json( - { - paired: true, - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", - responderProfile: RESPONDER_PROFILE, - }, - { status: 201 }, - ); - }); - - const result = await confirmPairing( - "beta", - { - ticket, - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-confirm", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").mkdir, - chmodImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").chmod, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - humanName: RESPONDER_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); - }); - - it("normalizes wrapped tickets before pair status request", async () => { - const fixture = await createPairFixture(); - const ticket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const wrappedTicket = `\`\n${ticket.slice(0, 18)}\n${ticket.slice(18)}\n\``; - const fetchImpl = vi.fn(async (url: string, init?: RequestInit) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - const requestBody = JSON.parse(String(init?.body ?? "{}")) as { - ticket?: string; - }; - expect(requestBody.ticket).toBe(ticket); - - return Response.json( - { - status: "pending", - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }); - - const result = await getPairingStatus( - "alpha", - { - ticket: wrappedTicket, - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-status", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.status).toBe("pending"); - }); - - it("confirms pairing with qr-file ticket decode", async () => { - const fixture = await createPairFixture(); - const unlinkImpl = vi.fn(async () => undefined); - const mkdirImpl = vi.fn(async () => undefined); - const writeFileImpl = vi.fn(async () => undefined); - const chmodImpl = vi.fn(async () => undefined); - const qrTicket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - paired: true, - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", - responderProfile: RESPONDER_PROFILE, - }, - { status: 201 }, - ); - }); - - const result = await confirmPairing( - "beta", - { - qrFile: "/tmp/pair.png", - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-confirm", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - unlinkImpl: - unlinkImpl as unknown as typeof import("node:fs/promises").unlink, - mkdirImpl: - mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, - writeFileImpl: - writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, - chmodImpl: - chmodImpl as unknown as typeof import("node:fs/promises").chmod, - qrDecodeImpl: () => qrTicket, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - humanName: RESPONDER_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.paired).toBe(true); - expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); - expect(result.peerAlias).toBe("peer-11111111"); - const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; - const headers = new Headers(init?.headers); - expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); - expect(headers.get("x-claw-proof")).toBeTruthy(); - expect(headers.get("x-claw-body-sha256")).toBeTruthy(); - expect(headers.get("x-claw-owner-pat")).toBeNull(); - expect(headers.get("x-claw-timestamp")).toBe("1700000000"); - expect(headers.get("x-claw-nonce")).toBe("nonce-confirm"); - expect(String(init?.body ?? "")).toContain(qrTicket); - expect(String(init?.body ?? "")).toContain("responderProfile"); - expect(unlinkImpl).toHaveBeenCalledTimes(1); - expect(unlinkImpl).toHaveBeenCalledWith("/tmp/pair.png"); - expect(writeFileImpl).toHaveBeenCalledTimes(1); - expect(chmodImpl).toHaveBeenCalledTimes(1); - }); - - it("syncs OpenClaw relay peers snapshot after pair confirm", async () => { - const fixture = await createPairFixture(); - const runtimeConfigPath = "/tmp/.clawdentity/openclaw-relay.json"; - const relayPeersPath = - "/tmp/.openclaw/hooks/transforms/clawdentity-peers.json"; - const ticket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - - const readFileImpl = vi.fn( - async (filePath: string, _encoding?: BufferEncoding) => { - if (filePath.endsWith("/ait.jwt")) { - return fixture.ait; - } - - if (filePath.endsWith("/secret.key")) { - return fixture.secretKeyBase64url; - } - - if (filePath === runtimeConfigPath) { - return JSON.stringify({ - openclawBaseUrl: "http://127.0.0.1:18789", - relayTransformPeersPath: relayPeersPath, - }); - } - - if (filePath === relayPeersPath) { - return JSON.stringify({ peers: {} }); - } - - throw buildErrnoError("ENOENT"); - }, - ); - const writeFileImpl = vi.fn(async () => undefined); - const mkdirImpl = vi.fn(async () => undefined); - const chmodImpl = vi.fn(async () => undefined); - const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - paired: true, - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", - responderProfile: RESPONDER_PROFILE, - }, - { status: 201 }, - ); - }); - - const result = await confirmPairing( - "beta", - { - ticket, - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-confirm", - readFileImpl: - readFileImpl as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: - writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: - mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, - chmodImpl: - chmodImpl as unknown as typeof import("node:fs/promises").chmod, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - humanName: RESPONDER_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.paired).toBe(true); - expect(result.peerAlias).toBe("peer-11111111"); - expect(writeFileImpl).toHaveBeenCalledWith( - "/tmp/.clawdentity/peers.json", - expect.any(String), - "utf8", - ); - expect(writeFileImpl).toHaveBeenCalledWith( - relayPeersPath, - expect.any(String), - "utf8", - ); - expect(mkdirImpl).toHaveBeenCalledTimes(2); - expect(chmodImpl).toHaveBeenCalledTimes(2); - }); - - it("checks pending pair status without persisting peers", async () => { - const fixture = await createPairFixture(); - const writeFileImpl = vi.fn(async () => undefined); - const mkdirImpl = vi.fn(async () => undefined); - const chmodImpl = vi.fn(async () => undefined); - const ticket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const fetchImpl = vi.fn(async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - status: "pending", - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }); - - const result = await getPairingStatus( - "alpha", - { - ticket, - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-status", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: - writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: - mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, - chmodImpl: - chmodImpl as unknown as typeof import("node:fs/promises").chmod, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }, - ); - - expect(result.status).toBe("pending"); - expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); - expect(result.peerAlias).toBeUndefined(); - expect(writeFileImpl).toHaveBeenCalledTimes(0); - expect(mkdirImpl).toHaveBeenCalledTimes(0); - expect(chmodImpl).toHaveBeenCalledTimes(0); - }); - - it("polls pair status until confirmed and persists peer for initiator", async () => { - const fixture = await createPairFixture(); - const writeFileImpl = vi.fn( - async ( - _filePath: string, - _data: string | Uint8Array, - _encoding?: BufferEncoding, - ) => undefined, - ); - const mkdirImpl = vi.fn(async () => undefined); - const chmodImpl = vi.fn(async () => undefined); - const sleepImpl = vi.fn(async () => undefined); - const ticket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const statusResponses = [ - { - status: "pending", - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { - status: "confirmed", - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", - responderProfile: { - ...RESPONDER_PROFILE, - proxyOrigin: "https://beta.proxy.example", - }, - expiresAt: "2026-02-18T00:00:00.000Z", - confirmedAt: "2026-02-18T00:00:05.000Z", - }, - ]; - let statusIndex = 0; - const fetchImpl = vi.fn(async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - const payload = - statusResponses[Math.min(statusIndex, statusResponses.length - 1)]; - statusIndex += 1; - return Response.json(payload, { status: 200 }); - }); - - const nowSequence = [1_700_000_000, 1_700_000_001, 1_700_000_002]; - const result = await getPairingStatus( - "alpha", - { - ticket, - wait: true, - waitSeconds: "10", - pollIntervalSeconds: "1", - }, - { - fetchImpl: fetchImpl as unknown as typeof fetch, - nowSecondsImpl: () => nowSequence.shift() ?? 1_700_000_003, - nonceFactoryImpl: () => "nonce-status", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: - writeFileImpl as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: - mkdirImpl as unknown as typeof import("node:fs/promises").mkdir, - chmodImpl: - chmodImpl as unknown as typeof import("node:fs/promises").chmod, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - sleepImpl, - }, - ); - - expect(result.status).toBe("confirmed"); - expect(result.peerAlias).toBe("peer-22222222"); - expect(sleepImpl).toHaveBeenCalledTimes(1); - expect(writeFileImpl).toHaveBeenCalledTimes(1); - expect(mkdirImpl).toHaveBeenCalledTimes(1); - expect(chmodImpl).toHaveBeenCalledTimes(1); - const peerWriteCall = writeFileImpl.mock.calls[0]; - const persistedPeers = JSON.parse(String(peerWriteCall?.[1] ?? "{}")) as { - peers: { - [key: string]: { - did: string; - proxyUrl: string; - }; - }; - }; - expect(persistedPeers.peers["peer-22222222"]?.proxyUrl).toBe( - "https://beta.proxy.example/hooks/agent", - ); - }); -}); - -const runPairCommand = async ( - args: string[], - command = createPairCommand(), -): Promise<{ - exitCode: number | undefined; - stderr: string; - stdout: string; -}> => { - const stdout: string[] = []; - const stderr: string[] = []; - const previousExitCode = process.exitCode; - - const stdoutSpy = vi - .spyOn(process.stdout, "write") - .mockImplementation((chunk: unknown) => { - stdout.push(String(chunk)); - return true; - }); - - const stderrSpy = vi - .spyOn(process.stderr, "write") - .mockImplementation((chunk: unknown) => { - stderr.push(String(chunk)); - return true; - }); - - process.exitCode = undefined; - - command.configureOutput({ - writeOut: (message) => stdout.push(message), - writeErr: (message) => stderr.push(message), - outputError: (message) => stderr.push(message), - }); - - const root = new Command("clawdentity"); - root.addCommand(command); - - try { - await root.parseAsync(["node", "clawdentity", "pair", ...args]); - } finally { - stdoutSpy.mockRestore(); - stderrSpy.mockRestore(); - } - - const exitCode = process.exitCode; - process.exitCode = previousExitCode; - - return { - exitCode, - stderr: stderr.join(""), - stdout: stdout.join(""), - }; -}; - -describe("pair command output", () => { - beforeEach(() => { - vi.clearAllMocks(); - process.env = resetClawdentityEnv(previousEnv); - }); - - afterEach(() => { - process.env = previousEnv; - }); - - it("prints pairing ticket from pair start", async () => { - const fixture = await createPairFixture(); - const command = createPairCommand({ - fetchImpl: (async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - ticket: "clwpair1_eyJ2IjoxfQ", - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-start", - qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").mkdir, - resolveConfigImpl: async () => ({ - registryUrl: "https://dev.registry.clawdentity.com/", - apiKey: "clw_pat_configured", - humanName: INITIATOR_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }); - - const result = await runPairCommand(["start", "alpha", "--qr"], command); - - expect(result.exitCode).toBeUndefined(); - expect(result.stdout).toContain("Pairing ticket created"); - expect(result.stdout).toContain("Ticket: clwpair1_eyJ2IjoxfQ"); - expect(result.stdout).toContain("QR File: "); - }); - - it("prints saved peer alias from pair confirm", async () => { - const fixture = await createPairFixture(); - const qrTicket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const command = createPairCommand({ - fetchImpl: (async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - paired: true, - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", - responderProfile: RESPONDER_PROFILE, - }, - { status: 201 }, - ); - }) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-confirm", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - writeFileImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").writeFile, - mkdirImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").mkdir, - chmodImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").chmod, - unlinkImpl: vi.fn( - async () => undefined, - ) as unknown as typeof import("node:fs/promises").unlink, - qrDecodeImpl: () => qrTicket, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - humanName: RESPONDER_PROFILE.humanName, - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }); - - const result = await runPairCommand( - ["confirm", "beta", "--qr-file", "/tmp/pair.png"], - command, - ); - - expect(result.exitCode).toBeUndefined(); - expect(result.stdout).toContain("Pairing confirmed"); - expect(result.stdout).toContain("Peer alias saved: peer-11111111"); - }); - - it("prints pairing status from pair status", async () => { - const fixture = await createPairFixture(); - const ticket = `clwpair1_${Buffer.from( - JSON.stringify({ iss: "https://alpha.proxy.example" }), - ).toString("base64url")}`; - const command = createPairCommand({ - fetchImpl: (async (url: string) => { - if (url.endsWith("/v1/metadata")) { - return Response.json( - { - status: "ok", - proxyUrl: "https://alpha.proxy.example", - }, - { status: 200 }, - ); - } - - return Response.json( - { - status: "pending", - initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", - initiatorProfile: INITIATOR_PROFILE, - expiresAt: "2026-02-18T00:00:00.000Z", - }, - { status: 200 }, - ); - }) as unknown as typeof fetch, - nowSecondsImpl: () => 1_700_000_000, - nonceFactoryImpl: () => "nonce-status", - readFileImpl: createReadFileMock( - fixture, - ) as unknown as typeof import("node:fs/promises").readFile, - resolveConfigImpl: async () => ({ - registryUrl: "https://registry.clawdentity.com/", - }), - getConfigDirImpl: () => "/tmp/.clawdentity", - }); - - const result = await runPairCommand( - ["status", "alpha", "--ticket", ticket], - command, - ); - - expect(result.exitCode).toBeUndefined(); - expect(result.stdout).toContain("Status: pending"); - expect(result.stdout).toContain( - "Initiator Agent DID: did:claw:agent:01HAAA11111111111111111111", - ); - }); -}); diff --git a/apps/cli/src/commands/pair.test/AGENTS.md b/apps/cli/src/commands/pair.test/AGENTS.md new file mode 100644 index 0000000..8ddf10b --- /dev/null +++ b/apps/cli/src/commands/pair.test/AGENTS.md @@ -0,0 +1,23 @@ +# AGENTS.md (pair command tests) + +## Purpose +- Keep `pair` command tests modular and deterministic while preserving existing behavior. +- Prevent oversized test files and duplicate mock/setup logic. + +## File Boundaries +- `helpers.ts`: shared fixtures, env reset hooks, typed dependency casts, and CLI command runner. +- `start.test.ts`: `startPairing` behavior and proxy selection cases. +- `confirm.test.ts`: `confirmPairing` behavior, QR decode flow, and relay-peer sync. +- `status.test.ts`: `getPairingStatus` normalization, pending checks, and wait/poll persistence. +- `output.test.ts`: CLI output assertions for `pair start|confirm|status`. + +## Test Splitting Practices +- Keep each test file under 800 LOC. +- Centralize reusable fixture/build helpers in `helpers.ts`; do not copy-paste setup blocks. +- Keep tests hermetic: mock filesystem/network dependencies, avoid host state dependence. +- Preserve exact stdout/stderr and `process.exitCode` assertions for CLI behavior contracts. + +## Validation +- Run before handoff: + - `pnpm -C apps/cli typecheck` + - `pnpm -C apps/cli test -- pair` diff --git a/apps/cli/src/commands/pair.test/confirm.test.ts b/apps/cli/src/commands/pair.test/confirm.test.ts new file mode 100644 index 0000000..e323325 --- /dev/null +++ b/apps/cli/src/commands/pair.test/confirm.test.ts @@ -0,0 +1,246 @@ +import { describe, expect, it, vi } from "vitest"; +import { confirmPairing } from "../pair.js"; +import { + asChmod, + asFetch, + asMkdir, + asReadFile, + asUnlink, + asWriteFile, + buildErrnoError, + createPairFixture, + createPairTicket, + createReadFileMock, + INITIATOR_PROFILE, + PAIR_CONFIG_DIR, + RESPONDER_PROFILE, + setupPairTestEnv, +} from "./helpers.js"; + +describe("pair confirm helpers", () => { + setupPairTestEnv(); + + it("routes confirm to ticket issuer proxy when local proxy origin differs", async () => { + const fixture = await createPairFixture(); + const ticket = createPairTicket("https://alpha.proxy.example"); + const fetchImpl = vi.fn(async (url: string, init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://beta.proxy.example", + }, + { status: 200 }, + ); + } + + expect(url).toBe("https://alpha.proxy.example/pair/confirm"); + const requestBody = JSON.parse(String(init?.body ?? "{}")) as { + responderProfile?: { proxyOrigin?: string }; + }; + expect(requestBody.responderProfile?.proxyOrigin).toBe( + "https://beta.proxy.example", + ); + + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + }, + { status: 201 }, + ); + }); + + const result = await confirmPairing( + "beta", + { + ticket, + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: asReadFile(createReadFileMock(fixture)), + writeFileImpl: asWriteFile(vi.fn(async () => undefined)), + mkdirImpl: asMkdir(vi.fn(async () => undefined)), + chmodImpl: asChmod(vi.fn(async () => undefined)), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); + }); + + it("confirms pairing with qr-file ticket decode", async () => { + const fixture = await createPairFixture(); + const unlinkImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const qrTicket = createPairTicket("https://alpha.proxy.example"); + const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + }, + { status: 201 }, + ); + }); + + const result = await confirmPairing( + "beta", + { + qrFile: "/tmp/pair.png", + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: asReadFile(createReadFileMock(fixture)), + unlinkImpl: asUnlink(unlinkImpl), + mkdirImpl: asMkdir(mkdirImpl), + writeFileImpl: asWriteFile(writeFileImpl), + chmodImpl: asChmod(chmodImpl), + qrDecodeImpl: () => qrTicket, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.paired).toBe(true); + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); + expect(result.peerAlias).toBe("peer-11111111"); + const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; + const headers = new Headers(init?.headers); + expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); + expect(headers.get("x-claw-proof")).toBeTruthy(); + expect(headers.get("x-claw-body-sha256")).toBeTruthy(); + expect(headers.get("x-claw-owner-pat")).toBeNull(); + expect(headers.get("x-claw-timestamp")).toBe("1700000000"); + expect(headers.get("x-claw-nonce")).toBe("nonce-confirm"); + expect(String(init?.body ?? "")).toContain(qrTicket); + expect(String(init?.body ?? "")).toContain("responderProfile"); + expect(unlinkImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).toHaveBeenCalledWith("/tmp/pair.png"); + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(chmodImpl).toHaveBeenCalledTimes(1); + }); + + it("syncs OpenClaw relay peers snapshot after pair confirm", async () => { + const fixture = await createPairFixture(); + const runtimeConfigPath = "/tmp/.clawdentity/openclaw-relay.json"; + const relayPeersPath = + "/tmp/.openclaw/hooks/transforms/clawdentity-peers.json"; + const ticket = createPairTicket("https://alpha.proxy.example"); + + const readFileImpl = vi.fn( + async (filePath: string, _encoding?: BufferEncoding) => { + if (filePath.endsWith("/ait.jwt")) { + return fixture.ait; + } + + if (filePath.endsWith("/secret.key")) { + return fixture.secretKeyBase64url; + } + + if (filePath === runtimeConfigPath) { + return JSON.stringify({ + openclawBaseUrl: "http://127.0.0.1:18789", + relayTransformPeersPath: relayPeersPath, + }); + } + + if (filePath === relayPeersPath) { + return JSON.stringify({ peers: {} }); + } + + throw buildErrnoError("ENOENT"); + }, + ); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + }, + { status: 201 }, + ); + }); + + const result = await confirmPairing( + "beta", + { + ticket, + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: asReadFile(readFileImpl), + writeFileImpl: asWriteFile(writeFileImpl), + mkdirImpl: asMkdir(mkdirImpl), + chmodImpl: asChmod(chmodImpl), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.paired).toBe(true); + expect(result.peerAlias).toBe("peer-11111111"); + expect(writeFileImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/peers.json", + expect.any(String), + "utf8", + ); + expect(writeFileImpl).toHaveBeenCalledWith( + relayPeersPath, + expect.any(String), + "utf8", + ); + expect(mkdirImpl).toHaveBeenCalledTimes(2); + expect(chmodImpl).toHaveBeenCalledTimes(2); + }); +}); diff --git a/apps/cli/src/commands/pair.test/helpers.ts b/apps/cli/src/commands/pair.test/helpers.ts new file mode 100644 index 0000000..3207a97 --- /dev/null +++ b/apps/cli/src/commands/pair.test/helpers.ts @@ -0,0 +1,166 @@ +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, +} from "@clawdentity/sdk"; +import { Command } from "commander"; +import { afterEach, beforeEach, vi } from "vitest"; +import { resetClawdentityEnv } from "../../test-env.js"; +import { createPairCommand } from "../pair.js"; + +const previousEnv = process.env; + +type FsPromises = typeof import("node:fs/promises"); + +export type PairFixture = { + ait: string; + secretKeyBase64url: string; +}; + +export const INITIATOR_PROFILE = { + agentName: "alpha", + humanName: "Ravi", +}; + +export const RESPONDER_PROFILE = { + agentName: "beta", + humanName: "Ira", +}; + +export const PAIR_CONFIG_DIR = "/tmp/.clawdentity"; + +export const asFetch = (impl: unknown): typeof fetch => impl as typeof fetch; + +export const asReadFile = (impl: unknown): FsPromises["readFile"] => + impl as FsPromises["readFile"]; + +export const asWriteFile = (impl: unknown): FsPromises["writeFile"] => + impl as FsPromises["writeFile"]; + +export const asMkdir = (impl: unknown): FsPromises["mkdir"] => + impl as FsPromises["mkdir"]; + +export const asReaddir = (impl: unknown): FsPromises["readdir"] => + impl as FsPromises["readdir"]; + +export const asUnlink = (impl: unknown): FsPromises["unlink"] => + impl as FsPromises["unlink"]; + +export const asChmod = (impl: unknown): FsPromises["chmod"] => + impl as FsPromises["chmod"]; + +export const setupPairTestEnv = () => { + beforeEach(() => { + vi.clearAllMocks(); + process.env = resetClawdentityEnv(previousEnv); + }); + + afterEach(() => { + process.env = previousEnv; + }); +}; + +export const buildErrnoError = (code: string): NodeJS.ErrnoException => { + const error = new Error(code) as NodeJS.ErrnoException; + error.code = code; + return error; +}; + +export const createPairFixture = async (): Promise => { + const keypair = await generateEd25519Keypair(); + const encoded = encodeEd25519KeypairBase64url(keypair); + const header = Buffer.from(JSON.stringify({ alg: "EdDSA", typ: "JWT" })) + .toString("base64url") + .trim(); + const payload = Buffer.from( + JSON.stringify({ + sub: "did:claw:agent:01HAAA11111111111111111111", + }), + ) + .toString("base64url") + .trim(); + + return { + ait: `${header}.${payload}.sig`, + secretKeyBase64url: encoded.secretKey, + }; +}; + +export const createPairTicket = ( + issuer = "https://alpha.proxy.example", +): string => + `clwpair1_${Buffer.from(JSON.stringify({ iss: issuer })).toString("base64url")}`; + +export const createReadFileMock = (fixture: PairFixture) => { + return vi.fn(async (filePath: string, encoding?: BufferEncoding) => { + if (filePath.endsWith("/ait.jwt")) { + return fixture.ait; + } + + if (filePath.endsWith("/secret.key")) { + return fixture.secretKeyBase64url; + } + + if (filePath.endsWith("pair.png")) { + if (encoding) { + return ""; + } + return new Uint8Array([1, 2, 3, 4]); + } + + throw buildErrnoError("ENOENT"); + }); +}; + +export const runPairCommand = async ( + args: string[], + command = createPairCommand(), +): Promise<{ + exitCode: number | undefined; + stderr: string; + stdout: string; +}> => { + const stdout: string[] = []; + const stderr: string[] = []; + const previousExitCode = process.exitCode; + + const stdoutSpy = vi + .spyOn(process.stdout, "write") + .mockImplementation((chunk: unknown) => { + stdout.push(String(chunk)); + return true; + }); + + const stderrSpy = vi + .spyOn(process.stderr, "write") + .mockImplementation((chunk: unknown) => { + stderr.push(String(chunk)); + return true; + }); + + process.exitCode = undefined; + + command.configureOutput({ + writeOut: (message) => stdout.push(message), + writeErr: (message) => stderr.push(message), + outputError: (message) => stderr.push(message), + }); + + const root = new Command("clawdentity"); + root.addCommand(command); + + try { + await root.parseAsync(["node", "clawdentity", "pair", ...args]); + } finally { + stdoutSpy.mockRestore(); + stderrSpy.mockRestore(); + } + + const exitCode = process.exitCode; + process.exitCode = previousExitCode; + + return { + exitCode, + stderr: stderr.join(""), + stdout: stdout.join(""), + }; +}; diff --git a/apps/cli/src/commands/pair.test/output.test.ts b/apps/cli/src/commands/pair.test/output.test.ts new file mode 100644 index 0000000..63d0083 --- /dev/null +++ b/apps/cli/src/commands/pair.test/output.test.ts @@ -0,0 +1,165 @@ +import { describe, expect, it, vi } from "vitest"; +import { createPairCommand } from "../pair.js"; +import { + asChmod, + asFetch, + asMkdir, + asReadFile, + asUnlink, + asWriteFile, + createPairFixture, + createPairTicket, + createReadFileMock, + INITIATOR_PROFILE, + PAIR_CONFIG_DIR, + RESPONDER_PROFILE, + runPairCommand, + setupPairTestEnv, +} from "./helpers.js"; + +describe("pair command output", () => { + setupPairTestEnv(); + + it("prints pairing ticket from pair start", async () => { + const fixture = await createPairFixture(); + const command = createPairCommand({ + fetchImpl: asFetch(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), + readFileImpl: asReadFile(createReadFileMock(fixture)), + writeFileImpl: asWriteFile(vi.fn(async () => undefined)), + mkdirImpl: asMkdir(vi.fn(async () => undefined)), + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.registry.clawdentity.com/", + apiKey: "clw_pat_configured", + humanName: INITIATOR_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }); + + const result = await runPairCommand(["start", "alpha", "--qr"], command); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Pairing ticket created"); + expect(result.stdout).toContain("Ticket: clwpair1_eyJ2IjoxfQ"); + expect(result.stdout).toContain("QR File: "); + }); + + it("prints saved peer alias from pair confirm", async () => { + const fixture = await createPairFixture(); + const qrTicket = createPairTicket("https://alpha.proxy.example"); + const command = createPairCommand({ + fetchImpl: asFetch(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + paired: true, + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: RESPONDER_PROFILE, + }, + { status: 201 }, + ); + }), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-confirm", + readFileImpl: asReadFile(createReadFileMock(fixture)), + writeFileImpl: asWriteFile(vi.fn(async () => undefined)), + mkdirImpl: asMkdir(vi.fn(async () => undefined)), + chmodImpl: asChmod(vi.fn(async () => undefined)), + unlinkImpl: asUnlink(vi.fn(async () => undefined)), + qrDecodeImpl: () => qrTicket, + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + humanName: RESPONDER_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }); + + const result = await runPairCommand( + ["confirm", "beta", "--qr-file", "/tmp/pair.png"], + command, + ); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Pairing confirmed"); + expect(result.stdout).toContain("Peer alias saved: peer-11111111"); + }); + + it("prints pairing status from pair status", async () => { + const fixture = await createPairFixture(); + const ticket = createPairTicket("https://alpha.proxy.example"); + const command = createPairCommand({ + fetchImpl: asFetch(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: asReadFile(createReadFileMock(fixture)), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }); + + const result = await runPairCommand( + ["status", "alpha", "--ticket", ticket], + command, + ); + + expect(result.exitCode).toBeUndefined(); + expect(result.stdout).toContain("Status: pending"); + expect(result.stdout).toContain( + "Initiator Agent DID: did:claw:agent:01HAAA11111111111111111111", + ); + }); +}); diff --git a/apps/cli/src/commands/pair.test/start.test.ts b/apps/cli/src/commands/pair.test/start.test.ts new file mode 100644 index 0000000..dd541ed --- /dev/null +++ b/apps/cli/src/commands/pair.test/start.test.ts @@ -0,0 +1,227 @@ +import { describe, expect, it, vi } from "vitest"; +import { startPairing } from "../pair.js"; +import { + asFetch, + asMkdir, + asReaddir, + asReadFile, + asUnlink, + asWriteFile, + createPairFixture, + createReadFileMock, + INITIATOR_PROFILE, + PAIR_CONFIG_DIR, + setupPairTestEnv, +} from "./helpers.js"; + +describe("pair start helpers", () => { + setupPairTestEnv(); + + it("starts pairing with local agent proof and configured owner PAT", async () => { + const fixture = await createPairFixture(); + const readFileImpl = createReadFileMock(fixture); + const readdirImpl = vi.fn(async () => [ + "alpha-pair-1699999000.png", + "alpha-pair-1699999500.png", + "notes.txt", + ]); + const unlinkImpl = vi.fn(async () => undefined); + const writeFileImpl = vi.fn( + async ( + _filePath: string, + _data: string | Uint8Array, + _encoding?: BufferEncoding, + ) => undefined, + ); + const mkdirImpl = vi.fn(async () => undefined); + const fetchImpl = vi.fn(async (url: string, _init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + const result = await startPairing( + "alpha", + { + ttlSeconds: "900", + qr: true, + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: asReadFile(readFileImpl), + writeFileImpl: asWriteFile(writeFileImpl), + mkdirImpl: asMkdir(mkdirImpl), + readdirImpl: asReaddir(readdirImpl), + unlinkImpl: asUnlink(unlinkImpl), + qrEncodeImpl: async () => new Uint8Array([1, 2, 3]), + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.registry.clawdentity.com/", + humanName: INITIATOR_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.ticket).toBe("clwpair1_eyJ2IjoxfQ"); + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); + expect(result.qrPath).toContain( + "/tmp/.clawdentity/pairing/alpha-pair-1700000000.png", + ); + expect(readdirImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).toHaveBeenCalledTimes(1); + expect(unlinkImpl).toHaveBeenCalledWith( + "/tmp/.clawdentity/pairing/alpha-pair-1699999000.png", + ); + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(mkdirImpl).toHaveBeenCalledTimes(1); + const [, init] = fetchImpl.mock.calls[1] as [string, RequestInit]; + expect(init?.method).toBe("POST"); + const headers = new Headers(init?.headers); + expect(headers.get("authorization")).toBe(`Claw ${fixture.ait}`); + expect(headers.get("x-claw-proof")).toBeTruthy(); + expect(headers.get("x-claw-body-sha256")).toBeTruthy(); + expect(headers.get("x-claw-timestamp")).toBe("1700000000"); + expect(headers.get("x-claw-nonce")).toBe("nonce-start"); + expect(String(init?.body ?? "")).toContain("ttlSeconds"); + expect(String(init?.body ?? "")).toContain("initiatorProfile"); + }); + + it("uses CLAWDENTITY_PROXY_URL when no proxy override options are present", async () => { + process.env.CLAWDENTITY_PROXY_URL = "https://env.proxy.example"; + const fixture = await createPairFixture(); + + const result = await startPairing( + "alpha", + {}, + { + fetchImpl: asFetch(async () => + Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ), + ), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: asReadFile(createReadFileMock(fixture)), + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.registry.clawdentity.com/", + humanName: INITIATOR_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.proxyUrl).toBe("https://env.proxy.example/"); + }); + + it("uses registry metadata proxyUrl when env override is omitted", async () => { + const fixture = await createPairFixture(); + + const result = await startPairing( + "alpha", + {}, + { + fetchImpl: asFetch(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://saved.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: asReadFile(createReadFileMock(fixture)), + resolveConfigImpl: async () => ({ + registryUrl: "https://dev.registry.clawdentity.com/", + proxyUrl: "https://saved.proxy.example", + humanName: INITIATOR_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.proxyUrl).toBe("https://saved.proxy.example/"); + }); + + it("fails when configured proxyUrl does not match registry metadata", async () => { + const fixture = await createPairFixture(); + + await expect( + startPairing( + "alpha", + {}, + { + fetchImpl: asFetch(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://proxy.clawdentity.com", + }, + { status: 200 }, + ); + } + + return Response.json( + { + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + ticket: "clwpair1_eyJ2IjoxfQ", + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-start", + readFileImpl: asReadFile(createReadFileMock(fixture)), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + proxyUrl: "https://stale.proxy.clawdentity.com", + humanName: INITIATOR_PROFILE.humanName, + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ), + ).rejects.toMatchObject({ + code: "CLI_PAIR_PROXY_URL_MISMATCH", + }); + }); +}); diff --git a/apps/cli/src/commands/pair.test/status.test.ts b/apps/cli/src/commands/pair.test/status.test.ts new file mode 100644 index 0000000..c99907f --- /dev/null +++ b/apps/cli/src/commands/pair.test/status.test.ts @@ -0,0 +1,223 @@ +import { describe, expect, it, vi } from "vitest"; +import { getPairingStatus } from "../pair.js"; +import { + asChmod, + asFetch, + asMkdir, + asReadFile, + asWriteFile, + createPairFixture, + createPairTicket, + createReadFileMock, + INITIATOR_PROFILE, + PAIR_CONFIG_DIR, + RESPONDER_PROFILE, + setupPairTestEnv, +} from "./helpers.js"; + +describe("pair status helpers", () => { + setupPairTestEnv(); + + it("normalizes wrapped tickets before pair status request", async () => { + const fixture = await createPairFixture(); + const ticket = createPairTicket("https://alpha.proxy.example"); + const wrappedTicket = `\`\n${ticket.slice(0, 18)}\n${ticket.slice(18)}\n\``; + const fetchImpl = vi.fn(async (url: string, init?: RequestInit) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + const requestBody = JSON.parse(String(init?.body ?? "{}")) as { + ticket?: string; + }; + expect(requestBody.ticket).toBe(ticket); + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + const result = await getPairingStatus( + "alpha", + { + ticket: wrappedTicket, + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: asReadFile(createReadFileMock(fixture)), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.status).toBe("pending"); + }); + + it("checks pending pair status without persisting peers", async () => { + const fixture = await createPairFixture(); + const writeFileImpl = vi.fn(async () => undefined); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const ticket = createPairTicket("https://alpha.proxy.example"); + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + return Response.json( + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { status: 200 }, + ); + }); + + const result = await getPairingStatus( + "alpha", + { + ticket, + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => 1_700_000_000, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: asReadFile(createReadFileMock(fixture)), + writeFileImpl: asWriteFile(writeFileImpl), + mkdirImpl: asMkdir(mkdirImpl), + chmodImpl: asChmod(chmodImpl), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + }, + ); + + expect(result.status).toBe("pending"); + expect(result.proxyUrl).toBe("https://alpha.proxy.example/"); + expect(result.peerAlias).toBeUndefined(); + expect(writeFileImpl).toHaveBeenCalledTimes(0); + expect(mkdirImpl).toHaveBeenCalledTimes(0); + expect(chmodImpl).toHaveBeenCalledTimes(0); + }); + + it("polls pair status until confirmed and persists peer for initiator", async () => { + const fixture = await createPairFixture(); + const writeFileImpl = vi.fn( + async ( + _filePath: string, + _data: string | Uint8Array, + _encoding?: BufferEncoding, + ) => undefined, + ); + const mkdirImpl = vi.fn(async () => undefined); + const chmodImpl = vi.fn(async () => undefined); + const sleepImpl = vi.fn(async () => undefined); + const ticket = createPairTicket("https://alpha.proxy.example"); + const statusResponses = [ + { + status: "pending", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + expiresAt: "2026-02-18T00:00:00.000Z", + }, + { + status: "confirmed", + initiatorAgentDid: "did:claw:agent:01HAAA11111111111111111111", + initiatorProfile: INITIATOR_PROFILE, + responderAgentDid: "did:claw:agent:01HBBB22222222222222222222", + responderProfile: { + ...RESPONDER_PROFILE, + proxyOrigin: "https://beta.proxy.example", + }, + expiresAt: "2026-02-18T00:00:00.000Z", + confirmedAt: "2026-02-18T00:00:05.000Z", + }, + ]; + let statusIndex = 0; + const fetchImpl = vi.fn(async (url: string) => { + if (url.endsWith("/v1/metadata")) { + return Response.json( + { + status: "ok", + proxyUrl: "https://alpha.proxy.example", + }, + { status: 200 }, + ); + } + + const payload = + statusResponses[Math.min(statusIndex, statusResponses.length - 1)]; + statusIndex += 1; + return Response.json(payload, { status: 200 }); + }); + + const nowSequence = [1_700_000_000, 1_700_000_001, 1_700_000_002]; + const result = await getPairingStatus( + "alpha", + { + ticket, + wait: true, + waitSeconds: "10", + pollIntervalSeconds: "1", + }, + { + fetchImpl: asFetch(fetchImpl), + nowSecondsImpl: () => nowSequence.shift() ?? 1_700_000_003, + nonceFactoryImpl: () => "nonce-status", + readFileImpl: asReadFile(createReadFileMock(fixture)), + writeFileImpl: asWriteFile(writeFileImpl), + mkdirImpl: asMkdir(mkdirImpl), + chmodImpl: asChmod(chmodImpl), + resolveConfigImpl: async () => ({ + registryUrl: "https://registry.clawdentity.com/", + }), + getConfigDirImpl: () => PAIR_CONFIG_DIR, + sleepImpl, + }, + ); + + expect(result.status).toBe("confirmed"); + expect(result.peerAlias).toBe("peer-22222222"); + expect(sleepImpl).toHaveBeenCalledTimes(1); + expect(writeFileImpl).toHaveBeenCalledTimes(1); + expect(mkdirImpl).toHaveBeenCalledTimes(1); + expect(chmodImpl).toHaveBeenCalledTimes(1); + const peerWriteCall = writeFileImpl.mock.calls[0]; + const persistedPeers = JSON.parse(String(peerWriteCall?.[1] ?? "{}")) as { + peers: { + [key: string]: { + did: string; + proxyUrl: string; + }; + }; + }; + expect(persistedPeers.peers["peer-22222222"]?.proxyUrl).toBe( + "https://beta.proxy.example/hooks/agent", + ); + }); +}); diff --git a/apps/proxy/src/auth-middleware.test.ts b/apps/proxy/src/auth-middleware.test.ts deleted file mode 100644 index d0412ba..0000000 --- a/apps/proxy/src/auth-middleware.test.ts +++ /dev/null @@ -1,944 +0,0 @@ -import { AGENT_AUTH_VALIDATE_PATH, generateUlid } from "@clawdentity/protocol"; -import { - encodeEd25519KeypairBase64url, - generateEd25519Keypair, - signAIT, - signCRL, - signHttpRequest, -} from "@clawdentity/sdk"; -import { buildTestAitClaims } from "@clawdentity/sdk/testing"; -import { describe, expect, it, vi } from "vitest"; -import { RELAY_RECIPIENT_AGENT_DID_HEADER } from "./agent-hook-route.js"; -import type { AgentRelaySessionNamespace } from "./agent-relay-session.js"; -import { parseProxyConfig } from "./config.js"; -import { PAIR_CONFIRM_PATH, PAIR_STATUS_PATH } from "./pairing-constants.js"; -import { createInMemoryProxyTrustStore } from "./proxy-trust-store.js"; -import { RELAY_CONNECT_PATH } from "./relay-connect-route.js"; -import { createProxyApp } from "./server.js"; - -const REGISTRY_KID = "registry-active-kid"; -const NOW_MS = Date.now(); -const NOW_SECONDS = Math.floor(NOW_MS / 1000); -const ISSUER = "https://registry.clawdentity.com"; -const BODY_JSON = JSON.stringify({ message: "hello" }); -const KNOWN_PEER_DID = "did:claw:agent:known-peer"; - -type AuthHarnessOptions = { - expired?: boolean; - crlStaleBehavior?: "fail-open" | "fail-closed"; - fetchCrlFails?: boolean; - fetchKeysFails?: boolean; - allowCurrentAgent?: boolean; - revoked?: boolean; - validateStatus?: number; -}; - -type AuthHarness = { - app: ReturnType; - claims: ReturnType; - createSignedHeaders: (input?: { - body?: string; - method?: "GET" | "POST"; - nonce?: string; - pathWithQuery?: string; - timestamp?: string; - timestampSeconds?: number; - }) => Promise>; -}; - -function resolveRequestUrl(requestInput: unknown): string { - if (typeof requestInput === "string") { - return requestInput; - } - if (requestInput instanceof URL) { - return requestInput.toString(); - } - if ( - typeof requestInput === "object" && - requestInput !== null && - "url" in requestInput && - typeof (requestInput as { url?: unknown }).url === "string" - ) { - return (requestInput as { url: string }).url; - } - - return ""; -} - -function createFetchMock(input: { - crlToken: string; - fetchCrlFails?: boolean; - fetchKeysFails?: boolean; - registryPublicKeyX: string; - validateStatus?: number; -}) { - return vi.fn(async (requestInput: unknown): Promise => { - const url = resolveRequestUrl(requestInput); - - if (url.endsWith("/.well-known/claw-keys.json")) { - if (input.fetchKeysFails) { - throw new Error("keys unavailable"); - } - - return new Response( - JSON.stringify({ - keys: [ - { - kid: REGISTRY_KID, - alg: "EdDSA", - crv: "Ed25519", - x: input.registryPublicKeyX, - status: "active", - }, - ], - }), - { status: 200 }, - ); - } - - if (url.endsWith("/v1/crl")) { - if (input.fetchCrlFails) { - throw new Error("crl unavailable"); - } - - return new Response( - JSON.stringify({ - crl: input.crlToken, - }), - { status: 200 }, - ); - } - - if (url.endsWith(AGENT_AUTH_VALIDATE_PATH)) { - const status = input.validateStatus ?? 204; - return new Response(status === 204 ? null : "", { status }); - } - - return new Response("not found", { status: 404 }); - }); -} - -async function createAuthHarness( - options: AuthHarnessOptions = {}, -): Promise { - const registryKeypair = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const encodedRegistry = encodeEd25519KeypairBase64url(registryKeypair); - const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); - const claims = buildTestAitClaims({ - publicKeyX: encodedAgent.publicKey, - issuer: ISSUER, - nowSeconds: NOW_SECONDS - 10, - ttlSeconds: 610, - nbfSkewSeconds: 0, - seedMs: NOW_MS, - }); - if (options.expired) { - claims.exp = NOW_SECONDS - 1; - } - - const ait = await signAIT({ - claims, - signerKid: REGISTRY_KID, - signerKeypair: registryKeypair, - }); - - const revocationJti = options.revoked - ? claims.jti - : generateUlid(NOW_MS + 40); - const crl = await signCRL({ - claims: { - iss: ISSUER, - jti: generateUlid(NOW_MS + 50), - iat: NOW_SECONDS - 10, - exp: NOW_SECONDS + 600, - revocations: [ - { - jti: revocationJti, - agentDid: claims.sub, - revokedAt: NOW_SECONDS - 5, - reason: "manual revoke", - }, - ], - }, - signerKid: REGISTRY_KID, - signerKeypair: registryKeypair, - }); - - const fetchMock = createFetchMock({ - crlToken: crl, - fetchCrlFails: options.fetchCrlFails, - fetchKeysFails: options.fetchKeysFails, - registryPublicKeyX: encodedRegistry.publicKey, - validateStatus: options.validateStatus, - }); - - const trustStore = createInMemoryProxyTrustStore(); - if (options.allowCurrentAgent !== false) { - await trustStore.upsertPair({ - initiatorAgentDid: claims.sub, - responderAgentDid: KNOWN_PEER_DID, - }); - } - - const relaySession = { - fetch: vi.fn(async (request: Request) => { - if (request.method === "POST") { - return Response.json( - { - delivered: true, - connectedSockets: 1, - }, - { status: 202 }, - ); - } - - return new Response(null, { status: 204 }); - }), - }; - const relayNamespace = { - idFromName: vi.fn((_name: string) => ({}) as DurableObjectId), - get: vi.fn((_id: DurableObjectId) => relaySession), - } satisfies AgentRelaySessionNamespace; - - const app = createProxyApp({ - config: parseProxyConfig({ - ...(options.crlStaleBehavior - ? { CRL_STALE_BEHAVIOR: options.crlStaleBehavior } - : {}), - }), - trustStore, - auth: { - fetchImpl: fetchMock as typeof fetch, - clock: () => NOW_MS, - }, - hooks: { - resolveSessionNamespace: () => relayNamespace, - now: () => new Date(NOW_MS).toISOString(), - }, - relay: { - resolveSessionNamespace: () => relayNamespace, - }, - registerRoutes: (nextApp) => { - nextApp.post("/protected", (c) => { - const auth = c.get("auth"); - return c.json({ - ok: true, - auth, - }); - }); - }, - }); - - return { - app, - claims, - createSignedHeaders: async (input = {}) => { - const method = input.method ?? "POST"; - const body = input.body ?? (method === "GET" ? "" : BODY_JSON); - const nonce = input.nonce ?? "nonce-1"; - const pathWithQuery = input.pathWithQuery ?? "/protected"; - const timestamp = - input.timestamp ?? String(input.timestampSeconds ?? NOW_SECONDS); - - const signed = await signHttpRequest({ - method, - pathWithQuery, - timestamp, - nonce, - body: new TextEncoder().encode(body), - secretKey: agentKeypair.secretKey, - }); - - return { - authorization: `Claw ${ait}`, - ...(method === "POST" ? { "content-type": "application/json" } : {}), - ...signed.headers, - }; - }, - }; -} - -describe("proxy auth middleware", () => { - it("keeps /health open without auth headers", async () => { - const harness = await createAuthHarness(); - const response = await harness.app.request("/health"); - - expect(response.status).toBe(200); - }); - - it("verifies inbound auth and exposes auth context to downstream handlers", async () => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders(); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(200); - const body = (await response.json()) as { - ok: boolean; - auth: { - agentDid: string; - ownerDid: string; - aitJti: string; - }; - }; - expect(body.ok).toBe(true); - expect(body.auth.agentDid).toBe(harness.claims.sub); - expect(body.auth.ownerDid).toBe(harness.claims.ownerDid); - expect(body.auth.aitJti).toBe(harness.claims.jti); - }); - - it("returns 403 when a verified caller is not trusted by agent DID", async () => { - const harness = await createAuthHarness({ - allowCurrentAgent: false, - }); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-not-trusted", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(403); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); - }); - - it("allows unknown agents to reach /pair/confirm for pairing bootstrap", async () => { - const harness = await createAuthHarness({ - allowCurrentAgent: false, - }); - const requestBody = JSON.stringify({ - ticket: "clwpair1_missing-ticket", - responderProfile: { - agentName: "beta", - humanName: "Ira", - }, - }); - const headers = await harness.createSignedHeaders({ - body: requestBody, - nonce: "nonce-pair-confirm-bootstrap", - pathWithQuery: PAIR_CONFIRM_PATH, - }); - - const response = await harness.app.request(PAIR_CONFIRM_PATH, { - method: "POST", - headers, - body: requestBody, - }); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); - }); - - it("allows unknown agents to reach /pair/status for initiator polling bootstrap", async () => { - const harness = await createAuthHarness({ - allowCurrentAgent: false, - }); - const requestBody = JSON.stringify({ ticket: "clwpair1_missing-ticket" }); - const headers = await harness.createSignedHeaders({ - body: requestBody, - nonce: "nonce-pair-status-bootstrap", - pathWithQuery: PAIR_STATUS_PATH, - }); - - const response = await harness.app.request(PAIR_STATUS_PATH, { - method: "POST", - headers, - body: requestBody, - }); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); - }); - - it("rejects /pair/confirm without Authorization", async () => { - const harness = await createAuthHarness({ - allowCurrentAgent: false, - }); - - const response = await harness.app.request(PAIR_CONFIRM_PATH, { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - ticket: "clwpair1_missing-ticket", - }), - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_MISSING_TOKEN"); - }); - - it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { - const oldKid = "registry-old-kid"; - const newKid = "registry-new-kid"; - const oldRegistryKeypair = await generateEd25519Keypair(); - const newRegistryKeypair = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const encodedOldRegistry = - encodeEd25519KeypairBase64url(oldRegistryKeypair); - const encodedNewRegistry = - encodeEd25519KeypairBase64url(newRegistryKeypair); - const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); - - const claims = buildTestAitClaims({ - publicKeyX: encodedAgent.publicKey, - issuer: ISSUER, - nowSeconds: NOW_SECONDS - 10, - ttlSeconds: 610, - nbfSkewSeconds: 0, - seedMs: NOW_MS, - }); - const ait = await signAIT({ - claims, - signerKid: newKid, - signerKeypair: newRegistryKeypair, - }); - const crl = await signCRL({ - claims: { - iss: ISSUER, - jti: generateUlid(NOW_MS + 70), - iat: NOW_SECONDS - 10, - exp: NOW_SECONDS + 600, - revocations: [ - { - jti: generateUlid(NOW_MS + 80), - agentDid: claims.sub, - revokedAt: NOW_SECONDS - 5, - reason: "manual revoke", - }, - ], - }, - signerKid: newKid, - signerKeypair: newRegistryKeypair, - }); - - let keyFetchCount = 0; - const fetchMock = vi.fn( - async (requestInput: unknown): Promise => { - const url = resolveRequestUrl(requestInput); - if (url.endsWith("/.well-known/claw-keys.json")) { - keyFetchCount += 1; - const key = - keyFetchCount === 1 - ? { - kid: oldKid, - alg: "EdDSA", - crv: "Ed25519", - x: encodedOldRegistry.publicKey, - status: "active", - } - : { - kid: newKid, - alg: "EdDSA", - crv: "Ed25519", - x: encodedNewRegistry.publicKey, - status: "active", - }; - return new Response( - JSON.stringify({ - keys: [key], - }), - { status: 200 }, - ); - } - - if (url.endsWith("/v1/crl")) { - return new Response( - JSON.stringify({ - crl, - }), - { status: 200 }, - ); - } - - return new Response("not found", { status: 404 }); - }, - ); - - const trustStore = createInMemoryProxyTrustStore(); - await trustStore.upsertPair({ - initiatorAgentDid: claims.sub, - responderAgentDid: KNOWN_PEER_DID, - }); - - const app = createProxyApp({ - config: parseProxyConfig({}), - trustStore, - auth: { - fetchImpl: fetchMock as typeof fetch, - clock: () => NOW_MS, - }, - registerRoutes: (nextApp) => { - nextApp.post("/protected", (c) => c.json({ ok: true })); - }, - }); - - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: "/protected", - timestamp: String(NOW_SECONDS), - nonce: "nonce-rotation", - body: new TextEncoder().encode(BODY_JSON), - secretKey: agentKeypair.secretKey, - }); - const response = await app.request("/protected", { - method: "POST", - headers: { - authorization: `Claw ${ait}`, - "content-type": "application/json", - ...signed.headers, - }, - body: BODY_JSON, - }); - - expect(response.status).toBe(200); - expect(keyFetchCount).toBe(2); - }); - - it("refreshes keyset and verifies CRL after registry CRL key rotation", async () => { - const oldKid = "registry-old-kid"; - const newKid = "registry-new-kid"; - const oldRegistryKeypair = await generateEd25519Keypair(); - const newRegistryKeypair = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const encodedOldRegistry = - encodeEd25519KeypairBase64url(oldRegistryKeypair); - const encodedNewRegistry = - encodeEd25519KeypairBase64url(newRegistryKeypair); - const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); - - const claims = buildTestAitClaims({ - publicKeyX: encodedAgent.publicKey, - issuer: ISSUER, - nowSeconds: NOW_SECONDS - 10, - ttlSeconds: 610, - nbfSkewSeconds: 0, - seedMs: NOW_MS, - }); - const ait = await signAIT({ - claims, - signerKid: oldKid, - signerKeypair: oldRegistryKeypair, - }); - const crl = await signCRL({ - claims: { - iss: ISSUER, - jti: generateUlid(NOW_MS + 90), - iat: NOW_SECONDS - 10, - exp: NOW_SECONDS + 600, - revocations: [ - { - jti: generateUlid(NOW_MS + 100), - agentDid: claims.sub, - revokedAt: NOW_SECONDS - 5, - reason: "manual revoke", - }, - ], - }, - signerKid: newKid, - signerKeypair: newRegistryKeypair, - }); - - let keyFetchCount = 0; - const fetchMock = vi.fn( - async (requestInput: unknown): Promise => { - const url = resolveRequestUrl(requestInput); - if (url.endsWith("/.well-known/claw-keys.json")) { - keyFetchCount += 1; - const key = - keyFetchCount === 1 - ? { - kid: oldKid, - alg: "EdDSA", - crv: "Ed25519", - x: encodedOldRegistry.publicKey, - status: "active", - } - : { - kid: newKid, - alg: "EdDSA", - crv: "Ed25519", - x: encodedNewRegistry.publicKey, - status: "active", - }; - return new Response( - JSON.stringify({ - keys: [key], - }), - { status: 200 }, - ); - } - - if (url.endsWith("/v1/crl")) { - return new Response( - JSON.stringify({ - crl, - }), - { status: 200 }, - ); - } - - return new Response("not found", { status: 404 }); - }, - ); - - const trustStore = createInMemoryProxyTrustStore(); - await trustStore.upsertPair({ - initiatorAgentDid: claims.sub, - responderAgentDid: KNOWN_PEER_DID, - }); - - const app = createProxyApp({ - config: parseProxyConfig({}), - trustStore, - auth: { - fetchImpl: fetchMock as typeof fetch, - clock: () => NOW_MS, - }, - registerRoutes: (nextApp) => { - nextApp.post("/protected", (c) => c.json({ ok: true })); - }, - }); - - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: "/protected", - timestamp: String(NOW_SECONDS), - nonce: "nonce-crl-rotation", - body: new TextEncoder().encode(BODY_JSON), - secretKey: agentKeypair.secretKey, - }); - const response = await app.request("/protected", { - method: "POST", - headers: { - authorization: `Claw ${ait}`, - "content-type": "application/json", - ...signed.headers, - }, - body: BODY_JSON, - }); - - expect(response.status).toBe(200); - expect(keyFetchCount).toBe(2); - }); - - it("requires x-claw-agent-access for /hooks/agent", async () => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders({ - pathWithQuery: "/hooks/agent", - nonce: "nonce-hooks-agent-access-required", - }); - const response = await harness.app.request("/hooks/agent", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AGENT_ACCESS_REQUIRED"); - }); - - it("rejects /hooks/agent when registry access-token validation fails", async () => { - const harness = await createAuthHarness({ - validateStatus: 401, - }); - const headers = await harness.createSignedHeaders({ - pathWithQuery: "/hooks/agent", - nonce: "nonce-hooks-agent-access-invalid", - }); - const response = await harness.app.request("/hooks/agent", { - method: "POST", - headers: { - ...headers, - "x-claw-agent-access": "clw_agt_invalid", - }, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AGENT_ACCESS_INVALID"); - }); - - it("accepts /hooks/agent when x-claw-agent-access validates", async () => { - const harness = await createAuthHarness({ - validateStatus: 204, - }); - const headers = await harness.createSignedHeaders({ - pathWithQuery: "/hooks/agent", - nonce: "nonce-hooks-agent-access-valid", - }); - const response = await harness.app.request("/hooks/agent", { - method: "POST", - headers: { - ...headers, - "x-claw-agent-access": "clw_agt_validtoken", - [RELAY_RECIPIENT_AGENT_DID_HEADER]: harness.claims.sub, - }, - body: BODY_JSON, - }); - - expect(response.status).toBe(202); - }); - - it("requires x-claw-agent-access for relay websocket connect", async () => { - const harness = await createAuthHarness({ - validateStatus: 204, - }); - const headers = await harness.createSignedHeaders({ - method: "GET", - pathWithQuery: RELAY_CONNECT_PATH, - nonce: "nonce-relay-connect", - }); - const response = await harness.app.request(RELAY_CONNECT_PATH, { - method: "GET", - headers: { - ...headers, - upgrade: "websocket", - }, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AGENT_ACCESS_REQUIRED"); - }); - - it("accepts relay websocket connect when x-claw-agent-access validates", async () => { - const harness = await createAuthHarness({ - validateStatus: 204, - }); - const headers = await harness.createSignedHeaders({ - method: "GET", - pathWithQuery: RELAY_CONNECT_PATH, - nonce: "nonce-relay-connect-agent-access-valid", - }); - const response = await harness.app.request(RELAY_CONNECT_PATH, { - method: "GET", - headers: { - ...headers, - upgrade: "websocket", - "x-claw-agent-access": "clw_agt_validtoken", - }, - }); - - expect(response.status).toBe(204); - }); - - it("allows unknown agents to connect relay websocket when auth validates", async () => { - const harness = await createAuthHarness({ - allowCurrentAgent: false, - validateStatus: 204, - }); - const headers = await harness.createSignedHeaders({ - method: "GET", - pathWithQuery: RELAY_CONNECT_PATH, - nonce: "nonce-relay-connect-unknown-agent", - }); - const response = await harness.app.request(RELAY_CONNECT_PATH, { - method: "GET", - headers: { - ...headers, - upgrade: "websocket", - "x-claw-agent-access": "clw_agt_validtoken", - }, - }); - - expect(response.status).toBe(204); - }); - - it("rejects non-health route when Authorization scheme is not Claw", async () => { - const harness = await createAuthHarness(); - const response = await harness.app.request("/protected", { - method: "POST", - headers: { - authorization: "Bearer token", - }, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); - }); - - it("rejects Authorization headers with extra segments", async () => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-auth-extra", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers: { - ...headers, - authorization: `${headers.authorization} extra`, - }, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); - }); - - it("rejects replayed nonce for the same agent", async () => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-replay-1", - }); - - const first = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - const second = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(first.status).toBe(200); - expect(second.status).toBe(401); - const body = (await second.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_REPLAY"); - }); - - it("rejects requests outside the timestamp skew window", async () => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders({ - timestampSeconds: NOW_SECONDS - 301, - nonce: "nonce-old", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_TIMESTAMP_SKEW"); - }); - - it.each([ - `${NOW_SECONDS}abc`, - `${NOW_SECONDS}.5`, - ])("rejects malformed X-Claw-Timestamp header: %s", async (malformedTimestamp) => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders({ - timestamp: malformedTimestamp, - nonce: "nonce-invalid-timestamp", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_INVALID_TIMESTAMP"); - }); - - it("rejects proof mismatches when body is tampered", async () => { - const harness = await createAuthHarness(); - const headers = await harness.createSignedHeaders({ - body: BODY_JSON, - nonce: "nonce-tampered", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: JSON.stringify({ message: "tampered" }), - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_INVALID_PROOF"); - }); - - it("rejects revoked AITs", async () => { - const harness = await createAuthHarness({ - revoked: true, - }); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-revoked", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_REVOKED"); - }); - - it("rejects expired AITs", async () => { - const harness = await createAuthHarness({ - expired: true, - }); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-expired", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_INVALID_AIT"); - }); - - it("returns 503 when registry signing keys are unavailable", async () => { - const harness = await createAuthHarness({ - fetchKeysFails: true, - }); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-keys-fail", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(503); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_DEPENDENCY_UNAVAILABLE"); - }); - - it("returns 503 when CRL is unavailable in fail-closed mode", async () => { - const harness = await createAuthHarness({ - fetchCrlFails: true, - crlStaleBehavior: "fail-closed", - }); - const headers = await harness.createSignedHeaders({ - nonce: "nonce-crl-fail-closed", - }); - const response = await harness.app.request("/protected", { - method: "POST", - headers, - body: BODY_JSON, - }); - - expect(response.status).toBe(503); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("PROXY_AUTH_DEPENDENCY_UNAVAILABLE"); - }); -}); diff --git a/apps/proxy/src/auth-middleware.test/AGENTS.md b/apps/proxy/src/auth-middleware.test/AGENTS.md new file mode 100644 index 0000000..9fda3ac --- /dev/null +++ b/apps/proxy/src/auth-middleware.test/AGENTS.md @@ -0,0 +1,18 @@ +# AGENTS.md (apps/proxy/src/auth-middleware.test) + +## Purpose +- Keep auth-middleware tests modular, deterministic, and easy to extend. + +## Test Layout +- `helpers.ts`: shared deterministic time constants and auth harness builders. +- `basic.test.ts`: baseline auth flow and pairing bootstrap access rules. +- `rotation.test.ts`: registry key/CRL key rotation behavior. +- `agent-access.test.ts`: `/hooks/agent` and `/v1/relay/connect` access-token checks. +- `robustness.test.ts`: malformed/replay/revoked/expired/dependency-failure cases. + +## Best Practices +- Keep each spec file below 800 lines. +- Reuse `createAuthHarness` and shared constants from `helpers.ts`; avoid duplicate setup. +- Preserve deterministic time behavior via `NOW_MS`/`NOW_SECONDS` from helpers. +- Keep assertions explicit for status code and error code in every negative-path test. +- Add new tests to the concern-specific file; only create a new file when concern boundaries become unclear. diff --git a/apps/proxy/src/auth-middleware.test/agent-access.test.ts b/apps/proxy/src/auth-middleware.test/agent-access.test.ts new file mode 100644 index 0000000..1e4e703 --- /dev/null +++ b/apps/proxy/src/auth-middleware.test/agent-access.test.ts @@ -0,0 +1,131 @@ +import { describe, expect, it } from "vitest"; +import { RELAY_RECIPIENT_AGENT_DID_HEADER } from "../agent-hook-route.js"; +import { RELAY_CONNECT_PATH } from "../relay-connect-route.js"; +import { BODY_JSON, createAuthHarness } from "./helpers.js"; + +describe("proxy auth middleware", () => { + it("requires x-claw-agent-access for /hooks/agent", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + pathWithQuery: "/hooks/agent", + nonce: "nonce-hooks-agent-access-required", + }); + const response = await harness.app.request("/hooks/agent", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AGENT_ACCESS_REQUIRED"); + }); + + it("rejects /hooks/agent when registry access-token validation fails", async () => { + const harness = await createAuthHarness({ + validateStatus: 401, + }); + const headers = await harness.createSignedHeaders({ + pathWithQuery: "/hooks/agent", + nonce: "nonce-hooks-agent-access-invalid", + }); + const response = await harness.app.request("/hooks/agent", { + method: "POST", + headers: { + ...headers, + "x-claw-agent-access": "clw_agt_invalid", + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AGENT_ACCESS_INVALID"); + }); + + it("accepts /hooks/agent when x-claw-agent-access validates", async () => { + const harness = await createAuthHarness({ + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + pathWithQuery: "/hooks/agent", + nonce: "nonce-hooks-agent-access-valid", + }); + const response = await harness.app.request("/hooks/agent", { + method: "POST", + headers: { + ...headers, + "x-claw-agent-access": "clw_agt_validtoken", + [RELAY_RECIPIENT_AGENT_DID_HEADER]: harness.claims.sub, + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(202); + }); + + it("requires x-claw-agent-access for relay websocket connect", async () => { + const harness = await createAuthHarness({ + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + method: "GET", + pathWithQuery: RELAY_CONNECT_PATH, + nonce: "nonce-relay-connect", + }); + const response = await harness.app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + ...headers, + upgrade: "websocket", + }, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AGENT_ACCESS_REQUIRED"); + }); + + it("accepts relay websocket connect when x-claw-agent-access validates", async () => { + const harness = await createAuthHarness({ + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + method: "GET", + pathWithQuery: RELAY_CONNECT_PATH, + nonce: "nonce-relay-connect-agent-access-valid", + }); + const response = await harness.app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + ...headers, + upgrade: "websocket", + "x-claw-agent-access": "clw_agt_validtoken", + }, + }); + + expect(response.status).toBe(204); + }); + + it("allows unknown agents to connect relay websocket when auth validates", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + validateStatus: 204, + }); + const headers = await harness.createSignedHeaders({ + method: "GET", + pathWithQuery: RELAY_CONNECT_PATH, + nonce: "nonce-relay-connect-unknown-agent", + }); + const response = await harness.app.request(RELAY_CONNECT_PATH, { + method: "GET", + headers: { + ...headers, + upgrade: "websocket", + "x-claw-agent-access": "clw_agt_validtoken", + }, + }); + + expect(response.status).toBe(204); + }); +}); diff --git a/apps/proxy/src/auth-middleware.test/basic.test.ts b/apps/proxy/src/auth-middleware.test/basic.test.ts new file mode 100644 index 0000000..129a35c --- /dev/null +++ b/apps/proxy/src/auth-middleware.test/basic.test.ts @@ -0,0 +1,124 @@ +import { describe, expect, it } from "vitest"; +import { PAIR_CONFIRM_PATH, PAIR_STATUS_PATH } from "../pairing-constants.js"; +import { BODY_JSON, createAuthHarness } from "./helpers.js"; + +describe("proxy auth middleware", () => { + it("keeps /health open without auth headers", async () => { + const harness = await createAuthHarness(); + const response = await harness.app.request("/health"); + + expect(response.status).toBe(200); + }); + + it("verifies inbound auth and exposes auth context to downstream handlers", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders(); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + ok: boolean; + auth: { + agentDid: string; + ownerDid: string; + aitJti: string; + }; + }; + expect(body.ok).toBe(true); + expect(body.auth.agentDid).toBe(harness.claims.sub); + expect(body.auth.ownerDid).toBe(harness.claims.ownerDid); + expect(body.auth.aitJti).toBe(harness.claims.jti); + }); + + it("returns 403 when a verified caller is not trusted by agent DID", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-not-trusted", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_FORBIDDEN"); + }); + + it("allows unknown agents to reach /pair/confirm for pairing bootstrap", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + const requestBody = JSON.stringify({ + ticket: "clwpair1_missing-ticket", + responderProfile: { + agentName: "beta", + humanName: "Ira", + }, + }); + const headers = await harness.createSignedHeaders({ + body: requestBody, + nonce: "nonce-pair-confirm-bootstrap", + pathWithQuery: PAIR_CONFIRM_PATH, + }); + + const response = await harness.app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers, + body: requestBody, + }); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); + }); + + it("allows unknown agents to reach /pair/status for initiator polling bootstrap", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + const requestBody = JSON.stringify({ ticket: "clwpair1_missing-ticket" }); + const headers = await harness.createSignedHeaders({ + body: requestBody, + nonce: "nonce-pair-status-bootstrap", + pathWithQuery: PAIR_STATUS_PATH, + }); + + const response = await harness.app.request(PAIR_STATUS_PATH, { + method: "POST", + headers, + body: requestBody, + }); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_PAIR_TICKET_INVALID_FORMAT"); + }); + + it("rejects /pair/confirm without Authorization", async () => { + const harness = await createAuthHarness({ + allowCurrentAgent: false, + }); + + const response = await harness.app.request(PAIR_CONFIRM_PATH, { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + ticket: "clwpair1_missing-ticket", + }), + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_MISSING_TOKEN"); + }); +}); diff --git a/apps/proxy/src/auth-middleware.test/helpers.ts b/apps/proxy/src/auth-middleware.test/helpers.ts new file mode 100644 index 0000000..d4a693b --- /dev/null +++ b/apps/proxy/src/auth-middleware.test/helpers.ts @@ -0,0 +1,261 @@ +import { AGENT_AUTH_VALIDATE_PATH, generateUlid } from "@clawdentity/protocol"; +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, + signAIT, + signCRL, + signHttpRequest, +} from "@clawdentity/sdk"; +import { buildTestAitClaims } from "@clawdentity/sdk/testing"; +import { vi } from "vitest"; +import type { AgentRelaySessionNamespace } from "../agent-relay-session.js"; +import { parseProxyConfig } from "../config.js"; +import { createInMemoryProxyTrustStore } from "../proxy-trust-store.js"; +import { createProxyApp } from "../server.js"; + +export const REGISTRY_KID = "registry-active-kid"; +export const NOW_MS = Date.now(); +export const NOW_SECONDS = Math.floor(NOW_MS / 1000); +export const ISSUER = "https://registry.clawdentity.com"; +export const BODY_JSON = JSON.stringify({ message: "hello" }); +export const KNOWN_PEER_DID = "did:claw:agent:known-peer"; + +type SignedHeadersInput = { + body?: string; + method?: "GET" | "POST"; + nonce?: string; + pathWithQuery?: string; + timestamp?: string; + timestampSeconds?: number; +}; + +export type AuthHarnessOptions = { + expired?: boolean; + crlStaleBehavior?: "fail-open" | "fail-closed"; + fetchCrlFails?: boolean; + fetchKeysFails?: boolean; + allowCurrentAgent?: boolean; + revoked?: boolean; + validateStatus?: number; +}; + +export type AuthHarness = { + app: ReturnType; + claims: ReturnType; + createSignedHeaders: ( + input?: SignedHeadersInput, + ) => Promise>; +}; + +export function resolveRequestUrl(requestInput: unknown): string { + if (typeof requestInput === "string") { + return requestInput; + } + if (requestInput instanceof URL) { + return requestInput.toString(); + } + if ( + typeof requestInput === "object" && + requestInput !== null && + "url" in requestInput && + typeof (requestInput as { url?: unknown }).url === "string" + ) { + return (requestInput as { url: string }).url; + } + + return ""; +} + +function createFetchMock(input: { + crlToken: string; + fetchCrlFails?: boolean; + fetchKeysFails?: boolean; + registryPublicKeyX: string; + validateStatus?: number; +}) { + return vi.fn(async (requestInput: unknown): Promise => { + const url = resolveRequestUrl(requestInput); + + if (url.endsWith("/.well-known/claw-keys.json")) { + if (input.fetchKeysFails) { + throw new Error("keys unavailable"); + } + + return new Response( + JSON.stringify({ + keys: [ + { + kid: REGISTRY_KID, + alg: "EdDSA", + crv: "Ed25519", + x: input.registryPublicKeyX, + status: "active", + }, + ], + }), + { status: 200 }, + ); + } + + if (url.endsWith("/v1/crl")) { + if (input.fetchCrlFails) { + throw new Error("crl unavailable"); + } + + return new Response( + JSON.stringify({ + crl: input.crlToken, + }), + { status: 200 }, + ); + } + + if (url.endsWith(AGENT_AUTH_VALIDATE_PATH)) { + const status = input.validateStatus ?? 204; + return new Response(status === 204 ? null : "", { status }); + } + + return new Response("not found", { status: 404 }); + }); +} + +export async function createAuthHarness( + options: AuthHarnessOptions = {}, +): Promise { + const registryKeypair = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const encodedRegistry = encodeEd25519KeypairBase64url(registryKeypair); + const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); + const claims = buildTestAitClaims({ + publicKeyX: encodedAgent.publicKey, + issuer: ISSUER, + nowSeconds: NOW_SECONDS - 10, + ttlSeconds: 610, + nbfSkewSeconds: 0, + seedMs: NOW_MS, + }); + if (options.expired) { + claims.exp = NOW_SECONDS - 1; + } + + const ait = await signAIT({ + claims, + signerKid: REGISTRY_KID, + signerKeypair: registryKeypair, + }); + + const revocationJti = options.revoked + ? claims.jti + : generateUlid(NOW_MS + 40); + const crl = await signCRL({ + claims: { + iss: ISSUER, + jti: generateUlid(NOW_MS + 50), + iat: NOW_SECONDS - 10, + exp: NOW_SECONDS + 600, + revocations: [ + { + jti: revocationJti, + agentDid: claims.sub, + revokedAt: NOW_SECONDS - 5, + reason: "manual revoke", + }, + ], + }, + signerKid: REGISTRY_KID, + signerKeypair: registryKeypair, + }); + + const fetchMock = createFetchMock({ + crlToken: crl, + fetchCrlFails: options.fetchCrlFails, + fetchKeysFails: options.fetchKeysFails, + registryPublicKeyX: encodedRegistry.publicKey, + validateStatus: options.validateStatus, + }); + + const trustStore = createInMemoryProxyTrustStore(); + if (options.allowCurrentAgent !== false) { + await trustStore.upsertPair({ + initiatorAgentDid: claims.sub, + responderAgentDid: KNOWN_PEER_DID, + }); + } + + const relaySession = { + fetch: vi.fn(async (request: Request) => { + if (request.method === "POST") { + return Response.json( + { + delivered: true, + connectedSockets: 1, + }, + { status: 202 }, + ); + } + + return new Response(null, { status: 204 }); + }), + }; + const relayNamespace = { + idFromName: vi.fn((_name: string) => ({}) as DurableObjectId), + get: vi.fn((_id: DurableObjectId) => relaySession), + } satisfies AgentRelaySessionNamespace; + + const app = createProxyApp({ + config: parseProxyConfig({ + ...(options.crlStaleBehavior + ? { CRL_STALE_BEHAVIOR: options.crlStaleBehavior } + : {}), + }), + trustStore, + auth: { + fetchImpl: fetchMock as typeof fetch, + clock: () => NOW_MS, + }, + hooks: { + resolveSessionNamespace: () => relayNamespace, + now: () => new Date(NOW_MS).toISOString(), + }, + relay: { + resolveSessionNamespace: () => relayNamespace, + }, + registerRoutes: (nextApp) => { + nextApp.post("/protected", (c) => { + const auth = c.get("auth"); + return c.json({ + ok: true, + auth, + }); + }); + }, + }); + + return { + app, + claims, + createSignedHeaders: async (input = {}) => { + const method = input.method ?? "POST"; + const body = input.body ?? (method === "GET" ? "" : BODY_JSON); + const nonce = input.nonce ?? "nonce-1"; + const pathWithQuery = input.pathWithQuery ?? "/protected"; + const timestamp = + input.timestamp ?? String(input.timestampSeconds ?? NOW_SECONDS); + + const signed = await signHttpRequest({ + method, + pathWithQuery, + timestamp, + nonce, + body: new TextEncoder().encode(body), + secretKey: agentKeypair.secretKey, + }); + + return { + authorization: `Claw ${ait}`, + ...(method === "POST" ? { "content-type": "application/json" } : {}), + ...signed.headers, + }; + }, + }; +} diff --git a/apps/proxy/src/auth-middleware.test/robustness.test.ts b/apps/proxy/src/auth-middleware.test/robustness.test.ts new file mode 100644 index 0000000..2fc8e7e --- /dev/null +++ b/apps/proxy/src/auth-middleware.test/robustness.test.ts @@ -0,0 +1,188 @@ +import { describe, expect, it } from "vitest"; +import { BODY_JSON, createAuthHarness, NOW_SECONDS } from "./helpers.js"; + +describe("proxy auth middleware", () => { + it("rejects non-health route when Authorization scheme is not Claw", async () => { + const harness = await createAuthHarness(); + const response = await harness.app.request("/protected", { + method: "POST", + headers: { + authorization: "Bearer token", + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); + }); + + it("rejects Authorization headers with extra segments", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-auth-extra", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers: { + ...headers, + authorization: `${headers.authorization} extra`, + }, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_SCHEME"); + }); + + it("rejects replayed nonce for the same agent", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-replay-1", + }); + + const first = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + const second = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(first.status).toBe(200); + expect(second.status).toBe(401); + const body = (await second.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_REPLAY"); + }); + + it("rejects requests outside the timestamp skew window", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + timestampSeconds: NOW_SECONDS - 301, + nonce: "nonce-old", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_TIMESTAMP_SKEW"); + }); + + it.each([ + `${NOW_SECONDS}abc`, + `${NOW_SECONDS}.5`, + ])("rejects malformed X-Claw-Timestamp header: %s", async (malformedTimestamp) => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + timestamp: malformedTimestamp, + nonce: "nonce-invalid-timestamp", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_TIMESTAMP"); + }); + + it("rejects proof mismatches when body is tampered", async () => { + const harness = await createAuthHarness(); + const headers = await harness.createSignedHeaders({ + body: BODY_JSON, + nonce: "nonce-tampered", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: JSON.stringify({ message: "tampered" }), + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_PROOF"); + }); + + it("rejects revoked AITs", async () => { + const harness = await createAuthHarness({ + revoked: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-revoked", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_REVOKED"); + }); + + it("rejects expired AITs", async () => { + const harness = await createAuthHarness({ + expired: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-expired", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_INVALID_AIT"); + }); + + it("returns 503 when registry signing keys are unavailable", async () => { + const harness = await createAuthHarness({ + fetchKeysFails: true, + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-keys-fail", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_DEPENDENCY_UNAVAILABLE"); + }); + + it("returns 503 when CRL is unavailable in fail-closed mode", async () => { + const harness = await createAuthHarness({ + fetchCrlFails: true, + crlStaleBehavior: "fail-closed", + }); + const headers = await harness.createSignedHeaders({ + nonce: "nonce-crl-fail-closed", + }); + const response = await harness.app.request("/protected", { + method: "POST", + headers, + body: BODY_JSON, + }); + + expect(response.status).toBe(503); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("PROXY_AUTH_DEPENDENCY_UNAVAILABLE"); + }); +}); diff --git a/apps/proxy/src/auth-middleware.test/rotation.test.ts b/apps/proxy/src/auth-middleware.test/rotation.test.ts new file mode 100644 index 0000000..2b23e7e --- /dev/null +++ b/apps/proxy/src/auth-middleware.test/rotation.test.ts @@ -0,0 +1,192 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { + encodeEd25519KeypairBase64url, + generateEd25519Keypair, + signAIT, + signCRL, + signHttpRequest, +} from "@clawdentity/sdk"; +import { buildTestAitClaims } from "@clawdentity/sdk/testing"; +import { describe, expect, it, vi } from "vitest"; +import { parseProxyConfig } from "../config.js"; +import { createInMemoryProxyTrustStore } from "../proxy-trust-store.js"; +import { createProxyApp } from "../server.js"; +import { + BODY_JSON, + ISSUER, + KNOWN_PEER_DID, + NOW_MS, + NOW_SECONDS, + resolveRequestUrl, +} from "./helpers.js"; + +type RotationCase = { + aitSignedBy: "old" | "new"; + crlSignedBy: "old" | "new"; + nonce: string; +}; + +async function runRotationCase(input: RotationCase) { + const oldKid = "registry-old-kid"; + const newKid = "registry-new-kid"; + const oldRegistryKeypair = await generateEd25519Keypair(); + const newRegistryKeypair = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const encodedOldRegistry = encodeEd25519KeypairBase64url(oldRegistryKeypair); + const encodedNewRegistry = encodeEd25519KeypairBase64url(newRegistryKeypair); + const encodedAgent = encodeEd25519KeypairBase64url(agentKeypair); + + const claims = buildTestAitClaims({ + publicKeyX: encodedAgent.publicKey, + issuer: ISSUER, + nowSeconds: NOW_SECONDS - 10, + ttlSeconds: 610, + nbfSkewSeconds: 0, + seedMs: NOW_MS, + }); + + const signerByKey = { + old: { + kid: oldKid, + keypair: oldRegistryKeypair, + }, + new: { + kid: newKid, + keypair: newRegistryKeypair, + }, + } as const; + + const aitSigner = signerByKey[input.aitSignedBy]; + const crlSigner = signerByKey[input.crlSignedBy]; + + const ait = await signAIT({ + claims, + signerKid: aitSigner.kid, + signerKeypair: aitSigner.keypair, + }); + const crl = await signCRL({ + claims: { + iss: ISSUER, + jti: generateUlid(NOW_MS + 70), + iat: NOW_SECONDS - 10, + exp: NOW_SECONDS + 600, + revocations: [ + { + jti: generateUlid(NOW_MS + 80), + agentDid: claims.sub, + revokedAt: NOW_SECONDS - 5, + reason: "manual revoke", + }, + ], + }, + signerKid: crlSigner.kid, + signerKeypair: crlSigner.keypair, + }); + + let keyFetchCount = 0; + const fetchMock = vi.fn(async (requestInput: unknown): Promise => { + const url = resolveRequestUrl(requestInput); + + if (url.endsWith("/.well-known/claw-keys.json")) { + keyFetchCount += 1; + const key = + keyFetchCount === 1 + ? { + kid: oldKid, + alg: "EdDSA", + crv: "Ed25519", + x: encodedOldRegistry.publicKey, + status: "active", + } + : { + kid: newKid, + alg: "EdDSA", + crv: "Ed25519", + x: encodedNewRegistry.publicKey, + status: "active", + }; + return new Response( + JSON.stringify({ + keys: [key], + }), + { status: 200 }, + ); + } + + if (url.endsWith("/v1/crl")) { + return new Response( + JSON.stringify({ + crl, + }), + { status: 200 }, + ); + } + + return new Response("not found", { status: 404 }); + }); + + const trustStore = createInMemoryProxyTrustStore(); + await trustStore.upsertPair({ + initiatorAgentDid: claims.sub, + responderAgentDid: KNOWN_PEER_DID, + }); + + const app = createProxyApp({ + config: parseProxyConfig({}), + trustStore, + auth: { + fetchImpl: fetchMock as typeof fetch, + clock: () => NOW_MS, + }, + registerRoutes: (nextApp) => { + nextApp.post("/protected", (c) => c.json({ ok: true })); + }, + }); + + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: "/protected", + timestamp: String(NOW_SECONDS), + nonce: input.nonce, + body: new TextEncoder().encode(BODY_JSON), + secretKey: agentKeypair.secretKey, + }); + const response = await app.request("/protected", { + method: "POST", + headers: { + authorization: `Claw ${ait}`, + "content-type": "application/json", + ...signed.headers, + }, + body: BODY_JSON, + }); + + return { + keyFetchCount, + response, + }; +} + +describe("proxy auth middleware", () => { + it("refreshes keyset and accepts valid AIT after registry key rotation", async () => { + const { keyFetchCount, response } = await runRotationCase({ + aitSignedBy: "new", + crlSignedBy: "new", + nonce: "nonce-rotation", + }); + + expect(response.status).toBe(200); + expect(keyFetchCount).toBe(2); + }); + + it("refreshes keyset and verifies CRL after registry CRL key rotation", async () => { + const { keyFetchCount, response } = await runRotationCase({ + aitSignedBy: "old", + crlSignedBy: "new", + nonce: "nonce-crl-rotation", + }); + + expect(response.status).toBe(200); + expect(keyFetchCount).toBe(2); + }); +}); From 630a91fa4b23aa00ffe8bcca8a01fb35e136e92a Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 17:34:25 +0530 Subject: [PATCH 133/190] test(cli,registry): split openclaw and server tests into modules --- apps/cli/src/commands/openclaw.test.ts | 1962 ----- apps/cli/src/commands/openclaw.test/AGENTS.md | 24 + .../src/commands/openclaw.test/doctor.test.ts | 535 ++ .../cli/src/commands/openclaw.test/helpers.ts | 150 + .../src/commands/openclaw.test/invite.test.ts | 28 + .../src/commands/openclaw.test/relay.test.ts | 305 + .../commands/openclaw.test/setup-core.test.ts | 465 ++ .../openclaw.test/setup-runtime.test.ts | 404 + apps/registry/src/server.test.ts | 6973 ----------------- apps/registry/src/server.test/AGENTS.md | 36 + .../server.test/agent-auth-refresh.test.ts | 374 + .../agent-auth-validate-revoke.test.ts | 384 + .../agent-registration-challenge.test.ts | 111 + .../agent-registration-create.test.ts | 779 ++ .../server.test/agents-delete-reissue.test.ts | 683 ++ .../server.test/agents-list-ownership.test.ts | 522 ++ .../server.test/health-metadata-admin.test.ts | 476 ++ apps/registry/src/server.test/helpers.ts | 8 + .../src/server.test/helpers/AGENTS.md | 21 + .../src/server.test/helpers/claims.ts | 16 + .../src/server.test/helpers/crypto.ts | 69 + .../src/server.test/helpers/db/index.ts | 2 + .../src/server.test/helpers/db/mock.ts | 542 ++ .../src/server.test/helpers/db/parse.ts | 143 + .../src/server.test/helpers/db/resolvers.ts | 668 ++ .../helpers/db/run-handlers-phase-one.ts | 414 + .../helpers/db/run-handlers-phase-two.ts | 407 + .../helpers/db/run-handlers-types.ts | 9 + .../server.test/helpers/db/run-handlers.ts | 38 + .../src/server.test/helpers/db/types.ts | 182 + .../registry/src/server.test/helpers/index.ts | 8 + apps/registry/src/server.test/helpers/pat.ts | 24 + apps/registry/src/server.test/invites.test.ts | 392 + .../registry/src/server.test/keys-crl.test.ts | 409 + .../src/server.test/me-api-keys.test.ts | 359 + .../src/server.test/resolve-me.test.ts | 252 + 36 files changed, 9239 insertions(+), 8935 deletions(-) delete mode 100644 apps/cli/src/commands/openclaw.test.ts create mode 100644 apps/cli/src/commands/openclaw.test/AGENTS.md create mode 100644 apps/cli/src/commands/openclaw.test/doctor.test.ts create mode 100644 apps/cli/src/commands/openclaw.test/helpers.ts create mode 100644 apps/cli/src/commands/openclaw.test/invite.test.ts create mode 100644 apps/cli/src/commands/openclaw.test/relay.test.ts create mode 100644 apps/cli/src/commands/openclaw.test/setup-core.test.ts create mode 100644 apps/cli/src/commands/openclaw.test/setup-runtime.test.ts delete mode 100644 apps/registry/src/server.test.ts create mode 100644 apps/registry/src/server.test/AGENTS.md create mode 100644 apps/registry/src/server.test/agent-auth-refresh.test.ts create mode 100644 apps/registry/src/server.test/agent-auth-validate-revoke.test.ts create mode 100644 apps/registry/src/server.test/agent-registration-challenge.test.ts create mode 100644 apps/registry/src/server.test/agent-registration-create.test.ts create mode 100644 apps/registry/src/server.test/agents-delete-reissue.test.ts create mode 100644 apps/registry/src/server.test/agents-list-ownership.test.ts create mode 100644 apps/registry/src/server.test/health-metadata-admin.test.ts create mode 100644 apps/registry/src/server.test/helpers.ts create mode 100644 apps/registry/src/server.test/helpers/AGENTS.md create mode 100644 apps/registry/src/server.test/helpers/claims.ts create mode 100644 apps/registry/src/server.test/helpers/crypto.ts create mode 100644 apps/registry/src/server.test/helpers/db/index.ts create mode 100644 apps/registry/src/server.test/helpers/db/mock.ts create mode 100644 apps/registry/src/server.test/helpers/db/parse.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers.ts create mode 100644 apps/registry/src/server.test/helpers/db/run-handlers-phase-one.ts create mode 100644 apps/registry/src/server.test/helpers/db/run-handlers-phase-two.ts create mode 100644 apps/registry/src/server.test/helpers/db/run-handlers-types.ts create mode 100644 apps/registry/src/server.test/helpers/db/run-handlers.ts create mode 100644 apps/registry/src/server.test/helpers/db/types.ts create mode 100644 apps/registry/src/server.test/helpers/index.ts create mode 100644 apps/registry/src/server.test/helpers/pat.ts create mode 100644 apps/registry/src/server.test/invites.test.ts create mode 100644 apps/registry/src/server.test/keys-crl.test.ts create mode 100644 apps/registry/src/server.test/me-api-keys.test.ts create mode 100644 apps/registry/src/server.test/resolve-me.test.ts diff --git a/apps/cli/src/commands/openclaw.test.ts b/apps/cli/src/commands/openclaw.test.ts deleted file mode 100644 index 5927a42..0000000 --- a/apps/cli/src/commands/openclaw.test.ts +++ /dev/null @@ -1,1962 +0,0 @@ -import { - mkdirSync, - mkdtempSync, - readFileSync, - rmSync, - writeFileSync, -} from "node:fs"; -import { tmpdir } from "node:os"; -import { dirname, join } from "node:path"; -import { describe, expect, it } from "vitest"; -import { getConfigDir } from "../config/manager.js"; -import { - createOpenclawCommand, - createOpenclawInviteCode, - decodeOpenclawInviteCode, - runOpenclawDoctor, - runOpenclawRelayTest, - runOpenclawRelayWebsocketTest, - setupOpenclawRelay, - setupOpenclawRelayFromInvite, - setupOpenclawSelfReady, -} from "./openclaw.js"; - -type OpenclawSandbox = { - cleanup: () => void; - homeDir: string; - openclawDir: string; - transformSourcePath: string; -}; - -function createSandbox(): OpenclawSandbox { - const root = mkdtempSync(join(tmpdir(), "clawdentity-cli-openclaw-")); - const homeDir = join(root, "home"); - const openclawDir = join(root, "openclaw"); - const transformSourcePath = join(root, "relay-to-peer.mjs"); - - mkdirSync(homeDir, { recursive: true }); - mkdirSync(openclawDir, { recursive: true }); - - writeFileSync( - join(openclawDir, "openclaw.json"), - JSON.stringify( - { - hooks: { - enabled: false, - mappings: [], - }, - }, - null, - 2, - ), - "utf8", - ); - - writeFileSync( - transformSourcePath, - "export default async function relay(ctx){ return ctx?.payload ?? null; }\n", - "utf8", - ); - - return { - cleanup: () => { - rmSync(root, { recursive: true, force: true }); - }, - homeDir, - openclawDir, - transformSourcePath, - }; -} - -function resolveCliStateDir(homeDir: string): string { - return getConfigDir({ homeDir }); -} - -function seedLocalAgentCredentials(homeDir: string, agentName: string): void { - const agentDir = join(resolveCliStateDir(homeDir), "agents", agentName); - mkdirSync(agentDir, { recursive: true }); - writeFileSync(join(agentDir, "secret.key"), "secret-key-value", "utf8"); - writeFileSync(join(agentDir, "ait.jwt"), "mock.ait.jwt", "utf8"); -} - -function seedPeersConfig( - homeDir: string, - peers: Record< - string, - { did: string; proxyUrl: string; agentName?: string; humanName?: string } - >, -): void { - const peersPath = join(resolveCliStateDir(homeDir), "peers.json"); - mkdirSync(dirname(peersPath), { recursive: true }); - writeFileSync(peersPath, `${JSON.stringify({ peers }, null, 2)}\n`, "utf8"); -} - -function connectorReadyFetch(): typeof fetch { - return async () => - new Response( - JSON.stringify({ - status: "ok", - websocket: { - connected: true, - }, - inbound: { - pending: { - pendingCount: 0, - pendingBytes: 0, - }, - deadLetter: { - deadLetterCount: 0, - deadLetterBytes: 0, - }, - replay: { - replayerActive: false, - }, - openclawHook: { - url: "http://127.0.0.1:18789/hooks/agent", - lastAttemptStatus: "ok", - }, - }, - }), - { - status: 200, - headers: { - "content-type": "application/json", - }, - }, - ); -} - -describe("openclaw command helpers", () => { - it("creates and decodes invite codes", () => { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - agentName: "beta", - humanName: "Ira", - }); - - expect(invite.code.startsWith("clawd1_")).toBe(true); - - const decoded = decodeOpenclawInviteCode(invite.code); - expect(decoded.v).toBe(1); - expect(decoded.did).toBe("did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"); - expect(decoded.proxyUrl).toBe("https://beta.example.com/hooks/agent"); - expect(decoded.alias).toBe("beta"); - expect(decoded.agentName).toBe("beta"); - expect(decoded.humanName).toBe("Ira"); - expect(decoded.issuedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); - }); - - it("applies relay setup and patches OpenClaw config", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const copiedTransform = readFileSync(result.transformTargetPath, "utf8"); - expect(copiedTransform).toContain("relay(ctx)"); - expect(result.openclawConfigChanged).toBe(true); - - const openclawConfig = JSON.parse( - readFileSync(result.openclawConfigPath, "utf8"), - ) as { - gateway?: { - auth?: { - mode?: string; - token?: string; - }; - }; - hooks: { - enabled?: boolean; - token?: string; - defaultSessionKey?: string; - allowRequestSessionKey?: boolean; - allowedSessionKeyPrefixes?: string[]; - mappings?: Array>; - }; - }; - - expect(openclawConfig.hooks.enabled).toBe(true); - expect(typeof openclawConfig.hooks.token).toBe("string"); - expect(openclawConfig.hooks.token?.length ?? 0).toBeGreaterThan(0); - expect(openclawConfig.hooks.defaultSessionKey).toBe("main"); - expect(openclawConfig.hooks.allowRequestSessionKey).toBe(false); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("main"); - expect(openclawConfig.gateway?.auth?.mode).toBe("token"); - expect(typeof openclawConfig.gateway?.auth?.token).toBe("string"); - expect(openclawConfig.gateway?.auth?.token?.length ?? 0).toBeGreaterThan( - 0, - ); - expect( - openclawConfig.hooks.mappings?.some( - (mapping) => - mapping.id === "clawdentity-send-to-peer" && - (mapping.match as { path?: string })?.path === "send-to-peer" && - mapping.action === "agent" && - mapping.wakeMode === "now" && - (mapping.transform as { module?: string })?.module === - "relay-to-peer.mjs", - ), - ).toBe(true); - - const peers = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "peers.json"), - "utf8", - ), - ) as { - peers: Record< - string, - { - did: string; - proxyUrl: string; - agentName?: string; - humanName?: string; - } - >; - }; - expect(peers.peers).toEqual({}); - - const selectedAgent = readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "openclaw-agent-name"), - "utf8", - ).trim(); - expect(selectedAgent).toBe("alpha"); - - expect(result.openclawBaseUrl).toBe("http://127.0.0.1:18789"); - expect(result.connectorBaseUrl).toBe("http://127.0.0.1:19400"); - expect(readFileSync(result.relayTransformRuntimePath, "utf8")).toContain( - '"connectorBaseUrl": "http://host.docker.internal:19400"', - ); - expect(readFileSync(result.relayTransformPeersPath, "utf8")).toContain( - '"peers": {}', - ); - const relayRuntimeConfig = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), - "utf8", - ), - ) as { - openclawBaseUrl: string; - openclawHookToken?: string; - relayTransformPeersPath?: string; - updatedAt: string; - }; - expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:18789"); - expect(relayRuntimeConfig.openclawHookToken).toBe( - openclawConfig.hooks.token, - ); - expect(relayRuntimeConfig.relayTransformPeersPath).toBe( - result.relayTransformPeersPath, - ); - expect(relayRuntimeConfig.updatedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); - - const connectorAssignments = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "openclaw-connectors.json"), - "utf8", - ), - ) as { - agents: Record; - }; - expect(connectorAssignments.agents.alpha.connectorBaseUrl).toBe( - "http://127.0.0.1:19400", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("does not rewrite OpenClaw config when setup state is already current", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousGatewayToken = process.env.OPENCLAW_GATEWAY_TOKEN; - delete process.env.OPENCLAW_GATEWAY_TOKEN; - const preconfiguredOpenclawJson = - '{"hooks":{"enabled":true,"token":"hook-token","defaultSessionKey":"main","allowRequestSessionKey":false,"allowedSessionKeyPrefixes":["hook:","main"],"mappings":[{"id":"clawdentity-send-to-peer","match":{"path":"send-to-peer"},"action":"agent","wakeMode":"now","transform":{"module":"relay-to-peer.mjs"}}]},"gateway":{"auth":{"mode":"token","token":"gateway-token"}}}\n'; - writeFileSync( - join(sandbox.openclawDir, "openclaw.json"), - preconfiguredOpenclawJson, - "utf8", - ); - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - expect(result.openclawConfigChanged).toBe(false); - expect(readFileSync(result.openclawConfigPath, "utf8")).toBe( - preconfiguredOpenclawJson, - ); - } finally { - if (previousGatewayToken === undefined) { - delete process.env.OPENCLAW_GATEWAY_TOKEN; - } else { - process.env.OPENCLAW_GATEWAY_TOKEN = previousGatewayToken; - } - sandbox.cleanup(); - } - }); - - it("supports setup-only mode without runtime startup", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const result = await setupOpenclawSelfReady("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - noRuntimeStart: true, - }); - - expect(result.runtimeMode).toBe("none"); - expect(result.runtimeStatus).toBe("skipped"); - expect(result.websocketStatus).toBe("skipped"); - expect(readFileSync(result.transformTargetPath, "utf8")).toContain( - "relay(ctx)", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("syncs gateway auth token from OPENCLAW_GATEWAY_TOKEN during setup", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousGatewayToken = process.env.OPENCLAW_GATEWAY_TOKEN; - process.env.OPENCLAW_GATEWAY_TOKEN = "gateway-token-from-env"; - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfig = JSON.parse( - readFileSync(result.openclawConfigPath, "utf8"), - ) as { - gateway?: { - auth?: { - mode?: string; - token?: string; - }; - }; - }; - - expect(openclawConfig.gateway?.auth?.mode).toBe("token"); - expect(openclawConfig.gateway?.auth?.token).toBe( - "gateway-token-from-env", - ); - } finally { - if (previousGatewayToken === undefined) { - delete process.env.OPENCLAW_GATEWAY_TOKEN; - } else { - process.env.OPENCLAW_GATEWAY_TOKEN = previousGatewayToken; - } - sandbox.cleanup(); - } - }); - - it("auto-recovers setup checklist when OpenClaw has pending gateway device approvals", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const pendingPath = join(sandbox.openclawDir, "devices", "pending.json"); - mkdirSync(dirname(pendingPath), { recursive: true }); - writeFileSync( - pendingPath, - JSON.stringify( - { - "request-1": { - requestId: "request-1", - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - const approvedRequestIds: string[] = []; - const result = await setupOpenclawSelfReady("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - noRuntimeStart: true, - gatewayDeviceApprovalRunner: async ({ requestId }) => { - approvedRequestIds.push(requestId); - writeFileSync(pendingPath, JSON.stringify({}, null, 2), "utf8"); - return { - ok: true, - }; - }, - }); - - expect(result.runtimeMode).toBe("none"); - expect(approvedRequestIds).toEqual(["request-1"]); - const pendingAfterRecovery = JSON.parse( - readFileSync(pendingPath, "utf8"), - ) as Record; - expect(Object.keys(pendingAfterRecovery)).toHaveLength(0); - } finally { - sandbox.cleanup(); - } - }); - - it("fails setup checklist when gateway approval runner is unavailable", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const pendingPath = join(sandbox.openclawDir, "devices", "pending.json"); - mkdirSync(dirname(pendingPath), { recursive: true }); - writeFileSync( - pendingPath, - JSON.stringify( - { - "request-1": { - requestId: "request-1", - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - await expect( - setupOpenclawSelfReady("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - noRuntimeStart: true, - gatewayDeviceApprovalRunner: async () => ({ - ok: false, - unavailable: true, - errorMessage: "spawn openclaw ENOENT", - }), - }), - ).rejects.toMatchObject({ - code: "CLI_OPENCLAW_SETUP_CHECKLIST_FAILED", - }); - } finally { - sandbox.cleanup(); - } - }); - - it("preserves explicit hook request session key (including subagent keys)", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - writeFileSync( - join(sandbox.openclawDir, "openclaw.json"), - JSON.stringify( - { - hooks: { - enabled: true, - token: "existing-token", - defaultSessionKey: "subagent:planner", - allowedSessionKeyPrefixes: ["hook:"], - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - const openclawConfig = JSON.parse( - readFileSync(result.openclawConfigPath, "utf8"), - ) as { - hooks: { - token?: string; - defaultSessionKey?: string; - allowedSessionKeyPrefixes?: string[]; - }; - }; - - expect(openclawConfig.hooks.token).toBe("existing-token"); - expect(openclawConfig.hooks.defaultSessionKey).toBe("subagent:planner"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( - "subagent:planner", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("normalizes legacy canonical hook default session key to request format", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - writeFileSync( - join(sandbox.openclawDir, "openclaw.json"), - JSON.stringify( - { - hooks: { - enabled: true, - token: "existing-token", - defaultSessionKey: "agent:ops:subagent:planner", - allowedSessionKeyPrefixes: ["hook:"], - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - const openclawConfig = JSON.parse( - readFileSync(result.openclawConfigPath, "utf8"), - ) as { - hooks: { - token?: string; - defaultSessionKey?: string; - allowedSessionKeyPrefixes?: string[]; - }; - }; - - expect(openclawConfig.hooks.token).toBe("existing-token"); - expect(openclawConfig.hooks.defaultSessionKey).toBe("subagent:planner"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( - "subagent:planner", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("derives hook default session key from OpenClaw session scope and main key", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - writeFileSync( - join(sandbox.openclawDir, "openclaw.json"), - JSON.stringify( - { - session: { mainKey: "work" }, - agents: { - list: [{ id: "main" }, { id: "ops-team", default: true }], - }, - hooks: { - enabled: false, - mappings: [], - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - const openclawConfig = JSON.parse( - readFileSync(result.openclawConfigPath, "utf8"), - ) as { - hooks: { - defaultSessionKey?: string; - allowedSessionKeyPrefixes?: string[]; - }; - }; - - expect(openclawConfig.hooks.defaultSessionKey).toBe("work"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("work"); - } finally { - sandbox.cleanup(); - } - }); - - it("uses global hook default session when OpenClaw session scope is global", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - writeFileSync( - join(sandbox.openclawDir, "openclaw.json"), - JSON.stringify( - { - session: { scope: "global" }, - hooks: { - enabled: false, - mappings: [], - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - const result = await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - const openclawConfig = JSON.parse( - readFileSync(result.openclawConfigPath, "utf8"), - ) as { - hooks: { - defaultSessionKey?: string; - allowedSessionKeyPrefixes?: string[]; - }; - }; - - expect(openclawConfig.hooks.defaultSessionKey).toBe("global"); - expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( - "global", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("stores explicit OpenClaw base URL in relay runtime config", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - const result = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - openclawBaseUrl: "http://127.0.0.1:19001", - }); - - expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19001"); - const relayRuntimeConfig = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), - "utf8", - ), - ) as { - openclawBaseUrl: string; - }; - expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:19001"); - } finally { - sandbox.cleanup(); - } - }); - - it("uses OPENCLAW_BASE_URL env when setup option is omitted", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousBaseUrl = process.env.OPENCLAW_BASE_URL; - process.env.OPENCLAW_BASE_URL = "http://127.0.0.1:19555"; - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - const result = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19555"); - const relayRuntimeConfig = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), - "utf8", - ), - ) as { - openclawBaseUrl: string; - }; - expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:19555"); - } finally { - if (previousBaseUrl === undefined) { - delete process.env.OPENCLAW_BASE_URL; - } else { - process.env.OPENCLAW_BASE_URL = previousBaseUrl; - } - sandbox.cleanup(); - } - }); - - it("resolves OpenClaw state/config paths from env variables", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousStateDir = process.env.OPENCLAW_STATE_DIR; - const previousConfigPath = process.env.OPENCLAW_CONFIG_PATH; - - try { - const customStateDir = join(sandbox.homeDir, ".openclaw-custom"); - const customConfigPath = join(customStateDir, "openclaw.custom.json"); - mkdirSync(customStateDir, { recursive: true }); - writeFileSync( - customConfigPath, - JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), - "utf8", - ); - - process.env.OPENCLAW_STATE_DIR = customStateDir; - process.env.OPENCLAW_CONFIG_PATH = customConfigPath; - - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - const result = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - transformSource: sandbox.transformSourcePath, - }); - - expect(result.openclawConfigPath).toBe(customConfigPath); - expect(result.transformTargetPath).toBe( - join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), - ); - } finally { - if (previousStateDir === undefined) { - delete process.env.OPENCLAW_STATE_DIR; - } else { - process.env.OPENCLAW_STATE_DIR = previousStateDir; - } - if (previousConfigPath === undefined) { - delete process.env.OPENCLAW_CONFIG_PATH; - } else { - process.env.OPENCLAW_CONFIG_PATH = previousConfigPath; - } - sandbox.cleanup(); - } - }); - - it("resolves OpenClaw state/config paths from legacy CLAWDBOT_* env aliases", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousStateDir = process.env.CLAWDBOT_STATE_DIR; - const previousConfigPath = process.env.CLAWDBOT_CONFIG_PATH; - const previousOpenclawStateDir = process.env.OPENCLAW_STATE_DIR; - const previousOpenclawConfigPath = process.env.OPENCLAW_CONFIG_PATH; - - try { - const customStateDir = join(sandbox.homeDir, ".clawdbot-custom"); - const customConfigPath = join(customStateDir, "clawdbot.custom.json"); - mkdirSync(customStateDir, { recursive: true }); - writeFileSync( - customConfigPath, - JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), - "utf8", - ); - - delete process.env.OPENCLAW_STATE_DIR; - delete process.env.OPENCLAW_CONFIG_PATH; - process.env.CLAWDBOT_STATE_DIR = customStateDir; - process.env.CLAWDBOT_CONFIG_PATH = customConfigPath; - - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - const result = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - transformSource: sandbox.transformSourcePath, - }); - - expect(result.openclawConfigPath).toBe(customConfigPath); - expect(result.transformTargetPath).toBe( - join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), - ); - } finally { - if (previousStateDir === undefined) { - delete process.env.CLAWDBOT_STATE_DIR; - } else { - process.env.CLAWDBOT_STATE_DIR = previousStateDir; - } - if (previousConfigPath === undefined) { - delete process.env.CLAWDBOT_CONFIG_PATH; - } else { - process.env.CLAWDBOT_CONFIG_PATH = previousConfigPath; - } - if (previousOpenclawStateDir === undefined) { - delete process.env.OPENCLAW_STATE_DIR; - } else { - process.env.OPENCLAW_STATE_DIR = previousOpenclawStateDir; - } - if (previousOpenclawConfigPath === undefined) { - delete process.env.OPENCLAW_CONFIG_PATH; - } else { - process.env.OPENCLAW_CONFIG_PATH = previousOpenclawConfigPath; - } - sandbox.cleanup(); - } - }); - - it("resolves default OpenClaw state from OPENCLAW_HOME", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousOpenclawHome = process.env.OPENCLAW_HOME; - const previousOpenclawStateDir = process.env.OPENCLAW_STATE_DIR; - const previousOpenclawConfigPath = process.env.OPENCLAW_CONFIG_PATH; - const previousClawdbotStateDir = process.env.CLAWDBOT_STATE_DIR; - const previousClawdbotConfigPath = process.env.CLAWDBOT_CONFIG_PATH; - - try { - const customHome = join(sandbox.homeDir, "openclaw-home"); - const customStateDir = join(customHome, ".openclaw"); - const customConfigPath = join(customStateDir, "openclaw.json"); - mkdirSync(customStateDir, { recursive: true }); - writeFileSync( - customConfigPath, - JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), - "utf8", - ); - - process.env.OPENCLAW_HOME = customHome; - delete process.env.OPENCLAW_STATE_DIR; - delete process.env.OPENCLAW_CONFIG_PATH; - delete process.env.CLAWDBOT_STATE_DIR; - delete process.env.CLAWDBOT_CONFIG_PATH; - - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - const result = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - transformSource: sandbox.transformSourcePath, - }); - - expect(result.openclawConfigPath).toBe(customConfigPath); - expect(result.transformTargetPath).toBe( - join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), - ); - } finally { - if (previousOpenclawHome === undefined) { - delete process.env.OPENCLAW_HOME; - } else { - process.env.OPENCLAW_HOME = previousOpenclawHome; - } - if (previousOpenclawStateDir === undefined) { - delete process.env.OPENCLAW_STATE_DIR; - } else { - process.env.OPENCLAW_STATE_DIR = previousOpenclawStateDir; - } - if (previousOpenclawConfigPath === undefined) { - delete process.env.OPENCLAW_CONFIG_PATH; - } else { - process.env.OPENCLAW_CONFIG_PATH = previousOpenclawConfigPath; - } - if (previousClawdbotStateDir === undefined) { - delete process.env.CLAWDBOT_STATE_DIR; - } else { - process.env.CLAWDBOT_STATE_DIR = previousClawdbotStateDir; - } - if (previousClawdbotConfigPath === undefined) { - delete process.env.CLAWDBOT_CONFIG_PATH; - } else { - process.env.CLAWDBOT_CONFIG_PATH = previousClawdbotConfigPath; - } - sandbox.cleanup(); - } - }); - - it("allocates distinct connector base URLs per local agent", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - seedLocalAgentCredentials(sandbox.homeDir, "beta"); - - try { - const alphaInvite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - const betaInvite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8", - proxyUrl: "https://alpha.example.com/hooks/agent", - peerAlias: "alpha", - }); - - const alphaResult = await setupOpenclawRelayFromInvite("alpha", { - inviteCode: alphaInvite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const betaOpenclawDir = join(sandbox.homeDir, "openclaw-beta"); - mkdirSync(betaOpenclawDir, { recursive: true }); - writeFileSync( - join(betaOpenclawDir, "openclaw.json"), - JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), - "utf8", - ); - - const betaResult = await setupOpenclawRelayFromInvite("beta", { - inviteCode: betaInvite.code, - homeDir: sandbox.homeDir, - openclawDir: betaOpenclawDir, - transformSource: sandbox.transformSourcePath, - }); - - expect(alphaResult.connectorBaseUrl).toBe("http://127.0.0.1:19400"); - expect(betaResult.connectorBaseUrl).toBe("http://127.0.0.1:19401"); - } finally { - sandbox.cleanup(); - } - }); - - it("keeps send-to-peer mapping idempotent across repeated setup", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfig = JSON.parse( - readFileSync(join(sandbox.openclawDir, "openclaw.json"), "utf8"), - ) as { - hooks: { mappings?: Array> }; - }; - - const relayMappings = (openclawConfig.hooks.mappings ?? []).filter( - (mapping) => - mapping.id === "clawdentity-send-to-peer" || - (mapping.match as { path?: string })?.path === "send-to-peer", - ); - expect(relayMappings).toHaveLength(1); - } finally { - sandbox.cleanup(); - } - }); - - it("preserves existing OpenClaw hooks token and mirrors it to relay runtime config", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); - writeFileSync( - openclawConfigPath, - JSON.stringify( - { - hooks: { - enabled: true, - token: "existing-hook-token", - mappings: [], - }, - }, - null, - 2, - ), - "utf8", - ); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfig = JSON.parse( - readFileSync(openclawConfigPath, "utf8"), - ) as { - hooks: { token?: string }; - }; - expect(openclawConfig.hooks.token).toBe("existing-hook-token"); - - const relayRuntimeConfig = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), - "utf8", - ), - ) as { - openclawHookToken?: string; - }; - expect(relayRuntimeConfig.openclawHookToken).toBe("existing-hook-token"); - } finally { - sandbox.cleanup(); - } - }); - - it("supports self setup without peer routing details", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const peers = JSON.parse( - readFileSync( - join(resolveCliStateDir(sandbox.homeDir), "peers.json"), - "utf8", - ), - ) as { - peers: Record< - string, - { - did: string; - proxyUrl: string; - agentName?: string; - humanName?: string; - } - >; - }; - expect(peers.peers).toEqual({}); - } finally { - sandbox.cleanup(); - } - }); - - it("reports healthy doctor status when relay setup is complete", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("healthy"); - expect(result.checks.every((check) => check.status === "pass")).toBe( - true, - ); - } finally { - sandbox.cleanup(); - } - }); - - it("reports healthy doctor status when setup is complete without peers", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("healthy"); - expect( - result.checks.some( - (check) => - check.id === "state.peers" && - check.status === "pass" && - check.message === - "no peers are configured yet (optional until pairing)", - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("reports missing peer alias in doctor output", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - peerAlias: "gamma", - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.peers" && - check.status === "fail" && - check.message.includes("peer alias is missing: gamma"), - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("does not throw when CLI config resolution fails", async () => { - const sandbox = createSandbox(); - - try { - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - resolveConfigImpl: async () => { - throw new Error("invalid config"); - }, - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "config.registry" && - check.status === "fail" && - check.message === "unable to resolve CLI config", - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("fails doctor hook mapping check when mapping path is wrong", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); - const openclawConfig = JSON.parse( - readFileSync(openclawConfigPath, "utf8"), - ) as { - hooks: { mappings?: Array> }; - }; - const mappings = openclawConfig.hooks.mappings ?? []; - const targetMapping = mappings.find( - (mapping) => mapping.id === "clawdentity-send-to-peer", - ); - if (targetMapping === undefined) { - throw new Error("expected clawdentity-send-to-peer mapping"); - } - targetMapping.match = { path: "not-send-to-peer" }; - writeFileSync(openclawConfigPath, JSON.stringify(openclawConfig), "utf8"); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.hookMapping" && check.status === "fail", - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("fails doctor hook session routing check when hook session constraints drift", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); - const openclawConfig = JSON.parse( - readFileSync(openclawConfigPath, "utf8"), - ) as { - hooks: { - allowRequestSessionKey?: boolean; - allowedSessionKeyPrefixes?: string[]; - }; - }; - - openclawConfig.hooks.allowRequestSessionKey = true; - openclawConfig.hooks.allowedSessionKeyPrefixes = ["hook:"]; - writeFileSync( - openclawConfigPath, - `${JSON.stringify(openclawConfig, null, 2)}\n`, - "utf8", - ); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.hookSessionRouting" && - check.status === "fail" && - check.message.includes("hooks.allowRequestSessionKey is not false"), - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("fails doctor hook session routing check when default session uses canonical agent format", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); - const openclawConfig = JSON.parse( - readFileSync(openclawConfigPath, "utf8"), - ) as { - hooks: { - defaultSessionKey?: string; - allowedSessionKeyPrefixes?: string[]; - }; - }; - - openclawConfig.hooks.defaultSessionKey = "agent:main:main"; - openclawConfig.hooks.allowedSessionKeyPrefixes = [ - "hook:", - "agent:main:main", - ]; - writeFileSync( - openclawConfigPath, - `${JSON.stringify(openclawConfig, null, 2)}\n`, - "utf8", - ); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.hookSessionRouting" && - check.status === "fail" && - check.message.includes("canonical agent format"), - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("fails doctor when OpenClaw has pending gateway device approvals", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const pendingPath = join(sandbox.openclawDir, "devices", "pending.json"); - mkdirSync(dirname(pendingPath), { recursive: true }); - writeFileSync( - pendingPath, - JSON.stringify( - { - "request-1": { - requestId: "request-1", - }, - }, - null, - 2, - ), - "utf8", - ); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.gatewayDevicePairing" && - check.status === "fail" && - check.message.includes("pending gateway device approvals: 1"), - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("fails doctor when gateway auth token mode is configured without token", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); - const openclawConfig = JSON.parse( - readFileSync(openclawConfigPath, "utf8"), - ) as { - gateway?: { - auth?: { - mode?: string; - token?: string; - }; - }; - }; - openclawConfig.gateway = { - ...(openclawConfig.gateway ?? {}), - auth: { - ...(openclawConfig.gateway?.auth ?? {}), - mode: "token", - }, - }; - if (openclawConfig.gateway?.auth) { - delete openclawConfig.gateway.auth.token; - } - writeFileSync( - openclawConfigPath, - `${JSON.stringify(openclawConfig, null, 2)}\n`, - "utf8", - ); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.gatewayAuth" && - check.status === "fail" && - check.message.includes("gateway.auth.token is missing"), - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("fails doctor hook health check when connector reports replay failures", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const unhealthyConnectorFetch: typeof fetch = async () => - new Response( - JSON.stringify({ - status: "ok", - websocket: { - connected: true, - }, - inbound: { - pending: { - pendingCount: 2, - pendingBytes: 512, - oldestPendingAt: "2026-01-01T00:00:00.000Z", - }, - deadLetter: { - deadLetterCount: 0, - deadLetterBytes: 0, - }, - replay: { - lastReplayError: - "Local OpenClaw hook rejected payload with status 500", - replayerActive: false, - }, - openclawHook: { - url: "http://127.0.0.1:18789/hooks/agent", - lastAttemptStatus: "failed", - lastAttemptAt: "2026-01-01T00:00:00.000Z", - }, - }, - }), - { - status: 200, - headers: { - "content-type": "application/json", - }, - }, - ); - - const result = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: unhealthyConnectorFetch, - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("unhealthy"); - expect( - result.checks.some( - (check) => - check.id === "state.openclawHookHealth" && - check.status === "fail" && - check.message.includes( - "connector replay to local OpenClaw hook is failing", - ), - ), - ).toBe(true); - } finally { - sandbox.cleanup(); - } - }); - - it("applies --peer filter for doctor command", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const originalHome = process.env.HOME; - const originalExitCode = process.exitCode; - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - - const configPath = join( - resolveCliStateDir(sandbox.homeDir), - "config.json", - ); - mkdirSync(dirname(configPath), { recursive: true }); - writeFileSync( - configPath, - JSON.stringify( - { - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }, - null, - 2, - ), - "utf8", - ); - - const baseline = await runOpenclawDoctor({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - expect(baseline.status).toBe("healthy"); - - process.env.HOME = sandbox.homeDir; - process.exitCode = undefined; - - const command = createOpenclawCommand(); - await command.parseAsync( - ["doctor", "--peer", "gamma", "--openclaw-dir", sandbox.openclawDir], - { from: "user" }, - ); - - expect(process.exitCode).toBe(1); - } finally { - process.env.HOME = originalHome; - process.exitCode = originalExitCode; - sandbox.cleanup(); - } - }); - - it("returns relay test success for accepted probe", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - const result = await runOpenclawRelayTest({ - peer: "beta", - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: async () => new Response(null, { status: 204 }), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("success"); - expect(result.httpStatus).toBe(204); - expect(result.endpoint).toBe("http://127.0.0.1:18789/hooks/send-to-peer"); - } finally { - sandbox.cleanup(); - } - }); - - it("auto-selects peer for relay test when exactly one peer is configured", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - const result = await runOpenclawRelayTest({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: async () => new Response(null, { status: 204 }), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("success"); - expect(result.peerAlias).toBe("beta"); - } finally { - sandbox.cleanup(); - } - }); - - it("uses hook token from relay runtime config when relay test option/env is unset", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - const previousHookToken = process.env.OPENCLAW_HOOK_TOKEN; - delete process.env.OPENCLAW_HOOK_TOKEN; - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - let sentHookToken: string | undefined; - await runOpenclawRelayTest({ - peer: "beta", - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: async (_input, init) => { - const headers = new Headers(init?.headers); - sentHookToken = headers.get("x-openclaw-token") ?? undefined; - return new Response(null, { status: 204 }); - }, - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(typeof sentHookToken).toBe("string"); - expect(sentHookToken?.length ?? 0).toBeGreaterThan(0); - } finally { - if (previousHookToken === undefined) { - delete process.env.OPENCLAW_HOOK_TOKEN; - } else { - process.env.OPENCLAW_HOOK_TOKEN = previousHookToken; - } - sandbox.cleanup(); - } - }); - - it("returns relay test failure when probe is rejected", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - const result = await runOpenclawRelayTest({ - peer: "beta", - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: async () => - new Response("connector offline", { status: 500 }), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("failure"); - expect(result.httpStatus).toBe(500); - expect(result.message).toBe( - "Relay probe failed inside local relay pipeline", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("returns relay websocket test success when connector websocket is connected", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - const invite = createOpenclawInviteCode({ - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - peerAlias: "beta", - }); - - await setupOpenclawRelayFromInvite("alpha", { - inviteCode: invite.code, - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - const result = await runOpenclawRelayWebsocketTest({ - peer: "beta", - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("success"); - expect(result.message).toBe( - "Connector websocket is connected for paired relay", - ); - expect(result.connectorStatusUrl).toBe( - "http://127.0.0.1:19400/v1/status", - ); - } finally { - sandbox.cleanup(); - } - }); - - it("auto-selects peer for relay websocket test when exactly one peer is configured", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - const result = await runOpenclawRelayWebsocketTest({ - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: connectorReadyFetch(), - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("success"); - expect(result.peerAlias).toBe("beta"); - } finally { - sandbox.cleanup(); - } - }); - - it("returns relay websocket test failure when connector websocket is disconnected", async () => { - const sandbox = createSandbox(); - seedLocalAgentCredentials(sandbox.homeDir, "alpha"); - - try { - await setupOpenclawRelay("alpha", { - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - transformSource: sandbox.transformSourcePath, - }); - seedPeersConfig(sandbox.homeDir, { - beta: { - did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", - proxyUrl: "https://beta.example.com/hooks/agent", - }, - }); - - const disconnectedConnectorFetch: typeof fetch = async () => - new Response( - JSON.stringify({ - status: "ok", - websocket: { - connected: false, - }, - }), - { - status: 200, - headers: { - "content-type": "application/json", - }, - }, - ); - - const result = await runOpenclawRelayWebsocketTest({ - peer: "beta", - homeDir: sandbox.homeDir, - openclawDir: sandbox.openclawDir, - fetchImpl: disconnectedConnectorFetch, - resolveConfigImpl: async () => ({ - registryUrl: "https://api.example.com", - proxyUrl: "https://proxy.example.com", - apiKey: "test-api-key", - }), - }); - - expect(result.status).toBe("failure"); - expect(result.message).toBe("Connector websocket is not connected"); - expect(result.remediationHint).toBe( - "Run: clawdentity openclaw setup ", - ); - } finally { - sandbox.cleanup(); - } - }); -}); diff --git a/apps/cli/src/commands/openclaw.test/AGENTS.md b/apps/cli/src/commands/openclaw.test/AGENTS.md new file mode 100644 index 0000000..ba59b05 --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/AGENTS.md @@ -0,0 +1,24 @@ +# AGENTS.md (openclaw command tests) + +## Purpose +- Keep `openclaw` tests modular, deterministic, and behavior-preserving. +- Avoid oversized test files and duplicated sandbox/env setup code. + +## File Boundaries +- `helpers.ts`: shared sandbox builders, state seeders, env restore helper, and common config/fetch fixtures. +- `invite.test.ts`: invite-code encode/decode behavior. +- `setup-core.test.ts`: core setup patching, idempotency, checklist recovery, and hook-session normalization. +- `setup-runtime.test.ts`: setup behavior tied to env/path resolution, connector allocation, and runtime config persistence. +- `doctor.test.ts`: diagnostic checks and CLI `doctor` command filtering behavior. +- `relay.test.ts`: relay probe and websocket diagnostic behavior. + +## Splitting Practices +- Keep each test file under 800 LOC. +- Move reusable sandbox/env helpers into `helpers.ts`; avoid repeated inline fixtures. +- Keep tests hermetic: no real network/process dependencies, no host state assumptions. +- Preserve existing assertions for stdout/stderr semantics, status codes, and `process.exitCode` behavior. + +## Validation +- Run before handoff: + - `pnpm -C apps/cli typecheck` + - `pnpm -C apps/cli test -- openclaw` diff --git a/apps/cli/src/commands/openclaw.test/doctor.test.ts b/apps/cli/src/commands/openclaw.test/doctor.test.ts new file mode 100644 index 0000000..f5be245 --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/doctor.test.ts @@ -0,0 +1,535 @@ +import { mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { dirname, join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { + createOpenclawCommand, + createOpenclawInviteCode, + runOpenclawDoctor, + setupOpenclawRelay, + setupOpenclawRelayFromInvite, +} from "../openclaw.js"; +import { + connectorReadyFetch, + createSandbox, + resolveCliStateDir, + resolveConfigFixture, + seedLocalAgentCredentials, + seedPendingGatewayApprovals, +} from "./helpers.js"; + +describe("openclaw doctor helpers", () => { + it("reports healthy doctor status when relay setup is complete", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("healthy"); + expect(result.checks.every((check) => check.status === "pass")).toBe( + true, + ); + } finally { + sandbox.cleanup(); + } + }); + + it("reports healthy doctor status when setup is complete without peers", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("healthy"); + expect( + result.checks.some( + (check) => + check.id === "state.peers" && + check.status === "pass" && + check.message === + "no peers are configured yet (optional until pairing)", + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("reports missing peer alias in doctor output", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + peerAlias: "gamma", + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.peers" && + check.status === "fail" && + check.message.includes("peer alias is missing: gamma"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("does not throw when CLI config resolution fails", async () => { + const sandbox = createSandbox(); + + try { + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + resolveConfigImpl: async () => { + throw new Error("invalid config"); + }, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "config.registry" && + check.status === "fail" && + check.message === "unable to resolve CLI config", + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook mapping check when mapping path is wrong", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { mappings?: Array> }; + }; + const mappings = openclawConfig.hooks.mappings ?? []; + const targetMapping = mappings.find( + (mapping) => mapping.id === "clawdentity-send-to-peer", + ); + if (targetMapping === undefined) { + throw new Error("expected clawdentity-send-to-peer mapping"); + } + targetMapping.match = { path: "not-send-to-peer" }; + writeFileSync(openclawConfigPath, JSON.stringify(openclawConfig), "utf8"); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.hookMapping" && check.status === "fail", + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook session routing check when hook session constraints drift", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { + allowRequestSessionKey?: boolean; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + openclawConfig.hooks.allowRequestSessionKey = true; + openclawConfig.hooks.allowedSessionKeyPrefixes = ["hook:"]; + writeFileSync( + openclawConfigPath, + `${JSON.stringify(openclawConfig, null, 2)}\n`, + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.hookSessionRouting" && + check.status === "fail" && + check.message.includes("hooks.allowRequestSessionKey is not false"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook session routing check when default session uses canonical agent format", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + openclawConfig.hooks.defaultSessionKey = "agent:main:main"; + openclawConfig.hooks.allowedSessionKeyPrefixes = [ + "hook:", + "agent:main:main", + ]; + writeFileSync( + openclawConfigPath, + `${JSON.stringify(openclawConfig, null, 2)}\n`, + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.hookSessionRouting" && + check.status === "fail" && + check.message.includes("canonical agent format"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor when OpenClaw has pending gateway device approvals", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + seedPendingGatewayApprovals(sandbox.openclawDir); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.gatewayDevicePairing" && + check.status === "fail" && + check.message.includes("pending gateway device approvals: 1"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor when gateway auth token mode is configured without token", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + gateway?: { + auth?: { + mode?: string; + token?: string; + }; + }; + }; + openclawConfig.gateway = { + ...(openclawConfig.gateway ?? {}), + auth: { + ...(openclawConfig.gateway?.auth ?? {}), + mode: "token", + }, + }; + if (openclawConfig.gateway?.auth) { + delete openclawConfig.gateway.auth.token; + } + writeFileSync( + openclawConfigPath, + `${JSON.stringify(openclawConfig, null, 2)}\n`, + "utf8", + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.gatewayAuth" && + check.status === "fail" && + check.message.includes("gateway.auth.token is missing"), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("fails doctor hook health check when connector reports replay failures", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const unhealthyConnectorFetch: typeof fetch = async () => + new Response( + JSON.stringify({ + status: "ok", + websocket: { + connected: true, + }, + inbound: { + pending: { + pendingCount: 2, + pendingBytes: 512, + oldestPendingAt: "2026-01-01T00:00:00.000Z", + }, + deadLetter: { + deadLetterCount: 0, + deadLetterBytes: 0, + }, + replay: { + lastReplayError: + "Local OpenClaw hook rejected payload with status 500", + replayerActive: false, + }, + openclawHook: { + url: "http://127.0.0.1:18789/hooks/agent", + lastAttemptStatus: "failed", + lastAttemptAt: "2026-01-01T00:00:00.000Z", + }, + }, + }), + { + status: 200, + headers: { + "content-type": "application/json", + }, + }, + ); + + const result = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: unhealthyConnectorFetch, + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("unhealthy"); + expect( + result.checks.some( + (check) => + check.id === "state.openclawHookHealth" && + check.status === "fail" && + check.message.includes( + "connector replay to local OpenClaw hook is failing", + ), + ), + ).toBe(true); + } finally { + sandbox.cleanup(); + } + }); + + it("applies --peer filter for doctor command", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const originalHome = process.env.HOME; + const originalExitCode = process.exitCode; + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const configPath = join( + resolveCliStateDir(sandbox.homeDir), + "config.json", + ); + mkdirSync(dirname(configPath), { recursive: true }); + writeFileSync( + configPath, + JSON.stringify( + { + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", + }, + null, + 2, + ), + "utf8", + ); + + const baseline = await runOpenclawDoctor({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + expect(baseline.status).toBe("healthy"); + + process.env.HOME = sandbox.homeDir; + process.exitCode = undefined; + + const command = createOpenclawCommand(); + await command.parseAsync( + ["doctor", "--peer", "gamma", "--openclaw-dir", sandbox.openclawDir], + { from: "user" }, + ); + + expect(process.exitCode).toBe(1); + } finally { + process.env.HOME = originalHome; + process.exitCode = originalExitCode; + sandbox.cleanup(); + } + }); +}); diff --git a/apps/cli/src/commands/openclaw.test/helpers.ts b/apps/cli/src/commands/openclaw.test/helpers.ts new file mode 100644 index 0000000..f777671 --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/helpers.ts @@ -0,0 +1,150 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { dirname, join } from "node:path"; +import { getConfigDir } from "../../config/manager.js"; + +export type OpenclawSandbox = { + cleanup: () => void; + homeDir: string; + openclawDir: string; + transformSourcePath: string; +}; + +export function createSandbox(): OpenclawSandbox { + const root = mkdtempSync(join(tmpdir(), "clawdentity-cli-openclaw-")); + const homeDir = join(root, "home"); + const openclawDir = join(root, "openclaw"); + const transformSourcePath = join(root, "relay-to-peer.mjs"); + + mkdirSync(homeDir, { recursive: true }); + mkdirSync(openclawDir, { recursive: true }); + + writeFileSync( + join(openclawDir, "openclaw.json"), + JSON.stringify( + { + hooks: { + enabled: false, + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + writeFileSync( + transformSourcePath, + "export default async function relay(ctx){ return ctx?.payload ?? null; }\n", + "utf8", + ); + + return { + cleanup: () => { + rmSync(root, { recursive: true, force: true }); + }, + homeDir, + openclawDir, + transformSourcePath, + }; +} + +export function resolveCliStateDir(homeDir: string): string { + return getConfigDir({ homeDir }); +} + +export function seedLocalAgentCredentials( + homeDir: string, + agentName: string, +): void { + const agentDir = join(resolveCliStateDir(homeDir), "agents", agentName); + mkdirSync(agentDir, { recursive: true }); + writeFileSync(join(agentDir, "secret.key"), "secret-key-value", "utf8"); + writeFileSync(join(agentDir, "ait.jwt"), "mock.ait.jwt", "utf8"); +} + +export function seedPeersConfig( + homeDir: string, + peers: Record< + string, + { did: string; proxyUrl: string; agentName?: string; humanName?: string } + >, +): void { + const peersPath = join(resolveCliStateDir(homeDir), "peers.json"); + mkdirSync(dirname(peersPath), { recursive: true }); + writeFileSync(peersPath, `${JSON.stringify({ peers }, null, 2)}\n`, "utf8"); +} + +export function seedPendingGatewayApprovals( + openclawDir: string, + requestIds: string[] = ["request-1"], +): string { + const pendingPath = join(openclawDir, "devices", "pending.json"); + mkdirSync(dirname(pendingPath), { recursive: true }); + + const pending = Object.fromEntries( + requestIds.map((requestId) => [ + requestId, + { + requestId, + }, + ]), + ); + + writeFileSync(pendingPath, JSON.stringify(pending, null, 2), "utf8"); + return pendingPath; +} + +export function connectorReadyFetch(): typeof fetch { + return async () => + new Response( + JSON.stringify({ + status: "ok", + websocket: { + connected: true, + }, + inbound: { + pending: { + pendingCount: 0, + pendingBytes: 0, + }, + deadLetter: { + deadLetterCount: 0, + deadLetterBytes: 0, + }, + replay: { + replayerActive: false, + }, + openclawHook: { + url: "http://127.0.0.1:18789/hooks/agent", + lastAttemptStatus: "ok", + }, + }, + }), + { + status: 200, + headers: { + "content-type": "application/json", + }, + }, + ); +} + +export const resolveConfigFixture = async () => ({ + registryUrl: "https://api.example.com", + proxyUrl: "https://proxy.example.com", + apiKey: "test-api-key", +}); + +export function restoreEnvVar( + key: keyof NodeJS.ProcessEnv, + previous: string | undefined, +): void { + if (previous === undefined) { + delete process.env[key]; + return; + } + + process.env[key] = previous; +} diff --git a/apps/cli/src/commands/openclaw.test/invite.test.ts b/apps/cli/src/commands/openclaw.test/invite.test.ts new file mode 100644 index 0000000..04a00cf --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/invite.test.ts @@ -0,0 +1,28 @@ +import { describe, expect, it } from "vitest"; +import { + createOpenclawInviteCode, + decodeOpenclawInviteCode, +} from "../openclaw.js"; + +describe("openclaw invite helpers", () => { + it("creates and decodes invite codes", () => { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + agentName: "beta", + humanName: "Ira", + }); + + expect(invite.code.startsWith("clawd1_")).toBe(true); + + const decoded = decodeOpenclawInviteCode(invite.code); + expect(decoded.v).toBe(1); + expect(decoded.did).toBe("did:claw:agent:01HF7YAT00W6W7CM7N3W5FDXT4"); + expect(decoded.proxyUrl).toBe("https://beta.example.com/hooks/agent"); + expect(decoded.alias).toBe("beta"); + expect(decoded.agentName).toBe("beta"); + expect(decoded.humanName).toBe("Ira"); + expect(decoded.issuedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); +}); diff --git a/apps/cli/src/commands/openclaw.test/relay.test.ts b/apps/cli/src/commands/openclaw.test/relay.test.ts new file mode 100644 index 0000000..cef8db2 --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/relay.test.ts @@ -0,0 +1,305 @@ +import { describe, expect, it } from "vitest"; +import { + createOpenclawInviteCode, + runOpenclawRelayTest, + runOpenclawRelayWebsocketTest, + setupOpenclawRelay, + setupOpenclawRelayFromInvite, +} from "../openclaw.js"; +import { + connectorReadyFetch, + createSandbox, + resolveConfigFixture, + restoreEnvVar, + seedLocalAgentCredentials, + seedPeersConfig, +} from "./helpers.js"; + +describe("openclaw relay diagnostics", () => { + it("returns relay test success for accepted probe", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async () => new Response(null, { status: 204 }), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("success"); + expect(result.httpStatus).toBe(204); + expect(result.endpoint).toBe("http://127.0.0.1:18789/hooks/send-to-peer"); + } finally { + sandbox.cleanup(); + } + }); + + it("auto-selects peer for relay test when exactly one peer is configured", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayTest({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async () => new Response(null, { status: 204 }), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("success"); + expect(result.peerAlias).toBe("beta"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses hook token from relay runtime config when relay test option/env is unset", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousHookToken = process.env.OPENCLAW_HOOK_TOKEN; + delete process.env.OPENCLAW_HOOK_TOKEN; + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + let sentHookToken: string | undefined; + await runOpenclawRelayTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async (_input, init) => { + const headers = new Headers(init?.headers); + sentHookToken = headers.get("x-openclaw-token") ?? undefined; + return new Response(null, { status: 204 }); + }, + resolveConfigImpl: resolveConfigFixture, + }); + + expect(typeof sentHookToken).toBe("string"); + expect(sentHookToken?.length ?? 0).toBeGreaterThan(0); + } finally { + restoreEnvVar("OPENCLAW_HOOK_TOKEN", previousHookToken); + sandbox.cleanup(); + } + }); + + it("returns relay test failure when probe is rejected", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: async () => + new Response("connector offline", { status: 500 }), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("failure"); + expect(result.httpStatus).toBe(500); + expect(result.message).toBe( + "Relay probe failed inside local relay pipeline", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("returns relay websocket test success when connector websocket is connected", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayWebsocketTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("success"); + expect(result.message).toBe( + "Connector websocket is connected for paired relay", + ); + expect(result.connectorStatusUrl).toBe( + "http://127.0.0.1:19400/v1/status", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("auto-selects peer for relay websocket test when exactly one peer is configured", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const result = await runOpenclawRelayWebsocketTest({ + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: connectorReadyFetch(), + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("success"); + expect(result.peerAlias).toBe("beta"); + } finally { + sandbox.cleanup(); + } + }); + + it("returns relay websocket test failure when connector websocket is disconnected", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + seedPeersConfig(sandbox.homeDir, { + beta: { + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + }, + }); + + const disconnectedConnectorFetch: typeof fetch = async () => + new Response( + JSON.stringify({ + status: "ok", + websocket: { + connected: false, + }, + }), + { + status: 200, + headers: { + "content-type": "application/json", + }, + }, + ); + + const result = await runOpenclawRelayWebsocketTest({ + peer: "beta", + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + fetchImpl: disconnectedConnectorFetch, + resolveConfigImpl: resolveConfigFixture, + }); + + expect(result.status).toBe("failure"); + expect(result.message).toBe("Connector websocket is not connected"); + expect(result.remediationHint).toBe( + "Run: clawdentity openclaw setup ", + ); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/cli/src/commands/openclaw.test/setup-core.test.ts b/apps/cli/src/commands/openclaw.test/setup-core.test.ts new file mode 100644 index 0000000..f03b19e --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/setup-core.test.ts @@ -0,0 +1,465 @@ +import { readFileSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { setupOpenclawRelay, setupOpenclawSelfReady } from "../openclaw.js"; +import { + createSandbox, + resolveCliStateDir, + restoreEnvVar, + seedLocalAgentCredentials, + seedPendingGatewayApprovals, +} from "./helpers.js"; + +describe("openclaw setup helpers (core)", () => { + it("applies relay setup and patches OpenClaw config", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const copiedTransform = readFileSync(result.transformTargetPath, "utf8"); + expect(copiedTransform).toContain("relay(ctx)"); + expect(result.openclawConfigChanged).toBe(true); + + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + gateway?: { + auth?: { + mode?: string; + token?: string; + }; + }; + hooks: { + enabled?: boolean; + token?: string; + defaultSessionKey?: string; + allowRequestSessionKey?: boolean; + allowedSessionKeyPrefixes?: string[]; + mappings?: Array>; + }; + }; + + expect(openclawConfig.hooks.enabled).toBe(true); + expect(typeof openclawConfig.hooks.token).toBe("string"); + expect(openclawConfig.hooks.token?.length ?? 0).toBeGreaterThan(0); + expect(openclawConfig.hooks.defaultSessionKey).toBe("main"); + expect(openclawConfig.hooks.allowRequestSessionKey).toBe(false); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("main"); + expect(openclawConfig.gateway?.auth?.mode).toBe("token"); + expect(typeof openclawConfig.gateway?.auth?.token).toBe("string"); + expect(openclawConfig.gateway?.auth?.token?.length ?? 0).toBeGreaterThan( + 0, + ); + expect( + openclawConfig.hooks.mappings?.some( + (mapping) => + mapping.id === "clawdentity-send-to-peer" && + (mapping.match as { path?: string })?.path === "send-to-peer" && + mapping.action === "agent" && + mapping.wakeMode === "now" && + (mapping.transform as { module?: string })?.module === + "relay-to-peer.mjs", + ), + ).toBe(true); + + const peers = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "peers.json"), + "utf8", + ), + ) as { + peers: Record< + string, + { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; + } + >; + }; + expect(peers.peers).toEqual({}); + + const selectedAgent = readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "openclaw-agent-name"), + "utf8", + ).trim(); + expect(selectedAgent).toBe("alpha"); + + expect(result.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + expect(result.connectorBaseUrl).toBe("http://127.0.0.1:19400"); + expect(readFileSync(result.relayTransformRuntimePath, "utf8")).toContain( + '"connectorBaseUrl": "http://host.docker.internal:19400"', + ); + expect(readFileSync(result.relayTransformPeersPath, "utf8")).toContain( + '"peers": {}', + ); + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawBaseUrl: string; + openclawHookToken?: string; + relayTransformPeersPath?: string; + updatedAt: string; + }; + expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:18789"); + expect(relayRuntimeConfig.openclawHookToken).toBe( + openclawConfig.hooks.token, + ); + expect(relayRuntimeConfig.relayTransformPeersPath).toBe( + result.relayTransformPeersPath, + ); + expect(relayRuntimeConfig.updatedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + + const connectorAssignments = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "openclaw-connectors.json"), + "utf8", + ), + ) as { + agents: Record; + }; + expect(connectorAssignments.agents.alpha.connectorBaseUrl).toBe( + "http://127.0.0.1:19400", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("does not rewrite OpenClaw config when setup state is already current", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousGatewayToken = process.env.OPENCLAW_GATEWAY_TOKEN; + delete process.env.OPENCLAW_GATEWAY_TOKEN; + const preconfiguredOpenclawJson = + '{"hooks":{"enabled":true,"token":"hook-token","defaultSessionKey":"main","allowRequestSessionKey":false,"allowedSessionKeyPrefixes":["hook:","main"],"mappings":[{"id":"clawdentity-send-to-peer","match":{"path":"send-to-peer"},"action":"agent","wakeMode":"now","transform":{"module":"relay-to-peer.mjs"}}]},"gateway":{"auth":{"mode":"token","token":"gateway-token"}}}\n'; + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + preconfiguredOpenclawJson, + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigChanged).toBe(false); + expect(readFileSync(result.openclawConfigPath, "utf8")).toBe( + preconfiguredOpenclawJson, + ); + } finally { + restoreEnvVar("OPENCLAW_GATEWAY_TOKEN", previousGatewayToken); + sandbox.cleanup(); + } + }); + + it("supports setup-only mode without runtime startup", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const result = await setupOpenclawSelfReady("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + noRuntimeStart: true, + }); + + expect(result.runtimeMode).toBe("none"); + expect(result.runtimeStatus).toBe("skipped"); + expect(result.websocketStatus).toBe("skipped"); + expect(readFileSync(result.transformTargetPath, "utf8")).toContain( + "relay(ctx)", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("syncs gateway auth token from OPENCLAW_GATEWAY_TOKEN during setup", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousGatewayToken = process.env.OPENCLAW_GATEWAY_TOKEN; + process.env.OPENCLAW_GATEWAY_TOKEN = "gateway-token-from-env"; + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + gateway?: { + auth?: { + mode?: string; + token?: string; + }; + }; + }; + + expect(openclawConfig.gateway?.auth?.mode).toBe("token"); + expect(openclawConfig.gateway?.auth?.token).toBe( + "gateway-token-from-env", + ); + } finally { + restoreEnvVar("OPENCLAW_GATEWAY_TOKEN", previousGatewayToken); + sandbox.cleanup(); + } + }); + + it("auto-recovers setup checklist when OpenClaw has pending gateway device approvals", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const pendingPath = seedPendingGatewayApprovals(sandbox.openclawDir); + + try { + const approvedRequestIds: string[] = []; + const result = await setupOpenclawSelfReady("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + noRuntimeStart: true, + gatewayDeviceApprovalRunner: async ({ requestId }) => { + approvedRequestIds.push(requestId); + writeFileSync(pendingPath, JSON.stringify({}, null, 2), "utf8"); + return { + ok: true, + }; + }, + }); + + expect(result.runtimeMode).toBe("none"); + expect(approvedRequestIds).toEqual(["request-1"]); + const pendingAfterRecovery = JSON.parse( + readFileSync(pendingPath, "utf8"), + ) as Record; + expect(Object.keys(pendingAfterRecovery)).toHaveLength(0); + } finally { + sandbox.cleanup(); + } + }); + + it("fails setup checklist when gateway approval runner is unavailable", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + seedPendingGatewayApprovals(sandbox.openclawDir); + + try { + await expect( + setupOpenclawSelfReady("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + noRuntimeStart: true, + gatewayDeviceApprovalRunner: async () => ({ + ok: false, + unavailable: true, + errorMessage: "spawn openclaw ENOENT", + }), + }), + ).rejects.toMatchObject({ + code: "CLI_OPENCLAW_SETUP_CHECKLIST_FAILED", + }); + } finally { + sandbox.cleanup(); + } + }); + + it("preserves explicit hook request session key (including subagent keys)", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + hooks: { + enabled: true, + token: "existing-token", + defaultSessionKey: "subagent:planner", + allowedSessionKeyPrefixes: ["hook:"], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + token?: string; + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.token).toBe("existing-token"); + expect(openclawConfig.hooks.defaultSessionKey).toBe("subagent:planner"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( + "subagent:planner", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("normalizes legacy canonical hook default session key to request format", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + hooks: { + enabled: true, + token: "existing-token", + defaultSessionKey: "agent:ops:subagent:planner", + allowedSessionKeyPrefixes: ["hook:"], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + token?: string; + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.token).toBe("existing-token"); + expect(openclawConfig.hooks.defaultSessionKey).toBe("subagent:planner"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("hook:"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( + "subagent:planner", + ); + } finally { + sandbox.cleanup(); + } + }); + + it("derives hook default session key from OpenClaw session scope and main key", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + session: { mainKey: "work" }, + agents: { + list: [{ id: "main" }, { id: "ops-team", default: true }], + }, + hooks: { + enabled: false, + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.defaultSessionKey).toBe("work"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain("work"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses global hook default session when OpenClaw session scope is global", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + writeFileSync( + join(sandbox.openclawDir, "openclaw.json"), + JSON.stringify( + { + session: { scope: "global" }, + hooks: { + enabled: false, + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const result = await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + const openclawConfig = JSON.parse( + readFileSync(result.openclawConfigPath, "utf8"), + ) as { + hooks: { + defaultSessionKey?: string; + allowedSessionKeyPrefixes?: string[]; + }; + }; + + expect(openclawConfig.hooks.defaultSessionKey).toBe("global"); + expect(openclawConfig.hooks.allowedSessionKeyPrefixes).toContain( + "global", + ); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/cli/src/commands/openclaw.test/setup-runtime.test.ts b/apps/cli/src/commands/openclaw.test/setup-runtime.test.ts new file mode 100644 index 0000000..1745c05 --- /dev/null +++ b/apps/cli/src/commands/openclaw.test/setup-runtime.test.ts @@ -0,0 +1,404 @@ +import { mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { describe, expect, it } from "vitest"; +import { + createOpenclawInviteCode, + setupOpenclawRelay, + setupOpenclawRelayFromInvite, +} from "../openclaw.js"; +import { + createSandbox, + resolveCliStateDir, + restoreEnvVar, + seedLocalAgentCredentials, +} from "./helpers.js"; + +describe("openclaw setup helpers (runtime + env)", () => { + it("stores explicit OpenClaw base URL in relay runtime config", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + openclawBaseUrl: "http://127.0.0.1:19001", + }); + + expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19001"); + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawBaseUrl: string; + }; + expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:19001"); + } finally { + sandbox.cleanup(); + } + }); + + it("uses OPENCLAW_BASE_URL env when setup option is omitted", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousBaseUrl = process.env.OPENCLAW_BASE_URL; + process.env.OPENCLAW_BASE_URL = "http://127.0.0.1:19555"; + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawBaseUrl).toBe("http://127.0.0.1:19555"); + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawBaseUrl: string; + }; + expect(relayRuntimeConfig.openclawBaseUrl).toBe("http://127.0.0.1:19555"); + } finally { + restoreEnvVar("OPENCLAW_BASE_URL", previousBaseUrl); + sandbox.cleanup(); + } + }); + + it("resolves OpenClaw state/config paths from env variables", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousStateDir = process.env.OPENCLAW_STATE_DIR; + const previousConfigPath = process.env.OPENCLAW_CONFIG_PATH; + + try { + const customStateDir = join(sandbox.homeDir, ".openclaw-custom"); + const customConfigPath = join(customStateDir, "openclaw.custom.json"); + mkdirSync(customStateDir, { recursive: true }); + writeFileSync( + customConfigPath, + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + process.env.OPENCLAW_STATE_DIR = customStateDir; + process.env.OPENCLAW_CONFIG_PATH = customConfigPath; + + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigPath).toBe(customConfigPath); + expect(result.transformTargetPath).toBe( + join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), + ); + } finally { + restoreEnvVar("OPENCLAW_STATE_DIR", previousStateDir); + restoreEnvVar("OPENCLAW_CONFIG_PATH", previousConfigPath); + sandbox.cleanup(); + } + }); + + it("resolves OpenClaw state/config paths from legacy CLAWDBOT_* env aliases", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousStateDir = process.env.CLAWDBOT_STATE_DIR; + const previousConfigPath = process.env.CLAWDBOT_CONFIG_PATH; + const previousOpenclawStateDir = process.env.OPENCLAW_STATE_DIR; + const previousOpenclawConfigPath = process.env.OPENCLAW_CONFIG_PATH; + + try { + const customStateDir = join(sandbox.homeDir, ".clawdbot-custom"); + const customConfigPath = join(customStateDir, "clawdbot.custom.json"); + mkdirSync(customStateDir, { recursive: true }); + writeFileSync( + customConfigPath, + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + delete process.env.OPENCLAW_STATE_DIR; + delete process.env.OPENCLAW_CONFIG_PATH; + process.env.CLAWDBOT_STATE_DIR = customStateDir; + process.env.CLAWDBOT_CONFIG_PATH = customConfigPath; + + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigPath).toBe(customConfigPath); + expect(result.transformTargetPath).toBe( + join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), + ); + } finally { + restoreEnvVar("CLAWDBOT_STATE_DIR", previousStateDir); + restoreEnvVar("CLAWDBOT_CONFIG_PATH", previousConfigPath); + restoreEnvVar("OPENCLAW_STATE_DIR", previousOpenclawStateDir); + restoreEnvVar("OPENCLAW_CONFIG_PATH", previousOpenclawConfigPath); + sandbox.cleanup(); + } + }); + + it("resolves default OpenClaw state from OPENCLAW_HOME", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const previousOpenclawHome = process.env.OPENCLAW_HOME; + const previousOpenclawStateDir = process.env.OPENCLAW_STATE_DIR; + const previousOpenclawConfigPath = process.env.OPENCLAW_CONFIG_PATH; + const previousClawdbotStateDir = process.env.CLAWDBOT_STATE_DIR; + const previousClawdbotConfigPath = process.env.CLAWDBOT_CONFIG_PATH; + + try { + const customHome = join(sandbox.homeDir, "openclaw-home"); + const customStateDir = join(customHome, ".openclaw"); + const customConfigPath = join(customStateDir, "openclaw.json"); + mkdirSync(customStateDir, { recursive: true }); + writeFileSync( + customConfigPath, + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + process.env.OPENCLAW_HOME = customHome; + delete process.env.OPENCLAW_STATE_DIR; + delete process.env.OPENCLAW_CONFIG_PATH; + delete process.env.CLAWDBOT_STATE_DIR; + delete process.env.CLAWDBOT_CONFIG_PATH; + + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + const result = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(result.openclawConfigPath).toBe(customConfigPath); + expect(result.transformTargetPath).toBe( + join(customStateDir, "hooks", "transforms", "relay-to-peer.mjs"), + ); + } finally { + restoreEnvVar("OPENCLAW_HOME", previousOpenclawHome); + restoreEnvVar("OPENCLAW_STATE_DIR", previousOpenclawStateDir); + restoreEnvVar("OPENCLAW_CONFIG_PATH", previousOpenclawConfigPath); + restoreEnvVar("CLAWDBOT_STATE_DIR", previousClawdbotStateDir); + restoreEnvVar("CLAWDBOT_CONFIG_PATH", previousClawdbotConfigPath); + sandbox.cleanup(); + } + }); + + it("allocates distinct connector base URLs per local agent", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + seedLocalAgentCredentials(sandbox.homeDir, "beta"); + + try { + const alphaInvite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + const betaInvite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8", + proxyUrl: "https://alpha.example.com/hooks/agent", + peerAlias: "alpha", + }); + + const alphaResult = await setupOpenclawRelayFromInvite("alpha", { + inviteCode: alphaInvite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const betaOpenclawDir = join(sandbox.homeDir, "openclaw-beta"); + mkdirSync(betaOpenclawDir, { recursive: true }); + writeFileSync( + join(betaOpenclawDir, "openclaw.json"), + JSON.stringify({ hooks: { enabled: false, mappings: [] } }, null, 2), + "utf8", + ); + + const betaResult = await setupOpenclawRelayFromInvite("beta", { + inviteCode: betaInvite.code, + homeDir: sandbox.homeDir, + openclawDir: betaOpenclawDir, + transformSource: sandbox.transformSourcePath, + }); + + expect(alphaResult.connectorBaseUrl).toBe("http://127.0.0.1:19400"); + expect(betaResult.connectorBaseUrl).toBe("http://127.0.0.1:19401"); + } finally { + sandbox.cleanup(); + } + }); + + it("keeps send-to-peer mapping idempotent across repeated setup", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfig = JSON.parse( + readFileSync(join(sandbox.openclawDir, "openclaw.json"), "utf8"), + ) as { + hooks: { mappings?: Array> }; + }; + + const relayMappings = (openclawConfig.hooks.mappings ?? []).filter( + (mapping) => + mapping.id === "clawdentity-send-to-peer" || + (mapping.match as { path?: string })?.path === "send-to-peer", + ); + expect(relayMappings).toHaveLength(1); + } finally { + sandbox.cleanup(); + } + }); + + it("preserves existing OpenClaw hooks token and mirrors it to relay runtime config", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + const openclawConfigPath = join(sandbox.openclawDir, "openclaw.json"); + writeFileSync( + openclawConfigPath, + JSON.stringify( + { + hooks: { + enabled: true, + token: "existing-hook-token", + mappings: [], + }, + }, + null, + 2, + ), + "utf8", + ); + + try { + const invite = createOpenclawInviteCode({ + did: "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7", + proxyUrl: "https://beta.example.com/hooks/agent", + peerAlias: "beta", + }); + + await setupOpenclawRelayFromInvite("alpha", { + inviteCode: invite.code, + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const openclawConfig = JSON.parse( + readFileSync(openclawConfigPath, "utf8"), + ) as { + hooks: { token?: string }; + }; + expect(openclawConfig.hooks.token).toBe("existing-hook-token"); + + const relayRuntimeConfig = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "openclaw-relay.json"), + "utf8", + ), + ) as { + openclawHookToken?: string; + }; + expect(relayRuntimeConfig.openclawHookToken).toBe("existing-hook-token"); + } finally { + sandbox.cleanup(); + } + }); + + it("supports self setup without peer routing details", async () => { + const sandbox = createSandbox(); + seedLocalAgentCredentials(sandbox.homeDir, "alpha"); + + try { + await setupOpenclawRelay("alpha", { + homeDir: sandbox.homeDir, + openclawDir: sandbox.openclawDir, + transformSource: sandbox.transformSourcePath, + }); + + const peers = JSON.parse( + readFileSync( + join(resolveCliStateDir(sandbox.homeDir), "peers.json"), + "utf8", + ), + ) as { + peers: Record< + string, + { + did: string; + proxyUrl: string; + agentName?: string; + humanName?: string; + } + >; + }; + expect(peers.peers).toEqual({}); + } finally { + sandbox.cleanup(); + } + }); +}); diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts deleted file mode 100644 index b7d4ed8..0000000 --- a/apps/registry/src/server.test.ts +++ /dev/null @@ -1,6973 +0,0 @@ -import { - ADMIN_BOOTSTRAP_PATH, - ADMIN_INTERNAL_SERVICES_PATH, - AGENT_AUTH_REFRESH_PATH, - AGENT_AUTH_VALIDATE_PATH, - AGENT_REGISTRATION_CHALLENGE_PATH, - canonicalizeAgentRegistrationProof, - encodeBase64url, - generateUlid, - INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, - INVITES_PATH, - INVITES_REDEEM_PATH, - ME_API_KEYS_PATH, - makeAgentDid, - makeHumanDid, - REGISTRY_METADATA_PATH, -} from "@clawdentity/protocol"; -import { - encodeEd25519SignatureBase64url, - generateEd25519Keypair, - REQUEST_ID_HEADER, - signAIT, - signEd25519, - signHttpRequest, - verifyAIT, - verifyCRL, -} from "@clawdentity/sdk"; -import { buildTestAitClaims } from "@clawdentity/sdk/testing"; -import { describe, expect, it } from "vitest"; -import { DEFAULT_AGENT_LIST_LIMIT } from "./agent-list.js"; -import { - DEFAULT_AGENT_FRAMEWORK, - DEFAULT_AGENT_TTL_DAYS, -} from "./agent-registration.js"; -import { - deriveAccessTokenLookupPrefix, - deriveRefreshTokenLookupPrefix, - hashAgentToken, -} from "./auth/agent-auth-token.js"; -import { - deriveApiKeyLookupPrefix, - hashApiKeyToken, -} from "./auth/api-key-auth.js"; -import { RESOLVE_RATE_LIMIT_MAX_REQUESTS } from "./rate-limit.js"; -import app, { createRegistryApp } from "./server.js"; - -function makeAitClaims(publicKey: Uint8Array) { - return buildTestAitClaims({ - publicKeyX: encodeBase64url(publicKey), - issuer: "https://registry.clawdentity.dev", - nowSeconds: Math.floor(Date.now() / 1000), - ttlSeconds: 3600, - nbfSkewSeconds: 5, - seedMs: 1_700_100_000_000, - name: "agent-registry-01", - framework: "openclaw", - description: "registry key publishing verification path", - }); -} - -type FakeD1Row = { - apiKeyId: string; - keyPrefix: string; - keyHash: string; - apiKeyStatus: "active" | "revoked"; - apiKeyName: string; - humanId: string; - humanDid: string; - humanDisplayName: string; - humanRole: "admin" | "user"; - humanStatus: "active" | "suspended"; -}; - -type FakeHumanRow = { - id: string; - did: string; - displayName: string; - role: "admin" | "user"; - status: "active" | "suspended"; - createdAt: string; - updatedAt: string; -}; - -type FakeApiKeyRow = { - id: string; - humanId: string; - keyHash: string; - keyPrefix: string; - name: string; - status: "active" | "revoked"; - createdAt: string; - lastUsedAt: string | null; -}; - -type FakeAgentAuthSessionRow = { - id: string; - agentId: string; - refreshKeyHash: string; - refreshKeyPrefix: string; - refreshIssuedAt: string; - refreshExpiresAt: string; - refreshLastUsedAt: string | null; - accessKeyHash: string; - accessKeyPrefix: string; - accessIssuedAt: string; - accessExpiresAt: string; - accessLastUsedAt: string | null; - status: "active" | "revoked"; - revokedAt: string | null; - createdAt: string; - updatedAt: string; -}; - -type FakeAgentAuthEventInsertRow = Record; -type FakeAgentAuthSessionInsertRow = Record; -type FakeAgentAuthSessionUpdateRow = Record; -type FakeApiKeySelectRow = { - id: string; - human_id: string; - key_hash: string; - key_prefix: string; - name: string; - status: "active" | "revoked"; - created_at: string; - last_used_at: string | null; -}; - -type FakeAgentInsertRow = Record; -type FakeHumanInsertRow = Record; -type FakeApiKeyInsertRow = Record; -type FakeAgentUpdateRow = Record; -type FakeRevocationInsertRow = Record; -type FakeAgentRegistrationChallengeInsertRow = Record; -type FakeAgentRegistrationChallengeUpdateRow = Record; -type FakeInviteInsertRow = Record; -type FakeInviteUpdateRow = Record; -type FakeRevocationRow = { - id: string; - jti: string; - agentId: string; - reason: string | null; - revokedAt: string; -}; -type FakeAgentRow = { - id: string; - did: string; - ownerId: string; - name: string; - framework: string | null; - publicKey?: string; - status: "active" | "revoked"; - expiresAt: string | null; - currentJti?: string | null; - createdAt?: string; - updatedAt?: string; -}; -type FakeAgentRegistrationChallengeRow = { - id: string; - ownerId: string; - publicKey: string; - nonce: string; - status: "pending" | "used"; - expiresAt: string; - usedAt: string | null; - createdAt: string; - updatedAt: string; -}; -type FakeInviteRow = { - id: string; - code: string; - createdBy: string; - redeemedBy: string | null; - agentId: string | null; - expiresAt: string | null; - createdAt: string; -}; - -type FakeAgentSelectRow = { - id: string; - did: string; - owner_id: string; - owner_did: string; - name: string; - framework: string | null; - public_key: string; - status: "active" | "revoked"; - expires_at: string | null; - current_jti: string | null; - created_at: string; - updated_at: string; -}; - -type FakeDbOptions = { - beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; - beforeFirstAgentAuthSessionUpdate?: ( - sessionRows: FakeAgentAuthSessionRow[], - ) => void; - failApiKeyInsertCount?: number; - failBeginTransaction?: boolean; - inviteRows?: FakeInviteRow[]; - revocationRows?: FakeRevocationRow[]; - registrationChallengeRows?: FakeAgentRegistrationChallengeRow[]; - agentAuthSessionRows?: FakeAgentAuthSessionRow[]; -}; - -type FakeCrlSelectRow = { - id: string; - jti: string; - reason: string | null; - revoked_at: string; - agent_did: string; - did: string; -}; - -function parseInsertColumns(query: string, tableName: string): string[] { - const match = query.match( - new RegExp(`insert\\s+into\\s+"?${tableName}"?\\s*\\(([^)]+)\\)`, "i"), - ); - if (!match) { - return []; - } - - const columns = match[1]?.split(",") ?? []; - return columns.map((column) => column.replace(/["`\s]/g, "")); -} - -function parseUpdateSetColumns(query: string, tableName: string): string[] { - const match = query.match( - new RegExp(`update\\s+"?${tableName}"?\\s+set\\s+(.+?)\\s+where`, "i"), - ); - if (!match) { - return []; - } - - const assignments = match[1]?.split(",") ?? []; - return assignments - .map((assignment) => assignment.split("=")[0] ?? "") - .map((column) => column.replace(/["`\s]/g, "")) - .filter((column) => column.length > 0); -} - -function extractWhereClause(query: string): string { - const normalized = query.toLowerCase(); - const whereIndex = normalized.indexOf(" where "); - if (whereIndex < 0) { - return ""; - } - - const orderByIndex = normalized.indexOf(" order by ", whereIndex + 7); - const limitIndex = normalized.indexOf(" limit ", whereIndex + 7); - const endIndex = - orderByIndex >= 0 - ? orderByIndex - : limitIndex >= 0 - ? limitIndex - : normalized.length; - - return normalized.slice(whereIndex, endIndex); -} - -function hasFilter( - whereClause: string, - column: string, - operator = "=", -): boolean { - const escapedColumn = column.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); - const escapedOperator = operator.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); - const quotedPattern = new RegExp( - `"${escapedColumn}"\\s*${escapedOperator}\\s*\\?`, - ); - const barePattern = new RegExp( - `\\b${escapedColumn}\\b\\s*${escapedOperator}\\s*\\?`, - ); - return quotedPattern.test(whereClause) || barePattern.test(whereClause); -} - -function isDefined(value: T | undefined): value is T { - return value !== undefined; -} - -function parseWhereEqualityParams(options: { - whereClause: string; - params: unknown[]; -}): { values: Record; consumedParams: number } { - const values: Record = {}; - const pattern = /"?([a-zA-Z0-9_]+)"?\s*=\s*\?/g; - let parameterIndex = 0; - - let match = pattern.exec(options.whereClause); - while (match !== null) { - const column = match[1]?.toLowerCase(); - if (!column) { - match = pattern.exec(options.whereClause); - continue; - } - - const entries = values[column] ?? []; - entries.push(options.params[parameterIndex]); - values[column] = entries; - parameterIndex += 1; - match = pattern.exec(options.whereClause); - } - - return { values, consumedParams: parameterIndex }; -} - -function parseSelectedColumns(query: string): string[] { - const normalized = query.toLowerCase(); - const selectIndex = normalized.indexOf("select "); - const fromIndex = normalized.indexOf(" from "); - if (selectIndex < 0 || fromIndex < 0 || fromIndex <= selectIndex) { - return []; - } - - const selectClause = query.slice(selectIndex + 7, fromIndex); - return selectClause - .split(",") - .map((column) => column.trim()) - .map((column) => { - const normalizedColumn = column.toLowerCase(); - if ( - normalizedColumn.includes(`"humans"."did"`) || - normalizedColumn.includes("humans.did") - ) { - return "owner_did"; - } - - if ( - normalizedColumn.includes(`"agents"."did"`) || - normalizedColumn.includes("agents.did") - ) { - return "did"; - } - - const aliasMatch = column.match(/\s+as\s+"?([a-zA-Z0-9_]+)"?\s*$/i); - if (aliasMatch?.[1]) { - return aliasMatch[1].toLowerCase(); - } - - const quotedMatch = column.match(/"([a-zA-Z0-9_]+)"\s*$/); - if (quotedMatch?.[1]) { - return quotedMatch[1].toLowerCase(); - } - - const bare = - column - .split(".") - .pop() - ?.replace(/["`\s]/g, "") ?? ""; - return bare.toLowerCase(); - }) - .filter((column) => column.length > 0); -} - -function createFakePublicKey(agentId: string): string { - const seed = agentId.length > 0 ? agentId : "agent"; - const bytes = new Uint8Array(32); - - for (let index = 0; index < bytes.length; index += 1) { - bytes[index] = seed.charCodeAt(index % seed.length) & 0xff; - } - - return encodeBase64url(bytes); -} - -function getAgentSelectColumnValue( - row: FakeAgentSelectRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "did") { - return row.did; - } - if (column === "owner_id") { - return row.owner_id; - } - if (column === "owner_did") { - return row.owner_did; - } - if (column === "name") { - return row.name; - } - if (column === "framework") { - return row.framework; - } - if (column === "public_key") { - return row.public_key; - } - if (column === "status") { - return row.status; - } - if (column === "expires_at") { - return row.expires_at; - } - if (column === "current_jti") { - return row.current_jti; - } - if (column === "created_at") { - return row.created_at; - } - if (column === "updated_at") { - return row.updated_at; - } - return undefined; -} - -function getAgentRegistrationChallengeSelectColumnValue( - row: FakeAgentRegistrationChallengeRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "owner_id") { - return row.ownerId; - } - if (column === "public_key") { - return row.publicKey; - } - if (column === "nonce") { - return row.nonce; - } - if (column === "status") { - return row.status; - } - if (column === "expires_at") { - return row.expiresAt; - } - if (column === "used_at") { - return row.usedAt; - } - if (column === "created_at") { - return row.createdAt; - } - if (column === "updated_at") { - return row.updatedAt; - } - return undefined; -} - -function getHumanSelectColumnValue(row: FakeHumanRow, column: string): unknown { - if (column === "id") { - return row.id; - } - if (column === "did") { - return row.did; - } - if (column === "display_name") { - return row.displayName; - } - if (column === "role") { - return row.role; - } - if (column === "status") { - return row.status; - } - if (column === "created_at") { - return row.createdAt; - } - if (column === "updated_at") { - return row.updatedAt; - } - return undefined; -} - -function resolveHumanSelectRows(options: { - query: string; - params: unknown[]; - humanRows: FakeHumanRow[]; -}): FakeHumanRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - - const roleFilter = - typeof equalityParams.values.role?.[0] === "string" - ? String(equalityParams.values.role[0]) - : undefined; - const statusFilter = - typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const didFilter = - typeof equalityParams.values.did?.[0] === "string" - ? String(equalityParams.values.did[0]) - : undefined; - - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.humanRows.length; - - return options.humanRows - .filter((row) => (roleFilter ? row.role === roleFilter : true)) - .filter((row) => (statusFilter ? row.status === statusFilter : true)) - .filter((row) => (idFilter ? row.id === idFilter : true)) - .filter((row) => (didFilter ? row.did === didFilter : true)) - .slice(0, limit); -} - -function getApiKeySelectColumnValue( - row: FakeApiKeySelectRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "human_id") { - return row.human_id; - } - if (column === "key_hash") { - return row.key_hash; - } - if (column === "key_prefix") { - return row.key_prefix; - } - if (column === "name") { - return row.name; - } - if (column === "status") { - return row.status; - } - if (column === "created_at") { - return row.created_at; - } - if (column === "last_used_at") { - return row.last_used_at; - } - return undefined; -} - -function resolveApiKeySelectRows(options: { - query: string; - params: unknown[]; - apiKeyRows: FakeApiKeyRow[]; -}): FakeApiKeySelectRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasHumanIdFilter = hasFilter(whereClause, "human_id"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasPrefixFilter = hasFilter(whereClause, "key_prefix"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - const orderByCreatedAtDesc = - options.query.toLowerCase().includes("order by") && - options.query.toLowerCase().includes("created_at") && - options.query.toLowerCase().includes("desc"); - - const humanId = - hasHumanIdFilter && typeof equalityParams.values.human_id?.[0] === "string" - ? String(equalityParams.values.human_id[0]) - : undefined; - const id = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const status = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const keyPrefix = - hasPrefixFilter && typeof equalityParams.values.key_prefix?.[0] === "string" - ? String(equalityParams.values.key_prefix[0]) - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.apiKeyRows.length; - - const rows = options.apiKeyRows - .filter((row) => (humanId ? row.humanId === humanId : true)) - .filter((row) => (id ? row.id === id : true)) - .filter((row) => (status ? row.status === status : true)) - .filter((row) => (keyPrefix ? row.keyPrefix === keyPrefix : true)) - .map((row) => ({ - id: row.id, - human_id: row.humanId, - key_hash: row.keyHash, - key_prefix: row.keyPrefix, - name: row.name, - status: row.status, - created_at: row.createdAt, - last_used_at: row.lastUsedAt, - })); - - if (orderByCreatedAtDesc) { - rows.sort((left, right) => { - const createdAtCompare = right.created_at.localeCompare(left.created_at); - if (createdAtCompare !== 0) { - return createdAtCompare; - } - return right.id.localeCompare(left.id); - }); - } - - return rows.slice(0, limit); -} - -function getAgentAuthSessionSelectColumnValue( - row: FakeAgentAuthSessionRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "agent_id") { - return row.agentId; - } - if (column === "refresh_key_hash") { - return row.refreshKeyHash; - } - if (column === "refresh_key_prefix") { - return row.refreshKeyPrefix; - } - if (column === "refresh_issued_at") { - return row.refreshIssuedAt; - } - if (column === "refresh_expires_at") { - return row.refreshExpiresAt; - } - if (column === "refresh_last_used_at") { - return row.refreshLastUsedAt; - } - if (column === "access_key_hash") { - return row.accessKeyHash; - } - if (column === "access_key_prefix") { - return row.accessKeyPrefix; - } - if (column === "access_issued_at") { - return row.accessIssuedAt; - } - if (column === "access_expires_at") { - return row.accessExpiresAt; - } - if (column === "access_last_used_at") { - return row.accessLastUsedAt; - } - if (column === "status") { - return row.status; - } - if (column === "revoked_at") { - return row.revokedAt; - } - if (column === "created_at") { - return row.createdAt; - } - if (column === "updated_at") { - return row.updatedAt; - } - return undefined; -} - -function resolveAgentAuthSessionSelectRows(options: { - query: string; - params: unknown[]; - sessionRows: FakeAgentAuthSessionRow[]; -}): FakeAgentAuthSessionRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasAgentIdFilter = hasFilter(whereClause, "agent_id"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasRefreshPrefixFilter = hasFilter(whereClause, "refresh_key_prefix"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - - const agentId = - hasAgentIdFilter && typeof equalityParams.values.agent_id?.[0] === "string" - ? String(equalityParams.values.agent_id[0]) - : undefined; - const id = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const status = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const refreshPrefix = - hasRefreshPrefixFilter && - typeof equalityParams.values.refresh_key_prefix?.[0] === "string" - ? String(equalityParams.values.refresh_key_prefix[0]) - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.sessionRows.length; - - return options.sessionRows - .filter((row) => (agentId ? row.agentId === agentId : true)) - .filter((row) => (id ? row.id === id : true)) - .filter((row) => (status ? row.status === status : true)) - .filter((row) => - refreshPrefix ? row.refreshKeyPrefix === refreshPrefix : true, - ) - .slice(0, limit); -} - -function resolveAgentSelectRows(options: { - query: string; - params: unknown[]; - authRows: FakeD1Row[]; - agentRows: FakeAgentRow[]; -}): FakeAgentSelectRow[] { - const normalizedQuery = options.query.toLowerCase(); - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasOwnerFilter = hasFilter(whereClause, "owner_id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasFrameworkFilter = hasFilter(whereClause, "framework"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasDidFilter = hasFilter(whereClause, "did"); - const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); - const hasCursorFilter = hasFilter(whereClause, "id", "<"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - const requiresHumanJoin = - normalizedQuery.includes('join "humans"') || - normalizedQuery.includes("join humans"); - - const ownerId = - hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" - ? String(equalityParams.values.owner_id?.[0]) - : undefined; - const statusFilter = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status?.[0]) - : undefined; - const frameworkFilter = - hasFrameworkFilter && - typeof equalityParams.values.framework?.[0] === "string" - ? String(equalityParams.values.framework?.[0]) - : undefined; - const idFilter = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id?.[0]) - : undefined; - const didFilter = - hasDidFilter && typeof equalityParams.values.did?.[0] === "string" - ? String(equalityParams.values.did?.[0]) - : undefined; - const currentJtiFilter = hasCurrentJtiFilter - ? (equalityParams.values.current_jti?.[0] as string | null | undefined) - : undefined; - const cursorFilter = hasCursorFilter - ? String(options.params[equalityParams.consumedParams] ?? "") - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.agentRows.length; - - const filteredRows = options.agentRows - .filter((row) => (ownerId ? row.ownerId === ownerId : true)) - .filter((row) => (statusFilter ? row.status === statusFilter : true)) - .filter((row) => - frameworkFilter ? row.framework === frameworkFilter : true, - ) - .filter((row) => (idFilter ? row.id === idFilter : true)) - .filter((row) => (didFilter ? row.did === didFilter : true)) - .filter((row) => - currentJtiFilter !== undefined - ? (row.currentJti ?? null) === currentJtiFilter - : true, - ) - .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) - .sort((left, right) => right.id.localeCompare(left.id)) - .map((row) => { - const ownerDid = options.authRows.find( - (authRow) => authRow.humanId === row.ownerId, - )?.humanDid; - - return { - id: row.id, - did: row.did, - owner_id: row.ownerId, - owner_did: ownerDid ?? "", - name: row.name, - framework: row.framework, - public_key: row.publicKey ?? createFakePublicKey(row.id), - status: row.status, - expires_at: row.expiresAt, - current_jti: row.currentJti ?? null, - created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", - updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", - }; - }) - .filter((row) => (requiresHumanJoin ? row.owner_did.length > 0 : true)) - .slice(0, limit); - - return filteredRows; -} - -function resolveAgentRegistrationChallengeSelectRows(options: { - query: string; - params: unknown[]; - challengeRows: FakeAgentRegistrationChallengeRow[]; -}): FakeAgentRegistrationChallengeRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasOwnerFilter = hasFilter(whereClause, "owner_id"); - const hasChallengeIdFilter = hasFilter(whereClause, "id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - - const ownerId = - hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" - ? String(equalityParams.values.owner_id[0]) - : undefined; - const challengeId = - hasChallengeIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const status = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.challengeRows.length; - - return options.challengeRows - .filter((row) => (ownerId ? row.ownerId === ownerId : true)) - .filter((row) => (challengeId ? row.id === challengeId : true)) - .filter((row) => (status ? row.status === status : true)) - .slice(0, limit); -} - -function getInviteSelectColumnValue( - row: FakeInviteRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "code") { - return row.code; - } - if (column === "created_by") { - return row.createdBy; - } - if (column === "redeemed_by") { - return row.redeemedBy; - } - if (column === "agent_id") { - return row.agentId; - } - if (column === "expires_at") { - return row.expiresAt; - } - if (column === "created_at") { - return row.createdAt; - } - return undefined; -} - -function resolveInviteSelectRows(options: { - query: string; - params: unknown[]; - inviteRows: FakeInviteRow[]; -}): FakeInviteRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasCodeFilter = hasFilter(whereClause, "code"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasRedeemedByFilter = hasFilter(whereClause, "redeemed_by"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - - const codeFilter = - hasCodeFilter && typeof equalityParams.values.code?.[0] === "string" - ? String(equalityParams.values.code[0]) - : undefined; - const idFilter = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const redeemedByFilter = hasRedeemedByFilter - ? (equalityParams.values.redeemed_by?.[0] as string | null | undefined) - : undefined; - - const requiresRedeemedByNull = - whereClause.includes("redeemed_by") && whereClause.includes("is null"); - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.inviteRows.length; - - return options.inviteRows - .filter((row) => (codeFilter ? row.code === codeFilter : true)) - .filter((row) => (idFilter ? row.id === idFilter : true)) - .filter((row) => - redeemedByFilter !== undefined - ? row.redeemedBy === redeemedByFilter - : true, - ) - .filter((row) => (requiresRedeemedByNull ? row.redeemedBy === null : true)) - .slice(0, limit); -} - -function getCrlSelectColumnValue( - row: FakeCrlSelectRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "jti") { - return row.jti; - } - if (column === "reason") { - return row.reason; - } - if (column === "revoked_at") { - return row.revoked_at; - } - if (column === "revokedat") { - return row.revoked_at; - } - if (column === "agent_did") { - return row.agent_did; - } - if (column === "agentdid" || column === "did") { - return row.did; - } - return undefined; -} - -function resolveCrlSelectRows(options: { - agentRows: FakeAgentRow[]; - revocationRows: FakeRevocationRow[]; -}): FakeCrlSelectRow[] { - return options.revocationRows - .map((row) => { - const agent = options.agentRows.find( - (agentRow) => agentRow.id === row.agentId, - ); - if (!agent) { - return null; - } - - return { - id: row.id, - jti: row.jti, - reason: row.reason, - revoked_at: row.revokedAt, - agent_did: agent.did, - did: agent.did, - }; - }) - .filter((row): row is FakeCrlSelectRow => row !== null) - .sort((left, right) => { - const timestampCompare = right.revoked_at.localeCompare(left.revoked_at); - if (timestampCompare !== 0) { - return timestampCompare; - } - return right.id.localeCompare(left.id); - }); -} - -function createFakeDb( - rows: FakeD1Row[], - agentRows: FakeAgentRow[] = [], - options: FakeDbOptions = {}, -) { - const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; - const humanInserts: FakeHumanInsertRow[] = []; - const apiKeyInserts: FakeApiKeyInsertRow[] = []; - const agentInserts: FakeAgentInsertRow[] = []; - const agentUpdates: FakeAgentUpdateRow[] = []; - const revocationInserts: FakeRevocationInsertRow[] = []; - const agentRegistrationChallengeInserts: FakeAgentRegistrationChallengeInsertRow[] = - []; - const agentRegistrationChallengeUpdates: FakeAgentRegistrationChallengeUpdateRow[] = - []; - const agentAuthSessionInserts: FakeAgentAuthSessionInsertRow[] = []; - const agentAuthSessionUpdates: FakeAgentAuthSessionUpdateRow[] = []; - const agentAuthEventInserts: FakeAgentAuthEventInsertRow[] = []; - const inviteInserts: FakeInviteInsertRow[] = []; - const inviteUpdates: FakeInviteUpdateRow[] = []; - const revocationRows = [...(options.revocationRows ?? [])]; - const registrationChallengeRows = [ - ...(options.registrationChallengeRows ?? []), - ]; - const agentAuthSessionRows = [...(options.agentAuthSessionRows ?? [])]; - const inviteRows = [...(options.inviteRows ?? [])]; - const humanRows = rows.reduce((acc, row) => { - if (acc.some((item) => item.id === row.humanId)) { - return acc; - } - - acc.push({ - id: row.humanId, - did: row.humanDid, - displayName: row.humanDisplayName, - role: row.humanRole, - status: row.humanStatus, - createdAt: "2026-01-01T00:00:00.000Z", - updatedAt: "2026-01-01T00:00:00.000Z", - }); - return acc; - }, []); - const apiKeyRows: FakeApiKeyRow[] = rows.map((row) => ({ - id: row.apiKeyId, - humanId: row.humanId, - keyHash: row.keyHash, - keyPrefix: row.keyPrefix, - name: row.apiKeyName, - status: row.apiKeyStatus, - createdAt: "2026-01-01T00:00:00.000Z", - lastUsedAt: null, - })); - let beforeFirstAgentUpdateApplied = false; - let beforeFirstAgentAuthSessionUpdateApplied = false; - let remainingApiKeyInsertFailures = options.failApiKeyInsertCount ?? 0; - - const database: D1Database = { - prepare(query: string) { - let params: unknown[] = []; - const normalizedQuery = query.toLowerCase(); - - return { - bind(...values: unknown[]) { - params = values; - return this; - }, - async all() { - if ( - normalizedQuery.includes('from "api_keys"') || - normalizedQuery.includes("from api_keys") - ) { - const requiresHumanJoin = - normalizedQuery.includes('join "humans"') || - normalizedQuery.includes("join humans"); - - if (requiresHumanJoin) { - const requestedKeyPrefix = - typeof params[0] === "string" ? params[0] : ""; - const matchingRows = apiKeyRows.filter( - (row) => row.keyPrefix === requestedKeyPrefix, - ); - - return { - results: matchingRows - .map((row) => { - const human = humanRows.find( - (humanRow) => humanRow.id === row.humanId, - ); - if (!human) { - return undefined; - } - - return { - api_key_id: row.id, - key_hash: row.keyHash, - api_key_status: row.status, - api_key_name: row.name, - human_id: human.id, - human_did: human.did, - human_display_name: human.displayName, - human_role: human.role, - human_status: human.status, - }; - }) - .filter(isDefined), - }; - } - - const resultRows = resolveApiKeySelectRows({ - query, - params, - apiKeyRows, - }); - const selectedColumns = parseSelectedColumns(query); - return { - results: resultRows.map((row) => { - if (selectedColumns.length === 0) { - return row; - } - - return selectedColumns.reduce>( - (acc, column) => { - acc[column] = getApiKeySelectColumnValue(row, column); - return acc; - }, - {}, - ); - }), - }; - } - if ( - (normalizedQuery.includes('from "humans"') || - normalizedQuery.includes("from humans")) && - normalizedQuery.includes("select") - ) { - const resultRows = resolveHumanSelectRows({ - query, - params, - humanRows, - }); - const selectedColumns = parseSelectedColumns(query); - - return { - results: resultRows.map((row) => { - if (selectedColumns.length === 0) { - return row; - } - - return selectedColumns.reduce>( - (acc, column) => { - acc[column] = getHumanSelectColumnValue(row, column); - return acc; - }, - {}, - ); - }), - }; - } - if ( - (normalizedQuery.includes('from "agents"') || - normalizedQuery.includes("from agents")) && - (normalizedQuery.includes("select") || - normalizedQuery.includes("returning")) - ) { - const resultRows = resolveAgentSelectRows({ - query, - params, - authRows: rows, - agentRows, - }); - const selectedColumns = parseSelectedColumns(query); - - return { - results: resultRows.map((row) => { - if (selectedColumns.length === 0) { - return row; - } - - return selectedColumns.reduce>( - (acc, column) => { - acc[column] = getAgentSelectColumnValue(row, column); - return acc; - }, - {}, - ); - }), - }; - } - if ( - (normalizedQuery.includes('from "agent_registration_challenges"') || - normalizedQuery.includes("from agent_registration_challenges")) && - (normalizedQuery.includes("select") || - normalizedQuery.includes("returning")) - ) { - const resultRows = resolveAgentRegistrationChallengeSelectRows({ - query, - params, - challengeRows: registrationChallengeRows, - }); - const selectedColumns = parseSelectedColumns(query); - - return { - results: resultRows.map((row) => { - if (selectedColumns.length === 0) { - return row; - } - - return selectedColumns.reduce>( - (acc, column) => { - acc[column] = - getAgentRegistrationChallengeSelectColumnValue( - row, - column, - ); - return acc; - }, - {}, - ); - }), - }; - } - if ( - (normalizedQuery.includes('from "agent_auth_sessions"') || - normalizedQuery.includes("from agent_auth_sessions")) && - (normalizedQuery.includes("select") || - normalizedQuery.includes("returning")) - ) { - const resultRows = resolveAgentAuthSessionSelectRows({ - query, - params, - sessionRows: agentAuthSessionRows, - }); - const selectedColumns = parseSelectedColumns(query); - - return { - results: resultRows.map((row) => { - if (selectedColumns.length === 0) { - return row; - } - - return selectedColumns.reduce>( - (acc, column) => { - acc[column] = getAgentAuthSessionSelectColumnValue( - row, - column, - ); - return acc; - }, - {}, - ); - }), - }; - } - if ( - (normalizedQuery.includes('from "invites"') || - normalizedQuery.includes("from invites")) && - (normalizedQuery.includes("select") || - normalizedQuery.includes("returning")) - ) { - const resultRows = resolveInviteSelectRows({ - query, - params, - inviteRows, - }); - const selectedColumns = parseSelectedColumns(query); - - return { - results: resultRows.map((row) => { - if (selectedColumns.length === 0) { - return row; - } - - return selectedColumns.reduce>( - (acc, column) => { - acc[column] = getInviteSelectColumnValue(row, column); - return acc; - }, - {}, - ); - }), - }; - } - if ( - (normalizedQuery.includes('from "revocations"') || - normalizedQuery.includes("from revocations")) && - normalizedQuery.includes("select") - ) { - return { - results: resolveCrlSelectRows({ - agentRows, - revocationRows, - }), - }; - } - return { results: [] }; - }, - async raw() { - if ( - normalizedQuery.includes('from "api_keys"') || - normalizedQuery.includes("from api_keys") - ) { - const requiresHumanJoin = - normalizedQuery.includes('join "humans"') || - normalizedQuery.includes("join humans"); - - if (requiresHumanJoin) { - const requestedKeyPrefix = - typeof params[0] === "string" ? params[0] : ""; - const matchingRows = apiKeyRows.filter( - (row) => row.keyPrefix === requestedKeyPrefix, - ); - - return matchingRows - .map((row) => { - const human = humanRows.find( - (humanRow) => humanRow.id === row.humanId, - ); - if (!human) { - return undefined; - } - - return [ - row.id, - row.keyHash, - row.status, - row.name, - human.id, - human.did, - human.displayName, - human.role, - human.status, - ]; - }) - .filter(isDefined); - } - - const resultRows = resolveApiKeySelectRows({ - query, - params, - apiKeyRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getApiKeySelectColumnValue(row, column), - ), - ); - } - if ( - normalizedQuery.includes('from "agent_auth_sessions"') || - normalizedQuery.includes("from agent_auth_sessions") - ) { - const resultRows = resolveAgentAuthSessionSelectRows({ - query, - params, - sessionRows: agentAuthSessionRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getAgentAuthSessionSelectColumnValue(row, column), - ), - ); - } - if ( - normalizedQuery.includes('from "humans"') || - normalizedQuery.includes("from humans") - ) { - const resultRows = resolveHumanSelectRows({ - query, - params, - humanRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getHumanSelectColumnValue(row, column), - ), - ); - } - if ( - normalizedQuery.includes('from "agents"') || - normalizedQuery.includes("from agents") - ) { - const resultRows = resolveAgentSelectRows({ - query, - params, - authRows: rows, - agentRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getAgentSelectColumnValue(row, column), - ), - ); - } - if ( - normalizedQuery.includes('from "agent_registration_challenges"') || - normalizedQuery.includes("from agent_registration_challenges") - ) { - const resultRows = resolveAgentRegistrationChallengeSelectRows({ - query, - params, - challengeRows: registrationChallengeRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getAgentRegistrationChallengeSelectColumnValue(row, column), - ), - ); - } - if ( - normalizedQuery.includes('from "invites"') || - normalizedQuery.includes("from invites") - ) { - const resultRows = resolveInviteSelectRows({ - query, - params, - inviteRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getInviteSelectColumnValue(row, column), - ), - ); - } - if ( - normalizedQuery.includes('from "revocations"') || - normalizedQuery.includes("from revocations") - ) { - const resultRows = resolveCrlSelectRows({ - agentRows, - revocationRows, - }); - const selectedColumns = parseSelectedColumns(query); - return resultRows.map((row) => - selectedColumns.map((column) => - getCrlSelectColumnValue(row, column), - ), - ); - } - return []; - }, - async run() { - if ( - options.failBeginTransaction && - normalizedQuery.trim() === "begin" - ) { - throw new Error("Failed query: begin"); - } - - let changes = 0; - - if ( - normalizedQuery.includes('update "api_keys"') || - normalizedQuery.includes("update api_keys") - ) { - const setColumns = parseUpdateSetColumns(query, "api_keys"); - const nextValues = setColumns.reduce>( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - const whereClause = extractWhereClause(query); - const whereParams = params.slice(setColumns.length); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: whereParams, - }); - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const humanIdFilter = - typeof equalityParams.values.human_id?.[0] === "string" - ? String(equalityParams.values.human_id[0]) - : undefined; - const statusFilter = - typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - - let matchedRows = 0; - for (const row of apiKeyRows) { - if (idFilter && row.id !== idFilter) { - continue; - } - if (humanIdFilter && row.humanId !== humanIdFilter) { - continue; - } - if (statusFilter && row.status !== statusFilter) { - continue; - } - - matchedRows += 1; - if ( - nextValues.status === "active" || - nextValues.status === "revoked" - ) { - row.status = nextValues.status; - } - if ( - typeof nextValues.last_used_at === "string" || - nextValues.last_used_at === null - ) { - row.lastUsedAt = nextValues.last_used_at; - } - } - - if (typeof nextValues.last_used_at === "string" && idFilter) { - updates.push({ - lastUsedAt: nextValues.last_used_at, - apiKeyId: idFilter, - }); - } - changes = matchedRows; - } - if ( - normalizedQuery.includes('insert into "humans"') || - normalizedQuery.includes("insert into humans") - ) { - const columns = parseInsertColumns(query, "humans"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - humanInserts.push(row); - - const nextHumanId = typeof row.id === "string" ? row.id : ""; - const nextHumanDid = typeof row.did === "string" ? row.did : ""; - const conflict = humanRows.some( - (humanRow) => - humanRow.id === nextHumanId || humanRow.did === nextHumanDid, - ); - - if (!conflict) { - if ( - (row.role === "admin" || row.role === "user") && - (row.status === "active" || row.status === "suspended") && - typeof row.display_name === "string" && - typeof row.created_at === "string" && - typeof row.updated_at === "string" - ) { - humanRows.push({ - id: nextHumanId, - did: nextHumanDid, - displayName: row.display_name, - role: row.role, - status: row.status, - createdAt: row.created_at, - updatedAt: row.updated_at, - }); - } - - changes = 1; - } else { - changes = 0; - } - } - if ( - normalizedQuery.includes('insert into "api_keys"') || - normalizedQuery.includes("insert into api_keys") - ) { - if (remainingApiKeyInsertFailures > 0) { - remainingApiKeyInsertFailures -= 1; - throw new Error("api key insert failed"); - } - - const columns = parseInsertColumns(query, "api_keys"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - apiKeyInserts.push(row); - - if ( - typeof row.id === "string" && - typeof row.human_id === "string" && - typeof row.key_hash === "string" && - typeof row.key_prefix === "string" && - typeof row.name === "string" && - (row.status === "active" || row.status === "revoked") && - typeof row.created_at === "string" - ) { - apiKeyRows.push({ - id: row.id, - humanId: row.human_id, - keyHash: row.key_hash, - keyPrefix: row.key_prefix, - name: row.name, - status: row.status, - createdAt: row.created_at, - lastUsedAt: - typeof row.last_used_at === "string" - ? row.last_used_at - : null, - }); - } - - changes = 1; - } - if ( - normalizedQuery.includes('insert into "agent_auth_sessions"') || - normalizedQuery.includes("insert into agent_auth_sessions") - ) { - const columns = parseInsertColumns(query, "agent_auth_sessions"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - agentAuthSessionInserts.push(row); - - if ( - typeof row.id === "string" && - typeof row.agent_id === "string" && - typeof row.refresh_key_hash === "string" && - typeof row.refresh_key_prefix === "string" && - typeof row.refresh_issued_at === "string" && - typeof row.refresh_expires_at === "string" && - typeof row.access_key_hash === "string" && - typeof row.access_key_prefix === "string" && - typeof row.access_issued_at === "string" && - typeof row.access_expires_at === "string" && - (row.status === "active" || row.status === "revoked") && - typeof row.created_at === "string" && - typeof row.updated_at === "string" - ) { - const existingIndex = agentAuthSessionRows.findIndex( - (sessionRow) => sessionRow.agentId === row.agent_id, - ); - const nextSession: FakeAgentAuthSessionRow = { - id: row.id, - agentId: row.agent_id, - refreshKeyHash: row.refresh_key_hash, - refreshKeyPrefix: row.refresh_key_prefix, - refreshIssuedAt: row.refresh_issued_at, - refreshExpiresAt: row.refresh_expires_at, - refreshLastUsedAt: - typeof row.refresh_last_used_at === "string" - ? row.refresh_last_used_at - : null, - accessKeyHash: row.access_key_hash, - accessKeyPrefix: row.access_key_prefix, - accessIssuedAt: row.access_issued_at, - accessExpiresAt: row.access_expires_at, - accessLastUsedAt: - typeof row.access_last_used_at === "string" - ? row.access_last_used_at - : null, - status: row.status, - revokedAt: - typeof row.revoked_at === "string" ? row.revoked_at : null, - createdAt: row.created_at, - updatedAt: row.updated_at, - }; - if (existingIndex >= 0) { - agentAuthSessionRows.splice(existingIndex, 1, nextSession); - } else { - agentAuthSessionRows.push(nextSession); - } - } - - changes = 1; - } - if ( - normalizedQuery.includes('insert into "agent_auth_events"') || - normalizedQuery.includes("insert into agent_auth_events") - ) { - const columns = parseInsertColumns(query, "agent_auth_events"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - agentAuthEventInserts.push(row); - changes = 1; - } - if ( - normalizedQuery.includes('update "agent_auth_sessions"') || - normalizedQuery.includes("update agent_auth_sessions") - ) { - if ( - !beforeFirstAgentAuthSessionUpdateApplied && - options.beforeFirstAgentAuthSessionUpdate - ) { - options.beforeFirstAgentAuthSessionUpdate(agentAuthSessionRows); - beforeFirstAgentAuthSessionUpdateApplied = true; - } - - const setColumns = parseUpdateSetColumns( - query, - "agent_auth_sessions", - ); - const nextValues = setColumns.reduce>( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - const whereClause = extractWhereClause(query); - const whereParams = params.slice(setColumns.length); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: whereParams, - }); - - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const agentIdFilter = - typeof equalityParams.values.agent_id?.[0] === "string" - ? String(equalityParams.values.agent_id[0]) - : undefined; - const statusFilter = - typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const refreshHashFilter = - typeof equalityParams.values.refresh_key_hash?.[0] === "string" - ? String(equalityParams.values.refresh_key_hash[0]) - : undefined; - const accessHashFilter = - typeof equalityParams.values.access_key_hash?.[0] === "string" - ? String(equalityParams.values.access_key_hash[0]) - : undefined; - - let matchedRows = 0; - for (const row of agentAuthSessionRows) { - if (idFilter && row.id !== idFilter) { - continue; - } - if (agentIdFilter && row.agentId !== agentIdFilter) { - continue; - } - if (statusFilter && row.status !== statusFilter) { - continue; - } - if ( - refreshHashFilter && - row.refreshKeyHash !== refreshHashFilter - ) { - continue; - } - if (accessHashFilter && row.accessKeyHash !== accessHashFilter) { - continue; - } - - matchedRows += 1; - if (typeof nextValues.refresh_key_hash === "string") { - row.refreshKeyHash = nextValues.refresh_key_hash; - } - if (typeof nextValues.refresh_key_prefix === "string") { - row.refreshKeyPrefix = nextValues.refresh_key_prefix; - } - if (typeof nextValues.refresh_issued_at === "string") { - row.refreshIssuedAt = nextValues.refresh_issued_at; - } - if (typeof nextValues.refresh_expires_at === "string") { - row.refreshExpiresAt = nextValues.refresh_expires_at; - } - if ( - typeof nextValues.refresh_last_used_at === "string" || - nextValues.refresh_last_used_at === null - ) { - row.refreshLastUsedAt = nextValues.refresh_last_used_at; - } - if (typeof nextValues.access_key_hash === "string") { - row.accessKeyHash = nextValues.access_key_hash; - } - if (typeof nextValues.access_key_prefix === "string") { - row.accessKeyPrefix = nextValues.access_key_prefix; - } - if (typeof nextValues.access_issued_at === "string") { - row.accessIssuedAt = nextValues.access_issued_at; - } - if (typeof nextValues.access_expires_at === "string") { - row.accessExpiresAt = nextValues.access_expires_at; - } - if ( - typeof nextValues.access_last_used_at === "string" || - nextValues.access_last_used_at === null - ) { - row.accessLastUsedAt = nextValues.access_last_used_at; - } - if ( - nextValues.status === "active" || - nextValues.status === "revoked" - ) { - row.status = nextValues.status; - } - if ( - typeof nextValues.revoked_at === "string" || - nextValues.revoked_at === null - ) { - row.revokedAt = nextValues.revoked_at; - } - if (typeof nextValues.updated_at === "string") { - row.updatedAt = nextValues.updated_at; - } - } - - agentAuthSessionUpdates.push({ - ...nextValues, - id: idFilter, - agent_id: agentIdFilter, - status_where: statusFilter, - refresh_key_hash_where: refreshHashFilter, - access_key_hash_where: accessHashFilter, - matched_rows: matchedRows, - }); - changes = matchedRows; - } - if ( - normalizedQuery.includes('delete from "agent_auth_sessions"') || - normalizedQuery.includes("delete from agent_auth_sessions") - ) { - const whereClause = extractWhereClause(query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params, - }); - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - - if (idFilter) { - for ( - let index = agentAuthSessionRows.length - 1; - index >= 0; - index -= 1 - ) { - if (agentAuthSessionRows[index]?.id === idFilter) { - agentAuthSessionRows.splice(index, 1); - changes += 1; - } - } - } - } - if ( - normalizedQuery.includes('insert into "invites"') || - normalizedQuery.includes("insert into invites") - ) { - const columns = parseInsertColumns(query, "invites"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - inviteInserts.push(row); - - if ( - typeof row.id === "string" && - typeof row.code === "string" && - typeof row.created_by === "string" && - typeof row.created_at === "string" - ) { - inviteRows.push({ - id: row.id, - code: row.code, - createdBy: row.created_by, - redeemedBy: - typeof row.redeemed_by === "string" ? row.redeemed_by : null, - agentId: typeof row.agent_id === "string" ? row.agent_id : null, - expiresAt: - typeof row.expires_at === "string" ? row.expires_at : null, - createdAt: row.created_at, - }); - } - - changes = 1; - } - if ( - normalizedQuery.includes('update "invites"') || - normalizedQuery.includes("update invites") - ) { - const setColumns = parseUpdateSetColumns(query, "invites"); - const nextValues = setColumns.reduce>( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - const whereClause = extractWhereClause(query); - const whereParams = params.slice(setColumns.length); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: whereParams, - }); - - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const redeemedByFilter = hasFilter(whereClause, "redeemed_by") - ? (equalityParams.values.redeemed_by?.[0] as - | string - | null - | undefined) - : undefined; - const requiresRedeemedByNull = - whereClause.includes("redeemed_by") && - whereClause.includes("is null"); - - let matchedRows = 0; - for (const row of inviteRows) { - if (idFilter && row.id !== idFilter) { - continue; - } - if (requiresRedeemedByNull && row.redeemedBy !== null) { - continue; - } - if ( - redeemedByFilter !== undefined && - row.redeemedBy !== redeemedByFilter - ) { - continue; - } - - matchedRows += 1; - if ( - typeof nextValues.redeemed_by === "string" || - nextValues.redeemed_by === null - ) { - row.redeemedBy = nextValues.redeemed_by; - } - } - - inviteUpdates.push({ - ...nextValues, - id: idFilter, - redeemed_by_where: redeemedByFilter, - redeemed_by_is_null_where: requiresRedeemedByNull, - matched_rows: matchedRows, - }); - changes = matchedRows; - } - if ( - normalizedQuery.includes('delete from "humans"') || - normalizedQuery.includes("delete from humans") - ) { - const whereClause = extractWhereClause(query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params, - }); - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : ""; - - if (idFilter.length > 0) { - for (let index = humanRows.length - 1; index >= 0; index -= 1) { - if (humanRows[index]?.id === idFilter) { - humanRows.splice(index, 1); - changes += 1; - } - } - - for (let index = apiKeyRows.length - 1; index >= 0; index -= 1) { - if (apiKeyRows[index]?.humanId === idFilter) { - apiKeyRows.splice(index, 1); - } - } - } - } - if ( - normalizedQuery.includes('insert into "agents"') || - normalizedQuery.includes("insert into agents") - ) { - const columns = parseInsertColumns(query, "agents"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - agentInserts.push(row); - changes = 1; - } - if ( - normalizedQuery.includes( - 'insert into "agent_registration_challenges"', - ) || - normalizedQuery.includes( - "insert into agent_registration_challenges", - ) - ) { - const columns = parseInsertColumns( - query, - "agent_registration_challenges", - ); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - agentRegistrationChallengeInserts.push(row); - - if ( - typeof row.id === "string" && - typeof row.owner_id === "string" && - typeof row.public_key === "string" && - typeof row.nonce === "string" && - (row.status === "pending" || row.status === "used") && - typeof row.expires_at === "string" && - typeof row.created_at === "string" && - typeof row.updated_at === "string" - ) { - registrationChallengeRows.push({ - id: row.id, - ownerId: row.owner_id, - publicKey: row.public_key, - nonce: row.nonce, - status: row.status, - expiresAt: row.expires_at, - usedAt: - typeof row.used_at === "string" ? String(row.used_at) : null, - createdAt: row.created_at, - updatedAt: row.updated_at, - }); - } - - changes = 1; - } - if ( - normalizedQuery.includes( - 'update "agent_registration_challenges"', - ) || - normalizedQuery.includes("update agent_registration_challenges") - ) { - const setColumns = parseUpdateSetColumns( - query, - "agent_registration_challenges", - ); - const nextValues = setColumns.reduce>( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - const whereClause = extractWhereClause(query); - const whereParams = params.slice(setColumns.length); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: whereParams, - }); - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const ownerFilter = - typeof equalityParams.values.owner_id?.[0] === "string" - ? String(equalityParams.values.owner_id[0]) - : undefined; - const statusFilter = - typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - - let matchedRows = 0; - for (const row of registrationChallengeRows) { - if (idFilter && row.id !== idFilter) { - continue; - } - if (ownerFilter && row.ownerId !== ownerFilter) { - continue; - } - if (statusFilter && row.status !== statusFilter) { - continue; - } - - matchedRows += 1; - if ( - nextValues.status === "pending" || - nextValues.status === "used" - ) { - row.status = nextValues.status; - } - if ( - typeof nextValues.used_at === "string" || - nextValues.used_at === null - ) { - row.usedAt = nextValues.used_at; - } - if (typeof nextValues.updated_at === "string") { - row.updatedAt = nextValues.updated_at; - } - } - - agentRegistrationChallengeUpdates.push({ - ...nextValues, - id: idFilter, - owner_id: ownerFilter, - status_where: statusFilter, - matched_rows: matchedRows, - }); - changes = matchedRows; - } - if ( - normalizedQuery.includes('update "agents"') || - normalizedQuery.includes("update agents") - ) { - if ( - !beforeFirstAgentUpdateApplied && - options.beforeFirstAgentUpdate - ) { - options.beforeFirstAgentUpdate(agentRows); - beforeFirstAgentUpdateApplied = true; - } - - const setColumns = parseUpdateSetColumns(query, "agents"); - const nextValues = setColumns.reduce>( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - const whereClause = extractWhereClause(query); - const whereParams = params.slice(setColumns.length); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: whereParams, - }); - const ownerFilter = - typeof equalityParams.values.owner_id?.[0] === "string" - ? String(equalityParams.values.owner_id?.[0]) - : undefined; - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id?.[0]) - : undefined; - const statusFilter = - typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status?.[0]) - : undefined; - const currentJtiFilter = equalityParams.values.current_jti?.[0] as - | string - | null - | undefined; - - let matchedRows = 0; - - for (const row of agentRows) { - if (ownerFilter && row.ownerId !== ownerFilter) { - continue; - } - if (idFilter && row.id !== idFilter) { - continue; - } - if ( - statusFilter && - row.status !== (statusFilter as "active" | "revoked") - ) { - continue; - } - if ( - currentJtiFilter !== undefined && - (row.currentJti ?? null) !== currentJtiFilter - ) { - continue; - } - - matchedRows += 1; - - if ( - nextValues.status === "active" || - nextValues.status === "revoked" - ) { - row.status = nextValues.status; - } - if (typeof nextValues.updated_at === "string") { - row.updatedAt = nextValues.updated_at; - } - if ( - typeof nextValues.current_jti === "string" || - nextValues.current_jti === null - ) { - row.currentJti = nextValues.current_jti; - } - if ( - typeof nextValues.expires_at === "string" || - nextValues.expires_at === null - ) { - row.expiresAt = nextValues.expires_at; - } - } - - agentUpdates.push({ - ...nextValues, - owner_id: ownerFilter, - id: idFilter, - status_where: statusFilter, - current_jti_where: currentJtiFilter, - matched_rows: matchedRows, - }); - changes = matchedRows; - } - if ( - normalizedQuery.includes('insert into "revocations"') || - normalizedQuery.includes("insert into revocations") - ) { - const columns = parseInsertColumns(query, "revocations"); - const row = columns.reduce( - (acc, column, index) => { - acc[column] = params[index]; - return acc; - }, - {}, - ); - revocationInserts.push(row); - if ( - typeof row.id === "string" && - typeof row.jti === "string" && - typeof row.agent_id === "string" && - typeof row.revoked_at === "string" - ) { - revocationRows.push({ - id: row.id, - jti: row.jti, - agentId: row.agent_id, - reason: typeof row.reason === "string" ? row.reason : null, - revokedAt: row.revoked_at, - }); - } - changes = 1; - } - return { success: true, meta: { changes } } as D1Result; - }, - } as D1PreparedStatement; - }, - } as D1Database; - - return { - database, - updates, - humanRows, - humanInserts, - apiKeyInserts, - agentAuthSessionRows, - agentAuthSessionInserts, - agentAuthSessionUpdates, - agentAuthEventInserts, - agentInserts, - agentUpdates, - agentRegistrationChallengeInserts, - agentRegistrationChallengeUpdates, - inviteInserts, - inviteUpdates, - inviteRows, - revocationInserts, - registrationChallengeRows, - }; -} - -function makeValidPatContext(token = "clw_pat_valid-token-value") { - return hashApiKeyToken(token).then((tokenHash) => { - const authRow: FakeD1Row = { - apiKeyId: "key-1", - keyPrefix: deriveApiKeyLookupPrefix(token), - keyHash: tokenHash, - apiKeyStatus: "active", - apiKeyName: "ci", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - - return { token, authRow }; - }); -} - -async function signRegistrationChallenge(options: { - challengeId: string; - nonce: string; - ownerDid: string; - publicKey: string; - name: string; - secretKey: Uint8Array; - framework?: string; - ttlDays?: number; -}): Promise { - const canonical = canonicalizeAgentRegistrationProof({ - challengeId: options.challengeId, - nonce: options.nonce, - ownerDid: options.ownerDid, - publicKey: options.publicKey, - name: options.name, - framework: options.framework, - ttlDays: options.ttlDays, - }); - const signature = await signEd25519( - new TextEncoder().encode(canonical), - options.secretKey, - ); - return encodeEd25519SignatureBase64url(signature); -} - -async function createSignedAgentRefreshRequest(options: { - ait: string; - secretKey: Uint8Array; - refreshToken: string; - timestamp?: string; - nonce?: string; -}): Promise<{ - body: string; - headers: Record; -}> { - const timestamp = options.timestamp ?? String(Math.floor(Date.now() / 1000)); - const nonce = options.nonce ?? "nonce-agent-refresh"; - const body = JSON.stringify({ - refreshToken: options.refreshToken, - }); - const signed = await signHttpRequest({ - method: "POST", - pathWithQuery: AGENT_AUTH_REFRESH_PATH, - timestamp, - nonce, - body: new TextEncoder().encode(body), - secretKey: options.secretKey, - }); - - return { - body, - headers: { - authorization: `Claw ${options.ait}`, - "content-type": "application/json", - ...signed.headers, - }, - }; -} - -describe("GET /health", () => { - it("returns status ok with fallback version", async () => { - const res = await app.request( - "/health", - {}, - { DB: {}, ENVIRONMENT: "test" }, - ); - expect(res.status).toBe(200); - const body = await res.json(); - expect(body).toEqual({ - status: "ok", - version: "0.0.0", - environment: "test", - }); - expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); - }); - - it("returns APP_VERSION when provided by runtime bindings", async () => { - const res = await createRegistryApp().request( - "/health", - {}, - { DB: {}, ENVIRONMENT: "test", APP_VERSION: "sha-1234567890" }, - ); - - expect(res.status).toBe(200); - const body = await res.json(); - expect(body).toEqual({ - status: "ok", - version: "sha-1234567890", - environment: "test", - }); - }); - - it("returns config validation error for invalid environment", async () => { - const res = await createRegistryApp().request( - "/health", - {}, - { DB: {}, ENVIRONMENT: "local" }, - ); - expect(res.status).toBe(500); - expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); - const body = (await res.json()) as { - error: { code: string; message: string }; - }; - expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); - expect(body.error.message).toBe("Registry configuration is invalid"); - }); -}); - -describe(`GET ${REGISTRY_METADATA_PATH}`, () => { - it("returns environment metadata including resolved proxy URL", async () => { - const res = await createRegistryApp().request( - `https://registry.example.test${REGISTRY_METADATA_PATH}`, - {}, - { - DB: {} as D1Database, - ENVIRONMENT: "development", - APP_VERSION: "sha-meta-123", - PROXY_URL: "https://dev.proxy.clawdentity.com", - REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com", - EVENT_BUS_BACKEND: "memory", - BOOTSTRAP_SECRET: "bootstrap-secret", - REGISTRY_SIGNING_KEY: "test-signing-key", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { - status: string; - environment: string; - version: string; - registryUrl: string; - proxyUrl: string; - }; - expect(body).toEqual({ - status: "ok", - environment: "development", - version: "sha-meta-123", - registryUrl: "https://registry.example.test", - proxyUrl: "https://dev.proxy.clawdentity.com", - }); - }); -}); - -describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { - it("returns 503 when bootstrap secret is not configured", async () => { - const { database } = createFakeDb([]); - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({}), - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(response.status).toBe(503); - const body = (await response.json()) as { - error: { - code: string; - message: string; - }; - }; - expect(body.error.code).toBe("ADMIN_BOOTSTRAP_DISABLED"); - expect(body.error.message).toBe("Admin bootstrap is disabled"); - }); - - it("returns 401 when bootstrap secret header is missing", async () => { - const { database } = createFakeDb([]); - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({}), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("ADMIN_BOOTSTRAP_UNAUTHORIZED"); - }); - - it("returns 401 when bootstrap secret is invalid", async () => { - const { database } = createFakeDb([]); - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "wrong-secret", - }, - body: JSON.stringify({}), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("ADMIN_BOOTSTRAP_UNAUTHORIZED"); - }); - - it("returns 400 when payload is not valid JSON", async () => { - const { database } = createFakeDb([]); - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: "{not-valid-json", - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("ADMIN_BOOTSTRAP_INVALID"); - }); - - it("returns 400 when payload fields are invalid", async () => { - const { database } = createFakeDb([]); - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({ - displayName: 123, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("ADMIN_BOOTSTRAP_INVALID"); - }); - - it("returns 409 when an admin already exists", async () => { - const { authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({}), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(409); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("ADMIN_BOOTSTRAP_ALREADY_COMPLETED"); - }); - - it("creates admin human and PAT token once", async () => { - const { database, humanInserts, apiKeyInserts } = createFakeDb([]); - - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({ - displayName: "Primary Admin", - apiKeyName: "prod-admin-key", - }), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(201); - - const body = (await response.json()) as { - human: { - id: string; - did: string; - displayName: string; - role: string; - status: string; - }; - apiKey: { - id: string; - name: string; - token: string; - }; - }; - - expect(body.human.id).toBe("00000000000000000000000000"); - expect(body.human.did).toBe("did:claw:human:00000000000000000000000000"); - expect(body.human.displayName).toBe("Primary Admin"); - expect(body.human.role).toBe("admin"); - expect(body.human.status).toBe("active"); - expect(body.apiKey.name).toBe("prod-admin-key"); - expect(body.apiKey.token.startsWith("clw_pat_")).toBe(true); - - expect(humanInserts).toHaveLength(1); - expect(apiKeyInserts).toHaveLength(1); - expect(apiKeyInserts[0]?.key_prefix).toBe( - deriveApiKeyLookupPrefix(body.apiKey.token), - ); - expect(apiKeyInserts[0]?.key_hash).toBe( - await hashApiKeyToken(body.apiKey.token), - ); - }); - - it("returns PAT that authenticates GET /v1/me on same app and database", async () => { - const { database } = createFakeDb([]); - const appInstance = createRegistryApp(); - - const bootstrapResponse = await appInstance.request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({ - displayName: "Primary Admin", - apiKeyName: "prod-admin-key", - }), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(bootstrapResponse.status).toBe(201); - const bootstrapBody = (await bootstrapResponse.json()) as { - human: { - id: string; - did: string; - displayName: string; - role: string; - }; - apiKey: { - id: string; - name: string; - token: string; - }; - }; - - const meResponse = await appInstance.request( - "/v1/me", - { - headers: { - Authorization: `Bearer ${bootstrapBody.apiKey.token}`, - }, - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(meResponse.status).toBe(200); - const meBody = (await meResponse.json()) as { - human: { - id: string; - did: string; - displayName: string; - role: string; - apiKey: { - id: string; - name: string; - }; - }; - }; - expect(meBody.human).toEqual({ - id: bootstrapBody.human.id, - did: bootstrapBody.human.did, - displayName: bootstrapBody.human.displayName, - role: bootstrapBody.human.role, - apiKey: { - id: bootstrapBody.apiKey.id, - name: bootstrapBody.apiKey.name, - }, - }); - }); - - it("falls back to manual mutation when transactions are unavailable", async () => { - const { database, humanInserts, apiKeyInserts } = createFakeDb([], [], { - failBeginTransaction: true, - }); - - const response = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({ - displayName: "Primary Admin", - apiKeyName: "prod-admin-key", - }), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(response.status).toBe(201); - expect(humanInserts).toHaveLength(1); - expect(apiKeyInserts).toHaveLength(1); - }); - - it("rolls back admin insert when fallback api key insert fails", async () => { - const { database, humanRows } = createFakeDb([], [], { - failBeginTransaction: true, - failApiKeyInsertCount: 1, - }); - - const firstResponse = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({ - displayName: "Primary Admin", - apiKeyName: "prod-admin-key", - }), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(firstResponse.status).toBe(500); - expect(humanRows).toHaveLength(0); - - const secondResponse = await createRegistryApp().request( - ADMIN_BOOTSTRAP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-bootstrap-secret": "bootstrap-secret", - }, - body: JSON.stringify({ - displayName: "Primary Admin", - apiKeyName: "prod-admin-key", - }), - }, - { - DB: database, - ENVIRONMENT: "test", - BOOTSTRAP_SECRET: "bootstrap-secret", - }, - ); - - expect(secondResponse.status).toBe(201); - expect(humanRows).toHaveLength(1); - }); -}); - -describe("GET /.well-known/claw-keys.json", () => { - it("returns configured registry signing keys with cache headers", async () => { - const res = await createRegistryApp().request( - "/.well-known/claw-keys.json", - {}, - { - DB: {} as D1Database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(200); - expect(res.headers.get("Cache-Control")).toBe( - "public, max-age=300, s-maxage=300, stale-while-revalidate=60", - ); - - const body = (await res.json()) as { - keys: Array<{ - kid: string; - alg: string; - crv: string; - x: string; - status: string; - }>; - }; - expect(body.keys).toEqual([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: "active", - }, - ]); - }); - - it("supports fetch-and-verify AIT flow using published keys", async () => { - const signer = await generateEd25519Keypair(); - const claims = makeAitClaims(signer.publicKey); - const token = await signAIT({ - claims, - signerKid: "reg-key-1", - signerKeypair: signer, - }); - - const keysResponse = await createRegistryApp().request( - "/.well-known/claw-keys.json", - {}, - { - DB: {} as D1Database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - const keysBody = (await keysResponse.json()) as { - keys: Array<{ - kid: string; - alg: "EdDSA"; - crv: "Ed25519"; - x: string; - status: "active" | "revoked"; - }>; - }; - - const verifiedClaims = await verifyAIT({ - token, - expectedIssuer: claims.iss, - registryKeys: keysBody.keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP" as const, - crv: key.crv, - x: key.x, - }, - })), - }); - - expect(verifiedClaims).toEqual(claims); - }); - - it("does not verify AIT when published key status is revoked", async () => { - const signer = await generateEd25519Keypair(); - const claims = makeAitClaims(signer.publicKey); - const token = await signAIT({ - claims, - signerKid: "reg-key-1", - signerKeypair: signer, - }); - - const keysResponse = await createRegistryApp().request( - "/.well-known/claw-keys.json", - {}, - { - DB: {} as D1Database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "revoked", - }, - ]), - }, - ); - - const keysBody = (await keysResponse.json()) as { - keys: Array<{ - kid: string; - alg: "EdDSA"; - crv: "Ed25519"; - x: string; - status: "active" | "revoked"; - }>; - }; - - await expect( - verifyAIT({ - token, - expectedIssuer: claims.iss, - registryKeys: keysBody.keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP" as const, - crv: key.crv, - x: key.x, - }, - })), - }), - ).rejects.toThrow(/kid/i); - }); -}); - -describe("GET /v1/crl", () => { - it("returns signed CRL snapshot with cache headers", async () => { - const signer = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - const signingKeyset = JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]); - const agentIdOne = generateUlid(1700400000000); - const agentIdTwo = generateUlid(1700400000100); - const revocationJtiOne = generateUlid(1700400000200); - const revocationJtiTwo = generateUlid(1700400000300); - const { database } = createFakeDb( - [], - [ - { - id: agentIdOne, - did: makeAgentDid(agentIdOne), - ownerId: "human-1", - name: "revoked-one", - framework: "openclaw", - status: "revoked", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - { - id: agentIdTwo, - did: makeAgentDid(agentIdTwo), - ownerId: "human-2", - name: "revoked-two", - framework: "langchain", - status: "revoked", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - ], - { - revocationRows: [ - { - id: generateUlid(1700400000400), - jti: revocationJtiOne, - agentId: agentIdOne, - reason: null, - revokedAt: "2026-02-11T10:00:00.000Z", - }, - { - id: generateUlid(1700400000500), - jti: revocationJtiTwo, - agentId: agentIdTwo, - reason: "manual revoke", - revokedAt: "2026-02-11T11:00:00.000Z", - }, - ], - }, - ); - - const response = await appInstance.request( - "/v1/crl", - {}, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - - expect(response.status).toBe(200); - expect(response.headers.get("Cache-Control")).toBe( - "public, max-age=300, s-maxage=300, stale-while-revalidate=60", - ); - const body = (await response.json()) as { crl: string }; - expect(body.crl).toEqual(expect.any(String)); - - const keysResponse = await appInstance.request( - "/.well-known/claw-keys.json", - {}, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - const keysBody = (await keysResponse.json()) as { - keys: Array<{ - kid: string; - alg: "EdDSA"; - crv: "Ed25519"; - x: string; - status: "active" | "revoked"; - }>; - }; - - const claims = await verifyCRL({ - token: body.crl, - expectedIssuer: "https://dev.registry.clawdentity.com", - registryKeys: keysBody.keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP" as const, - crv: key.crv, - x: key.x, - }, - })), - }); - - expect(claims.revocations).toHaveLength(2); - expect(claims.revocations).toEqual( - expect.arrayContaining([ - { - jti: revocationJtiOne, - agentDid: makeAgentDid(agentIdOne), - revokedAt: Math.floor(Date.parse("2026-02-11T10:00:00.000Z") / 1000), - }, - { - jti: revocationJtiTwo, - agentDid: makeAgentDid(agentIdTwo), - reason: "manual revoke", - revokedAt: Math.floor(Date.parse("2026-02-11T11:00:00.000Z") / 1000), - }, - ]), - ); - expect(claims.exp).toBeGreaterThan(claims.iat); - expect(claims.exp - claims.iat).toBe(390); - }); - - it("returns 404 when no revocations are available", async () => { - const { database } = createFakeDb([]); - const response = await createRegistryApp().request( - "/v1/crl", - {}, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(404); - const body = (await response.json()) as { - error: { - code: string; - message: string; - }; - }; - expect(body.error.code).toBe("CRL_NOT_FOUND"); - expect(body.error.message).toBe("CRL snapshot is not available"); - }); - - it("returns 429 when rate limit is exceeded for the same client", async () => { - const { database } = createFakeDb([]); - const appInstance = createRegistryApp({ - rateLimit: { - crlMaxRequests: 2, - crlWindowMs: 60_000, - }, - }); - - for (let index = 0; index < 2; index += 1) { - const response = await appInstance.request( - "/v1/crl", - { - headers: { - "CF-Connecting-IP": "203.0.113.77", - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(404); - } - - const rateLimited = await appInstance.request( - "/v1/crl", - { - headers: { - "CF-Connecting-IP": "203.0.113.77", - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(rateLimited.status).toBe(429); - const body = (await rateLimited.json()) as { error: { code: string } }; - expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); - }); - - it("returns 500 when CRL signing configuration is missing", async () => { - const agentId = generateUlid(1700400000600); - const { database } = createFakeDb( - [], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "revoked-agent", - framework: "openclaw", - status: "revoked", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - ], - { - revocationRows: [ - { - id: generateUlid(1700400000700), - jti: generateUlid(1700400000800), - agentId, - reason: null, - revokedAt: "2026-02-11T12:00:00.000Z", - }, - ], - }, - ); - - const response = await createRegistryApp().request( - "/v1/crl", - {}, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(500); - const body = (await response.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); - expect(body.error.message).toBe("Registry configuration is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - REGISTRY_SIGNING_KEYS: expect.any(Array), - }); - }); -}); - -describe("GET /v1/resolve/:id", () => { - it("returns public profile fields without requiring auth", async () => { - const { authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700500000000); - const { database } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "resolve-me", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/resolve/${agentId}`, - {}, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { - did: string; - name: string; - framework: string; - status: "active" | "revoked"; - ownerDid: string; - email?: string; - displayName?: string; - }; - expect(body).toEqual({ - did: makeAgentDid(agentId), - name: "resolve-me", - framework: "openclaw", - status: "active", - ownerDid: authRow.humanDid, - }); - expect(body).not.toHaveProperty("email"); - expect(body).not.toHaveProperty("displayName"); - }); - - it("falls back framework to openclaw when stored framework is null", async () => { - const { authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700500000100); - const { database } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "legacy-framework-null", - framework: null, - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/resolve/${agentId}`, - {}, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { framework: string }; - expect(body.framework).toBe("openclaw"); - }); - - it("returns 400 for invalid id path", async () => { - const res = await createRegistryApp().request( - "/v1/resolve/not-a-ulid", - {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_RESOLVE_INVALID_PATH"); - expect(body.error.details?.fieldErrors?.id).toEqual([ - "id must be a valid ULID", - ]); - }); - - it("returns 404 when agent does not exist", async () => { - const { authRow } = await makeValidPatContext(); - const missingAgentId = generateUlid(1700500000200); - const { database } = createFakeDb([authRow], []); - - const res = await createRegistryApp().request( - `/v1/resolve/${missingAgentId}`, - {}, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - }); - - it("returns 429 when rate limit is exceeded for the same client", async () => { - const { authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700500000300); - const { database } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "rate-limited-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - }, - ], - ); - const appInstance = createRegistryApp(); - - for (let index = 0; index < RESOLVE_RATE_LIMIT_MAX_REQUESTS; index += 1) { - const response = await appInstance.request( - `/v1/resolve/${agentId}`, - { - headers: { - "CF-Connecting-IP": "203.0.113.10", - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(200); - } - - const rateLimited = await appInstance.request( - `/v1/resolve/${agentId}`, - { - headers: { - "CF-Connecting-IP": "203.0.113.10", - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(rateLimited.status).toBe(429); - const body = (await rateLimited.json()) as { error: { code: string } }; - expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); - }); -}); - -describe("GET /v1/me", () => { - it("returns 401 when PAT is missing", async () => { - const res = await createRegistryApp().request( - "/v1/me", - {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 401 for invalid PAT", async () => { - const { authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/me", - { - headers: { Authorization: "Bearer clw_pat_invalid-token-value" }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_INVALID"); - }); - - it("returns 401 when PAT contains only marker", async () => { - const res = await createRegistryApp().request( - "/v1/me", - { - headers: { Authorization: "Bearer clw_pat_" }, - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_INVALID"); - }); - - it("authenticates valid PAT and injects ctx.human", async () => { - const { token: validToken, authRow } = await makeValidPatContext(); - const { database, updates } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/me", - { - headers: { Authorization: `Bearer ${validToken}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { - human: { - id: string; - did: string; - displayName: string; - role: string; - apiKey: { id: string; name: string }; - }; - }; - expect(body.human).toEqual({ - id: "human-1", - did: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - displayName: "Ravi", - role: "admin", - apiKey: { - id: "key-1", - name: "ci", - }, - }); - expect(updates).toHaveLength(1); - expect(updates[0]?.apiKeyId).toBe("key-1"); - }); -}); - -describe(`POST ${INVITES_PATH}`, () => { - it("returns 401 when PAT is missing", async () => { - const response = await createRegistryApp().request( - INVITES_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 403 when PAT owner is not an admin", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([ - { - ...authRow, - humanRole: "user", - }, - ]); - - const response = await createRegistryApp().request( - INVITES_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({}), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(403); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("INVITE_CREATE_FORBIDDEN"); - }); - - it("returns 400 when payload is invalid", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const response = await createRegistryApp().request( - INVITES_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - expiresAt: "not-an-iso-date", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("INVITE_CREATE_INVALID"); - expect(body.error.details?.fieldErrors?.expiresAt).toEqual([ - "expiresAt must be a valid ISO-8601 datetime", - ]); - }); - - it("creates invite code and persists invite row", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, inviteInserts } = createFakeDb([authRow]); - const expiresAt = new Date(Date.now() + 60 * 60 * 1000).toISOString(); - - const response = await createRegistryApp().request( - INVITES_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - expiresAt, - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(201); - const body = (await response.json()) as { - invite: { - id: string; - code: string; - createdBy: string; - expiresAt: string | null; - createdAt: string; - }; - }; - expect(body.invite.code.startsWith("clw_inv_")).toBe(true); - expect(body.invite.createdBy).toBe("human-1"); - expect(body.invite.expiresAt).toBe(expiresAt); - expect(body.invite.createdAt).toEqual(expect.any(String)); - - expect(inviteInserts).toHaveLength(1); - expect(inviteInserts[0]?.id).toBe(body.invite.id); - expect(inviteInserts[0]?.code).toBe(body.invite.code); - expect(inviteInserts[0]?.created_by).toBe("human-1"); - expect(inviteInserts[0]?.expires_at).toBe(expiresAt); - }); -}); - -describe(`POST ${INVITES_REDEEM_PATH}`, () => { - it("returns 400 when payload is invalid", async () => { - const response = await createRegistryApp().request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("INVITE_REDEEM_INVALID"); - expect(body.error.details?.fieldErrors?.code).toEqual(["code is required"]); - }); - - it("returns 400 when invite code does not exist", async () => { - const { database } = createFakeDb([]); - - const response = await createRegistryApp().request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - code: "clw_inv_missing", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("INVITE_REDEEM_CODE_INVALID"); - }); - - it("returns 400 when invite is expired", async () => { - const { authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow], [], { - inviteRows: [ - { - id: generateUlid(1700700000000), - code: "clw_inv_expired", - createdBy: "human-1", - redeemedBy: null, - agentId: null, - expiresAt: new Date(Date.now() - 60 * 1000).toISOString(), - createdAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - - const response = await createRegistryApp().request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - code: "clw_inv_expired", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("INVITE_REDEEM_EXPIRED"); - }); - - it("returns 409 when invite is already redeemed", async () => { - const { authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow], [], { - inviteRows: [ - { - id: generateUlid(1700700001000), - code: "clw_inv_redeemed", - createdBy: "human-1", - redeemedBy: "human-2", - agentId: null, - expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), - createdAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - - const response = await createRegistryApp().request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - code: "clw_inv_redeemed", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(409); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("INVITE_REDEEM_ALREADY_USED"); - }); - - it("redeems invite and returns PAT that authenticates /v1/me", async () => { - const { authRow } = await makeValidPatContext(); - const inviteCode = "clw_inv_redeem_success"; - const { database, humanInserts, apiKeyInserts, inviteRows, inviteUpdates } = - createFakeDb([authRow], [], { - inviteRows: [ - { - id: generateUlid(1700700002000), - code: inviteCode, - createdBy: "human-1", - redeemedBy: null, - agentId: null, - expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), - createdAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - const appInstance = createRegistryApp(); - - const redeemResponse = await appInstance.request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - code: inviteCode, - displayName: "Invitee Alpha", - apiKeyName: "primary-invite-key", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(redeemResponse.status).toBe(201); - const redeemBody = (await redeemResponse.json()) as { - human: { - id: string; - did: string; - displayName: string; - role: "admin" | "user"; - status: "active" | "suspended"; - }; - apiKey: { - id: string; - name: string; - token: string; - }; - proxyUrl: string; - }; - expect(redeemBody.human.displayName).toBe("Invitee Alpha"); - expect(redeemBody.human.role).toBe("user"); - expect(redeemBody.apiKey.name).toBe("primary-invite-key"); - expect(redeemBody.apiKey.token.startsWith("clw_pat_")).toBe(true); - expect(redeemBody.proxyUrl).toBe("https://dev.proxy.clawdentity.com"); - - expect(humanInserts).toHaveLength(1); - expect(apiKeyInserts).toHaveLength(1); - expect(apiKeyInserts[0]?.human_id).toBe(redeemBody.human.id); - expect(inviteUpdates).toHaveLength(1); - expect(inviteRows[0]?.redeemedBy).toBe(redeemBody.human.id); - - const meResponse = await appInstance.request( - "/v1/me", - { - headers: { - Authorization: `Bearer ${redeemBody.apiKey.token}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(meResponse.status).toBe(200); - const meBody = (await meResponse.json()) as { - human: { - id: string; - displayName: string; - role: "admin" | "user"; - }; - }; - expect(meBody.human.id).toBe(redeemBody.human.id); - expect(meBody.human.displayName).toBe("Invitee Alpha"); - expect(meBody.human.role).toBe("user"); - }); - - it("rolls back fallback mutations when api key insert fails", async () => { - const { authRow } = await makeValidPatContext(); - const inviteCode = "clw_inv_fallback_rollback"; - const { database, humanRows, inviteRows } = createFakeDb([authRow], [], { - failBeginTransaction: true, - failApiKeyInsertCount: 1, - inviteRows: [ - { - id: generateUlid(1700700003000), - code: inviteCode, - createdBy: "human-1", - redeemedBy: null, - agentId: null, - expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), - createdAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - const appInstance = createRegistryApp(); - - const firstResponse = await appInstance.request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - code: inviteCode, - displayName: "Fallback Invitee", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(firstResponse.status).toBe(500); - expect(humanRows).toHaveLength(1); - expect(inviteRows[0]?.redeemedBy).toBeNull(); - - const secondResponse = await appInstance.request( - INVITES_REDEEM_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - code: inviteCode, - displayName: "Fallback Invitee", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(secondResponse.status).toBe(201); - expect(humanRows).toHaveLength(2); - expect(inviteRows[0]?.redeemedBy).toEqual(expect.any(String)); - }); -}); - -describe(`POST ${ME_API_KEYS_PATH}`, () => { - it("returns 401 when PAT is missing", async () => { - const response = await createRegistryApp().request( - ME_API_KEYS_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ name: "workstation" }), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("creates key and returns plaintext token once", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, apiKeyInserts } = createFakeDb([authRow]); - - const response = await createRegistryApp().request( - ME_API_KEYS_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "workstation", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(201); - const body = (await response.json()) as { - apiKey: { - id: string; - name: string; - status: "active" | "revoked"; - createdAt: string; - lastUsedAt: string | null; - token: string; - }; - }; - expect(body.apiKey.name).toBe("workstation"); - expect(body.apiKey.status).toBe("active"); - expect(body.apiKey.token).toMatch(/^clw_pat_/); - expect(body.apiKey.lastUsedAt).toBeNull(); - - expect(apiKeyInserts).toHaveLength(1); - expect(apiKeyInserts[0]?.name).toBe("workstation"); - expect(apiKeyInserts[0]?.key_hash).not.toBe(body.apiKey.token); - expect(apiKeyInserts[0]?.key_prefix).toBe( - deriveApiKeyLookupPrefix(body.apiKey.token), - ); - }); - - it("accepts empty body and uses default key name", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, apiKeyInserts } = createFakeDb([authRow]); - - const response = await createRegistryApp().request( - ME_API_KEYS_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(201); - const body = (await response.json()) as { - apiKey: { - name: string; - token: string; - }; - }; - expect(body.apiKey.name).toBe("api-key"); - expect(body.apiKey.token).toMatch(/^clw_pat_/); - expect(apiKeyInserts).toHaveLength(1); - expect(apiKeyInserts[0]?.name).toBe("api-key"); - }); -}); - -describe(`GET ${ME_API_KEYS_PATH}`, () => { - it("returns metadata for caller-owned keys only", async () => { - const authToken = "clw_pat_valid-token-value"; - const authTokenHash = await hashApiKeyToken(authToken); - const revokedToken = "clw_pat_revoked-token-value"; - const revokedTokenHash = await hashApiKeyToken(revokedToken); - const foreignToken = "clw_pat_foreign-token-value"; - const foreignTokenHash = await hashApiKeyToken(foreignToken); - - const authRow: FakeD1Row = { - apiKeyId: "01KJ0000000000000000000001", - keyPrefix: deriveApiKeyLookupPrefix(authToken), - keyHash: authTokenHash, - apiKeyStatus: "active", - apiKeyName: "primary", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - const revokedOwnedRow: FakeD1Row = { - apiKeyId: "01KJ0000000000000000000002", - keyPrefix: deriveApiKeyLookupPrefix(revokedToken), - keyHash: revokedTokenHash, - apiKeyStatus: "revoked", - apiKeyName: "old-laptop", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - const foreignRow: FakeD1Row = { - apiKeyId: "01KJ0000000000000000000003", - keyPrefix: deriveApiKeyLookupPrefix(foreignToken), - keyHash: foreignTokenHash, - apiKeyStatus: "active", - apiKeyName: "foreign", - humanId: "human-2", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB8", - humanDisplayName: "Ira", - humanRole: "user", - humanStatus: "active", - }; - const { database } = createFakeDb([authRow, revokedOwnedRow, foreignRow]); - - const response = await createRegistryApp().request( - ME_API_KEYS_PATH, - { - headers: { - Authorization: `Bearer ${authToken}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(200); - const body = (await response.json()) as { - apiKeys: Array<{ - id: string; - name: string; - status: "active" | "revoked"; - createdAt: string; - lastUsedAt: string | null; - token?: string; - keyHash?: string; - keyPrefix?: string; - }>; - }; - expect(body.apiKeys).toEqual([ - { - id: "01KJ0000000000000000000002", - name: "old-laptop", - status: "revoked", - createdAt: "2026-01-01T00:00:00.000Z", - lastUsedAt: null, - }, - { - id: "01KJ0000000000000000000001", - name: "primary", - status: "active", - createdAt: "2026-01-01T00:00:00.000Z", - lastUsedAt: expect.any(String), - }, - ]); - for (const apiKey of body.apiKeys) { - expect(apiKey).not.toHaveProperty("token"); - expect(apiKey).not.toHaveProperty("keyHash"); - expect(apiKey).not.toHaveProperty("keyPrefix"); - } - }); -}); - -describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { - it("returns 400 for invalid id path", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const response = await createRegistryApp().request( - `${ME_API_KEYS_PATH}/invalid-id`, - { - method: "DELETE", - headers: { - Authorization: `Bearer ${token}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("API_KEY_REVOKE_INVALID_PATH"); - }); - - it("returns 404 when key is not found for owner", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const response = await createRegistryApp().request( - `${ME_API_KEYS_PATH}/${generateUlid(1700300000000)}`, - { - method: "DELETE", - headers: { - Authorization: `Bearer ${token}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(404); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("API_KEY_NOT_FOUND"); - }); - - it("revokes target key but keeps unrelated key active", async () => { - const authToken = "clw_pat_valid-token-value"; - const authTokenHash = await hashApiKeyToken(authToken); - const rotateToken = "clw_pat_rotation-token-value"; - const rotateTokenHash = await hashApiKeyToken(rotateToken); - const targetApiKeyId = generateUlid(1700300000000); - - const authRow: FakeD1Row = { - apiKeyId: "01KJ0000000000000000001001", - keyPrefix: deriveApiKeyLookupPrefix(authToken), - keyHash: authTokenHash, - apiKeyStatus: "active", - apiKeyName: "primary", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - const revokableRow: FakeD1Row = { - apiKeyId: targetApiKeyId, - keyPrefix: deriveApiKeyLookupPrefix(rotateToken), - keyHash: rotateTokenHash, - apiKeyStatus: "active", - apiKeyName: "rotate-me", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - const { database } = createFakeDb([authRow, revokableRow]); - const appInstance = createRegistryApp(); - - const revokeResponse = await appInstance.request( - `${ME_API_KEYS_PATH}/${targetApiKeyId}`, - { - method: "DELETE", - headers: { - Authorization: `Bearer ${authToken}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(revokeResponse.status).toBe(204); - - const revokedAuth = await appInstance.request( - "/v1/me", - { - headers: { - Authorization: `Bearer ${rotateToken}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(revokedAuth.status).toBe(401); - const revokedBody = (await revokedAuth.json()) as { - error: { code: string }; - }; - expect(revokedBody.error.code).toBe("API_KEY_REVOKED"); - - const activeAuth = await appInstance.request( - "/v1/me", - { - headers: { - Authorization: `Bearer ${authToken}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(activeAuth.status).toBe(200); - }); - - it("returns 204 when key is already revoked", async () => { - const authToken = "clw_pat_valid-token-value"; - const authTokenHash = await hashApiKeyToken(authToken); - const revokedToken = "clw_pat_already-revoked-token-value"; - const revokedTokenHash = await hashApiKeyToken(revokedToken); - const targetApiKeyId = generateUlid(1700300000100); - - const authRow: FakeD1Row = { - apiKeyId: "01KJ0000000000000000002001", - keyPrefix: deriveApiKeyLookupPrefix(authToken), - keyHash: authTokenHash, - apiKeyStatus: "active", - apiKeyName: "primary", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - const alreadyRevokedRow: FakeD1Row = { - apiKeyId: targetApiKeyId, - keyPrefix: deriveApiKeyLookupPrefix(revokedToken), - keyHash: revokedTokenHash, - apiKeyStatus: "revoked", - apiKeyName: "already-revoked", - humanId: "human-1", - humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", - humanDisplayName: "Ravi", - humanRole: "admin", - humanStatus: "active", - }; - const { database } = createFakeDb([authRow, alreadyRevokedRow]); - - const response = await createRegistryApp().request( - `${ME_API_KEYS_PATH}/${targetApiKeyId}`, - { - method: "DELETE", - headers: { - Authorization: `Bearer ${authToken}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(204); - }); -}); - -describe("GET /v1/agents", () => { - it("returns 401 when PAT is missing", async () => { - const res = await createRegistryApp().request( - "/v1/agents", - {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns only caller-owned agents with minimal fields", async () => { - const { token, authRow } = await makeValidPatContext(); - const ownerAgentNewId = generateUlid(1700100010000); - const ownerAgentOldId = generateUlid(1700100005000); - const foreignAgentId = generateUlid(1700100015000); - const { database } = createFakeDb( - [authRow], - [ - { - id: ownerAgentNewId, - did: makeAgentDid(ownerAgentNewId), - ownerId: "human-1", - name: "owner-agent-new", - framework: "openclaw", - status: "active", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - { - id: ownerAgentOldId, - did: makeAgentDid(ownerAgentOldId), - ownerId: "human-1", - name: "owner-agent-old", - framework: "langchain", - status: "revoked", - expiresAt: "2026-02-20T00:00:00.000Z", - }, - { - id: foreignAgentId, - did: makeAgentDid(foreignAgentId), - ownerId: "human-2", - name: "foreign-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - }, - ], - ); - - const res = await createRegistryApp().request( - "/v1/agents", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { - agents: Array<{ - id: string; - did: string; - name: string; - status: "active" | "revoked"; - expires: string | null; - }>; - pagination: { - limit: number; - nextCursor: string | null; - }; - }; - - expect(body.agents).toEqual([ - { - id: ownerAgentNewId, - did: makeAgentDid(ownerAgentNewId), - name: "owner-agent-new", - status: "active", - expires: "2026-03-01T00:00:00.000Z", - }, - { - id: ownerAgentOldId, - did: makeAgentDid(ownerAgentOldId), - name: "owner-agent-old", - status: "revoked", - expires: "2026-02-20T00:00:00.000Z", - }, - ]); - expect(body.pagination).toEqual({ - limit: DEFAULT_AGENT_LIST_LIMIT, - nextCursor: null, - }); - expect(body.agents[0]).not.toHaveProperty("framework"); - expect(body.agents[0]).not.toHaveProperty("ownerId"); - }); - - it("applies status and framework filters", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentIdOne = generateUlid(1700100010000); - const agentIdTwo = generateUlid(1700100011000); - const { database } = createFakeDb( - [authRow], - [ - { - id: agentIdOne, - did: makeAgentDid(agentIdOne), - ownerId: "human-1", - name: "owner-openclaw-active", - framework: "openclaw", - status: "active", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - { - id: agentIdTwo, - did: makeAgentDid(agentIdTwo), - ownerId: "human-1", - name: "owner-langchain-revoked", - framework: "langchain", - status: "revoked", - expiresAt: "2026-03-05T00:00:00.000Z", - }, - ], - ); - - const statusRes = await createRegistryApp().request( - "/v1/agents?status=revoked", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(statusRes.status).toBe(200); - const statusBody = (await statusRes.json()) as { - agents: Array<{ - id: string; - did: string; - name: string; - status: "active" | "revoked"; - expires: string | null; - }>; - }; - expect(statusBody.agents).toEqual([ - { - id: agentIdTwo, - did: makeAgentDid(agentIdTwo), - name: "owner-langchain-revoked", - status: "revoked", - expires: "2026-03-05T00:00:00.000Z", - }, - ]); - - const frameworkRes = await createRegistryApp().request( - "/v1/agents?framework=openclaw", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(frameworkRes.status).toBe(200); - const frameworkBody = (await frameworkRes.json()) as { - agents: Array<{ - id: string; - did: string; - name: string; - status: "active" | "revoked"; - expires: string | null; - }>; - }; - expect(frameworkBody.agents).toEqual([ - { - id: agentIdOne, - did: makeAgentDid(agentIdOne), - name: "owner-openclaw-active", - status: "active", - expires: "2026-03-01T00:00:00.000Z", - }, - ]); - }); - - it("supports cursor pagination and returns nextCursor", async () => { - const { token, authRow } = await makeValidPatContext(); - const newestId = generateUlid(1700100012000); - const olderId = generateUlid(1700100011000); - const oldestId = generateUlid(1700100010000); - const { database } = createFakeDb( - [authRow], - [ - { - id: newestId, - did: makeAgentDid(newestId), - ownerId: "human-1", - name: "newest", - framework: "openclaw", - status: "active", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - { - id: olderId, - did: makeAgentDid(olderId), - ownerId: "human-1", - name: "older", - framework: "openclaw", - status: "active", - expiresAt: "2026-02-28T00:00:00.000Z", - }, - { - id: oldestId, - did: makeAgentDid(oldestId), - ownerId: "human-1", - name: "oldest", - framework: "openclaw", - status: "active", - expiresAt: "2026-02-27T00:00:00.000Z", - }, - ], - ); - - const firstPage = await createRegistryApp().request( - "/v1/agents?limit=1", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(firstPage.status).toBe(200); - const firstBody = (await firstPage.json()) as { - agents: Array<{ - id: string; - did: string; - name: string; - status: "active" | "revoked"; - expires: string | null; - }>; - pagination: { limit: number; nextCursor: string | null }; - }; - expect(firstBody.agents).toEqual([ - { - id: newestId, - did: makeAgentDid(newestId), - name: "newest", - status: "active", - expires: "2026-03-01T00:00:00.000Z", - }, - ]); - expect(firstBody.pagination).toEqual({ - limit: 1, - nextCursor: newestId, - }); - - const secondPage = await createRegistryApp().request( - `/v1/agents?limit=1&cursor=${newestId}`, - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(secondPage.status).toBe(200); - const secondBody = (await secondPage.json()) as { - agents: Array<{ - id: string; - did: string; - name: string; - status: "active" | "revoked"; - expires: string | null; - }>; - pagination: { limit: number; nextCursor: string | null }; - }; - expect(secondBody.agents).toEqual([ - { - id: olderId, - did: makeAgentDid(olderId), - name: "older", - status: "active", - expires: "2026-02-28T00:00:00.000Z", - }, - ]); - expect(secondBody.pagination).toEqual({ - limit: 1, - nextCursor: olderId, - }); - }); - - it("returns verbose query validation errors in non-production", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents?status=invalid", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_LIST_INVALID_QUERY"); - expect(body.error.message).toBe("Agent list query is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - status: expect.any(Array), - }); - }); - - it("returns generic query validation errors in production", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents?cursor=not-a-ulid", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { - DB: database, - ENVIRONMENT: "production", - PROXY_URL: "https://proxy.clawdentity.com", - REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", - EVENT_BUS_BACKEND: "memory", - BOOTSTRAP_SECRET: "bootstrap-secret", - REGISTRY_SIGNING_KEY: "test-signing-key", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: Record; - }; - }; - expect(body.error.code).toBe("AGENT_LIST_INVALID_QUERY"); - expect(body.error.message).toBe("Request could not be processed"); - expect(body.error.details).toBeUndefined(); - }); -}); - -describe("GET /v1/agents/:id/ownership", () => { - it("returns 401 when PAT is missing", async () => { - const agentId = generateUlid(1700100017000); - const res = await createRegistryApp().request( - `/v1/agents/${agentId}/ownership`, - {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns ownsAgent=true when caller owns the agent", async () => { - const { token, authRow } = await makeValidPatContext(); - const ownedAgentId = generateUlid(1700100017100); - const { database } = createFakeDb( - [authRow], - [ - { - id: ownedAgentId, - did: makeAgentDid(ownedAgentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${ownedAgentId}/ownership`, - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { ownsAgent: boolean }; - expect(body).toEqual({ ownsAgent: true }); - }); - - it("returns ownsAgent=false for non-owned or missing agent ids", async () => { - const { token, authRow } = await makeValidPatContext(); - const foreignAgentId = generateUlid(1700100017200); - const missingAgentId = generateUlid(1700100017300); - const { database } = createFakeDb( - [authRow], - [ - { - id: foreignAgentId, - did: makeAgentDid(foreignAgentId), - ownerId: "human-2", - name: "foreign-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-03-01T00:00:00.000Z", - }, - ], - ); - - const foreignRes = await createRegistryApp().request( - `/v1/agents/${foreignAgentId}/ownership`, - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(foreignRes.status).toBe(200); - expect((await foreignRes.json()) as { ownsAgent: boolean }).toEqual({ - ownsAgent: false, - }); - - const missingRes = await createRegistryApp().request( - `/v1/agents/${missingAgentId}/ownership`, - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(missingRes.status).toBe(200); - expect((await missingRes.json()) as { ownsAgent: boolean }).toEqual({ - ownsAgent: false, - }); - }); - - it("returns path validation errors for invalid ids", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents/not-a-ulid/ownership", - { - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_OWNERSHIP_INVALID_PATH"); - expect(body.error.message).toBe("Agent ownership path is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - id: expect.any(Array), - }); - }); -}); - -describe("internal service-auth routes", () => { - it("returns 401 when internal service credential headers are missing", async () => { - const res = await createRegistryApp().request( - INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("INTERNAL_SERVICE_UNAUTHORIZED"); - }); - - // Service-scope and payload-validation integration is covered by - // dedicated auth + route-level tests that exercise real D1-backed flows. - it("requires PAT auth for admin internal service endpoints", async () => { - const res = await createRegistryApp().request( - ADMIN_INTERNAL_SERVICES_PATH, - { - method: "GET", - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - expect(res.status).toBe(401); - }); -}); - -describe("DELETE /v1/agents/:id", () => { - it("returns 401 when PAT is missing", async () => { - const agentId = generateUlid(1700200000000); - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 404 when agent does not exist", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, agentUpdates, revocationInserts } = createFakeDb([ - authRow, - ]); - const agentId = generateUlid(1700200000100); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { - error: { code: string; message: string }; - }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("returns 404 when agent is owned by another human", async () => { - const { token, authRow } = await makeValidPatContext(); - const foreignAgentId = generateUlid(1700200000200); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: foreignAgentId, - did: makeAgentDid(foreignAgentId), - ownerId: "human-2", - name: "foreign-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: generateUlid(1700200000201), - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${foreignAgentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("revokes owned agent and inserts revocation record", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700200000300); - const agentJti = generateUlid(1700200000301); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: agentJti, - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(204); - expect(agentUpdates).toHaveLength(1); - expect(agentUpdates[0]).toMatchObject({ - id: agentId, - status: "revoked", - updated_at: expect.any(String), - }); - expect(revocationInserts).toHaveLength(1); - expect(revocationInserts[0]).toMatchObject({ - agent_id: agentId, - jti: agentJti, - reason: null, - revoked_at: expect.any(String), - }); - }); - - it("is idempotent for repeat revoke requests", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700200000400); - const agentJti = generateUlid(1700200000401); - const { database, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: agentJti, - }, - ], - ); - - const first = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - const second = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(first.status).toBe(204); - expect(second.status).toBe(204); - expect(revocationInserts).toHaveLength(1); - }); - - it("returns 409 when owned agent has missing current_jti", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700200000500); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: null, - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(409); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REVOKE_INVALID_STATE"); - expect(body.error.details?.fieldErrors).toMatchObject({ - currentJti: expect.any(Array), - }); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); -}); - -describe("POST /v1/agents/:id/reissue", () => { - it("returns 401 when PAT is missing", async () => { - const agentId = generateUlid(1700300000000); - const res = await createRegistryApp().request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 404 when agent does not exist", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, agentUpdates, revocationInserts } = createFakeDb([ - authRow, - ]); - const agentId = generateUlid(1700300000100); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("returns 404 when agent is owned by another human", async () => { - const { token, authRow } = await makeValidPatContext(); - const foreignAgentId = generateUlid(1700300000200); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: foreignAgentId, - did: makeAgentDid(foreignAgentId), - ownerId: "human-2", - name: "foreign-agent", - framework: "openclaw", - status: "active", - publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: generateUlid(1700300000201), - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${foreignAgentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("returns 409 when agent is revoked", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700300000300); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "revoked-agent", - framework: "openclaw", - status: "revoked", - publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: generateUlid(1700300000301), - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(409); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); - expect(body.error.details?.fieldErrors).toMatchObject({ - status: expect.any(Array), - }); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("returns 409 when owned agent has missing current_jti", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700300000400); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: null, - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(409); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); - expect(body.error.details?.fieldErrors).toMatchObject({ - currentJti: expect.any(Array), - }); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("reissues owned agent, revokes old jti, and returns verifiable AIT", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700300000500); - const previousJti = generateUlid(1700300000501); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const signingKeyset = JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - publicKey: encodeBase64url(agentKeypair.publicKey), - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: previousJti, - }, - ], - ); - const appInstance = createRegistryApp(); - - const res = await appInstance.request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { - agent: { - id: string; - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - status: string; - expiresAt: string; - updatedAt: string; - }; - ait: string; - }; - expect(body.agent.id).toBe(agentId); - expect(body.agent.did).toBe(makeAgentDid(agentId)); - expect(body.agent.ownerDid).toBe(authRow.humanDid); - expect(body.agent.framework).toBe("openclaw"); - expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); - expect(body.agent.currentJti).not.toBe(previousJti); - expect(body.agent.status).toBe("active"); - expect(body.ait).toEqual(expect.any(String)); - - expect(agentUpdates).toHaveLength(1); - expect(agentUpdates[0]).toMatchObject({ - id: agentId, - status: "active", - status_where: "active", - current_jti_where: previousJti, - matched_rows: 1, - current_jti: body.agent.currentJti, - expires_at: body.agent.expiresAt, - updated_at: body.agent.updatedAt, - }); - - expect(revocationInserts).toHaveLength(1); - expect(revocationInserts[0]).toMatchObject({ - agent_id: agentId, - jti: previousJti, - reason: "reissued", - revoked_at: expect.any(String), - }); - - const keysRes = await appInstance.request( - "/.well-known/claw-keys.json", - {}, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - const keysBody = (await keysRes.json()) as { - keys: Array<{ - kid: string; - alg: "EdDSA"; - crv: "Ed25519"; - x: string; - status: "active" | "revoked"; - }>; - }; - - const claims = await verifyAIT({ - token: body.ait, - expectedIssuer: "https://dev.registry.clawdentity.com", - registryKeys: keysBody.keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP" as const, - crv: key.crv, - x: key.x, - }, - })), - }); - expect(claims.sub).toBe(body.agent.did); - expect(claims.ownerDid).toBe(body.agent.ownerDid); - expect(claims.name).toBe(body.agent.name); - expect(claims.framework).toBe(body.agent.framework); - expect(claims.cnf.jwk.x).toBe(body.agent.publicKey); - expect(claims.jti).toBe(body.agent.currentJti); - expect(claims.jti).not.toBe(previousJti); - }); - - it("returns 409 when guarded reissue update matches zero rows", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700300000550); - const previousJti = generateUlid(1700300000551); - const racedJti = generateUlid(1700300000552); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const signingKeyset = JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - publicKey: encodeBase64url(agentKeypair.publicKey), - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: previousJti, - }, - ], - { - beforeFirstAgentUpdate: (rows) => { - if (rows[0]) { - rows[0].currentJti = racedJti; - } - }, - }, - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - - expect(res.status).toBe(409); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); - expect(body.error.details?.fieldErrors).toMatchObject({ - currentJti: expect.any(Array), - }); - expect(agentUpdates).toHaveLength(1); - expect(agentUpdates[0]).toMatchObject({ - id: agentId, - status_where: "active", - current_jti_where: previousJti, - matched_rows: 0, - }); - expect(revocationInserts).toHaveLength(0); - }); - - it("does not extend expiry when reissuing a near-expiry token", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700300000560); - const previousJti = generateUlid(1700300000561); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const signingKeyset = JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]); - const previousExpiresAt = new Date( - Date.now() + 5 * 60 * 1000, - ).toISOString(); - const { database } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - publicKey: encodeBase64url(agentKeypair.publicKey), - status: "active", - expiresAt: previousExpiresAt, - currentJti: previousJti, - }, - ], - ); - - const appInstance = createRegistryApp(); - const res = await appInstance.request( - `/v1/agents/${agentId}/reissue`, - { - method: "POST", - headers: { Authorization: `Bearer ${token}` }, - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - - expect(res.status).toBe(200); - const body = (await res.json()) as { - agent: { - expiresAt: string; - }; - ait: string; - }; - expect(Date.parse(body.agent.expiresAt)).toBeLessThanOrEqual( - Date.parse(previousExpiresAt), - ); - - const claims = await verifyAIT({ - token: body.ait, - expectedIssuer: "https://dev.registry.clawdentity.com", - registryKeys: [ - { - kid: "reg-key-1", - jwk: { - kty: "OKP", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - }, - }, - ], - }); - expect(claims.exp).toBeLessThanOrEqual( - Math.floor(Date.parse(previousExpiresAt) / 1000), - ); - expect(claims.exp).toBe( - Math.floor(Date.parse(body.agent.expiresAt) / 1000), - ); - }); -}); - -describe(`POST ${AGENT_REGISTRATION_CHALLENGE_PATH}`, () => { - it("returns 401 when PAT is missing", async () => { - const res = await createRegistryApp().request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { "content-type": "application/json" }, - body: JSON.stringify({ - publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - }), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 400 when payload is invalid", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: "not-base64url", - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_INVALID"); - expect(body.error.details?.fieldErrors).toMatchObject({ - publicKey: expect.any(Array), - }); - }); - - it("creates and persists challenge for authenticated owner", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, agentRegistrationChallengeInserts } = createFakeDb([ - authRow, - ]); - const agentKeypair = await generateEd25519Keypair(); - - const res = await createRegistryApp().request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(201); - const body = (await res.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - expiresAt: string; - algorithm: string; - messageTemplate: string; - }; - expect(body.challengeId).toEqual(expect.any(String)); - expect(body.nonce).toEqual(expect.any(String)); - expect(body.ownerDid).toBe(authRow.humanDid); - expect(body.algorithm).toBe("Ed25519"); - expect(body.messageTemplate).toContain("challengeId:{challengeId}"); - expect(Date.parse(body.expiresAt)).toBeGreaterThan(Date.now()); - - expect(agentRegistrationChallengeInserts).toHaveLength(1); - expect(agentRegistrationChallengeInserts[0]).toMatchObject({ - id: body.challengeId, - owner_id: "human-1", - public_key: encodeBase64url(agentKeypair.publicKey), - nonce: body.nonce, - status: "pending", - used_at: null, - }); - }); -}); - -describe("POST /v1/agents", () => { - it("returns 401 when PAT is missing", async () => { - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { "content-type": "application/json" }, - body: JSON.stringify({ - name: "agent-01", - publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - }), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 400 when request payload is invalid", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "!!!", - framework: "", - publicKey: "not-base64url", - ttlDays: 0, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Agent registration payload is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - name: expect.any(Array), - framework: expect.any(Array), - publicKey: expect.any(Array), - ttlDays: expect.any(Array), - challengeId: expect.any(Array), - challengeSignature: expect.any(Array), - }); - }); - - it("returns verbose malformed-json error in test", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: '{"name":"agent-01"', - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Request body must be valid JSON"); - expect(body.error.details).toBeUndefined(); - }); - - it("returns generic malformed-json error in production", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: '{"name":"agent-01"', - }, - { - DB: database, - ENVIRONMENT: "production", - PROXY_URL: "https://proxy.clawdentity.com", - REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", - EVENT_BUS_BACKEND: "memory", - BOOTSTRAP_SECRET: "bootstrap-secret", - REGISTRY_SIGNING_KEY: "test-signing-key", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Request could not be processed"); - expect(body.error.details).toBeUndefined(); - }); - - it("returns generic validation error details in production", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "!!!", - publicKey: "not-base64url", - }), - }, - { - DB: database, - ENVIRONMENT: "production", - PROXY_URL: "https://proxy.clawdentity.com", - REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", - EVENT_BUS_BACKEND: "memory", - BOOTSTRAP_SECRET: "bootstrap-secret", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Request could not be processed"); - expect(body.error.details).toBeUndefined(); - }); - - it("returns 400 when registration challenge is missing", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const challengeSignature = encodeEd25519SignatureBase64url( - Uint8Array.from({ length: 64 }, (_, index) => index + 1), - ); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-missing-challenge", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: generateUlid(1700000000000), - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_NOT_FOUND"); - }); - - it("returns 400 when challenge signature is invalid", async () => { - const { token, authRow } = await makeValidPatContext(); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const challengeId = generateUlid(1700000010000); - const challengeNonce = encodeBase64url( - Uint8Array.from({ length: 24 }, (_, index) => index + 3), - ); - const { database } = createFakeDb([authRow], [], { - registrationChallengeRows: [ - { - id: challengeId, - ownerId: "human-1", - publicKey: encodeBase64url(agentKeypair.publicKey), - nonce: challengeNonce, - status: "pending", - expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - usedAt: null, - createdAt: "2026-01-01T00:00:00.000Z", - updatedAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - const invalidSignature = await signRegistrationChallenge({ - challengeId, - nonce: challengeNonce, - ownerDid: authRow.humanDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "wrong-name", - secretKey: agentKeypair.secretKey, - }); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-proof-invalid", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId, - challengeSignature: invalidSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_REGISTRATION_PROOF_INVALID"); - }); - - it("returns 400 when challenge has already been used", async () => { - const { token, authRow } = await makeValidPatContext(); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const challengeId = generateUlid(1700000011000); - const challengeNonce = encodeBase64url( - Uint8Array.from({ length: 24 }, (_, index) => index + 5), - ); - const { database } = createFakeDb([authRow], [], { - registrationChallengeRows: [ - { - id: challengeId, - ownerId: "human-1", - publicKey: encodeBase64url(agentKeypair.publicKey), - nonce: challengeNonce, - status: "used", - expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - usedAt: new Date(Date.now() - 60 * 1000).toISOString(), - createdAt: "2026-01-01T00:00:00.000Z", - updatedAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - const signature = await signRegistrationChallenge({ - challengeId, - nonce: challengeNonce, - ownerDid: authRow.humanDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-challenge-replayed", - secretKey: agentKeypair.secretKey, - }); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-challenge-replayed", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId, - challengeSignature: signature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_REPLAYED"); - }); - - it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { - const { token, authRow } = await makeValidPatContext(); - const { - database, - agentInserts, - agentAuthSessionInserts, - agentAuthEventInserts, - } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - - const challengeResponse = await appInstance.request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - expect(challengeResponse.status).toBe(201); - const challengeBody = (await challengeResponse.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - }; - const challengeSignature = await signRegistrationChallenge({ - challengeId: challengeBody.challengeId, - nonce: challengeBody.nonce, - ownerDid: challengeBody.ownerDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-01", - secretKey: agentKeypair.secretKey, - }); - - const res = await appInstance.request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-01", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: challengeBody.challengeId, - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(201); - const body = (await res.json()) as { - agent: { - id: string; - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - ttlDays: number; - status: string; - expiresAt: string; - createdAt: string; - updatedAt: string; - }; - ait: string; - agentAuth: { - tokenType: string; - accessToken: string; - accessExpiresAt: string; - refreshToken: string; - refreshExpiresAt: string; - }; - }; - - expect(body.agent.name).toBe("agent-01"); - expect(body.agent.framework).toBe(DEFAULT_AGENT_FRAMEWORK); - expect(body.agent.ttlDays).toBe(DEFAULT_AGENT_TTL_DAYS); - expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); - expect(body.agent.status).toBe("active"); - expect(body.ait).toEqual(expect.any(String)); - expect(body.agentAuth.tokenType).toBe("Bearer"); - expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); - expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); - expect(Date.parse(body.agentAuth.accessExpiresAt)).toBeGreaterThan( - Date.now(), - ); - expect(Date.parse(body.agentAuth.refreshExpiresAt)).toBeGreaterThan( - Date.now(), - ); - - expect(agentInserts).toHaveLength(1); - const inserted = agentInserts[0]; - expect(inserted?.owner_id).toBe("human-1"); - expect(inserted?.name).toBe("agent-01"); - expect(inserted?.framework).toBe(DEFAULT_AGENT_FRAMEWORK); - expect(inserted?.public_key).toBe(encodeBase64url(agentKeypair.publicKey)); - expect(inserted?.current_jti).toBe(body.agent.currentJti); - expect(inserted?.expires_at).toBe(body.agent.expiresAt); - expect(agentAuthSessionInserts).toHaveLength(1); - expect(agentAuthSessionInserts[0]).toMatchObject({ - agent_id: body.agent.id, - status: "active", - }); - expect(agentAuthEventInserts).toHaveLength(1); - expect(agentAuthEventInserts[0]).toMatchObject({ - agent_id: body.agent.id, - event_type: "issued", - }); - }); - - it("returns verifiable AIT using published keyset", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - const signingKeyset = JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]); - - const challengeResponse = await appInstance.request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - expect(challengeResponse.status).toBe(201); - const challengeBody = (await challengeResponse.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - }; - const challengeSignature = await signRegistrationChallenge({ - challengeId: challengeBody.challengeId, - nonce: challengeBody.nonce, - ownerDid: challengeBody.ownerDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-registry-verify", - framework: "openclaw", - ttlDays: 10, - secretKey: agentKeypair.secretKey, - }); - - const registerResponse = await appInstance.request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-registry-verify", - framework: "openclaw", - ttlDays: 10, - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: challengeBody.challengeId, - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - - expect(registerResponse.status).toBe(201); - const registerBody = (await registerResponse.json()) as { - agent: { - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - }; - ait: string; - }; - - const keysResponse = await appInstance.request( - "/.well-known/claw-keys.json", - {}, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - const keysBody = (await keysResponse.json()) as { - keys: Array<{ - kid: string; - alg: "EdDSA"; - crv: "Ed25519"; - x: string; - status: "active" | "revoked"; - }>; - }; - - const claims = await verifyAIT({ - token: registerBody.ait, - expectedIssuer: "https://dev.registry.clawdentity.com", - registryKeys: keysBody.keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP" as const, - crv: key.crv, - x: key.x, - }, - })), - }); - - expect(claims.iss).toBe("https://dev.registry.clawdentity.com"); - expect(claims.sub).toBe(registerBody.agent.did); - expect(claims.ownerDid).toBe(registerBody.agent.ownerDid); - expect(claims.name).toBe(registerBody.agent.name); - expect(claims.framework).toBe(registerBody.agent.framework); - expect(claims.cnf.jwk.x).toBe(registerBody.agent.publicKey); - expect(claims.jti).toBe(registerBody.agent.currentJti); - }); - - it("returns 500 when signer secret does not match any active published key", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const wrongPublishedKey = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - - const challengeResponse = await appInstance.request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - expect(challengeResponse.status).toBe(201); - const challengeBody = (await challengeResponse.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - }; - const challengeSignature = await signRegistrationChallenge({ - challengeId: challengeBody.challengeId, - nonce: challengeBody.nonce, - ownerDid: challengeBody.ownerDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-signer-mismatch", - secretKey: agentKeypair.secretKey, - }); - - const res = await appInstance.request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-signer-mismatch", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: challengeBody.challengeId, - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-2", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(wrongPublishedKey.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(500); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); - expect(body.error.message).toBe("Registry configuration is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - REGISTRY_SIGNING_KEYS: expect.any(Array), - }); - }); -}); - -describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { - async function buildRefreshFixture() { - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const nowSeconds = Math.floor(Date.now() / 1000); - const agentId = generateUlid(Date.now()); - const agentDid = makeAgentDid(agentId); - const aitJti = generateUlid(Date.now() + 1); - const refreshToken = - "clw_rft_fixture_refresh_token_value_for_registry_tests"; - const refreshTokenHash = await hashAgentToken(refreshToken); - const ait = await signAIT({ - claims: { - iss: "https://dev.registry.clawdentity.com", - sub: agentDid, - ownerDid: makeHumanDid(generateUlid(Date.now() + 2)), - name: "agent-refresh-01", - framework: "openclaw", - cnf: { - jwk: { - kty: "OKP", - crv: "Ed25519", - x: encodeBase64url(agentKeypair.publicKey), - }, - }, - iat: nowSeconds - 10, - nbf: nowSeconds - 10, - exp: nowSeconds + 3600, - jti: aitJti, - }, - signerKid: "reg-key-1", - signerKeypair: signer, - }); - - return { - signer, - agentKeypair, - agentId, - agentDid, - aitJti, - ait, - refreshToken, - refreshTokenHash, - }; - } - - it("rotates refresh credentials and returns a new agent auth bundle", async () => { - const fixture = await buildRefreshFixture(); - const nowIso = new Date().toISOString(); - const refreshExpiresAt = new Date(Date.now() + 60_000).toISOString(); - const { - database, - agentAuthSessionRows, - agentAuthSessionUpdates, - agentAuthEventInserts, - } = createFakeDb( - [], - [ - { - id: fixture.agentId, - did: fixture.agentDid, - ownerId: "human-1", - name: "agent-refresh-01", - framework: "openclaw", - publicKey: encodeBase64url(fixture.agentKeypair.publicKey), - status: "active", - expiresAt: null, - currentJti: fixture.aitJti, - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 3), - agentId: fixture.agentId, - refreshKeyHash: fixture.refreshTokenHash, - refreshKeyPrefix: deriveRefreshTokenLookupPrefix( - fixture.refreshToken, - ), - refreshIssuedAt: nowIso, - refreshExpiresAt, - refreshLastUsedAt: null, - accessKeyHash: "old-access-hash", - accessKeyPrefix: "clw_agt_old", - accessIssuedAt: nowIso, - accessExpiresAt: refreshExpiresAt, - accessLastUsedAt: null, - status: "active", - revokedAt: null, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - }, - ); - const request = await createSignedAgentRefreshRequest({ - ait: fixture.ait, - secretKey: fixture.agentKeypair.secretKey, - refreshToken: fixture.refreshToken, - }); - - const response = await createRegistryApp().request( - AGENT_AUTH_REFRESH_PATH, - { - method: "POST", - headers: request.headers, - body: request.body, - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(fixture.signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(response.status).toBe(200); - const body = (await response.json()) as { - agentAuth: { - tokenType: string; - accessToken: string; - accessExpiresAt: string; - refreshToken: string; - refreshExpiresAt: string; - }; - }; - expect(body.agentAuth.tokenType).toBe("Bearer"); - expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); - expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); - expect(body.agentAuth.refreshToken).not.toBe(fixture.refreshToken); - expect(agentAuthSessionUpdates).toHaveLength(1); - expect(agentAuthSessionRows[0]?.refreshKeyPrefix).toBe( - deriveRefreshTokenLookupPrefix(body.agentAuth.refreshToken), - ); - expect(agentAuthEventInserts).toEqual( - expect.arrayContaining([ - expect.objectContaining({ event_type: "refreshed" }), - ]), - ); - }); - - it("rejects refresh when session is revoked", async () => { - const fixture = await buildRefreshFixture(); - const nowIso = new Date().toISOString(); - const request = await createSignedAgentRefreshRequest({ - ait: fixture.ait, - secretKey: fixture.agentKeypair.secretKey, - refreshToken: fixture.refreshToken, - }); - const { database } = createFakeDb( - [], - [ - { - id: fixture.agentId, - did: fixture.agentDid, - ownerId: "human-1", - name: "agent-refresh-01", - framework: "openclaw", - publicKey: encodeBase64url(fixture.agentKeypair.publicKey), - status: "active", - expiresAt: null, - currentJti: fixture.aitJti, - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 4), - agentId: fixture.agentId, - refreshKeyHash: fixture.refreshTokenHash, - refreshKeyPrefix: deriveRefreshTokenLookupPrefix( - fixture.refreshToken, - ), - refreshIssuedAt: nowIso, - refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), - refreshLastUsedAt: null, - accessKeyHash: "old-access-hash", - accessKeyPrefix: "clw_agt_old", - accessIssuedAt: nowIso, - accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), - accessLastUsedAt: null, - status: "revoked", - revokedAt: nowIso, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - }, - ); - - const response = await createRegistryApp().request( - AGENT_AUTH_REFRESH_PATH, - { - method: "POST", - headers: request.headers, - body: request.body, - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(fixture.signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_AUTH_REFRESH_REVOKED"); - }); - - it("marks expired refresh credentials revoked and returns expired error", async () => { - const fixture = await buildRefreshFixture(); - const nowIso = new Date().toISOString(); - const { - database, - agentAuthSessionRows, - agentAuthEventInserts, - agentAuthSessionUpdates, - } = createFakeDb( - [], - [ - { - id: fixture.agentId, - did: fixture.agentDid, - ownerId: "human-1", - name: "agent-refresh-01", - framework: "openclaw", - publicKey: encodeBase64url(fixture.agentKeypair.publicKey), - status: "active", - expiresAt: null, - currentJti: fixture.aitJti, - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 5), - agentId: fixture.agentId, - refreshKeyHash: fixture.refreshTokenHash, - refreshKeyPrefix: deriveRefreshTokenLookupPrefix( - fixture.refreshToken, - ), - refreshIssuedAt: nowIso, - refreshExpiresAt: new Date(Date.now() - 60_000).toISOString(), - refreshLastUsedAt: null, - accessKeyHash: "old-access-hash", - accessKeyPrefix: "clw_agt_old", - accessIssuedAt: nowIso, - accessExpiresAt: new Date(Date.now() - 60_000).toISOString(), - accessLastUsedAt: null, - status: "active", - revokedAt: null, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - }, - ); - const request = await createSignedAgentRefreshRequest({ - ait: fixture.ait, - secretKey: fixture.agentKeypair.secretKey, - refreshToken: fixture.refreshToken, - }); - - const response = await createRegistryApp().request( - AGENT_AUTH_REFRESH_PATH, - { - method: "POST", - headers: request.headers, - body: request.body, - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(fixture.signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_AUTH_REFRESH_EXPIRED"); - expect(agentAuthSessionRows[0]?.status).toBe("revoked"); - expect(agentAuthSessionUpdates).toHaveLength(1); - expect(agentAuthEventInserts).toEqual( - expect.arrayContaining([ - expect.objectContaining({ event_type: "revoked" }), - ]), - ); - }); - - it("returns 429 when refresh rate limit is exceeded for the same client", async () => { - const appInstance = createRegistryApp({ - rateLimit: { - agentAuthRefreshMaxRequests: 2, - agentAuthRefreshWindowMs: 60_000, - }, - }); - - for (let index = 0; index < 2; index += 1) { - const response = await appInstance.request( - AGENT_AUTH_REFRESH_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "CF-Connecting-IP": "203.0.113.88", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - } - - const rateLimited = await appInstance.request( - AGENT_AUTH_REFRESH_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "CF-Connecting-IP": "203.0.113.88", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(rateLimited.status).toBe(429); - const body = (await rateLimited.json()) as { error: { code: string } }; - expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); - }); -}); - -describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { - it("validates active access token and updates access_last_used_at", async () => { - const nowIso = new Date().toISOString(); - const accessToken = "clw_agt_fixture_access_token_value_for_registry_tests"; - const accessTokenHash = await hashAgentToken(accessToken); - const agentId = generateUlid(Date.now() + 200); - const agentDid = makeAgentDid(agentId); - const aitJti = generateUlid(Date.now() + 201); - const { database, agentAuthSessionRows, agentAuthSessionUpdates } = - createFakeDb( - [], - [ - { - id: agentId, - did: agentDid, - ownerId: "human-1", - name: "agent-access-validate-01", - framework: "openclaw", - publicKey: encodeBase64url(new Uint8Array(32)), - status: "active", - expiresAt: null, - currentJti: aitJti, - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 202), - agentId, - refreshKeyHash: "refresh-hash", - refreshKeyPrefix: "clw_rft_fixture", - refreshIssuedAt: nowIso, - refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), - refreshLastUsedAt: null, - accessKeyHash: accessTokenHash, - accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), - accessIssuedAt: nowIso, - accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), - accessLastUsedAt: null, - status: "active", - revokedAt: null, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - }, - ); - - const response = await createRegistryApp().request( - AGENT_AUTH_VALIDATE_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-claw-agent-access": accessToken, - }, - body: JSON.stringify({ - agentDid, - aitJti, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(response.status).toBe(204); - expect(agentAuthSessionUpdates).toHaveLength(1); - expect(agentAuthSessionRows[0]?.accessLastUsedAt).not.toBeNull(); - }); - - it("rejects validation when x-claw-agent-access is missing", async () => { - const response = await createRegistryApp().request( - AGENT_AUTH_VALIDATE_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - agentDid: makeAgentDid(generateUlid(Date.now() + 203)), - aitJti: generateUlid(Date.now() + 204), - }), - }, - { - DB: {}, - ENVIRONMENT: "test", - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_UNAUTHORIZED"); - }); - - it("rejects validation for expired access token", async () => { - const nowIso = new Date().toISOString(); - const accessToken = - "clw_agt_fixture_expired_access_token_for_registry_tests"; - const accessTokenHash = await hashAgentToken(accessToken); - const agentId = generateUlid(Date.now() + 205); - const agentDid = makeAgentDid(agentId); - const aitJti = generateUlid(Date.now() + 206); - const { database } = createFakeDb( - [], - [ - { - id: agentId, - did: agentDid, - ownerId: "human-1", - name: "agent-access-validate-expired", - framework: "openclaw", - publicKey: encodeBase64url(new Uint8Array(32)), - status: "active", - expiresAt: null, - currentJti: aitJti, - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 207), - agentId, - refreshKeyHash: "refresh-hash", - refreshKeyPrefix: "clw_rft_fixture", - refreshIssuedAt: nowIso, - refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), - refreshLastUsedAt: null, - accessKeyHash: accessTokenHash, - accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), - accessIssuedAt: nowIso, - accessExpiresAt: new Date(Date.now() - 60_000).toISOString(), - accessLastUsedAt: null, - status: "active", - revokedAt: null, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - }, - ); - - const response = await createRegistryApp().request( - AGENT_AUTH_VALIDATE_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-claw-agent-access": accessToken, - }, - body: JSON.stringify({ - agentDid, - aitJti, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_EXPIRED"); - }); - - it("rejects validation when guarded session update matches zero rows", async () => { - const nowIso = new Date().toISOString(); - const accessToken = - "clw_agt_fixture_race_window_access_token_for_registry_tests"; - const accessTokenHash = await hashAgentToken(accessToken); - const agentId = generateUlid(Date.now() + 208); - const agentDid = makeAgentDid(agentId); - const aitJti = generateUlid(Date.now() + 209); - const { database, agentAuthSessionUpdates } = createFakeDb( - [], - [ - { - id: agentId, - did: agentDid, - ownerId: "human-1", - name: "agent-access-validate-race", - framework: "openclaw", - publicKey: encodeBase64url(new Uint8Array(32)), - status: "active", - expiresAt: null, - currentJti: aitJti, - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 210), - agentId, - refreshKeyHash: "refresh-hash", - refreshKeyPrefix: "clw_rft_fixture", - refreshIssuedAt: nowIso, - refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), - refreshLastUsedAt: null, - accessKeyHash: accessTokenHash, - accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), - accessIssuedAt: nowIso, - accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), - accessLastUsedAt: null, - status: "active", - revokedAt: null, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - beforeFirstAgentAuthSessionUpdate: (rows) => { - if (rows[0]) { - rows[0].status = "revoked"; - } - }, - }, - ); - - const response = await createRegistryApp().request( - AGENT_AUTH_VALIDATE_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "x-claw-agent-access": accessToken, - }, - body: JSON.stringify({ - agentDid, - aitJti, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(response.status).toBe(401); - const body = (await response.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_UNAUTHORIZED"); - expect(agentAuthSessionUpdates).toEqual( - expect.arrayContaining([expect.objectContaining({ matched_rows: 0 })]), - ); - }); - - it("returns 429 when validate rate limit is exceeded for the same client", async () => { - const appInstance = createRegistryApp({ - rateLimit: { - agentAuthValidateMaxRequests: 2, - agentAuthValidateWindowMs: 60_000, - }, - }); - - for (let index = 0; index < 2; index += 1) { - const response = await appInstance.request( - AGENT_AUTH_VALIDATE_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "CF-Connecting-IP": "203.0.113.99", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(response.status).toBe(400); - } - - const rateLimited = await appInstance.request( - AGENT_AUTH_VALIDATE_PATH, - { - method: "POST", - headers: { - "content-type": "application/json", - "CF-Connecting-IP": "203.0.113.99", - }, - body: JSON.stringify({}), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(rateLimited.status).toBe(429); - const body = (await rateLimited.json()) as { error: { code: string } }; - expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); - }); -}); - -describe("DELETE /v1/agents/:id/auth/revoke", () => { - it("revokes active session for owned agent and is idempotent", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(Date.now() + 10); - const nowIso = new Date().toISOString(); - const { database, agentAuthSessionRows, agentAuthEventInserts } = - createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: authRow.humanId, - name: "agent-auth-revoke", - framework: "openclaw", - publicKey: encodeBase64url(new Uint8Array(32)), - status: "active", - expiresAt: null, - currentJti: generateUlid(Date.now() + 11), - }, - ], - { - agentAuthSessionRows: [ - { - id: generateUlid(Date.now() + 12), - agentId, - refreshKeyHash: "refresh-hash", - refreshKeyPrefix: "clw_rft_test", - refreshIssuedAt: nowIso, - refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), - refreshLastUsedAt: null, - accessKeyHash: "access-hash", - accessKeyPrefix: "clw_agt_test", - accessIssuedAt: nowIso, - accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), - accessLastUsedAt: null, - status: "active", - revokedAt: null, - createdAt: nowIso, - updatedAt: nowIso, - }, - ], - }, - ); - - const appInstance = createRegistryApp(); - const firstResponse = await appInstance.request( - `/v1/agents/${agentId}/auth/revoke`, - { - method: "DELETE", - headers: { - Authorization: `Bearer ${token}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(firstResponse.status).toBe(204); - expect(agentAuthSessionRows[0]?.status).toBe("revoked"); - expect(agentAuthEventInserts).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - event_type: "revoked", - reason: "owner_auth_revoke", - }), - ]), - ); - - const secondResponse = await appInstance.request( - `/v1/agents/${agentId}/auth/revoke`, - { - method: "DELETE", - headers: { - Authorization: `Bearer ${token}`, - }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - expect(secondResponse.status).toBe(204); - }); -}); diff --git a/apps/registry/src/server.test/AGENTS.md b/apps/registry/src/server.test/AGENTS.md new file mode 100644 index 0000000..144c117 --- /dev/null +++ b/apps/registry/src/server.test/AGENTS.md @@ -0,0 +1,36 @@ +# AGENTS.md - `apps/registry/src/server.test` + +## Purpose +- Keep registry server tests modular, deterministic, and easy to maintain. +- Preserve behavior while allowing focused edits by route/concern. + +## Organization Rules +- Keep each `*.test.ts` file focused on one route or tightly related route concern. +- Keep each `*.test.ts` file under 800 lines. +- Keep `helpers.ts` as a thin public export shim used by tests. +- Place shared helper implementation in `helpers/**` with focused modules (`claims`, `crypto`, `pat`, `db/*`); do not duplicate harness logic across test files. +- Prefer adding small helper functions in the appropriate `helpers/**` module when setup repeats 3+ times. +- Keep every file under `server.test` (including `helpers/**`) below 800 lines. + +## Change Rules +- Preserve existing assertions and response contracts when refactoring test structure. +- When adding tests, keep test names explicit about endpoint, auth mode, and expected status. +- Favor deterministic fixtures (fixed IDs/timestamps/nonces) over random values. +- Avoid coupling tests to execution order; each test must be independently runnable. + +## Route Coverage +- Maintain separate coverage for: + - health/metadata/admin bootstrap + - key publication + CRL + - resolve + me + - invites + - me API keys + - agents listing/ownership/internal auth + - agent lifecycle (delete/reissue) + - registration challenge/create + - agent auth refresh/validate/revoke + +## Validation +- For server test changes, run: + - `pnpm -C apps/registry typecheck` + - `pnpm -C apps/registry test -- server` diff --git a/apps/registry/src/server.test/agent-auth-refresh.test.ts b/apps/registry/src/server.test/agent-auth-refresh.test.ts new file mode 100644 index 0000000..8ef0655 --- /dev/null +++ b/apps/registry/src/server.test/agent-auth-refresh.test.ts @@ -0,0 +1,374 @@ +import { + AGENT_AUTH_REFRESH_PATH, + encodeBase64url, + generateUlid, + makeAgentDid, + makeHumanDid, +} from "@clawdentity/protocol"; +import { generateEd25519Keypair, signAIT } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { + deriveRefreshTokenLookupPrefix, + hashAgentToken, +} from "../auth/agent-auth-token.js"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, createSignedAgentRefreshRequest } from "./helpers.js"; + +describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { + async function buildRefreshFixture() { + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const nowSeconds = Math.floor(Date.now() / 1000); + const agentId = generateUlid(Date.now()); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 1); + const refreshToken = + "clw_rft_fixture_refresh_token_value_for_registry_tests"; + const refreshTokenHash = await hashAgentToken(refreshToken); + const ait = await signAIT({ + claims: { + iss: "https://dev.registry.clawdentity.com", + sub: agentDid, + ownerDid: makeHumanDid(generateUlid(Date.now() + 2)), + name: "agent-refresh-01", + framework: "openclaw", + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(agentKeypair.publicKey), + }, + }, + iat: nowSeconds - 10, + nbf: nowSeconds - 10, + exp: nowSeconds + 3600, + jti: aitJti, + }, + signerKid: "reg-key-1", + signerKeypair: signer, + }); + + return { + signer, + agentKeypair, + agentId, + agentDid, + aitJti, + ait, + refreshToken, + refreshTokenHash, + }; + } + + it("rotates refresh credentials and returns a new agent auth bundle", async () => { + const fixture = await buildRefreshFixture(); + const nowIso = new Date().toISOString(); + const refreshExpiresAt = new Date(Date.now() + 60_000).toISOString(); + const { + database, + agentAuthSessionRows, + agentAuthSessionUpdates, + agentAuthEventInserts, + } = createFakeDb( + [], + [ + { + id: fixture.agentId, + did: fixture.agentDid, + ownerId: "human-1", + name: "agent-refresh-01", + framework: "openclaw", + publicKey: encodeBase64url(fixture.agentKeypair.publicKey), + status: "active", + expiresAt: null, + currentJti: fixture.aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 3), + agentId: fixture.agentId, + refreshKeyHash: fixture.refreshTokenHash, + refreshKeyPrefix: deriveRefreshTokenLookupPrefix( + fixture.refreshToken, + ), + refreshIssuedAt: nowIso, + refreshExpiresAt, + refreshLastUsedAt: null, + accessKeyHash: "old-access-hash", + accessKeyPrefix: "clw_agt_old", + accessIssuedAt: nowIso, + accessExpiresAt: refreshExpiresAt, + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + const request = await createSignedAgentRefreshRequest({ + ait: fixture.ait, + secretKey: fixture.agentKeypair.secretKey, + refreshToken: fixture.refreshToken, + }); + + const response = await createRegistryApp().request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: request.headers, + body: request.body, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(fixture.signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + agentAuth: { + tokenType: string; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; + }; + }; + expect(body.agentAuth.tokenType).toBe("Bearer"); + expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); + expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); + expect(body.agentAuth.refreshToken).not.toBe(fixture.refreshToken); + expect(agentAuthSessionUpdates).toHaveLength(1); + expect(agentAuthSessionRows[0]?.refreshKeyPrefix).toBe( + deriveRefreshTokenLookupPrefix(body.agentAuth.refreshToken), + ); + expect(agentAuthEventInserts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ event_type: "refreshed" }), + ]), + ); + }); + + it("rejects refresh when session is revoked", async () => { + const fixture = await buildRefreshFixture(); + const nowIso = new Date().toISOString(); + const request = await createSignedAgentRefreshRequest({ + ait: fixture.ait, + secretKey: fixture.agentKeypair.secretKey, + refreshToken: fixture.refreshToken, + }); + const { database } = createFakeDb( + [], + [ + { + id: fixture.agentId, + did: fixture.agentDid, + ownerId: "human-1", + name: "agent-refresh-01", + framework: "openclaw", + publicKey: encodeBase64url(fixture.agentKeypair.publicKey), + status: "active", + expiresAt: null, + currentJti: fixture.aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 4), + agentId: fixture.agentId, + refreshKeyHash: fixture.refreshTokenHash, + refreshKeyPrefix: deriveRefreshTokenLookupPrefix( + fixture.refreshToken, + ), + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: "old-access-hash", + accessKeyPrefix: "clw_agt_old", + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "revoked", + revokedAt: nowIso, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: request.headers, + body: request.body, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(fixture.signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_REFRESH_REVOKED"); + }); + + it("marks expired refresh credentials revoked and returns expired error", async () => { + const fixture = await buildRefreshFixture(); + const nowIso = new Date().toISOString(); + const { + database, + agentAuthSessionRows, + agentAuthEventInserts, + agentAuthSessionUpdates, + } = createFakeDb( + [], + [ + { + id: fixture.agentId, + did: fixture.agentDid, + ownerId: "human-1", + name: "agent-refresh-01", + framework: "openclaw", + publicKey: encodeBase64url(fixture.agentKeypair.publicKey), + status: "active", + expiresAt: null, + currentJti: fixture.aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 5), + agentId: fixture.agentId, + refreshKeyHash: fixture.refreshTokenHash, + refreshKeyPrefix: deriveRefreshTokenLookupPrefix( + fixture.refreshToken, + ), + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() - 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: "old-access-hash", + accessKeyPrefix: "clw_agt_old", + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() - 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + const request = await createSignedAgentRefreshRequest({ + ait: fixture.ait, + secretKey: fixture.agentKeypair.secretKey, + refreshToken: fixture.refreshToken, + }); + + const response = await createRegistryApp().request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: request.headers, + body: request.body, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(fixture.signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_REFRESH_EXPIRED"); + expect(agentAuthSessionRows[0]?.status).toBe("revoked"); + expect(agentAuthSessionUpdates).toHaveLength(1); + expect(agentAuthEventInserts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ event_type: "revoked" }), + ]), + ); + }); + + it("returns 429 when refresh rate limit is exceeded for the same client", async () => { + const appInstance = createRegistryApp({ + rateLimit: { + agentAuthRefreshMaxRequests: 2, + agentAuthRefreshWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await appInstance.request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.88", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + } + + const rateLimited = await appInstance.request( + AGENT_AUTH_REFRESH_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.88", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); +}); diff --git a/apps/registry/src/server.test/agent-auth-validate-revoke.test.ts b/apps/registry/src/server.test/agent-auth-validate-revoke.test.ts new file mode 100644 index 0000000..f885c94 --- /dev/null +++ b/apps/registry/src/server.test/agent-auth-validate-revoke.test.ts @@ -0,0 +1,384 @@ +import { + AGENT_AUTH_VALIDATE_PATH, + encodeBase64url, + generateUlid, + makeAgentDid, +} from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { + deriveAccessTokenLookupPrefix, + hashAgentToken, +} from "../auth/agent-auth-token.js"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { + it("validates active access token and updates access_last_used_at", async () => { + const nowIso = new Date().toISOString(); + const accessToken = "clw_agt_fixture_access_token_value_for_registry_tests"; + const accessTokenHash = await hashAgentToken(accessToken); + const agentId = generateUlid(Date.now() + 200); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 201); + const { database, agentAuthSessionRows, agentAuthSessionUpdates } = + createFakeDb( + [], + [ + { + id: agentId, + did: agentDid, + ownerId: "human-1", + name: "agent-access-validate-01", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 202), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_fixture", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: accessTokenHash, + accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid, + aitJti, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(204); + expect(agentAuthSessionUpdates).toHaveLength(1); + expect(agentAuthSessionRows[0]?.accessLastUsedAt).not.toBeNull(); + }); + + it("rejects validation when x-claw-agent-access is missing", async () => { + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + agentDid: makeAgentDid(generateUlid(Date.now() + 203)), + aitJti: generateUlid(Date.now() + 204), + }), + }, + { + DB: {}, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_UNAUTHORIZED"); + }); + + it("rejects validation for expired access token", async () => { + const nowIso = new Date().toISOString(); + const accessToken = + "clw_agt_fixture_expired_access_token_for_registry_tests"; + const accessTokenHash = await hashAgentToken(accessToken); + const agentId = generateUlid(Date.now() + 205); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 206); + const { database } = createFakeDb( + [], + [ + { + id: agentId, + did: agentDid, + ownerId: "human-1", + name: "agent-access-validate-expired", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 207), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_fixture", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: accessTokenHash, + accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() - 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid, + aitJti, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_EXPIRED"); + }); + + it("rejects validation when guarded session update matches zero rows", async () => { + const nowIso = new Date().toISOString(); + const accessToken = + "clw_agt_fixture_race_window_access_token_for_registry_tests"; + const accessTokenHash = await hashAgentToken(accessToken); + const agentId = generateUlid(Date.now() + 208); + const agentDid = makeAgentDid(agentId); + const aitJti = generateUlid(Date.now() + 209); + const { database, agentAuthSessionUpdates } = createFakeDb( + [], + [ + { + id: agentId, + did: agentDid, + ownerId: "human-1", + name: "agent-access-validate-race", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: aitJti, + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 210), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_fixture", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: accessTokenHash, + accessKeyPrefix: deriveAccessTokenLookupPrefix(accessToken), + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + beforeFirstAgentAuthSessionUpdate: (rows) => { + if (rows[0]) { + rows[0].status = "revoked"; + } + }, + }, + ); + + const response = await createRegistryApp().request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid, + aitJti, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_AUTH_VALIDATE_UNAUTHORIZED"); + expect(agentAuthSessionUpdates).toEqual( + expect.arrayContaining([expect.objectContaining({ matched_rows: 0 })]), + ); + }); + + it("returns 429 when validate rate limit is exceeded for the same client", async () => { + const appInstance = createRegistryApp({ + rateLimit: { + agentAuthValidateMaxRequests: 2, + agentAuthValidateWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await appInstance.request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.99", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + } + + const rateLimited = await appInstance.request( + AGENT_AUTH_VALIDATE_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "CF-Connecting-IP": "203.0.113.99", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); +}); + +describe("DELETE /v1/agents/:id/auth/revoke", () => { + it("revokes active session for owned agent and is idempotent", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(Date.now() + 10); + const nowIso = new Date().toISOString(); + const { database, agentAuthSessionRows, agentAuthEventInserts } = + createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: authRow.humanId, + name: "agent-auth-revoke", + framework: "openclaw", + publicKey: encodeBase64url(new Uint8Array(32)), + status: "active", + expiresAt: null, + currentJti: generateUlid(Date.now() + 11), + }, + ], + { + agentAuthSessionRows: [ + { + id: generateUlid(Date.now() + 12), + agentId, + refreshKeyHash: "refresh-hash", + refreshKeyPrefix: "clw_rft_test", + refreshIssuedAt: nowIso, + refreshExpiresAt: new Date(Date.now() + 60_000).toISOString(), + refreshLastUsedAt: null, + accessKeyHash: "access-hash", + accessKeyPrefix: "clw_agt_test", + accessIssuedAt: nowIso, + accessExpiresAt: new Date(Date.now() + 60_000).toISOString(), + accessLastUsedAt: null, + status: "active", + revokedAt: null, + createdAt: nowIso, + updatedAt: nowIso, + }, + ], + }, + ); + + const appInstance = createRegistryApp(); + const firstResponse = await appInstance.request( + `/v1/agents/${agentId}/auth/revoke`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(firstResponse.status).toBe(204); + expect(agentAuthSessionRows[0]?.status).toBe("revoked"); + expect(agentAuthEventInserts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + event_type: "revoked", + reason: "owner_auth_revoke", + }), + ]), + ); + + const secondResponse = await appInstance.request( + `/v1/agents/${agentId}/auth/revoke`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(secondResponse.status).toBe(204); + }); +}); diff --git a/apps/registry/src/server.test/agent-registration-challenge.test.ts b/apps/registry/src/server.test/agent-registration-challenge.test.ts new file mode 100644 index 0000000..66bd257 --- /dev/null +++ b/apps/registry/src/server.test/agent-registration-challenge.test.ts @@ -0,0 +1,111 @@ +import { + AGENT_REGISTRATION_CHALLENGE_PATH, + encodeBase64url, +} from "@clawdentity/protocol"; +import { generateEd25519Keypair } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe(`POST ${AGENT_REGISTRATION_CHALLENGE_PATH}`, () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 400 when payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: "not-base64url", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_INVALID"); + expect(body.error.details?.fieldErrors).toMatchObject({ + publicKey: expect.any(Array), + }); + }); + + it("creates and persists challenge for authenticated owner", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentRegistrationChallengeInserts } = createFakeDb([ + authRow, + ]); + const agentKeypair = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(201); + const body = (await res.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + expiresAt: string; + algorithm: string; + messageTemplate: string; + }; + expect(body.challengeId).toEqual(expect.any(String)); + expect(body.nonce).toEqual(expect.any(String)); + expect(body.ownerDid).toBe(authRow.humanDid); + expect(body.algorithm).toBe("Ed25519"); + expect(body.messageTemplate).toContain("challengeId:{challengeId}"); + expect(Date.parse(body.expiresAt)).toBeGreaterThan(Date.now()); + + expect(agentRegistrationChallengeInserts).toHaveLength(1); + expect(agentRegistrationChallengeInserts[0]).toMatchObject({ + id: body.challengeId, + owner_id: "human-1", + public_key: encodeBase64url(agentKeypair.publicKey), + nonce: body.nonce, + status: "pending", + used_at: null, + }); + }); +}); diff --git a/apps/registry/src/server.test/agent-registration-create.test.ts b/apps/registry/src/server.test/agent-registration-create.test.ts new file mode 100644 index 0000000..80315ce --- /dev/null +++ b/apps/registry/src/server.test/agent-registration-create.test.ts @@ -0,0 +1,779 @@ +import { + AGENT_REGISTRATION_CHALLENGE_PATH, + encodeBase64url, + generateUlid, +} from "@clawdentity/protocol"; +import { + encodeEd25519SignatureBase64url, + generateEd25519Keypair, + verifyAIT, +} from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { + DEFAULT_AGENT_FRAMEWORK, + DEFAULT_AGENT_TTL_DAYS, +} from "../agent-registration.js"; +import { createRegistryApp } from "../server.js"; +import { + createFakeDb, + makeValidPatContext, + signRegistrationChallenge, +} from "./helpers.js"; + +describe("POST /v1/agents", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + name: "agent-01", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 400 when request payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "!!!", + framework: "", + publicKey: "not-base64url", + ttlDays: 0, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Agent registration payload is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + name: expect.any(Array), + framework: expect.any(Array), + publicKey: expect.any(Array), + ttlDays: expect.any(Array), + challengeId: expect.any(Array), + challengeSignature: expect.any(Array), + }); + }); + + it("returns verbose malformed-json error in test", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: '{"name":"agent-01"', + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request body must be valid JSON"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns generic malformed-json error in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: '{"name":"agent-01"', + }, + { + DB: database, + ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns generic validation error details in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "!!!", + publicKey: "not-base64url", + }), + }, + { + DB: database, + ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns 400 when registration challenge is missing", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const challengeSignature = encodeEd25519SignatureBase64url( + Uint8Array.from({ length: 64 }, (_, index) => index + 1), + ); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-missing-challenge", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: generateUlid(1700000000000), + challengeSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_NOT_FOUND"); + }); + + it("returns 400 when challenge signature is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const challengeId = generateUlid(1700000010000); + const challengeNonce = encodeBase64url( + Uint8Array.from({ length: 24 }, (_, index) => index + 3), + ); + const { database } = createFakeDb([authRow], [], { + registrationChallengeRows: [ + { + id: challengeId, + ownerId: "human-1", + publicKey: encodeBase64url(agentKeypair.publicKey), + nonce: challengeNonce, + status: "pending", + expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), + usedAt: null, + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const invalidSignature = await signRegistrationChallenge({ + challengeId, + nonce: challengeNonce, + ownerDid: authRow.humanDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "wrong-name", + secretKey: agentKeypair.secretKey, + }); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-proof-invalid", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId, + challengeSignature: invalidSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_PROOF_INVALID"); + }); + + it("returns 400 when challenge has already been used", async () => { + const { token, authRow } = await makeValidPatContext(); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const challengeId = generateUlid(1700000011000); + const challengeNonce = encodeBase64url( + Uint8Array.from({ length: 24 }, (_, index) => index + 5), + ); + const { database } = createFakeDb([authRow], [], { + registrationChallengeRows: [ + { + id: challengeId, + ownerId: "human-1", + publicKey: encodeBase64url(agentKeypair.publicKey), + nonce: challengeNonce, + status: "used", + expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), + usedAt: new Date(Date.now() - 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const signature = await signRegistrationChallenge({ + challengeId, + nonce: challengeNonce, + ownerDid: authRow.humanDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-challenge-replayed", + secretKey: agentKeypair.secretKey, + }); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-challenge-replayed", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId, + challengeSignature: signature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_REPLAYED"); + }); + + it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { + const { token, authRow } = await makeValidPatContext(); + const { + database, + agentInserts, + agentAuthSessionInserts, + agentAuthEventInserts, + } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + + const challengeResponse = await appInstance.request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + expect(challengeResponse.status).toBe(201); + const challengeBody = (await challengeResponse.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + }; + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-01", + secretKey: agentKeypair.secretKey, + }); + + const res = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-01", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(201); + const body = (await res.json()) as { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: string; + expiresAt: string; + createdAt: string; + updatedAt: string; + }; + ait: string; + agentAuth: { + tokenType: string; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; + }; + }; + + expect(body.agent.name).toBe("agent-01"); + expect(body.agent.framework).toBe(DEFAULT_AGENT_FRAMEWORK); + expect(body.agent.ttlDays).toBe(DEFAULT_AGENT_TTL_DAYS); + expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(body.agent.status).toBe("active"); + expect(body.ait).toEqual(expect.any(String)); + expect(body.agentAuth.tokenType).toBe("Bearer"); + expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); + expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); + expect(Date.parse(body.agentAuth.accessExpiresAt)).toBeGreaterThan( + Date.now(), + ); + expect(Date.parse(body.agentAuth.refreshExpiresAt)).toBeGreaterThan( + Date.now(), + ); + + expect(agentInserts).toHaveLength(1); + const inserted = agentInserts[0]; + expect(inserted?.owner_id).toBe("human-1"); + expect(inserted?.name).toBe("agent-01"); + expect(inserted?.framework).toBe(DEFAULT_AGENT_FRAMEWORK); + expect(inserted?.public_key).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(inserted?.current_jti).toBe(body.agent.currentJti); + expect(inserted?.expires_at).toBe(body.agent.expiresAt); + expect(agentAuthSessionInserts).toHaveLength(1); + expect(agentAuthSessionInserts[0]).toMatchObject({ + agent_id: body.agent.id, + status: "active", + }); + expect(agentAuthEventInserts).toHaveLength(1); + expect(agentAuthEventInserts[0]).toMatchObject({ + agent_id: body.agent.id, + event_type: "issued", + }); + }); + + it("returns verifiable AIT using published keyset", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + + const challengeResponse = await appInstance.request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + expect(challengeResponse.status).toBe(201); + const challengeBody = (await challengeResponse.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + }; + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-registry-verify", + framework: "openclaw", + ttlDays: 10, + secretKey: agentKeypair.secretKey, + }); + + const registerResponse = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-registry-verify", + framework: "openclaw", + ttlDays: 10, + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(registerResponse.status).toBe(201); + const registerBody = (await registerResponse.json()) as { + agent: { + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + }; + ait: string; + }; + + const keysResponse = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyAIT({ + token: registerBody.ait, + expectedIssuer: "https://dev.registry.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(claims.iss).toBe("https://dev.registry.clawdentity.com"); + expect(claims.sub).toBe(registerBody.agent.did); + expect(claims.ownerDid).toBe(registerBody.agent.ownerDid); + expect(claims.name).toBe(registerBody.agent.name); + expect(claims.framework).toBe(registerBody.agent.framework); + expect(claims.cnf.jwk.x).toBe(registerBody.agent.publicKey); + expect(claims.jti).toBe(registerBody.agent.currentJti); + }); + + it("returns 500 when signer secret does not match any active published key", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const signer = await generateEd25519Keypair(); + const wrongPublishedKey = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + + const challengeResponse = await appInstance.request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: encodeBase64url(agentKeypair.publicKey), + }), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + expect(challengeResponse.status).toBe(201); + const challengeBody = (await challengeResponse.json()) as { + challengeId: string; + nonce: string; + ownerDid: string; + }; + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-signer-mismatch", + secretKey: agentKeypair.secretKey, + }); + + const res = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-signer-mismatch", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-2", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(wrongPublishedKey.publicKey), + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(500); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + REGISTRY_SIGNING_KEYS: expect.any(Array), + }); + }); +}); diff --git a/apps/registry/src/server.test/agents-delete-reissue.test.ts b/apps/registry/src/server.test/agents-delete-reissue.test.ts new file mode 100644 index 0000000..ac6d18b --- /dev/null +++ b/apps/registry/src/server.test/agents-delete-reissue.test.ts @@ -0,0 +1,683 @@ +import { + encodeBase64url, + generateUlid, + makeAgentDid, +} from "@clawdentity/protocol"; +import { generateEd25519Keypair, verifyAIT } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe("DELETE /v1/agents/:id", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700200000000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 404 when agent does not exist", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentUpdates, revocationInserts } = createFakeDb([ + authRow, + ]); + const agentId = generateUlid(1700200000100); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string; message: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 404 when agent is owned by another human", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700200000200); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700200000201), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("revokes owned agent and inserts revocation record", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000300); + const agentJti = generateUlid(1700200000301); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: agentJti, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(204); + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status: "revoked", + updated_at: expect.any(String), + }); + expect(revocationInserts).toHaveLength(1); + expect(revocationInserts[0]).toMatchObject({ + agent_id: agentId, + jti: agentJti, + reason: null, + revoked_at: expect.any(String), + }); + }); + + it("is idempotent for repeat revoke requests", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000400); + const agentJti = generateUlid(1700200000401); + const { database, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: agentJti, + }, + ], + ); + + const first = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + const second = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(first.status).toBe(204); + expect(second.status).toBe(204); + expect(revocationInserts).toHaveLength(1); + }); + + it("returns 409 when owned agent has missing current_jti", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000500); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: null, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REVOKE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); +}); + +describe("POST /v1/agents/:id/reissue", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700300000000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 404 when agent does not exist", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentUpdates, revocationInserts } = createFakeDb([ + authRow, + ]); + const agentId = generateUlid(1700300000100); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 404 when agent is owned by another human", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700300000200); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700300000201), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 409 when agent is revoked", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000300); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "revoked-agent", + framework: "openclaw", + status: "revoked", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700300000301), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + status: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 409 when owned agent has missing current_jti", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000400); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: null, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("reissues owned agent, revokes old jti, and returns verifiable AIT", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000500); + const previousJti = generateUlid(1700300000501); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + publicKey: encodeBase64url(agentKeypair.publicKey), + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: previousJti, + }, + ], + ); + const appInstance = createRegistryApp(); + + const res = await appInstance.request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + status: string; + expiresAt: string; + updatedAt: string; + }; + ait: string; + }; + expect(body.agent.id).toBe(agentId); + expect(body.agent.did).toBe(makeAgentDid(agentId)); + expect(body.agent.ownerDid).toBe(authRow.humanDid); + expect(body.agent.framework).toBe("openclaw"); + expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(body.agent.currentJti).not.toBe(previousJti); + expect(body.agent.status).toBe("active"); + expect(body.ait).toEqual(expect.any(String)); + + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status: "active", + status_where: "active", + current_jti_where: previousJti, + matched_rows: 1, + current_jti: body.agent.currentJti, + expires_at: body.agent.expiresAt, + updated_at: body.agent.updatedAt, + }); + + expect(revocationInserts).toHaveLength(1); + expect(revocationInserts[0]).toMatchObject({ + agent_id: agentId, + jti: previousJti, + reason: "reissued", + revoked_at: expect.any(String), + }); + + const keysRes = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + const keysBody = (await keysRes.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyAIT({ + token: body.ait, + expectedIssuer: "https://dev.registry.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + expect(claims.sub).toBe(body.agent.did); + expect(claims.ownerDid).toBe(body.agent.ownerDid); + expect(claims.name).toBe(body.agent.name); + expect(claims.framework).toBe(body.agent.framework); + expect(claims.cnf.jwk.x).toBe(body.agent.publicKey); + expect(claims.jti).toBe(body.agent.currentJti); + expect(claims.jti).not.toBe(previousJti); + }); + + it("returns 409 when guarded reissue update matches zero rows", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000550); + const previousJti = generateUlid(1700300000551); + const racedJti = generateUlid(1700300000552); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + publicKey: encodeBase64url(agentKeypair.publicKey), + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: previousJti, + }, + ], + { + beforeFirstAgentUpdate: (rows) => { + if (rows[0]) { + rows[0].currentJti = racedJti; + } + }, + }, + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REISSUE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status_where: "active", + current_jti_where: previousJti, + matched_rows: 0, + }); + expect(revocationInserts).toHaveLength(0); + }); + + it("does not extend expiry when reissuing a near-expiry token", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700300000560); + const previousJti = generateUlid(1700300000561); + const signer = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const previousExpiresAt = new Date( + Date.now() + 5 * 60 * 1000, + ).toISOString(); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + publicKey: encodeBase64url(agentKeypair.publicKey), + status: "active", + expiresAt: previousExpiresAt, + currentJti: previousJti, + }, + ], + ); + + const appInstance = createRegistryApp(); + const res = await appInstance.request( + `/v1/agents/${agentId}/reissue`, + { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + agent: { + expiresAt: string; + }; + ait: string; + }; + expect(Date.parse(body.agent.expiresAt)).toBeLessThanOrEqual( + Date.parse(previousExpiresAt), + ); + + const claims = await verifyAIT({ + token: body.ait, + expectedIssuer: "https://dev.registry.clawdentity.com", + registryKeys: [ + { + kid: "reg-key-1", + jwk: { + kty: "OKP", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + }, + }, + ], + }); + expect(claims.exp).toBeLessThanOrEqual( + Math.floor(Date.parse(previousExpiresAt) / 1000), + ); + expect(claims.exp).toBe( + Math.floor(Date.parse(body.agent.expiresAt) / 1000), + ); + }); +}); diff --git a/apps/registry/src/server.test/agents-list-ownership.test.ts b/apps/registry/src/server.test/agents-list-ownership.test.ts new file mode 100644 index 0000000..9a29671 --- /dev/null +++ b/apps/registry/src/server.test/agents-list-ownership.test.ts @@ -0,0 +1,522 @@ +import { + ADMIN_INTERNAL_SERVICES_PATH, + generateUlid, + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, + makeAgentDid, +} from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { DEFAULT_AGENT_LIST_LIMIT } from "../agent-list.js"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe("GET /v1/agents", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/agents", + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns only caller-owned agents with minimal fields", async () => { + const { token, authRow } = await makeValidPatContext(); + const ownerAgentNewId = generateUlid(1700100010000); + const ownerAgentOldId = generateUlid(1700100005000); + const foreignAgentId = generateUlid(1700100015000); + const { database } = createFakeDb( + [authRow], + [ + { + id: ownerAgentNewId, + did: makeAgentDid(ownerAgentNewId), + ownerId: "human-1", + name: "owner-agent-new", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: ownerAgentOldId, + did: makeAgentDid(ownerAgentOldId), + ownerId: "human-1", + name: "owner-agent-old", + framework: "langchain", + status: "revoked", + expiresAt: "2026-02-20T00:00:00.000Z", + }, + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + "/v1/agents", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + pagination: { + limit: number; + nextCursor: string | null; + }; + }; + + expect(body.agents).toEqual([ + { + id: ownerAgentNewId, + did: makeAgentDid(ownerAgentNewId), + name: "owner-agent-new", + status: "active", + expires: "2026-03-01T00:00:00.000Z", + }, + { + id: ownerAgentOldId, + did: makeAgentDid(ownerAgentOldId), + name: "owner-agent-old", + status: "revoked", + expires: "2026-02-20T00:00:00.000Z", + }, + ]); + expect(body.pagination).toEqual({ + limit: DEFAULT_AGENT_LIST_LIMIT, + nextCursor: null, + }); + expect(body.agents[0]).not.toHaveProperty("framework"); + expect(body.agents[0]).not.toHaveProperty("ownerId"); + }); + + it("applies status and framework filters", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentIdOne = generateUlid(1700100010000); + const agentIdTwo = generateUlid(1700100011000); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentIdOne, + did: makeAgentDid(agentIdOne), + ownerId: "human-1", + name: "owner-openclaw-active", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: agentIdTwo, + did: makeAgentDid(agentIdTwo), + ownerId: "human-1", + name: "owner-langchain-revoked", + framework: "langchain", + status: "revoked", + expiresAt: "2026-03-05T00:00:00.000Z", + }, + ], + ); + + const statusRes = await createRegistryApp().request( + "/v1/agents?status=revoked", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(statusRes.status).toBe(200); + const statusBody = (await statusRes.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + }; + expect(statusBody.agents).toEqual([ + { + id: agentIdTwo, + did: makeAgentDid(agentIdTwo), + name: "owner-langchain-revoked", + status: "revoked", + expires: "2026-03-05T00:00:00.000Z", + }, + ]); + + const frameworkRes = await createRegistryApp().request( + "/v1/agents?framework=openclaw", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(frameworkRes.status).toBe(200); + const frameworkBody = (await frameworkRes.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + }; + expect(frameworkBody.agents).toEqual([ + { + id: agentIdOne, + did: makeAgentDid(agentIdOne), + name: "owner-openclaw-active", + status: "active", + expires: "2026-03-01T00:00:00.000Z", + }, + ]); + }); + + it("supports cursor pagination and returns nextCursor", async () => { + const { token, authRow } = await makeValidPatContext(); + const newestId = generateUlid(1700100012000); + const olderId = generateUlid(1700100011000); + const oldestId = generateUlid(1700100010000); + const { database } = createFakeDb( + [authRow], + [ + { + id: newestId, + did: makeAgentDid(newestId), + ownerId: "human-1", + name: "newest", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: olderId, + did: makeAgentDid(olderId), + ownerId: "human-1", + name: "older", + framework: "openclaw", + status: "active", + expiresAt: "2026-02-28T00:00:00.000Z", + }, + { + id: oldestId, + did: makeAgentDid(oldestId), + ownerId: "human-1", + name: "oldest", + framework: "openclaw", + status: "active", + expiresAt: "2026-02-27T00:00:00.000Z", + }, + ], + ); + + const firstPage = await createRegistryApp().request( + "/v1/agents?limit=1", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(firstPage.status).toBe(200); + const firstBody = (await firstPage.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + pagination: { limit: number; nextCursor: string | null }; + }; + expect(firstBody.agents).toEqual([ + { + id: newestId, + did: makeAgentDid(newestId), + name: "newest", + status: "active", + expires: "2026-03-01T00:00:00.000Z", + }, + ]); + expect(firstBody.pagination).toEqual({ + limit: 1, + nextCursor: newestId, + }); + + const secondPage = await createRegistryApp().request( + `/v1/agents?limit=1&cursor=${newestId}`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(secondPage.status).toBe(200); + const secondBody = (await secondPage.json()) as { + agents: Array<{ + id: string; + did: string; + name: string; + status: "active" | "revoked"; + expires: string | null; + }>; + pagination: { limit: number; nextCursor: string | null }; + }; + expect(secondBody.agents).toEqual([ + { + id: olderId, + did: makeAgentDid(olderId), + name: "older", + status: "active", + expires: "2026-02-28T00:00:00.000Z", + }, + ]); + expect(secondBody.pagination).toEqual({ + limit: 1, + nextCursor: olderId, + }); + }); + + it("returns verbose query validation errors in non-production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents?status=invalid", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_LIST_INVALID_QUERY"); + expect(body.error.message).toBe("Agent list query is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + status: expect.any(Array), + }); + }); + + it("returns generic query validation errors in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents?cursor=not-a-ulid", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { + DB: database, + ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: Record; + }; + }; + expect(body.error.code).toBe("AGENT_LIST_INVALID_QUERY"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); +}); + +describe("GET /v1/agents/:id/ownership", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700100017000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}/ownership`, + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns ownsAgent=true when caller owns the agent", async () => { + const { token, authRow } = await makeValidPatContext(); + const ownedAgentId = generateUlid(1700100017100); + const { database } = createFakeDb( + [authRow], + [ + { + id: ownedAgentId, + did: makeAgentDid(ownedAgentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${ownedAgentId}/ownership`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { ownsAgent: boolean }; + expect(body).toEqual({ ownsAgent: true }); + }); + + it("returns ownsAgent=false for non-owned or missing agent ids", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700100017200); + const missingAgentId = generateUlid(1700100017300); + const { database } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + ); + + const foreignRes = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}/ownership`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(foreignRes.status).toBe(200); + expect((await foreignRes.json()) as { ownsAgent: boolean }).toEqual({ + ownsAgent: false, + }); + + const missingRes = await createRegistryApp().request( + `/v1/agents/${missingAgentId}/ownership`, + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(missingRes.status).toBe(200); + expect((await missingRes.json()) as { ownsAgent: boolean }).toEqual({ + ownsAgent: false, + }); + }); + + it("returns path validation errors for invalid ids", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents/not-a-ulid/ownership", + { + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_OWNERSHIP_INVALID_PATH"); + expect(body.error.message).toBe("Agent ownership path is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + id: expect.any(Array), + }); + }); +}); + +describe("internal service-auth routes", () => { + it("returns 401 when internal service credential headers are missing", async () => { + const res = await createRegistryApp().request( + INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("INTERNAL_SERVICE_UNAUTHORIZED"); + }); + + // Service-scope and payload-validation integration is covered by + // dedicated auth + route-level tests that exercise real D1-backed flows. + it("requires PAT auth for admin internal service endpoints", async () => { + const res = await createRegistryApp().request( + ADMIN_INTERNAL_SERVICES_PATH, + { + method: "GET", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + expect(res.status).toBe(401); + }); +}); diff --git a/apps/registry/src/server.test/health-metadata-admin.test.ts b/apps/registry/src/server.test/health-metadata-admin.test.ts new file mode 100644 index 0000000..31b1d26 --- /dev/null +++ b/apps/registry/src/server.test/health-metadata-admin.test.ts @@ -0,0 +1,476 @@ +import { + ADMIN_BOOTSTRAP_PATH, + REGISTRY_METADATA_PATH, +} from "@clawdentity/protocol"; +import { REQUEST_ID_HEADER } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { + deriveApiKeyLookupPrefix, + hashApiKeyToken, +} from "../auth/api-key-auth.js"; +import app, { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe("GET /health", () => { + it("returns status ok with fallback version", async () => { + const res = await app.request( + "/health", + {}, + { DB: {}, ENVIRONMENT: "test" }, + ); + expect(res.status).toBe(200); + const body = await res.json(); + expect(body).toEqual({ + status: "ok", + version: "0.0.0", + environment: "test", + }); + expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); + }); + + it("returns APP_VERSION when provided by runtime bindings", async () => { + const res = await createRegistryApp().request( + "/health", + {}, + { DB: {}, ENVIRONMENT: "test", APP_VERSION: "sha-1234567890" }, + ); + + expect(res.status).toBe(200); + const body = await res.json(); + expect(body).toEqual({ + status: "ok", + version: "sha-1234567890", + environment: "test", + }); + }); + + it("returns config validation error for invalid environment", async () => { + const res = await createRegistryApp().request( + "/health", + {}, + { DB: {}, ENVIRONMENT: "local" }, + ); + expect(res.status).toBe(500); + expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); + const body = (await res.json()) as { + error: { code: string; message: string }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + }); +}); + +describe(`GET ${REGISTRY_METADATA_PATH}`, () => { + it("returns environment metadata including resolved proxy URL", async () => { + const res = await createRegistryApp().request( + `https://registry.example.test${REGISTRY_METADATA_PATH}`, + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "development", + APP_VERSION: "sha-meta-123", + PROXY_URL: "https://dev.proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + status: string; + environment: string; + version: string; + registryUrl: string; + proxyUrl: string; + }; + expect(body).toEqual({ + status: "ok", + environment: "development", + version: "sha-meta-123", + registryUrl: "https://registry.example.test", + proxyUrl: "https://dev.proxy.clawdentity.com", + }); + }); +}); + +describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { + it("returns 503 when bootstrap secret is not configured", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(response.status).toBe(503); + const body = (await response.json()) as { + error: { + code: string; + message: string; + }; + }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_DISABLED"); + expect(body.error.message).toBe("Admin bootstrap is disabled"); + }); + + it("returns 401 when bootstrap secret header is missing", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_UNAUTHORIZED"); + }); + + it("returns 401 when bootstrap secret is invalid", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "wrong-secret", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_UNAUTHORIZED"); + }); + + it("returns 400 when payload is not valid JSON", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: "{not-valid-json", + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_INVALID"); + }); + + it("returns 400 when payload fields are invalid", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: 123, + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_INVALID"); + }); + + it("returns 409 when an admin already exists", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({}), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(409); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("ADMIN_BOOTSTRAP_ALREADY_COMPLETED"); + }); + + it("creates admin human and PAT token once", async () => { + const { database, humanInserts, apiKeyInserts } = createFakeDb([]); + + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(201); + + const body = (await response.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + status: string; + }; + apiKey: { + id: string; + name: string; + token: string; + }; + }; + + expect(body.human.id).toBe("00000000000000000000000000"); + expect(body.human.did).toBe("did:claw:human:00000000000000000000000000"); + expect(body.human.displayName).toBe("Primary Admin"); + expect(body.human.role).toBe("admin"); + expect(body.human.status).toBe("active"); + expect(body.apiKey.name).toBe("prod-admin-key"); + expect(body.apiKey.token.startsWith("clw_pat_")).toBe(true); + + expect(humanInserts).toHaveLength(1); + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.key_prefix).toBe( + deriveApiKeyLookupPrefix(body.apiKey.token), + ); + expect(apiKeyInserts[0]?.key_hash).toBe( + await hashApiKeyToken(body.apiKey.token), + ); + }); + + it("returns PAT that authenticates GET /v1/me on same app and database", async () => { + const { database } = createFakeDb([]); + const appInstance = createRegistryApp(); + + const bootstrapResponse = await appInstance.request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(bootstrapResponse.status).toBe(201); + const bootstrapBody = (await bootstrapResponse.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + }; + apiKey: { + id: string; + name: string; + token: string; + }; + }; + + const meResponse = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${bootstrapBody.apiKey.token}`, + }, + }, + { + DB: database, + ENVIRONMENT: "test", + }, + ); + + expect(meResponse.status).toBe(200); + const meBody = (await meResponse.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + apiKey: { + id: string; + name: string; + }; + }; + }; + expect(meBody.human).toEqual({ + id: bootstrapBody.human.id, + did: bootstrapBody.human.did, + displayName: bootstrapBody.human.displayName, + role: bootstrapBody.human.role, + apiKey: { + id: bootstrapBody.apiKey.id, + name: bootstrapBody.apiKey.name, + }, + }); + }); + + it("falls back to manual mutation when transactions are unavailable", async () => { + const { database, humanInserts, apiKeyInserts } = createFakeDb([], [], { + failBeginTransaction: true, + }); + + const response = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(response.status).toBe(201); + expect(humanInserts).toHaveLength(1); + expect(apiKeyInserts).toHaveLength(1); + }); + + it("rolls back admin insert when fallback api key insert fails", async () => { + const { database, humanRows } = createFakeDb([], [], { + failBeginTransaction: true, + failApiKeyInsertCount: 1, + }); + + const firstResponse = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(firstResponse.status).toBe(500); + expect(humanRows).toHaveLength(0); + + const secondResponse = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(secondResponse.status).toBe(201); + expect(humanRows).toHaveLength(1); + }); +}); diff --git a/apps/registry/src/server.test/helpers.ts b/apps/registry/src/server.test/helpers.ts new file mode 100644 index 0000000..fe04a6b --- /dev/null +++ b/apps/registry/src/server.test/helpers.ts @@ -0,0 +1,8 @@ +export type { FakeD1Row } from "./helpers/index.js"; +export { + createFakeDb, + createSignedAgentRefreshRequest, + makeAitClaims, + makeValidPatContext, + signRegistrationChallenge, +} from "./helpers/index.js"; diff --git a/apps/registry/src/server.test/helpers/AGENTS.md b/apps/registry/src/server.test/helpers/AGENTS.md new file mode 100644 index 0000000..32e775b --- /dev/null +++ b/apps/registry/src/server.test/helpers/AGENTS.md @@ -0,0 +1,21 @@ +# AGENTS.md - `apps/registry/src/server.test/helpers` + +## Purpose +- Keep server-test helpers deterministic, modular, and easy to evolve without changing test behavior. + +## Structure Rules +- Keep `../helpers.ts` as a stable export-only shim for tests. +- Group helper implementations by concern: + - `claims.ts`, `crypto.ts`, `pat.ts` for top-level helper APIs. + - `db/types.ts`, `db/parse.ts`, `db/resolvers.ts`, `db/mock.ts`, `db/run-handlers*.ts` for fake D1 behavior. +- Keep each helper file under 800 lines; split further when a file approaches the limit. + +## Behavior Rules +- Preserve SQL parsing/matching semantics in fake DB helpers unless a test explicitly requires a change. +- Reuse shared parser/resolver/run-handler utilities; avoid duplicated query handling logic. +- Keep fixtures deterministic (fixed timestamps/IDs/nonces) and avoid randomization. + +## Validation +- For helper changes, run: + - `pnpm -C apps/registry typecheck` + - `pnpm -C apps/registry test -- server` diff --git a/apps/registry/src/server.test/helpers/claims.ts b/apps/registry/src/server.test/helpers/claims.ts new file mode 100644 index 0000000..3fff019 --- /dev/null +++ b/apps/registry/src/server.test/helpers/claims.ts @@ -0,0 +1,16 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { buildTestAitClaims } from "@clawdentity/sdk/testing"; + +export function makeAitClaims(publicKey: Uint8Array) { + return buildTestAitClaims({ + publicKeyX: encodeBase64url(publicKey), + issuer: "https://registry.clawdentity.dev", + nowSeconds: Math.floor(Date.now() / 1000), + ttlSeconds: 3600, + nbfSkewSeconds: 5, + seedMs: 1_700_100_000_000, + name: "agent-registry-01", + framework: "openclaw", + description: "registry key publishing verification path", + }); +} diff --git a/apps/registry/src/server.test/helpers/crypto.ts b/apps/registry/src/server.test/helpers/crypto.ts new file mode 100644 index 0000000..94ae9b4 --- /dev/null +++ b/apps/registry/src/server.test/helpers/crypto.ts @@ -0,0 +1,69 @@ +import { + AGENT_AUTH_REFRESH_PATH, + canonicalizeAgentRegistrationProof, +} from "@clawdentity/protocol"; +import { + encodeEd25519SignatureBase64url, + signEd25519, + signHttpRequest, +} from "@clawdentity/sdk"; + +export async function signRegistrationChallenge(options: { + challengeId: string; + nonce: string; + ownerDid: string; + publicKey: string; + name: string; + secretKey: Uint8Array; + framework?: string; + ttlDays?: number; +}): Promise { + const canonical = canonicalizeAgentRegistrationProof({ + challengeId: options.challengeId, + nonce: options.nonce, + ownerDid: options.ownerDid, + publicKey: options.publicKey, + name: options.name, + framework: options.framework, + ttlDays: options.ttlDays, + }); + const signature = await signEd25519( + new TextEncoder().encode(canonical), + options.secretKey, + ); + return encodeEd25519SignatureBase64url(signature); +} + +export async function createSignedAgentRefreshRequest(options: { + ait: string; + secretKey: Uint8Array; + refreshToken: string; + timestamp?: string; + nonce?: string; +}): Promise<{ + body: string; + headers: Record; +}> { + const timestamp = options.timestamp ?? String(Math.floor(Date.now() / 1000)); + const nonce = options.nonce ?? "nonce-agent-refresh"; + const body = JSON.stringify({ + refreshToken: options.refreshToken, + }); + const signed = await signHttpRequest({ + method: "POST", + pathWithQuery: AGENT_AUTH_REFRESH_PATH, + timestamp, + nonce, + body: new TextEncoder().encode(body), + secretKey: options.secretKey, + }); + + return { + body, + headers: { + authorization: `Claw ${options.ait}`, + "content-type": "application/json", + ...signed.headers, + }, + }; +} diff --git a/apps/registry/src/server.test/helpers/db/index.ts b/apps/registry/src/server.test/helpers/db/index.ts new file mode 100644 index 0000000..43df786 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/index.ts @@ -0,0 +1,2 @@ +export { createFakeDb } from "./mock.js"; +export type { FakeD1Row } from "./types.js"; diff --git a/apps/registry/src/server.test/helpers/db/mock.ts b/apps/registry/src/server.test/helpers/db/mock.ts new file mode 100644 index 0000000..7cd03e8 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/mock.ts @@ -0,0 +1,542 @@ +import { isDefined, parseSelectedColumns } from "./parse.js"; +import { + getAgentAuthSessionSelectColumnValue, + getAgentRegistrationChallengeSelectColumnValue, + getAgentSelectColumnValue, + getApiKeySelectColumnValue, + getCrlSelectColumnValue, + getHumanSelectColumnValue, + getInviteSelectColumnValue, + resolveAgentAuthSessionSelectRows, + resolveAgentRegistrationChallengeSelectRows, + resolveAgentSelectRows, + resolveApiKeySelectRows, + resolveCrlSelectRows, + resolveHumanSelectRows, + resolveInviteSelectRows, +} from "./resolvers.js"; +import { handleRunQuery } from "./run-handlers.js"; +import type { + FakeAgentAuthEventInsertRow, + FakeAgentAuthSessionInsertRow, + FakeAgentAuthSessionUpdateRow, + FakeAgentInsertRow, + FakeAgentRegistrationChallengeInsertRow, + FakeAgentRegistrationChallengeUpdateRow, + FakeAgentRow, + FakeAgentUpdateRow, + FakeApiKeyInsertRow, + FakeApiKeyRow, + FakeD1Row, + FakeDbOptions, + FakeDbState, + FakeHumanInsertRow, + FakeHumanRow, + FakeInviteInsertRow, + FakeInviteUpdateRow, + FakeRevocationInsertRow, +} from "./types.js"; + +export function createFakeDb( + rows: FakeD1Row[], + agentRows: FakeAgentRow[] = [], + options: FakeDbOptions = {}, +) { + const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; + const humanInserts: FakeHumanInsertRow[] = []; + const apiKeyInserts: FakeApiKeyInsertRow[] = []; + const agentInserts: FakeAgentInsertRow[] = []; + const agentUpdates: FakeAgentUpdateRow[] = []; + const revocationInserts: FakeRevocationInsertRow[] = []; + const agentRegistrationChallengeInserts: FakeAgentRegistrationChallengeInsertRow[] = + []; + const agentRegistrationChallengeUpdates: FakeAgentRegistrationChallengeUpdateRow[] = + []; + const agentAuthSessionInserts: FakeAgentAuthSessionInsertRow[] = []; + const agentAuthSessionUpdates: FakeAgentAuthSessionUpdateRow[] = []; + const agentAuthEventInserts: FakeAgentAuthEventInsertRow[] = []; + const inviteInserts: FakeInviteInsertRow[] = []; + const inviteUpdates: FakeInviteUpdateRow[] = []; + const revocationRows = [...(options.revocationRows ?? [])]; + const registrationChallengeRows = [ + ...(options.registrationChallengeRows ?? []), + ]; + const agentAuthSessionRows = [...(options.agentAuthSessionRows ?? [])]; + const inviteRows = [...(options.inviteRows ?? [])]; + const humanRows = rows.reduce((acc, row) => { + if (acc.some((item) => item.id === row.humanId)) { + return acc; + } + + acc.push({ + id: row.humanId, + did: row.humanDid, + displayName: row.humanDisplayName, + role: row.humanRole, + status: row.humanStatus, + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }); + return acc; + }, []); + const apiKeyRows: FakeApiKeyRow[] = rows.map((row) => ({ + id: row.apiKeyId, + humanId: row.humanId, + keyHash: row.keyHash, + keyPrefix: row.keyPrefix, + name: row.apiKeyName, + status: row.apiKeyStatus, + createdAt: "2026-01-01T00:00:00.000Z", + lastUsedAt: null, + })); + const state: FakeDbState = { + authRows: rows, + agentRows, + options, + updates, + humanInserts, + apiKeyInserts, + agentInserts, + agentUpdates, + revocationInserts, + agentRegistrationChallengeInserts, + agentRegistrationChallengeUpdates, + agentAuthSessionInserts, + agentAuthSessionUpdates, + agentAuthEventInserts, + inviteInserts, + inviteUpdates, + revocationRows, + registrationChallengeRows, + agentAuthSessionRows, + inviteRows, + humanRows, + apiKeyRows, + beforeFirstAgentUpdateApplied: false, + beforeFirstAgentAuthSessionUpdateApplied: false, + remainingApiKeyInsertFailures: options.failApiKeyInsertCount ?? 0, + }; + + const database: D1Database = { + prepare(query: string) { + let params: unknown[] = []; + const normalizedQuery = query.toLowerCase(); + + return { + bind(...values: unknown[]) { + params = values; + return this; + }, + async all() { + if ( + normalizedQuery.includes('from "api_keys"') || + normalizedQuery.includes("from api_keys") + ) { + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); + + if (requiresHumanJoin) { + const requestedKeyPrefix = + typeof params[0] === "string" ? params[0] : ""; + const matchingRows = apiKeyRows.filter( + (row) => row.keyPrefix === requestedKeyPrefix, + ); + + return { + results: matchingRows + .map((row) => { + const human = humanRows.find( + (humanRow) => humanRow.id === row.humanId, + ); + if (!human) { + return undefined; + } + + return { + api_key_id: row.id, + key_hash: row.keyHash, + api_key_status: row.status, + api_key_name: row.name, + human_id: human.id, + human_did: human.did, + human_display_name: human.displayName, + human_role: human.role, + human_status: human.status, + }; + }) + .filter(isDefined), + }; + } + + const resultRows = resolveApiKeySelectRows({ + query, + params, + apiKeyRows, + }); + const selectedColumns = parseSelectedColumns(query); + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getApiKeySelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), + }; + } + if ( + (normalizedQuery.includes('from "humans"') || + normalizedQuery.includes("from humans")) && + normalizedQuery.includes("select") + ) { + const resultRows = resolveHumanSelectRows({ + query, + params, + humanRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getHumanSelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), + }; + } + if ( + (normalizedQuery.includes('from "agents"') || + normalizedQuery.includes("from agents")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveAgentSelectRows({ + query, + params, + authRows: rows, + agentRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getAgentSelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), + }; + } + if ( + (normalizedQuery.includes('from "agent_registration_challenges"') || + normalizedQuery.includes("from agent_registration_challenges")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveAgentRegistrationChallengeSelectRows({ + query, + params, + challengeRows: registrationChallengeRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = + getAgentRegistrationChallengeSelectColumnValue( + row, + column, + ); + return acc; + }, + {}, + ); + }), + }; + } + if ( + (normalizedQuery.includes('from "agent_auth_sessions"') || + normalizedQuery.includes("from agent_auth_sessions")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveAgentAuthSessionSelectRows({ + query, + params, + sessionRows: agentAuthSessionRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getAgentAuthSessionSelectColumnValue( + row, + column, + ); + return acc; + }, + {}, + ); + }), + }; + } + if ( + (normalizedQuery.includes('from "invites"') || + normalizedQuery.includes("from invites")) && + (normalizedQuery.includes("select") || + normalizedQuery.includes("returning")) + ) { + const resultRows = resolveInviteSelectRows({ + query, + params, + inviteRows, + }); + const selectedColumns = parseSelectedColumns(query); + + return { + results: resultRows.map((row) => { + if (selectedColumns.length === 0) { + return row; + } + + return selectedColumns.reduce>( + (acc, column) => { + acc[column] = getInviteSelectColumnValue(row, column); + return acc; + }, + {}, + ); + }), + }; + } + if ( + (normalizedQuery.includes('from "revocations"') || + normalizedQuery.includes("from revocations")) && + normalizedQuery.includes("select") + ) { + return { + results: resolveCrlSelectRows({ + agentRows, + revocationRows, + }), + }; + } + return { results: [] }; + }, + async raw() { + if ( + normalizedQuery.includes('from "api_keys"') || + normalizedQuery.includes("from api_keys") + ) { + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); + + if (requiresHumanJoin) { + const requestedKeyPrefix = + typeof params[0] === "string" ? params[0] : ""; + const matchingRows = apiKeyRows.filter( + (row) => row.keyPrefix === requestedKeyPrefix, + ); + + return matchingRows + .map((row) => { + const human = humanRows.find( + (humanRow) => humanRow.id === row.humanId, + ); + if (!human) { + return undefined; + } + + return [ + row.id, + row.keyHash, + row.status, + row.name, + human.id, + human.did, + human.displayName, + human.role, + human.status, + ]; + }) + .filter(isDefined); + } + + const resultRows = resolveApiKeySelectRows({ + query, + params, + apiKeyRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getApiKeySelectColumnValue(row, column), + ), + ); + } + if ( + normalizedQuery.includes('from "agent_auth_sessions"') || + normalizedQuery.includes("from agent_auth_sessions") + ) { + const resultRows = resolveAgentAuthSessionSelectRows({ + query, + params, + sessionRows: agentAuthSessionRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getAgentAuthSessionSelectColumnValue(row, column), + ), + ); + } + if ( + normalizedQuery.includes('from "humans"') || + normalizedQuery.includes("from humans") + ) { + const resultRows = resolveHumanSelectRows({ + query, + params, + humanRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getHumanSelectColumnValue(row, column), + ), + ); + } + if ( + normalizedQuery.includes('from "agents"') || + normalizedQuery.includes("from agents") + ) { + const resultRows = resolveAgentSelectRows({ + query, + params, + authRows: rows, + agentRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getAgentSelectColumnValue(row, column), + ), + ); + } + if ( + normalizedQuery.includes('from "agent_registration_challenges"') || + normalizedQuery.includes("from agent_registration_challenges") + ) { + const resultRows = resolveAgentRegistrationChallengeSelectRows({ + query, + params, + challengeRows: registrationChallengeRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getAgentRegistrationChallengeSelectColumnValue(row, column), + ), + ); + } + if ( + normalizedQuery.includes('from "invites"') || + normalizedQuery.includes("from invites") + ) { + const resultRows = resolveInviteSelectRows({ + query, + params, + inviteRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getInviteSelectColumnValue(row, column), + ), + ); + } + if ( + normalizedQuery.includes('from "revocations"') || + normalizedQuery.includes("from revocations") + ) { + const resultRows = resolveCrlSelectRows({ + agentRows, + revocationRows, + }); + const selectedColumns = parseSelectedColumns(query); + return resultRows.map((row) => + selectedColumns.map((column) => + getCrlSelectColumnValue(row, column), + ), + ); + } + return []; + }, + async run() { + return handleRunQuery({ + query, + normalizedQuery, + params, + state, + }); + }, + } as D1PreparedStatement; + }, + } as D1Database; + + return { + database, + updates, + humanRows, + humanInserts, + apiKeyInserts, + agentAuthSessionRows, + agentAuthSessionInserts, + agentAuthSessionUpdates, + agentAuthEventInserts, + agentInserts, + agentUpdates, + agentRegistrationChallengeInserts, + agentRegistrationChallengeUpdates, + inviteInserts, + inviteUpdates, + inviteRows, + revocationInserts, + registrationChallengeRows, + }; +} diff --git a/apps/registry/src/server.test/helpers/db/parse.ts b/apps/registry/src/server.test/helpers/db/parse.ts new file mode 100644 index 0000000..ce41d2b --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/parse.ts @@ -0,0 +1,143 @@ +// SQL helper parsers for the fake D1 database. +export function parseInsertColumns(query: string, tableName: string): string[] { + const match = query.match( + new RegExp(`insert\\s+into\\s+"?${tableName}"?\\s*\\(([^)]+)\\)`, "i"), + ); + if (!match) { + return []; + } + + const columns = match[1]?.split(",") ?? []; + return columns.map((column) => column.replace(/["`\s]/g, "")); +} + +export function parseUpdateSetColumns( + query: string, + tableName: string, +): string[] { + const match = query.match( + new RegExp(`update\\s+"?${tableName}"?\\s+set\\s+(.+?)\\s+where`, "i"), + ); + if (!match) { + return []; + } + + const assignments = match[1]?.split(",") ?? []; + return assignments + .map((assignment) => assignment.split("=")[0] ?? "") + .map((column) => column.replace(/["`\s]/g, "")) + .filter((column) => column.length > 0); +} + +export function extractWhereClause(query: string): string { + const normalized = query.toLowerCase(); + const whereIndex = normalized.indexOf(" where "); + if (whereIndex < 0) { + return ""; + } + + const orderByIndex = normalized.indexOf(" order by ", whereIndex + 7); + const limitIndex = normalized.indexOf(" limit ", whereIndex + 7); + const endIndex = + orderByIndex >= 0 + ? orderByIndex + : limitIndex >= 0 + ? limitIndex + : normalized.length; + + return normalized.slice(whereIndex, endIndex); +} + +export function hasFilter( + whereClause: string, + column: string, + operator = "=", +): boolean { + const escapedColumn = column.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); + const escapedOperator = operator.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); + const quotedPattern = new RegExp( + `"${escapedColumn}"\\s*${escapedOperator}\\s*\\?`, + ); + const barePattern = new RegExp( + `\\b${escapedColumn}\\b\\s*${escapedOperator}\\s*\\?`, + ); + return quotedPattern.test(whereClause) || barePattern.test(whereClause); +} + +export function isDefined(value: T | undefined): value is T { + return value !== undefined; +} + +export function parseWhereEqualityParams(options: { + whereClause: string; + params: unknown[]; +}): { values: Record; consumedParams: number } { + const values: Record = {}; + const pattern = /"?([a-zA-Z0-9_]+)"?\s*=\s*\?/g; + let parameterIndex = 0; + + let match = pattern.exec(options.whereClause); + while (match !== null) { + const column = match[1]?.toLowerCase(); + if (!column) { + match = pattern.exec(options.whereClause); + continue; + } + + const entries = values[column] ?? []; + entries.push(options.params[parameterIndex]); + values[column] = entries; + parameterIndex += 1; + match = pattern.exec(options.whereClause); + } + + return { values, consumedParams: parameterIndex }; +} + +export function parseSelectedColumns(query: string): string[] { + const normalized = query.toLowerCase(); + const selectIndex = normalized.indexOf("select "); + const fromIndex = normalized.indexOf(" from "); + if (selectIndex < 0 || fromIndex < 0 || fromIndex <= selectIndex) { + return []; + } + + const selectClause = query.slice(selectIndex + 7, fromIndex); + return selectClause + .split(",") + .map((column) => column.trim()) + .map((column) => { + const normalizedColumn = column.toLowerCase(); + if ( + normalizedColumn.includes(`"humans"."did"`) || + normalizedColumn.includes("humans.did") + ) { + return "owner_did"; + } + + if ( + normalizedColumn.includes(`"agents"."did"`) || + normalizedColumn.includes("agents.did") + ) { + return "did"; + } + + const aliasMatch = column.match(/\s+as\s+"?([a-zA-Z0-9_]+)"?\s*$/i); + if (aliasMatch?.[1]) { + return aliasMatch[1].toLowerCase(); + } + + const quotedMatch = column.match(/"([a-zA-Z0-9_]+)"\s*$/); + if (quotedMatch?.[1]) { + return quotedMatch[1].toLowerCase(); + } + + const bare = + column + .split(".") + .pop() + ?.replace(/["`\s]/g, "") ?? ""; + return bare.toLowerCase(); + }) + .filter((column) => column.length > 0); +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers.ts b/apps/registry/src/server.test/helpers/db/resolvers.ts new file mode 100644 index 0000000..7eb7f9a --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers.ts @@ -0,0 +1,668 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { + extractWhereClause, + hasFilter, + parseWhereEqualityParams, +} from "./parse.js"; +import type { + FakeAgentAuthSessionRow, + FakeAgentRegistrationChallengeRow, + FakeAgentRow, + FakeAgentSelectRow, + FakeApiKeyRow, + FakeApiKeySelectRow, + FakeCrlSelectRow, + FakeD1Row, + FakeHumanRow, + FakeInviteRow, + FakeRevocationRow, +} from "./types.js"; + +export function createFakePublicKey(agentId: string): string { + const seed = agentId.length > 0 ? agentId : "agent"; + const bytes = new Uint8Array(32); + + for (let index = 0; index < bytes.length; index += 1) { + bytes[index] = seed.charCodeAt(index % seed.length) & 0xff; + } + + return encodeBase64url(bytes); +} + +export function getAgentSelectColumnValue( + row: FakeAgentSelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "did") { + return row.did; + } + if (column === "owner_id") { + return row.owner_id; + } + if (column === "owner_did") { + return row.owner_did; + } + if (column === "name") { + return row.name; + } + if (column === "framework") { + return row.framework; + } + if (column === "public_key") { + return row.public_key; + } + if (column === "status") { + return row.status; + } + if (column === "expires_at") { + return row.expires_at; + } + if (column === "current_jti") { + return row.current_jti; + } + if (column === "created_at") { + return row.created_at; + } + if (column === "updated_at") { + return row.updated_at; + } + return undefined; +} + +export function getAgentRegistrationChallengeSelectColumnValue( + row: FakeAgentRegistrationChallengeRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "owner_id") { + return row.ownerId; + } + if (column === "public_key") { + return row.publicKey; + } + if (column === "nonce") { + return row.nonce; + } + if (column === "status") { + return row.status; + } + if (column === "expires_at") { + return row.expiresAt; + } + if (column === "used_at") { + return row.usedAt; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +export function getHumanSelectColumnValue( + row: FakeHumanRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "did") { + return row.did; + } + if (column === "display_name") { + return row.displayName; + } + if (column === "role") { + return row.role; + } + if (column === "status") { + return row.status; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +export function resolveHumanSelectRows(options: { + query: string; + params: unknown[]; + humanRows: FakeHumanRow[]; +}): FakeHumanRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + + const roleFilter = + typeof equalityParams.values.role?.[0] === "string" + ? String(equalityParams.values.role[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const didFilter = + typeof equalityParams.values.did?.[0] === "string" + ? String(equalityParams.values.did[0]) + : undefined; + + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.humanRows.length; + + return options.humanRows + .filter((row) => (roleFilter ? row.role === roleFilter : true)) + .filter((row) => (statusFilter ? row.status === statusFilter : true)) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => (didFilter ? row.did === didFilter : true)) + .slice(0, limit); +} + +export function getApiKeySelectColumnValue( + row: FakeApiKeySelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "human_id") { + return row.human_id; + } + if (column === "key_hash") { + return row.key_hash; + } + if (column === "key_prefix") { + return row.key_prefix; + } + if (column === "name") { + return row.name; + } + if (column === "status") { + return row.status; + } + if (column === "created_at") { + return row.created_at; + } + if (column === "last_used_at") { + return row.last_used_at; + } + return undefined; +} + +export function resolveApiKeySelectRows(options: { + query: string; + params: unknown[]; + apiKeyRows: FakeApiKeyRow[]; +}): FakeApiKeySelectRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasHumanIdFilter = hasFilter(whereClause, "human_id"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasPrefixFilter = hasFilter(whereClause, "key_prefix"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const orderByCreatedAtDesc = + options.query.toLowerCase().includes("order by") && + options.query.toLowerCase().includes("created_at") && + options.query.toLowerCase().includes("desc"); + + const humanId = + hasHumanIdFilter && typeof equalityParams.values.human_id?.[0] === "string" + ? String(equalityParams.values.human_id[0]) + : undefined; + const id = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const keyPrefix = + hasPrefixFilter && typeof equalityParams.values.key_prefix?.[0] === "string" + ? String(equalityParams.values.key_prefix[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.apiKeyRows.length; + + const rows = options.apiKeyRows + .filter((row) => (humanId ? row.humanId === humanId : true)) + .filter((row) => (id ? row.id === id : true)) + .filter((row) => (status ? row.status === status : true)) + .filter((row) => (keyPrefix ? row.keyPrefix === keyPrefix : true)) + .map((row) => ({ + id: row.id, + human_id: row.humanId, + key_hash: row.keyHash, + key_prefix: row.keyPrefix, + name: row.name, + status: row.status, + created_at: row.createdAt, + last_used_at: row.lastUsedAt, + })); + + if (orderByCreatedAtDesc) { + rows.sort((left, right) => { + const createdAtCompare = right.created_at.localeCompare(left.created_at); + if (createdAtCompare !== 0) { + return createdAtCompare; + } + return right.id.localeCompare(left.id); + }); + } + + return rows.slice(0, limit); +} + +export function getAgentAuthSessionSelectColumnValue( + row: FakeAgentAuthSessionRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "agent_id") { + return row.agentId; + } + if (column === "refresh_key_hash") { + return row.refreshKeyHash; + } + if (column === "refresh_key_prefix") { + return row.refreshKeyPrefix; + } + if (column === "refresh_issued_at") { + return row.refreshIssuedAt; + } + if (column === "refresh_expires_at") { + return row.refreshExpiresAt; + } + if (column === "refresh_last_used_at") { + return row.refreshLastUsedAt; + } + if (column === "access_key_hash") { + return row.accessKeyHash; + } + if (column === "access_key_prefix") { + return row.accessKeyPrefix; + } + if (column === "access_issued_at") { + return row.accessIssuedAt; + } + if (column === "access_expires_at") { + return row.accessExpiresAt; + } + if (column === "access_last_used_at") { + return row.accessLastUsedAt; + } + if (column === "status") { + return row.status; + } + if (column === "revoked_at") { + return row.revokedAt; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +export function resolveAgentAuthSessionSelectRows(options: { + query: string; + params: unknown[]; + sessionRows: FakeAgentAuthSessionRow[]; +}): FakeAgentAuthSessionRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasAgentIdFilter = hasFilter(whereClause, "agent_id"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasRefreshPrefixFilter = hasFilter(whereClause, "refresh_key_prefix"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const agentId = + hasAgentIdFilter && typeof equalityParams.values.agent_id?.[0] === "string" + ? String(equalityParams.values.agent_id[0]) + : undefined; + const id = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const refreshPrefix = + hasRefreshPrefixFilter && + typeof equalityParams.values.refresh_key_prefix?.[0] === "string" + ? String(equalityParams.values.refresh_key_prefix[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.sessionRows.length; + + return options.sessionRows + .filter((row) => (agentId ? row.agentId === agentId : true)) + .filter((row) => (id ? row.id === id : true)) + .filter((row) => (status ? row.status === status : true)) + .filter((row) => + refreshPrefix ? row.refreshKeyPrefix === refreshPrefix : true, + ) + .slice(0, limit); +} + +export function resolveAgentSelectRows(options: { + query: string; + params: unknown[]; + authRows: FakeD1Row[]; + agentRows: FakeAgentRow[]; +}): FakeAgentSelectRow[] { + const normalizedQuery = options.query.toLowerCase(); + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasOwnerFilter = hasFilter(whereClause, "owner_id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasFrameworkFilter = hasFilter(whereClause, "framework"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasDidFilter = hasFilter(whereClause, "did"); + const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); + const hasCursorFilter = hasFilter(whereClause, "id", "<"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); + + const ownerId = + hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id?.[0]) + : undefined; + const statusFilter = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status?.[0]) + : undefined; + const frameworkFilter = + hasFrameworkFilter && + typeof equalityParams.values.framework?.[0] === "string" + ? String(equalityParams.values.framework?.[0]) + : undefined; + const idFilter = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id?.[0]) + : undefined; + const didFilter = + hasDidFilter && typeof equalityParams.values.did?.[0] === "string" + ? String(equalityParams.values.did?.[0]) + : undefined; + const currentJtiFilter = hasCurrentJtiFilter + ? (equalityParams.values.current_jti?.[0] as string | null | undefined) + : undefined; + const cursorFilter = hasCursorFilter + ? String(options.params[equalityParams.consumedParams] ?? "") + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.agentRows.length; + + const filteredRows = options.agentRows + .filter((row) => (ownerId ? row.ownerId === ownerId : true)) + .filter((row) => (statusFilter ? row.status === statusFilter : true)) + .filter((row) => + frameworkFilter ? row.framework === frameworkFilter : true, + ) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => (didFilter ? row.did === didFilter : true)) + .filter((row) => + currentJtiFilter !== undefined + ? (row.currentJti ?? null) === currentJtiFilter + : true, + ) + .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) + .sort((left, right) => right.id.localeCompare(left.id)) + .map((row) => { + const ownerDid = options.authRows.find( + (authRow) => authRow.humanId === row.ownerId, + )?.humanDid; + + return { + id: row.id, + did: row.did, + owner_id: row.ownerId, + owner_did: ownerDid ?? "", + name: row.name, + framework: row.framework, + public_key: row.publicKey ?? createFakePublicKey(row.id), + status: row.status, + expires_at: row.expiresAt, + current_jti: row.currentJti ?? null, + created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", + updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", + }; + }) + .filter((row) => (requiresHumanJoin ? row.owner_did.length > 0 : true)) + .slice(0, limit); + + return filteredRows; +} + +export function resolveAgentRegistrationChallengeSelectRows(options: { + query: string; + params: unknown[]; + challengeRows: FakeAgentRegistrationChallengeRow[]; +}): FakeAgentRegistrationChallengeRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasOwnerFilter = hasFilter(whereClause, "owner_id"); + const hasChallengeIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const ownerId = + hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id[0]) + : undefined; + const challengeId = + hasChallengeIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.challengeRows.length; + + return options.challengeRows + .filter((row) => (ownerId ? row.ownerId === ownerId : true)) + .filter((row) => (challengeId ? row.id === challengeId : true)) + .filter((row) => (status ? row.status === status : true)) + .slice(0, limit); +} + +export function getInviteSelectColumnValue( + row: FakeInviteRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "code") { + return row.code; + } + if (column === "created_by") { + return row.createdBy; + } + if (column === "redeemed_by") { + return row.redeemedBy; + } + if (column === "agent_id") { + return row.agentId; + } + if (column === "expires_at") { + return row.expiresAt; + } + if (column === "created_at") { + return row.createdAt; + } + return undefined; +} + +export function resolveInviteSelectRows(options: { + query: string; + params: unknown[]; + inviteRows: FakeInviteRow[]; +}): FakeInviteRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasCodeFilter = hasFilter(whereClause, "code"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasRedeemedByFilter = hasFilter(whereClause, "redeemed_by"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const codeFilter = + hasCodeFilter && typeof equalityParams.values.code?.[0] === "string" + ? String(equalityParams.values.code[0]) + : undefined; + const idFilter = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const redeemedByFilter = hasRedeemedByFilter + ? (equalityParams.values.redeemed_by?.[0] as string | null | undefined) + : undefined; + + const requiresRedeemedByNull = + whereClause.includes("redeemed_by") && whereClause.includes("is null"); + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.inviteRows.length; + + return options.inviteRows + .filter((row) => (codeFilter ? row.code === codeFilter : true)) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => + redeemedByFilter !== undefined + ? row.redeemedBy === redeemedByFilter + : true, + ) + .filter((row) => (requiresRedeemedByNull ? row.redeemedBy === null : true)) + .slice(0, limit); +} + +export function getCrlSelectColumnValue( + row: FakeCrlSelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "jti") { + return row.jti; + } + if (column === "reason") { + return row.reason; + } + if (column === "revoked_at") { + return row.revoked_at; + } + if (column === "revokedat") { + return row.revoked_at; + } + if (column === "agent_did") { + return row.agent_did; + } + if (column === "agentdid" || column === "did") { + return row.did; + } + return undefined; +} + +export function resolveCrlSelectRows(options: { + agentRows: FakeAgentRow[]; + revocationRows: FakeRevocationRow[]; +}): FakeCrlSelectRow[] { + return options.revocationRows + .map((row) => { + const agent = options.agentRows.find( + (agentRow) => agentRow.id === row.agentId, + ); + if (!agent) { + return null; + } + + return { + id: row.id, + jti: row.jti, + reason: row.reason, + revoked_at: row.revokedAt, + agent_did: agent.did, + did: agent.did, + }; + }) + .filter((row): row is FakeCrlSelectRow => row !== null) + .sort((left, right) => { + const timestampCompare = right.revoked_at.localeCompare(left.revoked_at); + if (timestampCompare !== 0) { + return timestampCompare; + } + return right.id.localeCompare(left.id); + }); +} diff --git a/apps/registry/src/server.test/helpers/db/run-handlers-phase-one.ts b/apps/registry/src/server.test/helpers/db/run-handlers-phase-one.ts new file mode 100644 index 0000000..0b7a1d0 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/run-handlers-phase-one.ts @@ -0,0 +1,414 @@ +import { + extractWhereClause, + parseInsertColumns, + parseUpdateSetColumns, + parseWhereEqualityParams, +} from "./parse.js"; +import type { RunHandlerPhaseInput } from "./run-handlers-types.js"; +import type { + FakeAgentAuthEventInsertRow, + FakeAgentAuthSessionInsertRow, + FakeAgentAuthSessionRow, + FakeApiKeyInsertRow, + FakeHumanInsertRow, +} from "./types.js"; + +export function applyRunHandlersPhaseOne(input: RunHandlerPhaseInput): number { + const { query, normalizedQuery, params, state } = input; + let { changes } = input; + const { + apiKeyRows, + updates, + humanInserts, + humanRows, + apiKeyInserts, + agentAuthSessionInserts, + agentAuthSessionRows, + agentAuthEventInserts, + agentAuthSessionUpdates, + } = state; + const options = state.options; + + if ( + normalizedQuery.includes('update "api_keys"') || + normalizedQuery.includes("update api_keys") + ) { + const setColumns = parseUpdateSetColumns(query, "api_keys"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const humanIdFilter = + typeof equalityParams.values.human_id?.[0] === "string" + ? String(equalityParams.values.human_id[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + let matchedRows = 0; + for (const row of apiKeyRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (humanIdFilter && row.humanId !== humanIdFilter) { + continue; + } + if (statusFilter && row.status !== statusFilter) { + continue; + } + + matchedRows += 1; + if (nextValues.status === "active" || nextValues.status === "revoked") { + row.status = nextValues.status; + } + if ( + typeof nextValues.last_used_at === "string" || + nextValues.last_used_at === null + ) { + row.lastUsedAt = nextValues.last_used_at; + } + } + + if (typeof nextValues.last_used_at === "string" && idFilter) { + updates.push({ + lastUsedAt: nextValues.last_used_at, + apiKeyId: idFilter, + }); + } + changes = matchedRows; + } + if ( + normalizedQuery.includes('insert into "humans"') || + normalizedQuery.includes("insert into humans") + ) { + const columns = parseInsertColumns(query, "humans"); + const row = columns.reduce((acc, column, index) => { + acc[column] = params[index]; + return acc; + }, {}); + humanInserts.push(row); + + const nextHumanId = typeof row.id === "string" ? row.id : ""; + const nextHumanDid = typeof row.did === "string" ? row.did : ""; + const conflict = humanRows.some( + (humanRow) => + humanRow.id === nextHumanId || humanRow.did === nextHumanDid, + ); + + if (!conflict) { + if ( + (row.role === "admin" || row.role === "user") && + (row.status === "active" || row.status === "suspended") && + typeof row.display_name === "string" && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + humanRows.push({ + id: nextHumanId, + did: nextHumanDid, + displayName: row.display_name, + role: row.role, + status: row.status, + createdAt: row.created_at, + updatedAt: row.updated_at, + }); + } + + changes = 1; + } else { + changes = 0; + } + } + if ( + normalizedQuery.includes('insert into "api_keys"') || + normalizedQuery.includes("insert into api_keys") + ) { + if (state.remainingApiKeyInsertFailures > 0) { + state.remainingApiKeyInsertFailures -= 1; + throw new Error("api key insert failed"); + } + + const columns = parseInsertColumns(query, "api_keys"); + const row = columns.reduce((acc, column, index) => { + acc[column] = params[index]; + return acc; + }, {}); + apiKeyInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.human_id === "string" && + typeof row.key_hash === "string" && + typeof row.key_prefix === "string" && + typeof row.name === "string" && + (row.status === "active" || row.status === "revoked") && + typeof row.created_at === "string" + ) { + apiKeyRows.push({ + id: row.id, + humanId: row.human_id, + keyHash: row.key_hash, + keyPrefix: row.key_prefix, + name: row.name, + status: row.status, + createdAt: row.created_at, + lastUsedAt: + typeof row.last_used_at === "string" ? row.last_used_at : null, + }); + } + + changes = 1; + } + if ( + normalizedQuery.includes('insert into "agent_auth_sessions"') || + normalizedQuery.includes("insert into agent_auth_sessions") + ) { + const columns = parseInsertColumns(query, "agent_auth_sessions"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentAuthSessionInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.agent_id === "string" && + typeof row.refresh_key_hash === "string" && + typeof row.refresh_key_prefix === "string" && + typeof row.refresh_issued_at === "string" && + typeof row.refresh_expires_at === "string" && + typeof row.access_key_hash === "string" && + typeof row.access_key_prefix === "string" && + typeof row.access_issued_at === "string" && + typeof row.access_expires_at === "string" && + (row.status === "active" || row.status === "revoked") && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + const existingIndex = agentAuthSessionRows.findIndex( + (sessionRow) => sessionRow.agentId === row.agent_id, + ); + const nextSession: FakeAgentAuthSessionRow = { + id: row.id, + agentId: row.agent_id, + refreshKeyHash: row.refresh_key_hash, + refreshKeyPrefix: row.refresh_key_prefix, + refreshIssuedAt: row.refresh_issued_at, + refreshExpiresAt: row.refresh_expires_at, + refreshLastUsedAt: + typeof row.refresh_last_used_at === "string" + ? row.refresh_last_used_at + : null, + accessKeyHash: row.access_key_hash, + accessKeyPrefix: row.access_key_prefix, + accessIssuedAt: row.access_issued_at, + accessExpiresAt: row.access_expires_at, + accessLastUsedAt: + typeof row.access_last_used_at === "string" + ? row.access_last_used_at + : null, + status: row.status, + revokedAt: typeof row.revoked_at === "string" ? row.revoked_at : null, + createdAt: row.created_at, + updatedAt: row.updated_at, + }; + if (existingIndex >= 0) { + agentAuthSessionRows.splice(existingIndex, 1, nextSession); + } else { + agentAuthSessionRows.push(nextSession); + } + } + + changes = 1; + } + if ( + normalizedQuery.includes('insert into "agent_auth_events"') || + normalizedQuery.includes("insert into agent_auth_events") + ) { + const columns = parseInsertColumns(query, "agent_auth_events"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentAuthEventInserts.push(row); + changes = 1; + } + if ( + normalizedQuery.includes('update "agent_auth_sessions"') || + normalizedQuery.includes("update agent_auth_sessions") + ) { + if ( + !state.beforeFirstAgentAuthSessionUpdateApplied && + options.beforeFirstAgentAuthSessionUpdate + ) { + options.beforeFirstAgentAuthSessionUpdate(agentAuthSessionRows); + state.beforeFirstAgentAuthSessionUpdateApplied = true; + } + + const setColumns = parseUpdateSetColumns(query, "agent_auth_sessions"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const agentIdFilter = + typeof equalityParams.values.agent_id?.[0] === "string" + ? String(equalityParams.values.agent_id[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const refreshHashFilter = + typeof equalityParams.values.refresh_key_hash?.[0] === "string" + ? String(equalityParams.values.refresh_key_hash[0]) + : undefined; + const accessHashFilter = + typeof equalityParams.values.access_key_hash?.[0] === "string" + ? String(equalityParams.values.access_key_hash[0]) + : undefined; + + let matchedRows = 0; + for (const row of agentAuthSessionRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (agentIdFilter && row.agentId !== agentIdFilter) { + continue; + } + if (statusFilter && row.status !== statusFilter) { + continue; + } + if (refreshHashFilter && row.refreshKeyHash !== refreshHashFilter) { + continue; + } + if (accessHashFilter && row.accessKeyHash !== accessHashFilter) { + continue; + } + + matchedRows += 1; + if (typeof nextValues.refresh_key_hash === "string") { + row.refreshKeyHash = nextValues.refresh_key_hash; + } + if (typeof nextValues.refresh_key_prefix === "string") { + row.refreshKeyPrefix = nextValues.refresh_key_prefix; + } + if (typeof nextValues.refresh_issued_at === "string") { + row.refreshIssuedAt = nextValues.refresh_issued_at; + } + if (typeof nextValues.refresh_expires_at === "string") { + row.refreshExpiresAt = nextValues.refresh_expires_at; + } + if ( + typeof nextValues.refresh_last_used_at === "string" || + nextValues.refresh_last_used_at === null + ) { + row.refreshLastUsedAt = nextValues.refresh_last_used_at; + } + if (typeof nextValues.access_key_hash === "string") { + row.accessKeyHash = nextValues.access_key_hash; + } + if (typeof nextValues.access_key_prefix === "string") { + row.accessKeyPrefix = nextValues.access_key_prefix; + } + if (typeof nextValues.access_issued_at === "string") { + row.accessIssuedAt = nextValues.access_issued_at; + } + if (typeof nextValues.access_expires_at === "string") { + row.accessExpiresAt = nextValues.access_expires_at; + } + if ( + typeof nextValues.access_last_used_at === "string" || + nextValues.access_last_used_at === null + ) { + row.accessLastUsedAt = nextValues.access_last_used_at; + } + if (nextValues.status === "active" || nextValues.status === "revoked") { + row.status = nextValues.status; + } + if ( + typeof nextValues.revoked_at === "string" || + nextValues.revoked_at === null + ) { + row.revokedAt = nextValues.revoked_at; + } + if (typeof nextValues.updated_at === "string") { + row.updatedAt = nextValues.updated_at; + } + } + + agentAuthSessionUpdates.push({ + ...nextValues, + id: idFilter, + agent_id: agentIdFilter, + status_where: statusFilter, + refresh_key_hash_where: refreshHashFilter, + access_key_hash_where: accessHashFilter, + matched_rows: matchedRows, + }); + changes = matchedRows; + } + if ( + normalizedQuery.includes('delete from "agent_auth_sessions"') || + normalizedQuery.includes("delete from agent_auth_sessions") + ) { + const whereClause = extractWhereClause(query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + + if (idFilter) { + for ( + let index = agentAuthSessionRows.length - 1; + index >= 0; + index -= 1 + ) { + if (agentAuthSessionRows[index]?.id === idFilter) { + agentAuthSessionRows.splice(index, 1); + changes += 1; + } + } + } + } + + return changes; +} diff --git a/apps/registry/src/server.test/helpers/db/run-handlers-phase-two.ts b/apps/registry/src/server.test/helpers/db/run-handlers-phase-two.ts new file mode 100644 index 0000000..409b586 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/run-handlers-phase-two.ts @@ -0,0 +1,407 @@ +import { + extractWhereClause, + hasFilter, + parseInsertColumns, + parseUpdateSetColumns, + parseWhereEqualityParams, +} from "./parse.js"; +import type { RunHandlerPhaseInput } from "./run-handlers-types.js"; +import type { + FakeAgentInsertRow, + FakeAgentRegistrationChallengeInsertRow, + FakeInviteInsertRow, + FakeRevocationInsertRow, +} from "./types.js"; + +export function applyRunHandlersPhaseTwo(input: RunHandlerPhaseInput): number { + const { query, normalizedQuery, params, state } = input; + let { changes } = input; + const { + inviteInserts, + inviteRows, + inviteUpdates, + humanRows, + apiKeyRows, + agentInserts, + agentRegistrationChallengeInserts, + registrationChallengeRows, + agentRegistrationChallengeUpdates, + agentRows, + agentUpdates, + revocationInserts, + revocationRows, + } = state; + const options = state.options; + + if ( + normalizedQuery.includes('insert into "invites"') || + normalizedQuery.includes("insert into invites") + ) { + const columns = parseInsertColumns(query, "invites"); + const row = columns.reduce((acc, column, index) => { + acc[column] = params[index]; + return acc; + }, {}); + inviteInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.code === "string" && + typeof row.created_by === "string" && + typeof row.created_at === "string" + ) { + inviteRows.push({ + id: row.id, + code: row.code, + createdBy: row.created_by, + redeemedBy: + typeof row.redeemed_by === "string" ? row.redeemed_by : null, + agentId: typeof row.agent_id === "string" ? row.agent_id : null, + expiresAt: typeof row.expires_at === "string" ? row.expires_at : null, + createdAt: row.created_at, + }); + } + + changes = 1; + } + if ( + normalizedQuery.includes('update "invites"') || + normalizedQuery.includes("update invites") + ) { + const setColumns = parseUpdateSetColumns(query, "invites"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const redeemedByFilter = hasFilter(whereClause, "redeemed_by") + ? (equalityParams.values.redeemed_by?.[0] as string | null | undefined) + : undefined; + const requiresRedeemedByNull = + whereClause.includes("redeemed_by") && whereClause.includes("is null"); + + let matchedRows = 0; + for (const row of inviteRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (requiresRedeemedByNull && row.redeemedBy !== null) { + continue; + } + if ( + redeemedByFilter !== undefined && + row.redeemedBy !== redeemedByFilter + ) { + continue; + } + + matchedRows += 1; + if ( + typeof nextValues.redeemed_by === "string" || + nextValues.redeemed_by === null + ) { + row.redeemedBy = nextValues.redeemed_by; + } + } + + inviteUpdates.push({ + ...nextValues, + id: idFilter, + redeemed_by_where: redeemedByFilter, + redeemed_by_is_null_where: requiresRedeemedByNull, + matched_rows: matchedRows, + }); + changes = matchedRows; + } + if ( + normalizedQuery.includes('delete from "humans"') || + normalizedQuery.includes("delete from humans") + ) { + const whereClause = extractWhereClause(query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : ""; + + if (idFilter.length > 0) { + for (let index = humanRows.length - 1; index >= 0; index -= 1) { + if (humanRows[index]?.id === idFilter) { + humanRows.splice(index, 1); + changes += 1; + } + } + + for (let index = apiKeyRows.length - 1; index >= 0; index -= 1) { + if (apiKeyRows[index]?.humanId === idFilter) { + apiKeyRows.splice(index, 1); + } + } + } + } + if ( + normalizedQuery.includes('insert into "agents"') || + normalizedQuery.includes("insert into agents") + ) { + const columns = parseInsertColumns(query, "agents"); + const row = columns.reduce((acc, column, index) => { + acc[column] = params[index]; + return acc; + }, {}); + agentInserts.push(row); + changes = 1; + } + if ( + normalizedQuery.includes('insert into "agent_registration_challenges"') || + normalizedQuery.includes("insert into agent_registration_challenges") + ) { + const columns = parseInsertColumns(query, "agent_registration_challenges"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + agentRegistrationChallengeInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.owner_id === "string" && + typeof row.public_key === "string" && + typeof row.nonce === "string" && + (row.status === "pending" || row.status === "used") && + typeof row.expires_at === "string" && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + registrationChallengeRows.push({ + id: row.id, + ownerId: row.owner_id, + publicKey: row.public_key, + nonce: row.nonce, + status: row.status, + expiresAt: row.expires_at, + usedAt: typeof row.used_at === "string" ? String(row.used_at) : null, + createdAt: row.created_at, + updatedAt: row.updated_at, + }); + } + + changes = 1; + } + if ( + normalizedQuery.includes('update "agent_registration_challenges"') || + normalizedQuery.includes("update agent_registration_challenges") + ) { + const setColumns = parseUpdateSetColumns( + query, + "agent_registration_challenges", + ); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const ownerFilter = + typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + let matchedRows = 0; + for (const row of registrationChallengeRows) { + if (idFilter && row.id !== idFilter) { + continue; + } + if (ownerFilter && row.ownerId !== ownerFilter) { + continue; + } + if (statusFilter && row.status !== statusFilter) { + continue; + } + + matchedRows += 1; + if (nextValues.status === "pending" || nextValues.status === "used") { + row.status = nextValues.status; + } + if ( + typeof nextValues.used_at === "string" || + nextValues.used_at === null + ) { + row.usedAt = nextValues.used_at; + } + if (typeof nextValues.updated_at === "string") { + row.updatedAt = nextValues.updated_at; + } + } + + agentRegistrationChallengeUpdates.push({ + ...nextValues, + id: idFilter, + owner_id: ownerFilter, + status_where: statusFilter, + matched_rows: matchedRows, + }); + changes = matchedRows; + } + if ( + normalizedQuery.includes('update "agents"') || + normalizedQuery.includes("update agents") + ) { + if ( + !state.beforeFirstAgentUpdateApplied && + options.beforeFirstAgentUpdate + ) { + options.beforeFirstAgentUpdate(agentRows); + state.beforeFirstAgentUpdateApplied = true; + } + + const setColumns = parseUpdateSetColumns(query, "agents"); + const nextValues = setColumns.reduce>( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + const whereClause = extractWhereClause(query); + const whereParams = params.slice(setColumns.length); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: whereParams, + }); + const ownerFilter = + typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id?.[0]) + : undefined; + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id?.[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status?.[0]) + : undefined; + const currentJtiFilter = equalityParams.values.current_jti?.[0] as + | string + | null + | undefined; + + let matchedRows = 0; + + for (const row of agentRows) { + if (ownerFilter && row.ownerId !== ownerFilter) { + continue; + } + if (idFilter && row.id !== idFilter) { + continue; + } + if ( + statusFilter && + row.status !== (statusFilter as "active" | "revoked") + ) { + continue; + } + if ( + currentJtiFilter !== undefined && + (row.currentJti ?? null) !== currentJtiFilter + ) { + continue; + } + + matchedRows += 1; + + if (nextValues.status === "active" || nextValues.status === "revoked") { + row.status = nextValues.status; + } + if (typeof nextValues.updated_at === "string") { + row.updatedAt = nextValues.updated_at; + } + if ( + typeof nextValues.current_jti === "string" || + nextValues.current_jti === null + ) { + row.currentJti = nextValues.current_jti; + } + if ( + typeof nextValues.expires_at === "string" || + nextValues.expires_at === null + ) { + row.expiresAt = nextValues.expires_at; + } + } + + agentUpdates.push({ + ...nextValues, + owner_id: ownerFilter, + id: idFilter, + status_where: statusFilter, + current_jti_where: currentJtiFilter, + matched_rows: matchedRows, + }); + changes = matchedRows; + } + if ( + normalizedQuery.includes('insert into "revocations"') || + normalizedQuery.includes("insert into revocations") + ) { + const columns = parseInsertColumns(query, "revocations"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + revocationInserts.push(row); + if ( + typeof row.id === "string" && + typeof row.jti === "string" && + typeof row.agent_id === "string" && + typeof row.revoked_at === "string" + ) { + revocationRows.push({ + id: row.id, + jti: row.jti, + agentId: row.agent_id, + reason: typeof row.reason === "string" ? row.reason : null, + revokedAt: row.revoked_at, + }); + } + changes = 1; + } + + return changes; +} diff --git a/apps/registry/src/server.test/helpers/db/run-handlers-types.ts b/apps/registry/src/server.test/helpers/db/run-handlers-types.ts new file mode 100644 index 0000000..5844359 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/run-handlers-types.ts @@ -0,0 +1,9 @@ +import type { FakeDbState } from "./types.js"; + +export type RunHandlerPhaseInput = { + query: string; + normalizedQuery: string; + params: unknown[]; + changes: number; + state: FakeDbState; +}; diff --git a/apps/registry/src/server.test/helpers/db/run-handlers.ts b/apps/registry/src/server.test/helpers/db/run-handlers.ts new file mode 100644 index 0000000..4650e89 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/run-handlers.ts @@ -0,0 +1,38 @@ +import { applyRunHandlersPhaseOne } from "./run-handlers-phase-one.js"; +import { applyRunHandlersPhaseTwo } from "./run-handlers-phase-two.js"; +import type { FakeDbState } from "./types.js"; + +export function handleRunQuery(input: { + query: string; + normalizedQuery: string; + params: unknown[]; + state: FakeDbState; +}): D1Result { + const { query, normalizedQuery, params, state } = input; + + if ( + state.options.failBeginTransaction && + normalizedQuery.trim() === "begin" + ) { + throw new Error("Failed query: begin"); + } + + let changes = 0; + + changes = applyRunHandlersPhaseOne({ + query, + normalizedQuery, + params, + changes, + state, + }); + changes = applyRunHandlersPhaseTwo({ + query, + normalizedQuery, + params, + changes, + state, + }); + + return { success: true, meta: { changes } } as D1Result; +} diff --git a/apps/registry/src/server.test/helpers/db/types.ts b/apps/registry/src/server.test/helpers/db/types.ts new file mode 100644 index 0000000..ae90266 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/types.ts @@ -0,0 +1,182 @@ +// Shared fake DB types used by server test helpers. +export type FakeD1Row = { + apiKeyId: string; + keyPrefix: string; + keyHash: string; + apiKeyStatus: "active" | "revoked"; + apiKeyName: string; + humanId: string; + humanDid: string; + humanDisplayName: string; + humanRole: "admin" | "user"; + humanStatus: "active" | "suspended"; +}; + +export type FakeHumanRow = { + id: string; + did: string; + displayName: string; + role: "admin" | "user"; + status: "active" | "suspended"; + createdAt: string; + updatedAt: string; +}; + +export type FakeApiKeyRow = { + id: string; + humanId: string; + keyHash: string; + keyPrefix: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; +}; + +export type FakeAgentAuthSessionRow = { + id: string; + agentId: string; + refreshKeyHash: string; + refreshKeyPrefix: string; + refreshIssuedAt: string; + refreshExpiresAt: string; + refreshLastUsedAt: string | null; + accessKeyHash: string; + accessKeyPrefix: string; + accessIssuedAt: string; + accessExpiresAt: string; + accessLastUsedAt: string | null; + status: "active" | "revoked"; + revokedAt: string | null; + createdAt: string; + updatedAt: string; +}; + +export type FakeAgentAuthEventInsertRow = Record; +export type FakeAgentAuthSessionInsertRow = Record; +export type FakeAgentAuthSessionUpdateRow = Record; +export type FakeApiKeySelectRow = { + id: string; + human_id: string; + key_hash: string; + key_prefix: string; + name: string; + status: "active" | "revoked"; + created_at: string; + last_used_at: string | null; +}; + +export type FakeAgentInsertRow = Record; +export type FakeHumanInsertRow = Record; +export type FakeApiKeyInsertRow = Record; +export type FakeAgentUpdateRow = Record; +export type FakeRevocationInsertRow = Record; +export type FakeAgentRegistrationChallengeInsertRow = Record; +export type FakeAgentRegistrationChallengeUpdateRow = Record; +export type FakeInviteInsertRow = Record; +export type FakeInviteUpdateRow = Record; +export type FakeRevocationRow = { + id: string; + jti: string; + agentId: string; + reason: string | null; + revokedAt: string; +}; +export type FakeAgentRow = { + id: string; + did: string; + ownerId: string; + name: string; + framework: string | null; + publicKey?: string; + status: "active" | "revoked"; + expiresAt: string | null; + currentJti?: string | null; + createdAt?: string; + updatedAt?: string; +}; +export type FakeAgentRegistrationChallengeRow = { + id: string; + ownerId: string; + publicKey: string; + nonce: string; + status: "pending" | "used"; + expiresAt: string; + usedAt: string | null; + createdAt: string; + updatedAt: string; +}; +export type FakeInviteRow = { + id: string; + code: string; + createdBy: string; + redeemedBy: string | null; + agentId: string | null; + expiresAt: string | null; + createdAt: string; +}; + +export type FakeAgentSelectRow = { + id: string; + did: string; + owner_id: string; + owner_did: string; + name: string; + framework: string | null; + public_key: string; + status: "active" | "revoked"; + expires_at: string | null; + current_jti: string | null; + created_at: string; + updated_at: string; +}; + +export type FakeDbOptions = { + beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; + beforeFirstAgentAuthSessionUpdate?: ( + sessionRows: FakeAgentAuthSessionRow[], + ) => void; + failApiKeyInsertCount?: number; + failBeginTransaction?: boolean; + inviteRows?: FakeInviteRow[]; + revocationRows?: FakeRevocationRow[]; + registrationChallengeRows?: FakeAgentRegistrationChallengeRow[]; + agentAuthSessionRows?: FakeAgentAuthSessionRow[]; +}; + +export type FakeCrlSelectRow = { + id: string; + jti: string; + reason: string | null; + revoked_at: string; + agent_did: string; + did: string; +}; + +export type FakeDbState = { + authRows: FakeD1Row[]; + agentRows: FakeAgentRow[]; + options: FakeDbOptions; + updates: Array<{ lastUsedAt: string; apiKeyId: string }>; + humanInserts: FakeHumanInsertRow[]; + apiKeyInserts: FakeApiKeyInsertRow[]; + agentInserts: FakeAgentInsertRow[]; + agentUpdates: FakeAgentUpdateRow[]; + revocationInserts: FakeRevocationInsertRow[]; + agentRegistrationChallengeInserts: FakeAgentRegistrationChallengeInsertRow[]; + agentRegistrationChallengeUpdates: FakeAgentRegistrationChallengeUpdateRow[]; + agentAuthSessionInserts: FakeAgentAuthSessionInsertRow[]; + agentAuthSessionUpdates: FakeAgentAuthSessionUpdateRow[]; + agentAuthEventInserts: FakeAgentAuthEventInsertRow[]; + inviteInserts: FakeInviteInsertRow[]; + inviteUpdates: FakeInviteUpdateRow[]; + revocationRows: FakeRevocationRow[]; + registrationChallengeRows: FakeAgentRegistrationChallengeRow[]; + agentAuthSessionRows: FakeAgentAuthSessionRow[]; + inviteRows: FakeInviteRow[]; + humanRows: FakeHumanRow[]; + apiKeyRows: FakeApiKeyRow[]; + beforeFirstAgentUpdateApplied: boolean; + beforeFirstAgentAuthSessionUpdateApplied: boolean; + remainingApiKeyInsertFailures: number; +}; diff --git a/apps/registry/src/server.test/helpers/index.ts b/apps/registry/src/server.test/helpers/index.ts new file mode 100644 index 0000000..3fb0ec9 --- /dev/null +++ b/apps/registry/src/server.test/helpers/index.ts @@ -0,0 +1,8 @@ +export { makeAitClaims } from "./claims.js"; +export { + createSignedAgentRefreshRequest, + signRegistrationChallenge, +} from "./crypto.js"; +export type { FakeD1Row } from "./db/index.js"; +export { createFakeDb } from "./db/index.js"; +export { makeValidPatContext } from "./pat.js"; diff --git a/apps/registry/src/server.test/helpers/pat.ts b/apps/registry/src/server.test/helpers/pat.ts new file mode 100644 index 0000000..f3e990a --- /dev/null +++ b/apps/registry/src/server.test/helpers/pat.ts @@ -0,0 +1,24 @@ +import { + deriveApiKeyLookupPrefix, + hashApiKeyToken, +} from "../../auth/api-key-auth.js"; +import type { FakeD1Row } from "./db/types.js"; + +export function makeValidPatContext(token = "clw_pat_valid-token-value") { + return hashApiKeyToken(token).then((tokenHash) => { + const authRow: FakeD1Row = { + apiKeyId: "key-1", + keyPrefix: deriveApiKeyLookupPrefix(token), + keyHash: tokenHash, + apiKeyStatus: "active", + apiKeyName: "ci", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + + return { token, authRow }; + }); +} diff --git a/apps/registry/src/server.test/invites.test.ts b/apps/registry/src/server.test/invites.test.ts new file mode 100644 index 0000000..a53c860 --- /dev/null +++ b/apps/registry/src/server.test/invites.test.ts @@ -0,0 +1,392 @@ +import { + generateUlid, + INVITES_PATH, + INVITES_REDEEM_PATH, +} from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe(`POST ${INVITES_PATH}`, () => { + it("returns 401 when PAT is missing", async () => { + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 403 when PAT owner is not an admin", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([ + { + ...authRow, + humanRole: "user", + }, + ]); + + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(403); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_CREATE_FORBIDDEN"); + }); + + it("returns 400 when payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + expiresAt: "not-an-iso-date", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("INVITE_CREATE_INVALID"); + expect(body.error.details?.fieldErrors?.expiresAt).toEqual([ + "expiresAt must be a valid ISO-8601 datetime", + ]); + }); + + it("creates invite code and persists invite row", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, inviteInserts } = createFakeDb([authRow]); + const expiresAt = new Date(Date.now() + 60 * 60 * 1000).toISOString(); + + const response = await createRegistryApp().request( + INVITES_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + expiresAt, + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + invite: { + id: string; + code: string; + createdBy: string; + expiresAt: string | null; + createdAt: string; + }; + }; + expect(body.invite.code.startsWith("clw_inv_")).toBe(true); + expect(body.invite.createdBy).toBe("human-1"); + expect(body.invite.expiresAt).toBe(expiresAt); + expect(body.invite.createdAt).toEqual(expect.any(String)); + + expect(inviteInserts).toHaveLength(1); + expect(inviteInserts[0]?.id).toBe(body.invite.id); + expect(inviteInserts[0]?.code).toBe(body.invite.code); + expect(inviteInserts[0]?.created_by).toBe("human-1"); + expect(inviteInserts[0]?.expires_at).toBe(expiresAt); + }); +}); + +describe(`POST ${INVITES_REDEEM_PATH}`, () => { + it("returns 400 when payload is invalid", async () => { + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({}), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("INVITE_REDEEM_INVALID"); + expect(body.error.details?.fieldErrors?.code).toEqual(["code is required"]); + }); + + it("returns 400 when invite code does not exist", async () => { + const { database } = createFakeDb([]); + + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: "clw_inv_missing", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_REDEEM_CODE_INVALID"); + }); + + it("returns 400 when invite is expired", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow], [], { + inviteRows: [ + { + id: generateUlid(1700700000000), + code: "clw_inv_expired", + createdBy: "human-1", + redeemedBy: null, + agentId: null, + expiresAt: new Date(Date.now() - 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: "clw_inv_expired", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_REDEEM_EXPIRED"); + }); + + it("returns 409 when invite is already redeemed", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow], [], { + inviteRows: [ + { + id: generateUlid(1700700001000), + code: "clw_inv_redeemed", + createdBy: "human-1", + redeemedBy: "human-2", + agentId: null, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + + const response = await createRegistryApp().request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: "clw_inv_redeemed", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(409); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("INVITE_REDEEM_ALREADY_USED"); + }); + + it("redeems invite and returns PAT that authenticates /v1/me", async () => { + const { authRow } = await makeValidPatContext(); + const inviteCode = "clw_inv_redeem_success"; + const { database, humanInserts, apiKeyInserts, inviteRows, inviteUpdates } = + createFakeDb([authRow], [], { + inviteRows: [ + { + id: generateUlid(1700700002000), + code: inviteCode, + createdBy: "human-1", + redeemedBy: null, + agentId: null, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const appInstance = createRegistryApp(); + + const redeemResponse = await appInstance.request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: inviteCode, + displayName: "Invitee Alpha", + apiKeyName: "primary-invite-key", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(redeemResponse.status).toBe(201); + const redeemBody = (await redeemResponse.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: "admin" | "user"; + status: "active" | "suspended"; + }; + apiKey: { + id: string; + name: string; + token: string; + }; + proxyUrl: string; + }; + expect(redeemBody.human.displayName).toBe("Invitee Alpha"); + expect(redeemBody.human.role).toBe("user"); + expect(redeemBody.apiKey.name).toBe("primary-invite-key"); + expect(redeemBody.apiKey.token.startsWith("clw_pat_")).toBe(true); + expect(redeemBody.proxyUrl).toBe("https://dev.proxy.clawdentity.com"); + + expect(humanInserts).toHaveLength(1); + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.human_id).toBe(redeemBody.human.id); + expect(inviteUpdates).toHaveLength(1); + expect(inviteRows[0]?.redeemedBy).toBe(redeemBody.human.id); + + const meResponse = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${redeemBody.apiKey.token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(meResponse.status).toBe(200); + const meBody = (await meResponse.json()) as { + human: { + id: string; + displayName: string; + role: "admin" | "user"; + }; + }; + expect(meBody.human.id).toBe(redeemBody.human.id); + expect(meBody.human.displayName).toBe("Invitee Alpha"); + expect(meBody.human.role).toBe("user"); + }); + + it("rolls back fallback mutations when api key insert fails", async () => { + const { authRow } = await makeValidPatContext(); + const inviteCode = "clw_inv_fallback_rollback"; + const { database, humanRows, inviteRows } = createFakeDb([authRow], [], { + failBeginTransaction: true, + failApiKeyInsertCount: 1, + inviteRows: [ + { + id: generateUlid(1700700003000), + code: inviteCode, + createdBy: "human-1", + redeemedBy: null, + agentId: null, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const appInstance = createRegistryApp(); + + const firstResponse = await appInstance.request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: inviteCode, + displayName: "Fallback Invitee", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(firstResponse.status).toBe(500); + expect(humanRows).toHaveLength(1); + expect(inviteRows[0]?.redeemedBy).toBeNull(); + + const secondResponse = await appInstance.request( + INVITES_REDEEM_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + code: inviteCode, + displayName: "Fallback Invitee", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(secondResponse.status).toBe(201); + expect(humanRows).toHaveLength(2); + expect(inviteRows[0]?.redeemedBy).toEqual(expect.any(String)); + }); +}); diff --git a/apps/registry/src/server.test/keys-crl.test.ts b/apps/registry/src/server.test/keys-crl.test.ts new file mode 100644 index 0000000..113faed --- /dev/null +++ b/apps/registry/src/server.test/keys-crl.test.ts @@ -0,0 +1,409 @@ +import { + encodeBase64url, + generateUlid, + makeAgentDid, +} from "@clawdentity/protocol"; +import { + generateEd25519Keypair, + signAIT, + verifyAIT, + verifyCRL, +} from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeAitClaims } from "./helpers.js"; + +describe("GET /.well-known/claw-keys.json", () => { + it("returns configured registry signing keys with cache headers", async () => { + const res = await createRegistryApp().request( + "/.well-known/claw-keys.json", + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }, + ); + + expect(res.status).toBe(200); + expect(res.headers.get("Cache-Control")).toBe( + "public, max-age=300, s-maxage=300, stale-while-revalidate=60", + ); + + const body = (await res.json()) as { + keys: Array<{ + kid: string; + alg: string; + crv: string; + x: string; + status: string; + }>; + }; + expect(body.keys).toEqual([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]); + }); + + it("supports fetch-and-verify AIT flow using published keys", async () => { + const signer = await generateEd25519Keypair(); + const claims = makeAitClaims(signer.publicKey); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: signer, + }); + + const keysResponse = await createRegistryApp().request( + "/.well-known/claw-keys.json", + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]), + }, + ); + + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const verifiedClaims = await verifyAIT({ + token, + expectedIssuer: claims.iss, + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(verifiedClaims).toEqual(claims); + }); + + it("does not verify AIT when published key status is revoked", async () => { + const signer = await generateEd25519Keypair(); + const claims = makeAitClaims(signer.publicKey); + const token = await signAIT({ + claims, + signerKid: "reg-key-1", + signerKeypair: signer, + }); + + const keysResponse = await createRegistryApp().request( + "/.well-known/claw-keys.json", + {}, + { + DB: {} as D1Database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "revoked", + }, + ]), + }, + ); + + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + await expect( + verifyAIT({ + token, + expectedIssuer: claims.iss, + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }), + ).rejects.toThrow(/kid/i); + }); +}); + +describe("GET /v1/crl", () => { + it("returns signed CRL snapshot with cache headers", async () => { + const signer = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + const signingKeyset = JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(signer.publicKey), + status: "active", + }, + ]); + const agentIdOne = generateUlid(1700400000000); + const agentIdTwo = generateUlid(1700400000100); + const revocationJtiOne = generateUlid(1700400000200); + const revocationJtiTwo = generateUlid(1700400000300); + const { database } = createFakeDb( + [], + [ + { + id: agentIdOne, + did: makeAgentDid(agentIdOne), + ownerId: "human-1", + name: "revoked-one", + framework: "openclaw", + status: "revoked", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + { + id: agentIdTwo, + did: makeAgentDid(agentIdTwo), + ownerId: "human-2", + name: "revoked-two", + framework: "langchain", + status: "revoked", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + { + revocationRows: [ + { + id: generateUlid(1700400000400), + jti: revocationJtiOne, + agentId: agentIdOne, + reason: null, + revokedAt: "2026-02-11T10:00:00.000Z", + }, + { + id: generateUlid(1700400000500), + jti: revocationJtiTwo, + agentId: agentIdTwo, + reason: "manual revoke", + revokedAt: "2026-02-11T11:00:00.000Z", + }, + ], + }, + ); + + const response = await appInstance.request( + "/v1/crl", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + + expect(response.status).toBe(200); + expect(response.headers.get("Cache-Control")).toBe( + "public, max-age=300, s-maxage=300, stale-while-revalidate=60", + ); + const body = (await response.json()) as { crl: string }; + expect(body.crl).toEqual(expect.any(String)); + + const keysResponse = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + { + DB: database, + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingKeyset, + }, + ); + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyCRL({ + token: body.crl, + expectedIssuer: "https://dev.registry.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(claims.revocations).toHaveLength(2); + expect(claims.revocations).toEqual( + expect.arrayContaining([ + { + jti: revocationJtiOne, + agentDid: makeAgentDid(agentIdOne), + revokedAt: Math.floor(Date.parse("2026-02-11T10:00:00.000Z") / 1000), + }, + { + jti: revocationJtiTwo, + agentDid: makeAgentDid(agentIdTwo), + reason: "manual revoke", + revokedAt: Math.floor(Date.parse("2026-02-11T11:00:00.000Z") / 1000), + }, + ]), + ); + expect(claims.exp).toBeGreaterThan(claims.iat); + expect(claims.exp - claims.iat).toBe(390); + }); + + it("returns 404 when no revocations are available", async () => { + const { database } = createFakeDb([]); + const response = await createRegistryApp().request( + "/v1/crl", + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(404); + const body = (await response.json()) as { + error: { + code: string; + message: string; + }; + }; + expect(body.error.code).toBe("CRL_NOT_FOUND"); + expect(body.error.message).toBe("CRL snapshot is not available"); + }); + + it("returns 429 when rate limit is exceeded for the same client", async () => { + const { database } = createFakeDb([]); + const appInstance = createRegistryApp({ + rateLimit: { + crlMaxRequests: 2, + crlWindowMs: 60_000, + }, + }); + + for (let index = 0; index < 2; index += 1) { + const response = await appInstance.request( + "/v1/crl", + { + headers: { + "CF-Connecting-IP": "203.0.113.77", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(404); + } + + const rateLimited = await appInstance.request( + "/v1/crl", + { + headers: { + "CF-Connecting-IP": "203.0.113.77", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); + + it("returns 500 when CRL signing configuration is missing", async () => { + const agentId = generateUlid(1700400000600); + const { database } = createFakeDb( + [], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "revoked-agent", + framework: "openclaw", + status: "revoked", + expiresAt: "2026-03-01T00:00:00.000Z", + }, + ], + { + revocationRows: [ + { + id: generateUlid(1700400000700), + jti: generateUlid(1700400000800), + agentId, + reason: null, + revokedAt: "2026-02-11T12:00:00.000Z", + }, + ], + }, + ); + + const response = await createRegistryApp().request( + "/v1/crl", + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(500); + const body = (await response.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + REGISTRY_SIGNING_KEYS: expect.any(Array), + }); + }); +}); diff --git a/apps/registry/src/server.test/me-api-keys.test.ts b/apps/registry/src/server.test/me-api-keys.test.ts new file mode 100644 index 0000000..7a5338f --- /dev/null +++ b/apps/registry/src/server.test/me-api-keys.test.ts @@ -0,0 +1,359 @@ +import { generateUlid, ME_API_KEYS_PATH } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { + deriveApiKeyLookupPrefix, + hashApiKeyToken, +} from "../auth/api-key-auth.js"; +import { createRegistryApp } from "../server.js"; +import { + createFakeDb, + type FakeD1Row, + makeValidPatContext, +} from "./helpers.js"; + +describe(`POST ${ME_API_KEYS_PATH}`, () => { + it("returns 401 when PAT is missing", async () => { + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ name: "workstation" }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(401); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("creates key and returns plaintext token once", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, apiKeyInserts } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "workstation", + }), + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + apiKey: { + id: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; + token: string; + }; + }; + expect(body.apiKey.name).toBe("workstation"); + expect(body.apiKey.status).toBe("active"); + expect(body.apiKey.token).toMatch(/^clw_pat_/); + expect(body.apiKey.lastUsedAt).toBeNull(); + + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.name).toBe("workstation"); + expect(apiKeyInserts[0]?.key_hash).not.toBe(body.apiKey.token); + expect(apiKeyInserts[0]?.key_prefix).toBe( + deriveApiKeyLookupPrefix(body.apiKey.token), + ); + }); + + it("accepts empty body and uses default key name", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, apiKeyInserts } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(201); + const body = (await response.json()) as { + apiKey: { + name: string; + token: string; + }; + }; + expect(body.apiKey.name).toBe("api-key"); + expect(body.apiKey.token).toMatch(/^clw_pat_/); + expect(apiKeyInserts).toHaveLength(1); + expect(apiKeyInserts[0]?.name).toBe("api-key"); + }); +}); + +describe(`GET ${ME_API_KEYS_PATH}`, () => { + it("returns metadata for caller-owned keys only", async () => { + const authToken = "clw_pat_valid-token-value"; + const authTokenHash = await hashApiKeyToken(authToken); + const revokedToken = "clw_pat_revoked-token-value"; + const revokedTokenHash = await hashApiKeyToken(revokedToken); + const foreignToken = "clw_pat_foreign-token-value"; + const foreignTokenHash = await hashApiKeyToken(foreignToken); + + const authRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000000001", + keyPrefix: deriveApiKeyLookupPrefix(authToken), + keyHash: authTokenHash, + apiKeyStatus: "active", + apiKeyName: "primary", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const revokedOwnedRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000000002", + keyPrefix: deriveApiKeyLookupPrefix(revokedToken), + keyHash: revokedTokenHash, + apiKeyStatus: "revoked", + apiKeyName: "old-laptop", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const foreignRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000000003", + keyPrefix: deriveApiKeyLookupPrefix(foreignToken), + keyHash: foreignTokenHash, + apiKeyStatus: "active", + apiKeyName: "foreign", + humanId: "human-2", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB8", + humanDisplayName: "Ira", + humanRole: "user", + humanStatus: "active", + }; + const { database } = createFakeDb([authRow, revokedOwnedRow, foreignRow]); + + const response = await createRegistryApp().request( + ME_API_KEYS_PATH, + { + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(200); + const body = (await response.json()) as { + apiKeys: Array<{ + id: string; + name: string; + status: "active" | "revoked"; + createdAt: string; + lastUsedAt: string | null; + token?: string; + keyHash?: string; + keyPrefix?: string; + }>; + }; + expect(body.apiKeys).toEqual([ + { + id: "01KJ0000000000000000000002", + name: "old-laptop", + status: "revoked", + createdAt: "2026-01-01T00:00:00.000Z", + lastUsedAt: null, + }, + { + id: "01KJ0000000000000000000001", + name: "primary", + status: "active", + createdAt: "2026-01-01T00:00:00.000Z", + lastUsedAt: expect.any(String), + }, + ]); + for (const apiKey of body.apiKeys) { + expect(apiKey).not.toHaveProperty("token"); + expect(apiKey).not.toHaveProperty("keyHash"); + expect(apiKey).not.toHaveProperty("keyPrefix"); + } + }); +}); + +describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { + it("returns 400 for invalid id path", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + `${ME_API_KEYS_PATH}/invalid-id`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(400); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_REVOKE_INVALID_PATH"); + }); + + it("returns 404 when key is not found for owner", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const response = await createRegistryApp().request( + `${ME_API_KEYS_PATH}/${generateUlid(1700300000000)}`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${token}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(404); + const body = (await response.json()) as { error: { code: string } }; + expect(body.error.code).toBe("API_KEY_NOT_FOUND"); + }); + + it("revokes target key but keeps unrelated key active", async () => { + const authToken = "clw_pat_valid-token-value"; + const authTokenHash = await hashApiKeyToken(authToken); + const rotateToken = "clw_pat_rotation-token-value"; + const rotateTokenHash = await hashApiKeyToken(rotateToken); + const targetApiKeyId = generateUlid(1700300000000); + + const authRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000001001", + keyPrefix: deriveApiKeyLookupPrefix(authToken), + keyHash: authTokenHash, + apiKeyStatus: "active", + apiKeyName: "primary", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const revokableRow: FakeD1Row = { + apiKeyId: targetApiKeyId, + keyPrefix: deriveApiKeyLookupPrefix(rotateToken), + keyHash: rotateTokenHash, + apiKeyStatus: "active", + apiKeyName: "rotate-me", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const { database } = createFakeDb([authRow, revokableRow]); + const appInstance = createRegistryApp(); + + const revokeResponse = await appInstance.request( + `${ME_API_KEYS_PATH}/${targetApiKeyId}`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(revokeResponse.status).toBe(204); + + const revokedAuth = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${rotateToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(revokedAuth.status).toBe(401); + const revokedBody = (await revokedAuth.json()) as { + error: { code: string }; + }; + expect(revokedBody.error.code).toBe("API_KEY_REVOKED"); + + const activeAuth = await appInstance.request( + "/v1/me", + { + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + expect(activeAuth.status).toBe(200); + }); + + it("returns 204 when key is already revoked", async () => { + const authToken = "clw_pat_valid-token-value"; + const authTokenHash = await hashApiKeyToken(authToken); + const revokedToken = "clw_pat_already-revoked-token-value"; + const revokedTokenHash = await hashApiKeyToken(revokedToken); + const targetApiKeyId = generateUlid(1700300000100); + + const authRow: FakeD1Row = { + apiKeyId: "01KJ0000000000000000002001", + keyPrefix: deriveApiKeyLookupPrefix(authToken), + keyHash: authTokenHash, + apiKeyStatus: "active", + apiKeyName: "primary", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const alreadyRevokedRow: FakeD1Row = { + apiKeyId: targetApiKeyId, + keyPrefix: deriveApiKeyLookupPrefix(revokedToken), + keyHash: revokedTokenHash, + apiKeyStatus: "revoked", + apiKeyName: "already-revoked", + humanId: "human-1", + humanDid: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + humanDisplayName: "Ravi", + humanRole: "admin", + humanStatus: "active", + }; + const { database } = createFakeDb([authRow, alreadyRevokedRow]); + + const response = await createRegistryApp().request( + `${ME_API_KEYS_PATH}/${targetApiKeyId}`, + { + method: "DELETE", + headers: { + Authorization: `Bearer ${authToken}`, + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(204); + }); +}); diff --git a/apps/registry/src/server.test/resolve-me.test.ts b/apps/registry/src/server.test/resolve-me.test.ts new file mode 100644 index 0000000..52fdbcf --- /dev/null +++ b/apps/registry/src/server.test/resolve-me.test.ts @@ -0,0 +1,252 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { RESOLVE_RATE_LIMIT_MAX_REQUESTS } from "../rate-limit.js"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe("GET /v1/resolve/:id", () => { + it("returns public profile fields without requiring auth", async () => { + const { authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700500000000); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "resolve-me", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/resolve/${agentId}`, + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + did: string; + name: string; + framework: string; + status: "active" | "revoked"; + ownerDid: string; + email?: string; + displayName?: string; + }; + expect(body).toEqual({ + did: makeAgentDid(agentId), + name: "resolve-me", + framework: "openclaw", + status: "active", + ownerDid: authRow.humanDid, + }); + expect(body).not.toHaveProperty("email"); + expect(body).not.toHaveProperty("displayName"); + }); + + it("falls back framework to openclaw when stored framework is null", async () => { + const { authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700500000100); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "legacy-framework-null", + framework: null, + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/resolve/${agentId}`, + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { framework: string }; + expect(body.framework).toBe("openclaw"); + }); + + it("returns 400 for invalid id path", async () => { + const res = await createRegistryApp().request( + "/v1/resolve/not-a-ulid", + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_RESOLVE_INVALID_PATH"); + expect(body.error.details?.fieldErrors?.id).toEqual([ + "id must be a valid ULID", + ]); + }); + + it("returns 404 when agent does not exist", async () => { + const { authRow } = await makeValidPatContext(); + const missingAgentId = generateUlid(1700500000200); + const { database } = createFakeDb([authRow], []); + + const res = await createRegistryApp().request( + `/v1/resolve/${missingAgentId}`, + {}, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + }); + + it("returns 429 when rate limit is exceeded for the same client", async () => { + const { authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700500000300); + const { database } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "rate-limited-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + }, + ], + ); + const appInstance = createRegistryApp(); + + for (let index = 0; index < RESOLVE_RATE_LIMIT_MAX_REQUESTS; index += 1) { + const response = await appInstance.request( + `/v1/resolve/${agentId}`, + { + headers: { + "CF-Connecting-IP": "203.0.113.10", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(response.status).toBe(200); + } + + const rateLimited = await appInstance.request( + `/v1/resolve/${agentId}`, + { + headers: { + "CF-Connecting-IP": "203.0.113.10", + }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(rateLimited.status).toBe(429); + const body = (await rateLimited.json()) as { error: { code: string } }; + expect(body.error.code).toBe("RATE_LIMIT_EXCEEDED"); + }); +}); + +describe("GET /v1/me", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/me", + {}, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 401 for invalid PAT", async () => { + const { authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/me", + { + headers: { Authorization: "Bearer clw_pat_invalid-token-value" }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_INVALID"); + }); + + it("returns 401 when PAT contains only marker", async () => { + const res = await createRegistryApp().request( + "/v1/me", + { + headers: { Authorization: "Bearer clw_pat_" }, + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_INVALID"); + }); + + it("authenticates valid PAT and injects ctx.human", async () => { + const { token: validToken, authRow } = await makeValidPatContext(); + const { database, updates } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/me", + { + headers: { Authorization: `Bearer ${validToken}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(200); + const body = (await res.json()) as { + human: { + id: string; + did: string; + displayName: string; + role: string; + apiKey: { id: string; name: string }; + }; + }; + expect(body.human).toEqual({ + id: "human-1", + did: "did:claw:human:01HF7YAT31JZHSMW1CG6Q6MHB7", + displayName: "Ravi", + role: "admin", + apiKey: { + id: "key-1", + name: "ci", + }, + }); + expect(updates).toHaveLength(1); + expect(updates[0]?.apiKeyId).toBe("key-1"); + }); +}); From 2c4c3cf651cb772c19d91985f5894f5b2bafd721 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 17:47:13 +0530 Subject: [PATCH 134/190] Review proxy pairing bootstrap --- apps/cli/src/commands/AGENTS.md | 5 +- apps/cli/src/commands/admin.test.ts | 7 + apps/cli/src/commands/admin.ts | 29 +++- apps/openclaw-skill/skill/AGENTS.md | 17 +++ apps/openclaw-skill/skill/SKILL.md | 1 + .../skill/references/clawdentity-protocol.md | 2 +- apps/registry/src/AGENTS.md | 7 +- apps/registry/src/server.test.ts | 141 +++++++++++++++++- apps/registry/src/server.ts | 54 ++++++- 9 files changed, 246 insertions(+), 17 deletions(-) create mode 100644 apps/openclaw-skill/skill/AGENTS.md diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index fdd9349..8e92f84 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -59,10 +59,11 @@ ## Admin Command Rules - `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. - `admin bootstrap` must import `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` instead of duplicating endpoint literals in command code/tests. -- Treat bootstrap API key token as write-once secret: print once, persist via config manager, and never log token contents. +- Treat bootstrap API key token and internal service secret as write-once secrets: print once and never log secret contents. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. -- Config persistence failures after successful bootstrap must not hide the returned PAT token; print token first, then surface recovery instructions. +- Bootstrap response parsing must require `{ human, apiKey, internalService }` to prevent partially-valid onboarding state. +- Config persistence failures after successful bootstrap must not hide the returned PAT token; print secrets first, then surface recovery instructions. ## API Key Command Rules - `api-key create` must call registry `POST /v1/me/api-keys` and print the plaintext PAT token once without persisting it into local config automatically. diff --git a/apps/cli/src/commands/admin.test.ts b/apps/cli/src/commands/admin.test.ts index f99cbbb..fae92b6 100644 --- a/apps/cli/src/commands/admin.test.ts +++ b/apps/cli/src/commands/admin.test.ts @@ -26,6 +26,11 @@ describe("admin bootstrap helper", () => { name: "prod-admin", token: "clw_pat_testtoken", }, + internalService: { + id: "01KHH000000000000000000002", + name: "proxy-pairing", + secret: "clw_srv_testsecret", + }, }), { status: 201, headers: { "content-type": "application/json" } }, ); @@ -47,6 +52,8 @@ describe("admin bootstrap helper", () => { expect(result.human.did).toBe("did:claw:human:00000000000000000000000000"); expect(result.apiKey.token).toBe("clw_pat_testtoken"); + expect(result.internalService.id).toBe("01KHH000000000000000000002"); + expect(result.internalService.secret).toBe("clw_srv_testsecret"); expect(result.registryUrl).toBe("https://api.example.com/"); expect(fetchMock).toHaveBeenCalledTimes(1); const [calledInput, calledInit] = fetchMock.mock.calls[0] as [ diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index 84da21f..e2d4dce 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -31,6 +31,11 @@ type AdminBootstrapResponse = { name: string; token: string; }; + internalService: { + id: string; + name: string; + secret: string; + }; }; export type AdminBootstrapResult = AdminBootstrapResponse & { @@ -96,7 +101,10 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { const body = payload as Record; const human = body.human as Record | undefined; const apiKey = body.apiKey as Record | undefined; - if (!human || !apiKey) { + const internalService = body.internalService as + | Record + | undefined; + if (!human || !apiKey || !internalService) { throw createCliError( "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", "Bootstrap response is invalid", @@ -109,6 +117,9 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { const apiKeyId = parseNonEmptyString(apiKey.id); const apiKeyName = parseNonEmptyString(apiKey.name); const apiKeyToken = parseNonEmptyString(apiKey.token); + const internalServiceId = parseNonEmptyString(internalService.id); + const internalServiceName = parseNonEmptyString(internalService.name); + const internalServiceSecret = parseNonEmptyString(internalService.secret); if ( humanId.length === 0 || @@ -116,7 +127,10 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { humanDisplayName.length === 0 || apiKeyId.length === 0 || apiKeyName.length === 0 || - apiKeyToken.length === 0 + apiKeyToken.length === 0 || + internalServiceId.length === 0 || + internalServiceName.length === 0 || + internalServiceSecret.length === 0 ) { throw createCliError( "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", @@ -137,6 +151,11 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { name: apiKeyName, token: apiKeyToken, }, + internalService: { + id: internalServiceId, + name: internalServiceName, + secret: internalServiceSecret, + }, }; } @@ -274,6 +293,12 @@ export const createAdminCommand = (): Command => { writeStdoutLine(`API key name: ${result.apiKey.name}`); writeStdoutLine("API key token (shown once):"); writeStdoutLine(result.apiKey.token); + writeStdoutLine(`Internal service ID: ${result.internalService.id}`); + writeStdoutLine( + `Internal service name: ${result.internalService.name}`, + ); + writeStdoutLine("Internal service secret (shown once):"); + writeStdoutLine(result.internalService.secret); await persistBootstrapConfig(result.registryUrl, result.apiKey.token); writeStdoutLine("API key saved to local config"); diff --git a/apps/openclaw-skill/skill/AGENTS.md b/apps/openclaw-skill/skill/AGENTS.md new file mode 100644 index 0000000..6d0cd1d --- /dev/null +++ b/apps/openclaw-skill/skill/AGENTS.md @@ -0,0 +1,17 @@ +# AGENTS.md (apps/openclaw-skill/skill) + +## Purpose +- Keep user-facing skill guidance aligned with current CLI/proxy/registry behavior. + +## Documentation Rules +- `SKILL.md` and `references/*.md` must use command-first remediation with executable `clawdentity` commands. +- Pairing error guidance must include `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` and explain proxy internal-service credential recovery. +- Keep invite/ticket prefixes explicit: + - `clw_inv_...` for onboarding invite redeem + - `clwpair1_...` for pairing tickets +- Do not document manual registry/proxy host changes unless explicitly needed for a recovery scenario. + +## Sync Rules +- When `skill/SKILL.md` or `skill/references/*` changes, regenerate and sync CLI bundle: + - `pnpm -F @clawdentity/openclaw-skill build` + - `pnpm -F clawdentity run sync:skill-bundle` diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index 9affb0c..a87d093 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -406,6 +406,7 @@ Do not suggest switching endpoints unless user explicitly asks for endpoint chan - `CLI_PAIR_STATUS_WAIT_CANCELLED`: wait interrupted (SIGINT). Run `pair recover`. - `CLI_PAIR_CONFIRM_INPUT_CONFLICT`: cannot provide both `--ticket` and `--qr-file`. Use one path only. - `CLI_PAIR_PROXY_URL_MISMATCH`: local `proxyUrl` does not match registry metadata. Rerun `clawdentity invite redeem `. +- `PROXY_PAIR_OWNERSHIP_UNAVAILABLE`: proxy cannot authenticate to registry ownership endpoint. Bootstrap should create `proxy-pairing` internal service automatically on fresh environments. For already-bootstrapped environments, create or rotate the internal service via admin API and update proxy secrets (`REGISTRY_INTERNAL_SERVICE_ID`, `REGISTRY_INTERNAL_SERVICE_SECRET`). - Responder shows peer but initiator does not: - Cause: initiator started pairing without `--wait`. - Fix: run `clawdentity pair status --ticket --wait` on initiator. diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index 86a4117..f947839 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -226,7 +226,7 @@ The connector `deliver` frame includes `fromAgentDid` as a top-level field. Inbo | HTTP Status | Error Code | Meaning | Recovery | |---|---|---|---| | 403 | `PROXY_PAIR_OWNERSHIP_FORBIDDEN` | Initiator ownership check failed | Recreate/refresh the local agent identity | -| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | Check proxy/registry service auth configuration | +| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | Ensure bootstrap created `proxy-pairing` internal service on registry; for existing envs create/rotate service credentials and update proxy `REGISTRY_INTERNAL_SERVICE_ID` + `REGISTRY_INTERNAL_SERVICE_SECRET` | | — | `CLI_PAIR_AGENT_NOT_FOUND` | Agent ait.jwt or secret.key missing/empty | Run `agent create` or `agent auth refresh` | | — | `CLI_PAIR_HUMAN_NAME_MISSING` | Local config is missing `humanName` | Set via `invite redeem` or config | | — | `CLI_PAIR_PROXY_URL_REQUIRED` | Proxy URL could not be resolved | Run `invite redeem` or set `CLAWDENTITY_PROXY_URL` | diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index e0b61d9..45369f8 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -18,9 +18,10 @@ - Require `x-bootstrap-secret` header and compare with constant-time semantics; invalid/missing secret must return `401 ADMIN_BOOTSTRAP_UNAUTHORIZED`. - If `BOOTSTRAP_SECRET` is not configured, return `503 ADMIN_BOOTSTRAP_DISABLED`. - If any admin human already exists, return `409 ADMIN_BOOTSTRAP_ALREADY_COMPLETED`. -- Success response must include `{ human, apiKey }` and return the PAT token only in bootstrap response. -- Persist admin bootstrap atomically where supported (transaction). When falling back because transactions are unavailable, run the manual mutation with rollback-on-api-key-failure so that no admin human exists without the new API key even if part of the bootstrap fails. -- Fallback path must be compensation-safe: if API key insert fails after admin insert, delete the inserted admin row before returning failure so retry remains possible. +- Success response must include `{ human, apiKey, internalService }`; return plaintext PAT and internal service secret only in bootstrap response. +- Bootstrap must create a default internal service named `proxy-pairing` with scope `identity.read` in the same mutation unit as admin + PAT creation. +- Persist admin bootstrap atomically where supported (transaction). When falling back because transactions are unavailable, run manual compensation rollback so no partial bootstrap state survives. +- Fallback path must be compensation-safe: if API key/internal-service insert fails after admin insert, delete inserted `internal_services` + `api_keys` rows before deleting the admin human so retry remains possible under FK constraints. ## Registry Keyset Contract - `/.well-known/claw-keys.json` is a public endpoint and must remain unauthenticated. diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index b7d4ed8..39164dd 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -41,6 +41,10 @@ import { deriveApiKeyLookupPrefix, hashApiKeyToken, } from "./auth/api-key-auth.js"; +import { + deriveInternalServiceSecretPrefix, + hashInternalServiceSecret, +} from "./auth/service-auth.js"; import { RESOLVE_RATE_LIMIT_MAX_REQUESTS } from "./rate-limit.js"; import app, { createRegistryApp } from "./server.js"; @@ -128,6 +132,7 @@ type FakeApiKeySelectRow = { type FakeAgentInsertRow = Record; type FakeHumanInsertRow = Record; type FakeApiKeyInsertRow = Record; +type FakeInternalServiceInsertRow = Record; type FakeAgentUpdateRow = Record; type FakeRevocationInsertRow = Record; type FakeAgentRegistrationChallengeInsertRow = Record; @@ -196,6 +201,7 @@ type FakeDbOptions = { sessionRows: FakeAgentAuthSessionRow[], ) => void; failApiKeyInsertCount?: number; + failInternalServiceInsertCount?: number; failBeginTransaction?: boolean; inviteRows?: FakeInviteRow[]; revocationRows?: FakeRevocationRow[]; @@ -1006,6 +1012,7 @@ function createFakeDb( const updates: Array<{ lastUsedAt: string; apiKeyId: string }> = []; const humanInserts: FakeHumanInsertRow[] = []; const apiKeyInserts: FakeApiKeyInsertRow[] = []; + const internalServiceInserts: FakeInternalServiceInsertRow[] = []; const agentInserts: FakeAgentInsertRow[] = []; const agentUpdates: FakeAgentUpdateRow[] = []; const revocationInserts: FakeRevocationInsertRow[] = []; @@ -1053,6 +1060,8 @@ function createFakeDb( let beforeFirstAgentUpdateApplied = false; let beforeFirstAgentAuthSessionUpdateApplied = false; let remainingApiKeyInsertFailures = options.failApiKeyInsertCount ?? 0; + let remainingInternalServiceInsertFailures = + options.failInternalServiceInsertCount ?? 0; const database: D1Database = { prepare(query: string) { @@ -1610,6 +1619,26 @@ function createFakeDb( changes = 1; } + if ( + normalizedQuery.includes('insert into "internal_services"') || + normalizedQuery.includes("insert into internal_services") + ) { + if (remainingInternalServiceInsertFailures > 0) { + remainingInternalServiceInsertFailures -= 1; + throw new Error("internal service insert failed"); + } + + const columns = parseInsertColumns(query, "internal_services"); + const row = columns.reduce( + (acc, column, index) => { + acc[column] = params[index]; + return acc; + }, + {}, + ); + internalServiceInserts.push(row); + changes = 1; + } if ( normalizedQuery.includes('insert into "agent_auth_sessions"') || normalizedQuery.includes("insert into agent_auth_sessions") @@ -1985,6 +2014,35 @@ function createFakeDb( } } } + if ( + normalizedQuery.includes('delete from "api_keys"') || + normalizedQuery.includes("delete from api_keys") + ) { + const whereClause = extractWhereClause(query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params, + }); + const humanIdFilter = + typeof equalityParams.values.human_id?.[0] === "string" + ? String(equalityParams.values.human_id[0]) + : undefined; + + if (humanIdFilter) { + for (let index = apiKeyRows.length - 1; index >= 0; index -= 1) { + if (apiKeyRows[index]?.humanId === humanIdFilter) { + apiKeyRows.splice(index, 1); + changes += 1; + } + } + } + } + if ( + normalizedQuery.includes('delete from "internal_services"') || + normalizedQuery.includes("delete from internal_services") + ) { + changes = 1; + } if ( normalizedQuery.includes('insert into "agents"') || normalizedQuery.includes("insert into agents") @@ -2263,6 +2321,7 @@ function createFakeDb( humanRows, humanInserts, apiKeyInserts, + internalServiceInserts, agentAuthSessionRows, agentAuthSessionInserts, agentAuthSessionUpdates, @@ -2604,7 +2663,8 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }); it("creates admin human and PAT token once", async () => { - const { database, humanInserts, apiKeyInserts } = createFakeDb([]); + const { database, humanInserts, apiKeyInserts, internalServiceInserts } = + createFakeDb([]); const response = await createRegistryApp().request( ADMIN_BOOTSTRAP_PATH, @@ -2641,6 +2701,11 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { name: string; token: string; }; + internalService: { + id: string; + name: string; + secret: string; + }; }; expect(body.human.id).toBe("00000000000000000000000000"); @@ -2650,15 +2715,28 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { expect(body.human.status).toBe("active"); expect(body.apiKey.name).toBe("prod-admin-key"); expect(body.apiKey.token.startsWith("clw_pat_")).toBe(true); + expect(body.internalService.name).toBe("proxy-pairing"); + expect(body.internalService.secret.startsWith("clw_srv_")).toBe(true); expect(humanInserts).toHaveLength(1); expect(apiKeyInserts).toHaveLength(1); + expect(internalServiceInserts).toHaveLength(1); expect(apiKeyInserts[0]?.key_prefix).toBe( deriveApiKeyLookupPrefix(body.apiKey.token), ); expect(apiKeyInserts[0]?.key_hash).toBe( await hashApiKeyToken(body.apiKey.token), ); + expect(internalServiceInserts[0]?.name).toBe("proxy-pairing"); + expect(internalServiceInserts[0]?.scopes_json).toBe( + JSON.stringify(["identity.read"]), + ); + expect(internalServiceInserts[0]?.secret_prefix).toBe( + deriveInternalServiceSecretPrefix(body.internalService.secret), + ); + expect(internalServiceInserts[0]?.secret_hash).toBe( + await hashInternalServiceSecret(body.internalService.secret), + ); }); it("returns PAT that authenticates GET /v1/me on same app and database", async () => { @@ -2739,9 +2817,10 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }); it("falls back to manual mutation when transactions are unavailable", async () => { - const { database, humanInserts, apiKeyInserts } = createFakeDb([], [], { - failBeginTransaction: true, - }); + const { database, humanInserts, apiKeyInserts, internalServiceInserts } = + createFakeDb([], [], { + failBeginTransaction: true, + }); const response = await createRegistryApp().request( ADMIN_BOOTSTRAP_PATH, @@ -2766,6 +2845,7 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { expect(response.status).toBe(201); expect(humanInserts).toHaveLength(1); expect(apiKeyInserts).toHaveLength(1); + expect(internalServiceInserts).toHaveLength(1); }); it("rolls back admin insert when fallback api key insert fails", async () => { @@ -2820,6 +2900,59 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { expect(secondResponse.status).toBe(201); expect(humanRows).toHaveLength(1); }); + + it("rolls back admin insert when fallback internal service insert fails", async () => { + const { database, humanRows } = createFakeDb([], [], { + failBeginTransaction: true, + failInternalServiceInsertCount: 1, + }); + + const firstResponse = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(firstResponse.status).toBe(500); + expect(humanRows).toHaveLength(0); + + const secondResponse = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(secondResponse.status).toBe(201); + expect(humanRows).toHaveLength(1); + }); }); describe("GET /.well-known/claw-keys.json", () => { diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index 0cc04de..bb25326 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -159,6 +159,8 @@ const PROXY_URL_BY_ENVIRONMENT: Record = }; // Deterministic bootstrap identity guarantees one-time admin creation under races. const BOOTSTRAP_ADMIN_HUMAN_ID = "00000000000000000000000000"; +const BOOTSTRAP_INTERNAL_SERVICE_NAME = "proxy-pairing"; +const BOOTSTRAP_INTERNAL_SERVICE_SCOPES = ["identity.read"] as const; const REGISTRY_SERVICE_EVENT_VERSION = "v1"; const AGENT_AUTH_EVENT_NAME_BY_TYPE: Record< @@ -1116,11 +1118,34 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const apiKeyHash = await hashApiKeyToken(apiKeyToken); const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); const apiKeyId = generateUlid(nowUtcMs() + 1); + const internalServiceSecret = generateInternalServiceSecret(); + const internalServiceSecretHash = await hashInternalServiceSecret( + internalServiceSecret, + ); + const internalServiceSecretPrefix = deriveInternalServiceSecretPrefix( + internalServiceSecret, + ); + const internalServiceId = generateUlid(nowUtcMs() + 2); const createdAt = nowIso(); + const rollbackBootstrapMutation = async ( + executor: typeof db, + ): Promise => { + await executor + .delete(internal_services) + .where( + and( + eq(internal_services.created_by, humanId), + eq(internal_services.name, BOOTSTRAP_INTERNAL_SERVICE_NAME), + ), + ); + await executor.delete(api_keys).where(eq(api_keys.human_id, humanId)); + await executor.delete(humans).where(eq(humans.id, humanId)); + }; + const applyBootstrapMutation = async ( executor: typeof db, - options: { rollbackOnApiKeyFailure: boolean }, + options: { rollbackOnFailure: boolean }, ): Promise => { const insertAdminResult = await executor .insert(humans) @@ -1153,10 +1178,24 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { created_at: createdAt, last_used_at: null, }); + + await executor.insert(internal_services).values({ + id: internalServiceId, + name: BOOTSTRAP_INTERNAL_SERVICE_NAME, + secret_hash: internalServiceSecretHash, + secret_prefix: internalServiceSecretPrefix, + scopes_json: JSON.stringify(BOOTSTRAP_INTERNAL_SERVICE_SCOPES), + status: "active", + created_by: humanId, + rotated_at: null, + last_used_at: null, + created_at: createdAt, + updated_at: createdAt, + }); } catch (error) { - if (options.rollbackOnApiKeyFailure) { + if (options.rollbackOnFailure) { try { - await executor.delete(humans).where(eq(humans.id, humanId)); + await rollbackBootstrapMutation(executor); } catch (rollbackError) { logger.error("registry.admin_bootstrap_rollback_failed", { rollbackErrorName: @@ -1172,7 +1211,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { try { await db.transaction(async (tx) => { await applyBootstrapMutation(tx as unknown as typeof db, { - rollbackOnApiKeyFailure: false, + rollbackOnFailure: false, }); }); } catch (error) { @@ -1181,7 +1220,7 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { } await applyBootstrapMutation(db, { - rollbackOnApiKeyFailure: true, + rollbackOnFailure: true, }); } @@ -1199,6 +1238,11 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { name: bootstrapPayload.apiKeyName, token: apiKeyToken, }, + internalService: { + id: internalServiceId, + name: BOOTSTRAP_INTERNAL_SERVICE_NAME, + secret: internalServiceSecret, + }, }, 201, ); From 2943017701a95ce465c8220c21886cd3512d148c Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 17:55:49 +0530 Subject: [PATCH 135/190] test(registry): improve fake D1 rollback fidelity for internal services --- apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/admin.ts | 3 + apps/registry/src/server.test.ts | 209 ++++++++++++++++++++++++++++++- 3 files changed, 212 insertions(+), 1 deletion(-) diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 8e92f84..e37b745 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -62,6 +62,7 @@ - Treat bootstrap API key token and internal service secret as write-once secrets: print once and never log secret contents. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. +- Bootstrap command output should explicitly remind operators to set `REGISTRY_INTERNAL_SERVICE_ID` and `REGISTRY_INTERNAL_SERVICE_SECRET` on proxy environment before deploy. - Bootstrap response parsing must require `{ human, apiKey, internalService }` to prevent partially-valid onboarding state. - Config persistence failures after successful bootstrap must not hide the returned PAT token; print secrets first, then surface recovery instructions. diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index e2d4dce..0ad8372 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -299,6 +299,9 @@ export const createAdminCommand = (): Command => { ); writeStdoutLine("Internal service secret (shown once):"); writeStdoutLine(result.internalService.secret); + writeStdoutLine( + "Set proxy secrets REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET with the values above before proxy deploy.", + ); await persistBootstrapConfig(result.registryUrl, result.apiKey.token); writeStdoutLine("API key saved to local config"); diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 39164dd..3978110 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -195,6 +195,20 @@ type FakeAgentSelectRow = { updated_at: string; }; +type FakeInternalServiceRow = { + id: string; + name: string; + secretHash: string; + secretPrefix: string; + scopesJson: string; + status: "active" | "revoked"; + createdBy: string; + rotatedAt: string | null; + lastUsedAt: string | null; + createdAt: string; + updatedAt: string; +}; + type FakeDbOptions = { beforeFirstAgentUpdate?: (agentRows: FakeAgentRow[]) => void; beforeFirstAgentAuthSessionUpdate?: ( @@ -284,6 +298,10 @@ function isDefined(value: T | undefined): value is T { return value !== undefined; } +function cloneRows>(rows: T[]): T[] { + return rows.map((row) => ({ ...row })); +} + function parseWhereEqualityParams(options: { whereClause: string; params: unknown[]; @@ -1057,11 +1075,62 @@ function createFakeDb( createdAt: "2026-01-01T00:00:00.000Z", lastUsedAt: null, })); + const internalServiceRows: FakeInternalServiceRow[] = []; let beforeFirstAgentUpdateApplied = false; let beforeFirstAgentAuthSessionUpdateApplied = false; let remainingApiKeyInsertFailures = options.failApiKeyInsertCount ?? 0; let remainingInternalServiceInsertFailures = options.failInternalServiceInsertCount ?? 0; + let transactionSnapshot: { + humanRows: FakeHumanRow[]; + apiKeyRows: FakeApiKeyRow[]; + internalServiceRows: FakeInternalServiceRow[]; + agentRows: FakeAgentRow[]; + revocationRows: FakeRevocationRow[]; + registrationChallengeRows: FakeAgentRegistrationChallengeRow[]; + agentAuthSessionRows: FakeAgentAuthSessionRow[]; + inviteRows: FakeInviteRow[]; + } | null = null; + + const createTransactionSnapshot = () => ({ + humanRows: cloneRows(humanRows), + apiKeyRows: cloneRows(apiKeyRows), + internalServiceRows: cloneRows(internalServiceRows), + agentRows: cloneRows(agentRows), + revocationRows: cloneRows(revocationRows), + registrationChallengeRows: cloneRows(registrationChallengeRows), + agentAuthSessionRows: cloneRows(agentAuthSessionRows), + inviteRows: cloneRows(inviteRows), + }); + + const restoreTransactionSnapshot = ( + snapshot: NonNullable, + ) => { + humanRows.splice(0, humanRows.length, ...cloneRows(snapshot.humanRows)); + apiKeyRows.splice(0, apiKeyRows.length, ...cloneRows(snapshot.apiKeyRows)); + internalServiceRows.splice( + 0, + internalServiceRows.length, + ...cloneRows(snapshot.internalServiceRows), + ); + agentRows.splice(0, agentRows.length, ...cloneRows(snapshot.agentRows)); + revocationRows.splice( + 0, + revocationRows.length, + ...cloneRows(snapshot.revocationRows), + ); + registrationChallengeRows.splice( + 0, + registrationChallengeRows.length, + ...cloneRows(snapshot.registrationChallengeRows), + ); + agentAuthSessionRows.splice( + 0, + agentAuthSessionRows.length, + ...cloneRows(snapshot.agentAuthSessionRows), + ); + inviteRows.splice(0, inviteRows.length, ...cloneRows(snapshot.inviteRows)); + }; const database: D1Database = { prepare(query: string) { @@ -1461,6 +1530,24 @@ function createFakeDb( throw new Error("Failed query: begin"); } + if (normalizedQuery.trim() === "begin") { + transactionSnapshot = createTransactionSnapshot(); + return { success: true, meta: { changes: 0 } } as D1Result; + } + + if (normalizedQuery.trim() === "commit") { + transactionSnapshot = null; + return { success: true, meta: { changes: 0 } } as D1Result; + } + + if (normalizedQuery.trim() === "rollback") { + if (transactionSnapshot) { + restoreTransactionSnapshot(transactionSnapshot); + transactionSnapshot = null; + } + return { success: true, meta: { changes: 0 } } as D1Result; + } + let changes = 0; if ( @@ -1637,6 +1724,36 @@ function createFakeDb( {}, ); internalServiceInserts.push(row); + + if ( + typeof row.id === "string" && + typeof row.name === "string" && + typeof row.secret_hash === "string" && + typeof row.secret_prefix === "string" && + typeof row.scopes_json === "string" && + (row.status === "active" || row.status === "revoked") && + typeof row.created_by === "string" && + typeof row.created_at === "string" && + typeof row.updated_at === "string" + ) { + internalServiceRows.push({ + id: row.id, + name: row.name, + secretHash: row.secret_hash, + secretPrefix: row.secret_prefix, + scopesJson: row.scopes_json, + status: row.status, + createdBy: row.created_by, + rotatedAt: + typeof row.rotated_at === "string" ? row.rotated_at : null, + lastUsedAt: + typeof row.last_used_at === "string" + ? row.last_used_at + : null, + createdAt: row.created_at, + updatedAt: row.updated_at, + }); + } changes = 1; } if ( @@ -2041,7 +2158,45 @@ function createFakeDb( normalizedQuery.includes('delete from "internal_services"') || normalizedQuery.includes("delete from internal_services") ) { - changes = 1; + const whereClause = extractWhereClause(query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params, + }); + const createdByFilter = + typeof equalityParams.values.created_by?.[0] === "string" + ? String(equalityParams.values.created_by[0]) + : undefined; + const nameFilter = + typeof equalityParams.values.name?.[0] === "string" + ? String(equalityParams.values.name[0]) + : undefined; + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + + for ( + let index = internalServiceRows.length - 1; + index >= 0; + index -= 1 + ) { + const row = internalServiceRows[index]; + if (!row) { + continue; + } + if (createdByFilter && row.createdBy !== createdByFilter) { + continue; + } + if (nameFilter && row.name !== nameFilter) { + continue; + } + if (idFilter && row.id !== idFilter) { + continue; + } + internalServiceRows.splice(index, 1); + changes += 1; + } } if ( normalizedQuery.includes('insert into "agents"') || @@ -2953,6 +3108,58 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { expect(secondResponse.status).toBe(201); expect(humanRows).toHaveLength(1); }); + + it("rolls back transaction when internal service insert fails", async () => { + const { database, humanRows } = createFakeDb([], [], { + failInternalServiceInsertCount: 1, + }); + + const firstResponse = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(firstResponse.status).toBe(500); + expect(humanRows).toHaveLength(0); + + const secondResponse = await createRegistryApp().request( + ADMIN_BOOTSTRAP_PATH, + { + method: "POST", + headers: { + "content-type": "application/json", + "x-bootstrap-secret": "bootstrap-secret", + }, + body: JSON.stringify({ + displayName: "Primary Admin", + apiKeyName: "prod-admin-key", + }), + }, + { + DB: database, + ENVIRONMENT: "test", + BOOTSTRAP_SECRET: "bootstrap-secret", + }, + ); + + expect(secondResponse.status).toBe(201); + expect(humanRows).toHaveLength(1); + }); }); describe("GET /.well-known/claw-keys.json", () => { From 71482ef4fffd51e8b1624ccfc5d75eb2e70b66b4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:00:17 +0530 Subject: [PATCH 136/190] refactor(common): unify safe JSON response parsing across packages and apps --- apps/cli/package.json | 1 + apps/cli/src/AGENTS.md | 1 + apps/cli/src/commands/admin.ts | 7 +++---- apps/cli/src/commands/agent/validation.ts | 11 ++--------- apps/cli/src/commands/api-key.ts | 9 +-------- apps/cli/src/commands/invite.ts | 9 +-------- apps/cli/src/commands/openclaw/connector.ts | 7 +++---- apps/cli/src/commands/pair/proxy.ts | 9 ++------- apps/cli/src/commands/verify.ts | 9 +-------- apps/cli/src/config/registry-metadata.ts | 9 +-------- apps/cli/tsup.config.ts | 1 + apps/proxy/package.json | 1 + apps/proxy/src/AGENTS.md | 1 + apps/proxy/src/auth-middleware.ts | 9 +-------- apps/proxy/src/proxy-trust-store.ts | 9 +-------- packages/common/AGENTS.md | 1 + packages/common/src/index.ts | 10 ++++++++++ packages/sdk/package.json | 1 + packages/sdk/src/AGENTS.md | 1 + packages/sdk/src/agent-auth-client.ts | 9 +-------- packages/sdk/src/registry-identity-client.ts | 9 +-------- pnpm-lock.yaml | 9 +++++++++ 22 files changed, 45 insertions(+), 88 deletions(-) diff --git a/apps/cli/package.json b/apps/cli/package.json index f6ec40e..fe813a5 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -40,6 +40,7 @@ "ws": "^8.19.0" }, "devDependencies": { + "@clawdentity/common": "workspace:*", "@clawdentity/connector": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", diff --git a/apps/cli/src/AGENTS.md b/apps/cli/src/AGENTS.md index 2ba7706..84c600a 100644 --- a/apps/cli/src/AGENTS.md +++ b/apps/cli/src/AGENTS.md @@ -6,6 +6,7 @@ ## Command Modules - Keep each command implementation in `commands/.ts` with one exported factory (`createCommand`). - Reuse shared command wrappers (`withErrorHandling`) and IO helpers (`writeStdoutLine`, `writeStderrLine`) instead of inline process writes. +- Reuse `@clawdentity/common` transport utilities (for example safe JSON response parsing) across commands/config modules instead of redefining local try/catch wrappers. - Prefer explicit error-to-reason mapping for operator-facing failures rather than generic stack traces. - Prefer SDK shared primitives (`AppError`) for new command error logic instead of ad-hoc equivalents. - Admin bootstrap command logic should stay in `commands/admin.ts` and remain side-effect-safe: only mutate config after a validated successful registry response. diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index 84da21f..0ddd988 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe } from "@clawdentity/common"; import { ADMIN_BOOTSTRAP_PATH } from "@clawdentity/protocol"; import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; @@ -203,10 +204,8 @@ export async function bootstrapAdmin( ); } - let payload: unknown; - try { - payload = await response.json(); - } catch { + const payload = await parseJsonResponseSafe(response); + if (payload === undefined) { throw createCliError( "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", "Bootstrap response is invalid", diff --git a/apps/cli/src/commands/agent/validation.ts b/apps/cli/src/commands/agent/validation.ts index 646aed9..e8095af 100644 --- a/apps/cli/src/commands/agent/validation.ts +++ b/apps/cli/src/commands/agent/validation.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe } from "@clawdentity/common"; import { parseDid } from "@clawdentity/protocol"; import { toIso } from "@clawdentity/sdk"; import { IDENTITY_FILE_NAME } from "./paths.js"; @@ -85,15 +86,7 @@ export const extractRegistryErrorMessage = ( return trimmed.length > 0 ? trimmed : undefined; }; -export const parseJsonResponse = async ( - response: Response, -): Promise => { - try { - return await response.json(); - } catch { - return undefined; - } -}; +export const parseJsonResponse = parseJsonResponseSafe; const parseAgentAuthBundle = (value: unknown): AgentAuthBundle => { if (!isRecord(value)) { diff --git a/apps/cli/src/commands/api-key.ts b/apps/cli/src/commands/api-key.ts index 52b0f92..9eaacbe 100644 --- a/apps/cli/src/commands/api-key.ts +++ b/apps/cli/src/commands/api-key.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { ME_API_KEYS_PATH, parseUlid } from "@clawdentity/protocol"; import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; @@ -130,14 +131,6 @@ function extractRegistryErrorMessage(payload: unknown): string | undefined { return trimmed.length > 0 ? trimmed : undefined; } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - function toHttpErrorMessage(options: { status: number; responseBody: unknown; diff --git a/apps/cli/src/commands/invite.ts b/apps/cli/src/commands/invite.ts index a8c4d5c..6ab1ea8 100644 --- a/apps/cli/src/commands/invite.ts +++ b/apps/cli/src/commands/invite.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { INVITES_PATH, INVITES_REDEEM_PATH } from "@clawdentity/protocol"; import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; @@ -157,14 +158,6 @@ function extractRegistryErrorMessage(payload: unknown): string | undefined { return trimmed.length > 0 ? trimmed : undefined; } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - async function executeInviteRequest(input: { fetchImpl: typeof fetch; url: string; diff --git a/apps/cli/src/commands/openclaw/connector.ts b/apps/cli/src/commands/openclaw/connector.ts index d87be54..48277f4 100644 --- a/apps/cli/src/commands/openclaw/connector.ts +++ b/apps/cli/src/commands/openclaw/connector.ts @@ -3,6 +3,7 @@ import { closeSync, existsSync, openSync } from "node:fs"; import { mkdir, readFile } from "node:fs/promises"; import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; +import { parseJsonResponseSafe } from "@clawdentity/common"; import { nowUtcMs } from "@clawdentity/sdk"; import { getConfigDir } from "../../config/manager.js"; import { assertValidAgentName } from "../agent-name.js"; @@ -240,10 +241,8 @@ export async function fetchConnectorHealthStatus(input: { }; } - let payload: unknown; - try { - payload = await response.json(); - } catch { + const payload = await parseJsonResponseSafe(response); + if (payload === undefined) { return { connected: false, reachable: false, diff --git a/apps/cli/src/commands/pair/proxy.ts b/apps/cli/src/commands/pair/proxy.ts index 26a9f5a..1c99876 100644 --- a/apps/cli/src/commands/pair/proxy.ts +++ b/apps/cli/src/commands/pair/proxy.ts @@ -1,5 +1,6 @@ import { readFile } from "node:fs/promises"; import { join } from "node:path"; +import { parseJsonResponseSafe } from "@clawdentity/common"; import { decodeBase64url } from "@clawdentity/protocol"; import { signHttpRequest } from "@clawdentity/sdk"; import { @@ -102,13 +103,7 @@ function extractErrorMessage(payload: unknown): string | undefined { return message.length > 0 ? message : undefined; } -export async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} +export const parseJsonResponse = parseJsonResponseSafe; export async function executePairRequest(input: { fetchImpl: typeof fetch; diff --git a/apps/cli/src/commands/verify.ts b/apps/cli/src/commands/verify.ts index 9d270e0..04036dc 100644 --- a/apps/cli/src/commands/verify.ts +++ b/apps/cli/src/commands/verify.ts @@ -1,4 +1,5 @@ import { readFile } from "node:fs/promises"; +import { parseJsonResponseSafe as parseResponseJson } from "@clawdentity/common"; import { parseCrlClaims } from "@clawdentity/protocol"; import { createLogger, @@ -133,14 +134,6 @@ const parseJson = (raw: string): unknown => { } }; -const parseResponseJson = async (response: Response): Promise => { - try { - return await response.json(); - } catch { - return undefined; - } -}; - const parseSigningKeys = (payload: unknown): RegistrySigningKey[] => { if (!isRecord(payload) || !Array.isArray(payload.keys)) { throw new VerifyCommandError( diff --git a/apps/cli/src/config/registry-metadata.ts b/apps/cli/src/config/registry-metadata.ts index fe939d1..01b2953 100644 --- a/apps/cli/src/config/registry-metadata.ts +++ b/apps/cli/src/config/registry-metadata.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { REGISTRY_METADATA_PATH } from "@clawdentity/protocol"; import { AppError } from "@clawdentity/sdk"; @@ -84,14 +85,6 @@ function extractRegistryErrorMessage(payload: unknown): string | undefined { return trimmed.length > 0 ? trimmed : undefined; } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - function parseMetadataPayload( payload: unknown, fallbackRegistryUrl: string, diff --git a/apps/cli/tsup.config.ts b/apps/cli/tsup.config.ts index f51fab9..67b5c73 100644 --- a/apps/cli/tsup.config.ts +++ b/apps/cli/tsup.config.ts @@ -7,6 +7,7 @@ export default defineConfig({ splitting: false, external: ["ws"], noExternal: [ + "@clawdentity/common", "@clawdentity/connector", "@clawdentity/protocol", "@clawdentity/sdk", diff --git a/apps/proxy/package.json b/apps/proxy/package.json index f7e3ac0..565aa85 100644 --- a/apps/proxy/package.json +++ b/apps/proxy/package.json @@ -40,6 +40,7 @@ }, "dependencies": { "@hono/node-server": "^1.19.6", + "@clawdentity/common": "workspace:*", "@clawdentity/connector": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*", diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 6a449da..46436c6 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -30,6 +30,7 @@ ## Maintainability - Prefer schema-driven parsing with small pure helpers for coercion/overrides. +- Reuse `@clawdentity/common` for generic transport helpers (for example safe JSON response parsing) instead of duplicating identical utility functions in multiple proxy modules. - Keep CRL defaults centralized as exported constants in `config.ts`; do not duplicate timing literals across modules. - Keep trust/pairing state centralized in `proxy-trust-store.ts` and `proxy-trust-state/` (Durable Object backed; `proxy-trust-state.ts` remains the facade export). - Keep shared trust key/expiry helpers in `proxy-trust-keys.ts`; do not duplicate pair-key or expiry-normalization logic across store/state runtimes. diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index fdf1290..0b6586b 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { AGENT_AUTH_VALIDATE_PATH, decodeBase64url, @@ -274,14 +275,6 @@ function assertTimestampWithinSkew(options: { } } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - function toProofVerificationInput(input: { method: string; pathWithQuery: string; diff --git a/apps/proxy/src/proxy-trust-store.ts b/apps/proxy/src/proxy-trust-store.ts index 02d5ec2..84afabe 100644 --- a/apps/proxy/src/proxy-trust-store.ts +++ b/apps/proxy/src/proxy-trust-store.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { nowUtcMs } from "@clawdentity/sdk"; import { PROXY_TRUST_DO_NAME } from "./pairing-constants.js"; import { @@ -150,14 +151,6 @@ function parseErrorPayload(payload: unknown): { return { code, message }; } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - function createDurableObjectRequest(path: string, payload: unknown): Request { return new Request(`https://proxy-trust-state${path}`, { method: "POST", diff --git a/packages/common/AGENTS.md b/packages/common/AGENTS.md index fc90519..9780731 100644 --- a/packages/common/AGENTS.md +++ b/packages/common/AGENTS.md @@ -8,3 +8,4 @@ - Keep API surface minimal and stable. - Avoid domain-specific logic that belongs in feature packages. - Prefer composable utility functions over class-heavy abstractions. +- Keep shared HTTP response parsing helpers (for example safe JSON parsing) in this package so apps/packages do not duplicate try/catch wrappers. diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts index 8d15905..8e9619b 100644 --- a/packages/common/src/index.ts +++ b/packages/common/src/index.ts @@ -27,3 +27,13 @@ export function toOpenclawHookUrl(baseUrl: string, hookPath: string): string { : hookPath; return new URL(normalizedHookPath, normalizedBase).toString(); } + +export async function parseJsonResponseSafe( + response: Response, +): Promise { + try { + return await response.json(); + } catch { + return undefined; + } +} diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 0764d1a..30f8203 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -23,6 +23,7 @@ "typecheck": "tsc --noEmit" }, "dependencies": { + "@clawdentity/common": "workspace:*", "@clawdentity/protocol": "workspace:*", "@noble/ed25519": "^3.0.0", "hono": "^4.11.9", diff --git a/packages/sdk/src/AGENTS.md b/packages/sdk/src/AGENTS.md index 9fdb94d..aeb5123 100644 --- a/packages/sdk/src/AGENTS.md +++ b/packages/sdk/src/AGENTS.md @@ -4,3 +4,4 @@ - Follow `packages/sdk/AGENTS.md` as the canonical SDK guidance. - Keep datetime primitives centralized in `datetime.ts` and exported through `index.ts` (`nowUtcMs`, `toIso`, `nowIso`, `addSeconds`, `isExpired`). - Keep helper tests focused and deterministic in `datetime.test.ts`. +- Reuse `@clawdentity/common` primitives (for example safe JSON response parsing) instead of duplicating generic transport helpers in SDK clients. diff --git a/packages/sdk/src/agent-auth-client.ts b/packages/sdk/src/agent-auth-client.ts index 079df33..e84da4b 100644 --- a/packages/sdk/src/agent-auth-client.ts +++ b/packages/sdk/src/agent-auth-client.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { AGENT_AUTH_REFRESH_PATH, encodeBase64url, @@ -40,14 +41,6 @@ const parseNonEmptyString = (value: unknown): string => { return value.trim(); }; -const parseJsonResponse = async (response: Response): Promise => { - try { - return await response.json(); - } catch { - return undefined; - } -}; - const toPathWithQuery = (requestUrl: string): string => { const parsed = new URL(requestUrl); return `${parsed.pathname}${parsed.search}`; diff --git a/packages/sdk/src/registry-identity-client.ts b/packages/sdk/src/registry-identity-client.ts index a138ef4..c3ea955 100644 --- a/packages/sdk/src/registry-identity-client.ts +++ b/packages/sdk/src/registry-identity-client.ts @@ -1,3 +1,4 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; import { INTERNAL_IDENTITY_AGENT_OWNERSHIP_PATH, parseDid, @@ -64,14 +65,6 @@ function parseRegistryErrorEnvelope(payload: unknown): RegistryErrorEnvelope { }; } -async function parseJsonResponse(response: Response): Promise { - try { - return await response.json(); - } catch { - return undefined; - } -} - function parseOwnershipResponse(payload: unknown): AgentOwnershipResult { if (typeof payload !== "object" || payload === null) { throw new AppError({ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a3972a0..0aa997e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -51,6 +51,9 @@ importers: specifier: ^8.19.0 version: 8.19.0 devDependencies: + '@clawdentity/common': + specifier: workspace:* + version: link:../../packages/common '@clawdentity/connector': specifier: workspace:* version: link:../../packages/connector @@ -85,6 +88,9 @@ importers: apps/proxy: dependencies: + '@clawdentity/common': + specifier: workspace:* + version: link:../../packages/common '@clawdentity/connector': specifier: workspace:* version: link:../../packages/connector @@ -184,6 +190,9 @@ importers: packages/sdk: dependencies: + '@clawdentity/common': + specifier: workspace:* + version: link:../common '@clawdentity/protocol': specifier: workspace:* version: link:../protocol From 99a3de444e1cee50d21066bb5ec155c242bcfece Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:05:43 +0530 Subject: [PATCH 137/190] chore: keep proxy secrets manual and hide bootstrap output --- .github/workflows/deploy-develop.yml | 11 ----------- apps/cli/src/commands/AGENTS.md | 3 ++- apps/cli/src/commands/admin.ts | 11 +++++++++-- 3 files changed, 11 insertions(+), 14 deletions(-) diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index 962669f..70c6909 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -24,8 +24,6 @@ jobs: APP_VERSION: ${{ github.sha }} REGISTRY_HEALTH_URL_OVERRIDE: ${{ secrets.REGISTRY_HEALTH_URL }} PROXY_HEALTH_URL_OVERRIDE: ${{ secrets.PROXY_HEALTH_URL }} - REGISTRY_INTERNAL_SERVICE_ID: ${{ secrets.REGISTRY_INTERNAL_SERVICE_ID }} - REGISTRY_INTERNAL_SERVICE_SECRET: ${{ secrets.REGISTRY_INTERNAL_SERVICE_SECRET }} steps: - name: Checkout uses: actions/checkout@v4 @@ -46,8 +44,6 @@ jobs: run: | test -n "${CLOUDFLARE_API_TOKEN}" test -n "${CLOUDFLARE_ACCOUNT_ID}" - test -n "${REGISTRY_INTERNAL_SERVICE_ID}" - test -n "${REGISTRY_INTERNAL_SERVICE_SECRET}" - name: Install dependencies run: pnpm install --frozen-lockfile @@ -160,13 +156,6 @@ jobs: time.sleep(delay_seconds) PY - - name: Sync proxy internal service credentials - run: | - echo "::add-mask::${REGISTRY_INTERNAL_SERVICE_ID}" - echo "::add-mask::${REGISTRY_INTERNAL_SERVICE_SECRET}" - printf "%s" "${REGISTRY_INTERNAL_SERVICE_ID}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_ID --env dev - printf "%s" "${REGISTRY_INTERNAL_SERVICE_SECRET}" | pnpm exec wrangler --cwd apps/proxy secret put REGISTRY_INTERNAL_SERVICE_SECRET --env dev - - name: Deploy proxy to dev environment run: | mkdir -p artifacts diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index e37b745..9146c2e 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -59,7 +59,8 @@ ## Admin Command Rules - `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. - `admin bootstrap` must import `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` instead of duplicating endpoint literals in command code/tests. -- Treat bootstrap API key token and internal service secret as write-once secrets: print once and never log secret contents. +- Treat bootstrap API key token as a write-once secret (print once) and never log secret contents. +- Keep internal service secret hidden by default; only print when operator explicitly passes `--print-internal-service-secret` in a secure terminal. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. - Bootstrap command output should explicitly remind operators to set `REGISTRY_INTERNAL_SERVICE_ID` and `REGISTRY_INTERNAL_SERVICE_SECRET` on proxy environment before deploy. diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index 0ad8372..a02e99a 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -16,6 +16,7 @@ type AdminBootstrapOptions = { displayName?: string; apiKeyName?: string; registryUrl?: string; + printInternalServiceSecret?: boolean; }; type AdminBootstrapResponse = { @@ -283,6 +284,10 @@ export const createAdminCommand = (): Command => { .option("--display-name ", "Admin display name") .option("--api-key-name ", "Admin API key label") .option("--registry-url ", "Override registry URL") + .option( + "--print-internal-service-secret", + "Print internal service secret in stdout (not recommended)", + ) .action( withErrorHandling( "admin bootstrap", @@ -297,8 +302,10 @@ export const createAdminCommand = (): Command => { writeStdoutLine( `Internal service name: ${result.internalService.name}`, ); - writeStdoutLine("Internal service secret (shown once):"); - writeStdoutLine(result.internalService.secret); + if (options.printInternalServiceSecret) { + writeStdoutLine("Internal service secret (shown once):"); + writeStdoutLine(result.internalService.secret); + } writeStdoutLine( "Set proxy secrets REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET with the values above before proxy deploy.", ); From 0b0c63b65de30e0b8a5f9a3c53e40e47299609df Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:06:05 +0530 Subject: [PATCH 138/190] test(connector): split client tests into focused modules --- packages/connector/src/AGENTS.md | 5 +- packages/connector/src/client.test.ts | 750 ------------------ .../src/client.test/delivery.test.ts | 315 ++++++++ packages/connector/src/client.test/helpers.ts | 103 +++ .../src/client.test/outbound-queue.test.ts | 47 ++ .../src/client.test/reconnect.test.ts | 284 +++++++ 6 files changed, 752 insertions(+), 752 deletions(-) delete mode 100644 packages/connector/src/client.test.ts create mode 100644 packages/connector/src/client.test/delivery.test.ts create mode 100644 packages/connector/src/client.test/helpers.ts create mode 100644 packages/connector/src/client.test/outbound-queue.test.ts create mode 100644 packages/connector/src/client.test/reconnect.test.ts diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index f6089bc..08ecff5 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -70,7 +70,8 @@ ## Testing Rules - `inbound-inbox.test.ts` must cover persistence, dedupe, cap enforcement, replay bookkeeping, dead-letter thresholding, dead-letter replay, and dead-letter purge transitions. -- `client.test.ts` must cover both delivery modes: +- `client.test/*.test.ts` must stay split by concern (for example delivery/heartbeat, reconnect lifecycle, outbound queue) to keep each test file focused and easy to maintain. +- `client.test/*.test.ts` must cover both delivery modes: - direct local OpenClaw delivery fallback - injected inbound persistence handler ack path -- `client.test.ts` must keep websocket lifecycle expectations compatible with non-persistent and persistent queue modes. +- `client.test/*.test.ts` must keep websocket lifecycle expectations compatible with non-persistent and persistent queue modes. diff --git a/packages/connector/src/client.test.ts b/packages/connector/src/client.test.ts deleted file mode 100644 index 7d89b91..0000000 --- a/packages/connector/src/client.test.ts +++ /dev/null @@ -1,750 +0,0 @@ -import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; -import { afterEach, describe, expect, it, vi } from "vitest"; -import { ConnectorClient } from "./client.js"; -import { parseFrame, serializeFrame } from "./frames.js"; - -class MockWebSocket { - readonly url: string; - readyState = 0; - readonly sent: string[] = []; - - private readonly listeners: Record void>> = { - open: new Set(), - message: new Set(), - close: new Set(), - error: new Set(), - "unexpected-response": new Set(), - }; - - constructor(url: string) { - this.url = url; - } - - addEventListener(type: string, listener: (event: unknown) => void): void { - this.listeners[type]?.add(listener); - } - - send(data: string): void { - if (this.readyState !== 1) { - throw new Error("socket is not open"); - } - - this.sent.push(data); - } - - close(code?: number, reason?: string): void { - if (this.readyState === 3) { - return; - } - - this.readyState = 3; - this.emit("close", { - code, - reason, - wasClean: true, - }); - } - - open(): void { - this.readyState = 1; - this.emit("open", {}); - } - - message(data: unknown): void { - this.emit("message", { data }); - } - - failClose(code = 1006, reason = ""): void { - this.readyState = 3; - this.emit("close", { - code, - reason, - wasClean: false, - }); - } - - error(error: unknown): void { - this.emit("error", { error }); - } - - unexpectedResponse(status: number): void { - this.emit("unexpected-response", { status }); - } - - private emit(type: string, event: unknown): void { - for (const listener of this.listeners[type] ?? []) { - listener(event); - } - } -} - -function createAgentDid(seedMs: number): string { - return makeAgentDid(generateUlid(seedMs)); -} - -afterEach(() => { - vi.useRealTimers(); - vi.restoreAllMocks(); -}); - -describe("ConnectorClient", () => { - it("acks inbound heartbeat frames", async () => { - const sockets: MockWebSocket[] = []; - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - expect(sockets).toHaveLength(1); - - sockets[0].open(); - - const heartbeatId = generateUlid(1700000000000); - sockets[0].message( - serializeFrame({ - v: 1, - type: "heartbeat", - id: heartbeatId, - ts: "2026-01-01T00:00:00.000Z", - }), - ); - - await vi.waitFor(() => { - expect(sockets[0].sent).toHaveLength(1); - }); - - const outbound = parseFrame(sockets[0].sent[0]); - expect(outbound.type).toBe("heartbeat_ack"); - if (outbound.type !== "heartbeat_ack") { - throw new Error("expected heartbeat_ack frame"); - } - expect(outbound.ackId).toBe(heartbeatId); - - client.disconnect(); - }); - - it("forwards deliver frames to local openclaw and acks success", async () => { - const sockets: MockWebSocket[] = []; - const fetchMock = vi - .fn() - .mockResolvedValue(new Response("ok", { status: 200 })); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - openclawHookToken: "hook-secret", - heartbeatIntervalMs: 0, - fetchImpl: fetchMock, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - const deliverId = generateUlid(1700000000000); - sockets[0].message( - serializeFrame({ - v: 1, - type: "deliver", - id: deliverId, - ts: "2026-01-01T00:00:00.000Z", - fromAgentDid: createAgentDid(1700000000100), - toAgentDid: createAgentDid(1700000000200), - payload: { - message: "hello from connector", - }, - }), - ); - - await vi.waitFor(() => { - expect(fetchMock).toHaveBeenCalledTimes(1); - expect(sockets[0].sent.length).toBeGreaterThan(0); - }); - - const [url, requestInit] = fetchMock.mock.calls[0]; - expect(url).toBe("http://127.0.0.1:18789/hooks/agent"); - expect(requestInit?.method).toBe("POST"); - expect(requestInit?.headers).toMatchObject({ - "content-type": "application/json", - "x-clawdentity-agent-did": expect.stringMatching(/^did:claw:agent:/), - "x-clawdentity-to-agent-did": expect.stringMatching(/^did:claw:agent:/), - "x-clawdentity-verified": "true", - "x-openclaw-token": "hook-secret", - "x-request-id": deliverId, - }); - - const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); - expect(ack.type).toBe("deliver_ack"); - if (ack.type !== "deliver_ack") { - throw new Error("expected deliver_ack frame"); - } - expect(ack.ackId).toBe(deliverId); - expect(ack.accepted).toBe(true); - - client.disconnect(); - }); - - it("acks delivery failure when local openclaw rejects", async () => { - const sockets: MockWebSocket[] = []; - const fetchMock = vi - .fn() - .mockResolvedValue(new Response("bad", { status: 400 })); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - fetchImpl: fetchMock, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - const deliverId = generateUlid(1700000000000); - sockets[0].message( - serializeFrame({ - v: 1, - type: "deliver", - id: deliverId, - ts: "2026-01-01T00:00:00.000Z", - fromAgentDid: createAgentDid(1700000000100), - toAgentDid: createAgentDid(1700000000200), - payload: { - message: "hello from connector", - }, - }), - ); - - await vi.waitFor(() => { - expect(sockets[0].sent.length).toBeGreaterThan(0); - }); - - const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); - expect(ack.type).toBe("deliver_ack"); - if (ack.type !== "deliver_ack") { - throw new Error("expected deliver_ack frame"); - } - expect(ack.ackId).toBe(deliverId); - expect(ack.accepted).toBe(false); - expect(ack.reason).toContain("status 400"); - - client.disconnect(); - }); - - it("acks success when inbound delivery handler persists payload", async () => { - const sockets: MockWebSocket[] = []; - const fetchMock = vi.fn(); - const inboundDeliverHandler = vi.fn(async () => ({ accepted: true })); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - fetchImpl: fetchMock, - inboundDeliverHandler, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - const deliverId = generateUlid(1700000000000); - sockets[0].message( - serializeFrame({ - v: 1, - type: "deliver", - id: deliverId, - ts: "2026-01-01T00:00:00.000Z", - fromAgentDid: createAgentDid(1700000000100), - toAgentDid: createAgentDid(1700000000200), - payload: { message: "persist me" }, - }), - ); - - await vi.waitFor(() => { - expect(inboundDeliverHandler).toHaveBeenCalledTimes(1); - expect(sockets[0].sent.length).toBeGreaterThan(0); - }); - - expect(fetchMock).not.toHaveBeenCalled(); - const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); - expect(ack.type).toBe("deliver_ack"); - if (ack.type !== "deliver_ack") { - throw new Error("expected deliver_ack frame"); - } - expect(ack.ackId).toBe(deliverId); - expect(ack.accepted).toBe(true); - - client.disconnect(); - }); - - it("retries transient local openclaw failures and eventually acks success", async () => { - const sockets: MockWebSocket[] = []; - const fetchMock = vi - .fn() - .mockRejectedValueOnce(new Error("connect ECONNREFUSED 127.0.0.1:18789")) - .mockRejectedValueOnce(new Error("connect ECONNREFUSED 127.0.0.1:18789")) - .mockResolvedValue(new Response("ok", { status: 200 })); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - fetchImpl: fetchMock, - openclawDeliverTimeoutMs: 100, - openclawDeliverRetryInitialDelayMs: 1, - openclawDeliverRetryMaxDelayMs: 2, - openclawDeliverRetryBudgetMs: 500, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - const deliverId = generateUlid(1700000000000); - sockets[0].message( - serializeFrame({ - v: 1, - type: "deliver", - id: deliverId, - ts: "2026-01-01T00:00:00.000Z", - fromAgentDid: createAgentDid(1700000000100), - toAgentDid: createAgentDid(1700000000200), - payload: { - message: "hello from connector", - }, - }), - ); - - await vi.waitFor(() => { - expect(fetchMock).toHaveBeenCalledTimes(3); - expect(sockets[0].sent.length).toBeGreaterThan(0); - }); - - const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); - expect(ack.type).toBe("deliver_ack"); - if (ack.type !== "deliver_ack") { - throw new Error("expected deliver_ack frame"); - } - expect(ack.ackId).toBe(deliverId); - expect(ack.accepted).toBe(true); - - client.disconnect(); - }); - - it("retries when local openclaw hook auth rejects with 401", async () => { - const sockets: MockWebSocket[] = []; - const fetchMock = vi - .fn() - .mockResolvedValueOnce(new Response("unauthorized", { status: 401 })) - .mockResolvedValueOnce(new Response("ok", { status: 200 })); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - fetchImpl: fetchMock, - openclawDeliverTimeoutMs: 100, - openclawDeliverRetryInitialDelayMs: 1, - openclawDeliverRetryMaxDelayMs: 2, - openclawDeliverRetryBudgetMs: 500, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - const deliverId = generateUlid(1700000000000); - sockets[0].message( - serializeFrame({ - v: 1, - type: "deliver", - id: deliverId, - ts: "2026-01-01T00:00:00.000Z", - fromAgentDid: createAgentDid(1700000000100), - toAgentDid: createAgentDid(1700000000200), - payload: { - message: "hello from connector", - }, - }), - ); - - await vi.waitFor(() => { - expect(fetchMock).toHaveBeenCalledTimes(2); - expect(sockets[0].sent.length).toBeGreaterThan(0); - }); - - const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); - expect(ack.type).toBe("deliver_ack"); - if (ack.type !== "deliver_ack") { - throw new Error("expected deliver_ack frame"); - } - expect(ack.ackId).toBe(deliverId); - expect(ack.accepted).toBe(true); - - client.disconnect(); - }); - - it("reconnects when heartbeat acknowledgement times out", async () => { - vi.useFakeTimers(); - - const sockets: MockWebSocket[] = []; - const disconnectedEvents: { code: number; reason: string }[] = []; - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - connectTimeoutMs: 0, - heartbeatIntervalMs: 10, - heartbeatAckTimeoutMs: 25, - reconnectMinDelayMs: 50, - reconnectMaxDelayMs: 50, - reconnectJitterRatio: 0, - hooks: { - onDisconnected: (event) => { - disconnectedEvents.push({ code: event.code, reason: event.reason }); - }, - }, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - await vi.advanceTimersByTimeAsync(35); - expect(sockets).toHaveLength(1); - expect(disconnectedEvents).toHaveLength(1); - expect(disconnectedEvents[0]?.reason).toContain( - "Heartbeat acknowledgement", - ); - - await vi.advanceTimersByTimeAsync(50); - expect(sockets).toHaveLength(2); - - client.disconnect(); - }); - - it("does not reconnect when heartbeat acknowledgement arrives before timeout", async () => { - vi.useFakeTimers(); - - const sockets: MockWebSocket[] = []; - const disconnected = vi.fn(); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - connectTimeoutMs: 0, - heartbeatIntervalMs: 100, - heartbeatAckTimeoutMs: 40, - reconnectMinDelayMs: 20, - reconnectMaxDelayMs: 20, - reconnectJitterRatio: 0, - hooks: { - onDisconnected: disconnected, - }, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - - await vi.advanceTimersByTimeAsync(100); - const outboundHeartbeat = parseFrame(sockets[0].sent[0]); - expect(outboundHeartbeat.type).toBe("heartbeat"); - if (outboundHeartbeat.type !== "heartbeat") { - throw new Error("expected heartbeat frame"); - } - - sockets[0].message( - serializeFrame({ - v: 1, - type: "heartbeat_ack", - id: generateUlid(1700000000010), - ts: "2026-01-01T00:00:00.010Z", - ackId: outboundHeartbeat.id, - }), - ); - - await vi.advanceTimersByTimeAsync(80); - expect(disconnected).not.toHaveBeenCalled(); - expect(sockets).toHaveLength(1); - - client.disconnect(); - }); - - it("reconnects when websocket connection does not open before timeout", async () => { - vi.useFakeTimers(); - - const sockets: MockWebSocket[] = []; - const disconnected = vi.fn(); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - connectTimeoutMs: 30, - heartbeatIntervalMs: 0, - reconnectMinDelayMs: 20, - reconnectMaxDelayMs: 20, - reconnectJitterRatio: 0, - hooks: { - onDisconnected: disconnected, - }, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - expect(sockets).toHaveLength(1); - - await vi.advanceTimersByTimeAsync(29); - expect(sockets).toHaveLength(1); - await vi.advanceTimersByTimeAsync(1); - expect(disconnected).toHaveBeenCalledTimes(1); - await vi.advanceTimersByTimeAsync(20); - expect(sockets).toHaveLength(2); - - client.disconnect(); - }); - - it("reconnects after websocket error even when close event is missing", async () => { - vi.useFakeTimers(); - - const sockets: MockWebSocket[] = []; - const disconnected = vi.fn(); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - connectTimeoutMs: 0, - heartbeatIntervalMs: 0, - reconnectMinDelayMs: 40, - reconnectMaxDelayMs: 40, - reconnectJitterRatio: 0, - hooks: { - onDisconnected: disconnected, - }, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - sockets[0].open(); - sockets[0].readyState = 3; - sockets[0].error(new Error("boom")); - - expect(disconnected).toHaveBeenCalledTimes(1); - await vi.advanceTimersByTimeAsync(39); - expect(sockets).toHaveLength(1); - await vi.advanceTimersByTimeAsync(1); - expect(sockets).toHaveLength(2); - - client.disconnect(); - }); - - it("retries websocket upgrade rejection with one immediate retry on 401", async () => { - vi.useFakeTimers(); - - const sockets: MockWebSocket[] = []; - const onAuthUpgradeRejected = - vi.fn<(event: { status: number; immediateRetry: boolean }) => void>(); - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - connectTimeoutMs: 0, - heartbeatIntervalMs: 0, - reconnectMinDelayMs: 100, - reconnectMaxDelayMs: 100, - reconnectJitterRatio: 0, - hooks: { - onAuthUpgradeRejected, - }, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - expect(sockets).toHaveLength(1); - - sockets[0].unexpectedResponse(401); - await vi.runOnlyPendingTimersAsync(); - expect(sockets).toHaveLength(2); - expect(onAuthUpgradeRejected).toHaveBeenCalledTimes(1); - expect(onAuthUpgradeRejected).toHaveBeenNthCalledWith(1, { - status: 401, - immediateRetry: true, - }); - - sockets[1].unexpectedResponse(401); - await vi.advanceTimersByTimeAsync(99); - expect(sockets).toHaveLength(2); - await vi.advanceTimersByTimeAsync(1); - expect(sockets).toHaveLength(3); - expect(onAuthUpgradeRejected).toHaveBeenCalledTimes(2); - expect(onAuthUpgradeRejected).toHaveBeenNthCalledWith(2, { - status: 401, - immediateRetry: false, - }); - - client.disconnect(); - }); - - it("reconnects after websocket closes", () => { - vi.useFakeTimers(); - - const sockets: MockWebSocket[] = []; - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - reconnectMinDelayMs: 100, - reconnectMaxDelayMs: 100, - reconnectJitterRatio: 0, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - expect(sockets).toHaveLength(1); - - sockets[0].open(); - sockets[0].failClose(1006, "network down"); - - vi.advanceTimersByTime(99); - expect(sockets).toHaveLength(1); - - vi.advanceTimersByTime(1); - expect(sockets).toHaveLength(2); - - client.disconnect(); - }); - - it("refreshes connection headers on reconnect attempts", async () => { - const sockets: MockWebSocket[] = []; - const dialHeaders: Record[] = []; - let nonceCounter = 0; - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - reconnectMinDelayMs: 0, - reconnectMaxDelayMs: 0, - reconnectJitterRatio: 0, - connectionHeadersProvider: () => ({ - "x-claw-nonce": `nonce-${++nonceCounter}`, - }), - webSocketFactory: (url, headers) => { - dialHeaders.push(headers); - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - await vi.waitFor(() => { - expect(sockets).toHaveLength(1); - }); - expect(dialHeaders[0]["x-claw-nonce"]).toBe("nonce-1"); - - sockets[0].open(); - sockets[0].failClose(1006, "network down"); - - await vi.waitFor(() => { - expect(sockets).toHaveLength(2); - }); - expect(dialHeaders[1]["x-claw-nonce"]).toBe("nonce-2"); - - client.disconnect(); - }); - - it("queues outbound enqueue frames until connected", async () => { - const sockets: MockWebSocket[] = []; - - const client = new ConnectorClient({ - connectorUrl: "wss://connector.example.com/agent", - openclawBaseUrl: "http://127.0.0.1:18789", - heartbeatIntervalMs: 0, - webSocketFactory: (url) => { - const socket = new MockWebSocket(url); - sockets.push(socket); - return socket; - }, - }); - - client.connect(); - expect(client.getQueuedOutboundCount()).toBe(0); - - const enqueueFrame = client.enqueueOutbound({ - toAgentDid: createAgentDid(1700000000000), - payload: { message: "queued message" }, - }); - - expect(client.getQueuedOutboundCount()).toBe(1); - expect(sockets[0].sent).toHaveLength(0); - - sockets[0].open(); - - await vi.waitFor(() => { - expect(client.getQueuedOutboundCount()).toBe(0); - expect(sockets[0].sent).toHaveLength(1); - }); - - const outbound = parseFrame(sockets[0].sent[0]); - expect(outbound.type).toBe("enqueue"); - expect(outbound.id).toBe(enqueueFrame.id); - - client.disconnect(); - }); -}); diff --git a/packages/connector/src/client.test/delivery.test.ts b/packages/connector/src/client.test/delivery.test.ts new file mode 100644 index 0000000..8b1acd8 --- /dev/null +++ b/packages/connector/src/client.test/delivery.test.ts @@ -0,0 +1,315 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; +import { ConnectorClient } from "../client.js"; +import { parseFrame, serializeFrame } from "../frames.js"; +import { + createAgentDid, + createMockWebSocketFactory, + registerConnectorClientTestHooks, +} from "./helpers.js"; + +registerConnectorClientTestHooks(); + +describe("ConnectorClient delivery and heartbeat frames", () => { + it("acks inbound heartbeat frames", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + webSocketFactory, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + sockets[0].open(); + + const heartbeatId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "heartbeat", + id: heartbeatId, + ts: "2026-01-01T00:00:00.000Z", + }), + ); + + await vi.waitFor(() => { + expect(sockets[0].sent).toHaveLength(1); + }); + + const outbound = parseFrame(sockets[0].sent[0]); + expect(outbound.type).toBe("heartbeat_ack"); + if (outbound.type !== "heartbeat_ack") { + throw new Error("expected heartbeat_ack frame"); + } + expect(outbound.ackId).toBe(heartbeatId); + + client.disconnect(); + }); + + it("forwards deliver frames to local openclaw and acks success", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const fetchMock = vi + .fn() + .mockResolvedValue(new Response("ok", { status: 200 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + openclawHookToken: "hook-secret", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const [url, requestInit] = fetchMock.mock.calls[0]; + expect(url).toBe("http://127.0.0.1:18789/hooks/agent"); + expect(requestInit?.method).toBe("POST"); + expect(requestInit?.headers).toMatchObject({ + "content-type": "application/json", + "x-clawdentity-agent-did": expect.stringMatching(/^did:claw:agent:/), + "x-clawdentity-to-agent-did": expect.stringMatching(/^did:claw:agent:/), + "x-clawdentity-verified": "true", + "x-openclaw-token": "hook-secret", + "x-request-id": deliverId, + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + + it("acks delivery failure when local openclaw rejects", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const fetchMock = vi + .fn() + .mockResolvedValue(new Response("bad", { status: 400 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(false); + expect(ack.reason).toContain("status 400"); + + client.disconnect(); + }); + + it("acks success when inbound delivery handler persists payload", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const fetchMock = vi.fn(); + const inboundDeliverHandler = vi.fn(async () => ({ accepted: true })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + inboundDeliverHandler, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { message: "persist me" }, + }), + ); + + await vi.waitFor(() => { + expect(inboundDeliverHandler).toHaveBeenCalledTimes(1); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + expect(fetchMock).not.toHaveBeenCalled(); + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + + it("retries transient local openclaw failures and eventually acks success", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const fetchMock = vi + .fn() + .mockRejectedValueOnce(new Error("connect ECONNREFUSED 127.0.0.1:18789")) + .mockRejectedValueOnce(new Error("connect ECONNREFUSED 127.0.0.1:18789")) + .mockResolvedValue(new Response("ok", { status: 200 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + openclawDeliverTimeoutMs: 100, + openclawDeliverRetryInitialDelayMs: 1, + openclawDeliverRetryMaxDelayMs: 2, + openclawDeliverRetryBudgetMs: 500, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(fetchMock).toHaveBeenCalledTimes(3); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); + + it("retries when local openclaw hook auth rejects with 401", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const fetchMock = vi + .fn() + .mockResolvedValueOnce(new Response("unauthorized", { status: 401 })) + .mockResolvedValueOnce(new Response("ok", { status: 200 })); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + fetchImpl: fetchMock, + openclawDeliverTimeoutMs: 100, + openclawDeliverRetryInitialDelayMs: 1, + openclawDeliverRetryMaxDelayMs: 2, + openclawDeliverRetryBudgetMs: 500, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + const deliverId = generateUlid(1700000000000); + sockets[0].message( + serializeFrame({ + v: 1, + type: "deliver", + id: deliverId, + ts: "2026-01-01T00:00:00.000Z", + fromAgentDid: createAgentDid(1700000000100), + toAgentDid: createAgentDid(1700000000200), + payload: { + message: "hello from connector", + }, + }), + ); + + await vi.waitFor(() => { + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(sockets[0].sent.length).toBeGreaterThan(0); + }); + + const ack = parseFrame(sockets[0].sent[sockets[0].sent.length - 1]); + expect(ack.type).toBe("deliver_ack"); + if (ack.type !== "deliver_ack") { + throw new Error("expected deliver_ack frame"); + } + expect(ack.ackId).toBe(deliverId); + expect(ack.accepted).toBe(true); + + client.disconnect(); + }); +}); diff --git a/packages/connector/src/client.test/helpers.ts b/packages/connector/src/client.test/helpers.ts new file mode 100644 index 0000000..811d17d --- /dev/null +++ b/packages/connector/src/client.test/helpers.ts @@ -0,0 +1,103 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { afterEach, vi } from "vitest"; + +export class MockWebSocket { + readonly url: string; + readyState = 0; + readonly sent: string[] = []; + + private readonly listeners: Record void>> = { + open: new Set(), + message: new Set(), + close: new Set(), + error: new Set(), + "unexpected-response": new Set(), + }; + + constructor(url: string) { + this.url = url; + } + + addEventListener(type: string, listener: (event: unknown) => void): void { + this.listeners[type]?.add(listener); + } + + send(data: string): void { + if (this.readyState !== 1) { + throw new Error("socket is not open"); + } + + this.sent.push(data); + } + + close(code?: number, reason?: string): void { + if (this.readyState === 3) { + return; + } + + this.readyState = 3; + this.emit("close", { + code, + reason, + wasClean: true, + }); + } + + open(): void { + this.readyState = 1; + this.emit("open", {}); + } + + message(data: unknown): void { + this.emit("message", { data }); + } + + failClose(code = 1006, reason = ""): void { + this.readyState = 3; + this.emit("close", { + code, + reason, + wasClean: false, + }); + } + + error(error: unknown): void { + this.emit("error", { error }); + } + + unexpectedResponse(status: number): void { + this.emit("unexpected-response", { status }); + } + + private emit(type: string, event: unknown): void { + for (const listener of this.listeners[type] ?? []) { + listener(event); + } + } +} + +export function createAgentDid(seedMs: number): string { + return makeAgentDid(generateUlid(seedMs)); +} + +export function createMockWebSocketFactory(): { + sockets: MockWebSocket[]; + webSocketFactory: (url: string) => MockWebSocket; +} { + const sockets: MockWebSocket[] = []; + return { + sockets, + webSocketFactory: (url: string) => { + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }; +} + +export function registerConnectorClientTestHooks(): void { + afterEach(() => { + vi.useRealTimers(); + vi.restoreAllMocks(); + }); +} diff --git a/packages/connector/src/client.test/outbound-queue.test.ts b/packages/connector/src/client.test/outbound-queue.test.ts new file mode 100644 index 0000000..d262d03 --- /dev/null +++ b/packages/connector/src/client.test/outbound-queue.test.ts @@ -0,0 +1,47 @@ +import { describe, expect, it, vi } from "vitest"; +import { ConnectorClient } from "../client.js"; +import { parseFrame } from "../frames.js"; +import { + createAgentDid, + createMockWebSocketFactory, + registerConnectorClientTestHooks, +} from "./helpers.js"; + +registerConnectorClientTestHooks(); + +describe("ConnectorClient outbound queue", () => { + it("queues outbound enqueue frames until connected", async () => { + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + webSocketFactory, + }); + + client.connect(); + expect(client.getQueuedOutboundCount()).toBe(0); + + const enqueueFrame = client.enqueueOutbound({ + toAgentDid: createAgentDid(1700000000000), + payload: { message: "queued message" }, + }); + + expect(client.getQueuedOutboundCount()).toBe(1); + expect(sockets[0].sent).toHaveLength(0); + + sockets[0].open(); + + await vi.waitFor(() => { + expect(client.getQueuedOutboundCount()).toBe(0); + expect(sockets[0].sent).toHaveLength(1); + }); + + const outbound = parseFrame(sockets[0].sent[0]); + expect(outbound.type).toBe("enqueue"); + expect(outbound.id).toBe(enqueueFrame.id); + + client.disconnect(); + }); +}); diff --git a/packages/connector/src/client.test/reconnect.test.ts b/packages/connector/src/client.test/reconnect.test.ts new file mode 100644 index 0000000..55b1db3 --- /dev/null +++ b/packages/connector/src/client.test/reconnect.test.ts @@ -0,0 +1,284 @@ +import { generateUlid } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; +import { ConnectorClient } from "../client.js"; +import { parseFrame, serializeFrame } from "../frames.js"; +import { + createMockWebSocketFactory, + MockWebSocket, + registerConnectorClientTestHooks, +} from "./helpers.js"; + +registerConnectorClientTestHooks(); + +describe("ConnectorClient reconnect behavior", () => { + it("reconnects when heartbeat acknowledgement times out", async () => { + vi.useFakeTimers(); + + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const disconnectedEvents: { code: number; reason: string }[] = []; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 10, + heartbeatAckTimeoutMs: 25, + reconnectMinDelayMs: 50, + reconnectMaxDelayMs: 50, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: (event) => { + disconnectedEvents.push({ code: event.code, reason: event.reason }); + }, + }, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + await vi.advanceTimersByTimeAsync(35); + expect(sockets).toHaveLength(1); + expect(disconnectedEvents).toHaveLength(1); + expect(disconnectedEvents[0]?.reason).toContain( + "Heartbeat acknowledgement", + ); + + await vi.advanceTimersByTimeAsync(50); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("does not reconnect when heartbeat acknowledgement arrives before timeout", async () => { + vi.useFakeTimers(); + + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const disconnected = vi.fn(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 100, + heartbeatAckTimeoutMs: 40, + reconnectMinDelayMs: 20, + reconnectMaxDelayMs: 20, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: disconnected, + }, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + + await vi.advanceTimersByTimeAsync(100); + const outboundHeartbeat = parseFrame(sockets[0].sent[0]); + expect(outboundHeartbeat.type).toBe("heartbeat"); + if (outboundHeartbeat.type !== "heartbeat") { + throw new Error("expected heartbeat frame"); + } + + sockets[0].message( + serializeFrame({ + v: 1, + type: "heartbeat_ack", + id: generateUlid(1700000000010), + ts: "2026-01-01T00:00:00.010Z", + ackId: outboundHeartbeat.id, + }), + ); + + await vi.advanceTimersByTimeAsync(80); + expect(disconnected).not.toHaveBeenCalled(); + expect(sockets).toHaveLength(1); + + client.disconnect(); + }); + + it("reconnects when websocket connection does not open before timeout", async () => { + vi.useFakeTimers(); + + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const disconnected = vi.fn(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 30, + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 20, + reconnectMaxDelayMs: 20, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: disconnected, + }, + webSocketFactory, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + await vi.advanceTimersByTimeAsync(29); + expect(sockets).toHaveLength(1); + await vi.advanceTimersByTimeAsync(1); + expect(disconnected).toHaveBeenCalledTimes(1); + await vi.advanceTimersByTimeAsync(20); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("reconnects after websocket error even when close event is missing", async () => { + vi.useFakeTimers(); + + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const disconnected = vi.fn(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 40, + reconnectMaxDelayMs: 40, + reconnectJitterRatio: 0, + hooks: { + onDisconnected: disconnected, + }, + webSocketFactory, + }); + + client.connect(); + sockets[0].open(); + sockets[0].readyState = 3; + sockets[0].error(new Error("boom")); + + expect(disconnected).toHaveBeenCalledTimes(1); + await vi.advanceTimersByTimeAsync(39); + expect(sockets).toHaveLength(1); + await vi.advanceTimersByTimeAsync(1); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("retries websocket upgrade rejection with one immediate retry on 401", async () => { + vi.useFakeTimers(); + + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + const onAuthUpgradeRejected = + vi.fn<(event: { status: number; immediateRetry: boolean }) => void>(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + connectTimeoutMs: 0, + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 100, + reconnectMaxDelayMs: 100, + reconnectJitterRatio: 0, + hooks: { + onAuthUpgradeRejected, + }, + webSocketFactory, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + sockets[0].unexpectedResponse(401); + await vi.runOnlyPendingTimersAsync(); + expect(sockets).toHaveLength(2); + expect(onAuthUpgradeRejected).toHaveBeenCalledTimes(1); + expect(onAuthUpgradeRejected).toHaveBeenNthCalledWith(1, { + status: 401, + immediateRetry: true, + }); + + sockets[1].unexpectedResponse(401); + await vi.advanceTimersByTimeAsync(99); + expect(sockets).toHaveLength(2); + await vi.advanceTimersByTimeAsync(1); + expect(sockets).toHaveLength(3); + expect(onAuthUpgradeRejected).toHaveBeenCalledTimes(2); + expect(onAuthUpgradeRejected).toHaveBeenNthCalledWith(2, { + status: 401, + immediateRetry: false, + }); + + client.disconnect(); + }); + + it("reconnects after websocket closes", () => { + vi.useFakeTimers(); + + const { sockets, webSocketFactory } = createMockWebSocketFactory(); + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 100, + reconnectMaxDelayMs: 100, + reconnectJitterRatio: 0, + webSocketFactory, + }); + + client.connect(); + expect(sockets).toHaveLength(1); + + sockets[0].open(); + sockets[0].failClose(1006, "network down"); + + vi.advanceTimersByTime(99); + expect(sockets).toHaveLength(1); + + vi.advanceTimersByTime(1); + expect(sockets).toHaveLength(2); + + client.disconnect(); + }); + + it("refreshes connection headers on reconnect attempts", async () => { + const sockets: MockWebSocket[] = []; + const dialHeaders: Record[] = []; + let nonceCounter = 0; + + const client = new ConnectorClient({ + connectorUrl: "wss://connector.example.com/agent", + openclawBaseUrl: "http://127.0.0.1:18789", + heartbeatIntervalMs: 0, + reconnectMinDelayMs: 0, + reconnectMaxDelayMs: 0, + reconnectJitterRatio: 0, + connectionHeadersProvider: () => ({ + "x-claw-nonce": `nonce-${++nonceCounter}`, + }), + webSocketFactory: (url, headers) => { + dialHeaders.push(headers); + const socket = new MockWebSocket(url); + sockets.push(socket); + return socket; + }, + }); + + client.connect(); + await vi.waitFor(() => { + expect(sockets).toHaveLength(1); + }); + expect(dialHeaders[0]["x-claw-nonce"]).toBe("nonce-1"); + + sockets[0].open(); + sockets[0].failClose(1006, "network down"); + + await vi.waitFor(() => { + expect(sockets).toHaveLength(2); + }); + expect(dialHeaders[1]["x-claw-nonce"]).toBe("nonce-2"); + + client.disconnect(); + }); +}); From cf592bfef46fe87bf2c2cc69625b4b7945ba8b12 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:07:43 +0530 Subject: [PATCH 139/190] security: remove internal service secret stdout path --- apps/cli/src/commands/AGENTS.md | 2 +- apps/cli/src/commands/admin.ts | 11 +---------- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 9146c2e..3832f70 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -60,7 +60,7 @@ - `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. - `admin bootstrap` must import `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` instead of duplicating endpoint literals in command code/tests. - Treat bootstrap API key token as a write-once secret (print once) and never log secret contents. -- Keep internal service secret hidden by default; only print when operator explicitly passes `--print-internal-service-secret` in a secure terminal. +- Never print internal service secret from `admin bootstrap`; operators must provision/rotate `REGISTRY_INTERNAL_SERVICE_SECRET` manually in Cloudflare. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. - Bootstrap command output should explicitly remind operators to set `REGISTRY_INTERNAL_SERVICE_ID` and `REGISTRY_INTERNAL_SERVICE_SECRET` on proxy environment before deploy. diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index a02e99a..000a40b 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -16,7 +16,6 @@ type AdminBootstrapOptions = { displayName?: string; apiKeyName?: string; registryUrl?: string; - printInternalServiceSecret?: boolean; }; type AdminBootstrapResponse = { @@ -284,10 +283,6 @@ export const createAdminCommand = (): Command => { .option("--display-name ", "Admin display name") .option("--api-key-name ", "Admin API key label") .option("--registry-url ", "Override registry URL") - .option( - "--print-internal-service-secret", - "Print internal service secret in stdout (not recommended)", - ) .action( withErrorHandling( "admin bootstrap", @@ -302,12 +297,8 @@ export const createAdminCommand = (): Command => { writeStdoutLine( `Internal service name: ${result.internalService.name}`, ); - if (options.printInternalServiceSecret) { - writeStdoutLine("Internal service secret (shown once):"); - writeStdoutLine(result.internalService.secret); - } writeStdoutLine( - "Set proxy secrets REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET with the values above before proxy deploy.", + "Set proxy secrets REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET manually in Cloudflare before proxy deploy.", ); await persistBootstrapConfig(result.registryUrl, result.apiKey.token); From 69edbc37899298b8b3cdc22ec213c391c2690080 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:32:03 +0530 Subject: [PATCH 140/190] refactor(connector): modularize runtime orchestration --- packages/connector/src/AGENTS.md | 4 + packages/connector/src/runtime.ts | 514 +++--------------- packages/connector/src/runtime/AGENTS.md | 4 + .../connector/src/runtime/auth-lifecycle.ts | 92 ++++ .../src/runtime/openclaw-hook-token.ts | 47 ++ .../connector/src/runtime/openclaw-probe.ts | 63 +++ packages/connector/src/runtime/replay.ts | 329 +++++++++++ 7 files changed, 624 insertions(+), 429 deletions(-) create mode 100644 packages/connector/src/runtime/auth-lifecycle.ts create mode 100644 packages/connector/src/runtime/openclaw-hook-token.ts create mode 100644 packages/connector/src/runtime/openclaw-probe.ts create mode 100644 packages/connector/src/runtime/replay.ts diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 08ecff5..7712be2 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -10,9 +10,13 @@ - `client/queue.ts` for outbound queue + persistence orchestration. - `client/delivery.ts` for local OpenClaw delivery + retry behavior. - Keep `runtime.ts` as the runtime entrypoint and wire internal concerns through `runtime/` modules: + - `runtime/auth-lifecycle.ts` for in-memory auth state + refresh/sync orchestration. - `runtime/auth-storage.ts` for registry auth disk sync + atomic persistence. + - `runtime/openclaw-hook-token.ts` for explicit-vs-runtime hook token precedence and sync. + - `runtime/openclaw-probe.ts` for OpenClaw gateway liveness probing state transitions. - `runtime/openclaw.ts` for hook token discovery and abort-aware local hook delivery. - `runtime/policy.ts` for replay/probe configuration loading and retry-delay calculation. + - `runtime/replay.ts` for inbound replay orchestration, lane scheduling, retry/dead-letter transitions, and delivery receipts. - `runtime/relay-service.ts` for outbound relay and signed delivery-receipt callbacks. - `runtime/server.ts` for HTTP route handling (`/v1/status`, dead-letter ops, `/v1/outbound`). - `runtime/trusted-receipts.ts`, `runtime/url.ts`, `runtime/ws.ts`, and `runtime/parse.ts` for focused helper concerns. diff --git a/packages/connector/src/runtime.ts b/packages/connector/src/runtime.ts index a3c86e3..6fcd033 100644 --- a/packages/connector/src/runtime.ts +++ b/packages/connector/src/runtime.ts @@ -3,50 +3,26 @@ import { decodeBase64url, RELAY_DELIVERY_RECEIPTS_PATH, } from "@clawdentity/protocol"; -import { - createLogger, - nowIso, - nowUtcMs, - refreshAgentAuthWithClawProof, - toIso, -} from "@clawdentity/sdk"; +import { createLogger } from "@clawdentity/sdk"; import { ConnectorClient } from "./client.js"; -import { - type ConnectorInboundInboxSnapshot, - createConnectorInboundInbox, -} from "./inbound-inbox.js"; -import { - readRegistryAuthFromDisk, - toInitialAuthBundle, - writeRegistryAuthAtomic, -} from "./runtime/auth-storage.js"; -import { - LocalOpenclawDeliveryError, - sanitizeErrorReason, -} from "./runtime/errors.js"; -import { - deliverToOpenclawHook, - readOpenclawHookTokenFromRelayRuntimeConfig, - waitWithAbort, -} from "./runtime/openclaw.js"; +import { createConnectorInboundInbox } from "./inbound-inbox.js"; +import { createRuntimeAuthController } from "./runtime/auth-lifecycle.js"; +import { toInitialAuthBundle } from "./runtime/auth-storage.js"; +import { sanitizeErrorReason } from "./runtime/errors.js"; +import { createOpenclawHookTokenController } from "./runtime/openclaw-hook-token.js"; +import { createOpenclawGatewayProbeController } from "./runtime/openclaw-probe.js"; import { createOutboundQueuePersistence } from "./runtime/outbound-queue.js"; +import { parseRequiredString } from "./runtime/parse.js"; import { - parseRequiredString, - shouldRefreshAccessToken, -} from "./runtime/parse.js"; -import { - computeReplayDelayMs, - computeRuntimeReplayRetryDelayMs, loadInboundReplayPolicy, loadOpenclawProbePolicy, } from "./runtime/policy.js"; import { createRelayService } from "./runtime/relay-service.js"; +import { createInboundReplayController } from "./runtime/replay.js"; import { createRuntimeRequestHandler } from "./runtime/server.js"; import { loadTrustedReceiptTargets } from "./runtime/trusted-receipts.js"; import type { ConnectorRuntimeHandle, - InboundReplayStatus, - InboundReplayView, OpenclawGatewayProbeStatus, StartConnectorRuntimeInput, } from "./runtime/types.js"; @@ -77,65 +53,18 @@ export async function startConnectorRuntime( const secretKey = decodeBase64url( parseRequiredString(input.credentials.secretKey, "secretKey"), ); - let currentAuth = toInitialAuthBundle(input.credentials); - - const syncAuthFromDisk = async (): Promise => { - const diskAuth = await readRegistryAuthFromDisk({ - configDir: input.configDir, - agentName: input.agentName, - logger, - }); - if (!diskAuth) { - return; - } - - if ( - diskAuth.accessToken === currentAuth.accessToken && - diskAuth.accessExpiresAt === currentAuth.accessExpiresAt && - diskAuth.refreshToken === currentAuth.refreshToken && - diskAuth.refreshExpiresAt === currentAuth.refreshExpiresAt - ) { - return; - } - - currentAuth = diskAuth; - logger.info("connector.runtime.registry_auth_synced", { - agentName: input.agentName, - }); - }; - - const persistCurrentAuth = async ( - nextAuth: typeof currentAuth, - ): Promise => { - currentAuth = nextAuth; - await writeRegistryAuthAtomic({ - configDir: input.configDir, - agentName: input.agentName, - auth: nextAuth, - }); - }; - - const refreshCurrentAuth = async (): Promise => { - const refreshed = await refreshAgentAuthWithClawProof({ - registryUrl: input.registryUrl, - ait: input.credentials.ait, - secretKey, - refreshToken: currentAuth.refreshToken, - fetchImpl, - }); - await persistCurrentAuth(refreshed); - }; - - const refreshCurrentAuthIfNeeded = async (): Promise => { - await syncAuthFromDisk(); - if (!shouldRefreshAccessToken(currentAuth, nowUtcMs())) { - return; - } - - await refreshCurrentAuth(); - }; - await refreshCurrentAuthIfNeeded(); + const authController = createRuntimeAuthController({ + agentName: input.agentName, + ait: input.credentials.ait, + configDir: input.configDir, + fetchImpl, + initialAuth: toInitialAuthBundle(input.credentials), + logger, + registryUrl: input.registryUrl, + secretKey, + }); + await authController.refreshCurrentAuthIfNeeded(); const wsUrl = normalizeWebSocketUrl(input.proxyWebsocketUrl); const wsParsed = new URL(wsUrl); @@ -145,17 +74,18 @@ export async function startConnectorRuntime( ).toString(); const defaultReceiptCallbackOrigin = new URL(defaultReceiptCallbackUrl) .origin; + const openclawBaseUrl = resolveOpenclawBaseUrl(input.openclawBaseUrl); const openclawProbeUrl = openclawBaseUrl; const openclawHookPath = resolveOpenclawHookPath(input.openclawHookPath); const explicitOpenclawHookToken = resolveOpenclawHookToken( input.openclawHookToken, ); - const hasExplicitOpenclawHookToken = explicitOpenclawHookToken !== undefined; - let currentOpenclawHookToken = explicitOpenclawHookToken; const openclawHookUrl = toOpenclawHookUrl(openclawBaseUrl, openclawHookPath); + const inboundReplayPolicy = loadInboundReplayPolicy(); const openclawProbePolicy = loadOpenclawProbePolicy(); + const trustedReceiptTargets = await loadTrustedReceiptTargets({ configDir: input.configDir, logger, @@ -171,333 +101,74 @@ export async function startConnectorRuntime( maxPendingBytes: inboundReplayPolicy.inboxMaxBytes, }); - const inboundReplayStatus: InboundReplayStatus = { - replayerActive: false, - }; const openclawGatewayProbeStatus: OpenclawGatewayProbeStatus = { reachable: true, }; - let openclawProbeInFlight = false; let runtimeStopping = false; - let replayInFlight = false; let replayIntervalHandle: ReturnType | undefined; let openclawProbeIntervalHandle: ReturnType | undefined; const runtimeShutdownController = new AbortController(); + const openclawHookTokenController = createOpenclawHookTokenController({ + configDir: input.configDir, + explicitOpenclawHookToken, + logger, + }); + const resolveUpgradeHeaders = async (): Promise> => { - await refreshCurrentAuthIfNeeded(); + await authController.refreshCurrentAuthIfNeeded(); return buildUpgradeHeaders({ wsUrl: wsParsed, ait: input.credentials.ait, - accessToken: currentAuth.accessToken, + accessToken: authController.getCurrentAuth().accessToken, secretKey, }); }; - const syncOpenclawHookToken = async (reason: "auth_rejected" | "batch") => { - if (hasExplicitOpenclawHookToken) { - return; - } - - const diskToken = await readOpenclawHookTokenFromRelayRuntimeConfig({ - configDir: input.configDir, - logger, - }); - if (diskToken === currentOpenclawHookToken) { - return; - } - - currentOpenclawHookToken = diskToken; - logger.info("connector.runtime.openclaw_hook_token_synced", { - reason, - source: diskToken !== undefined ? "openclaw-relay.json" : "unset", - hasToken: currentOpenclawHookToken !== undefined, - }); - }; - - const probeOpenclawGateway = async (): Promise => { - if (runtimeStopping || openclawProbeInFlight) { - return; - } - openclawProbeInFlight = true; - - const checkedAt = nowIso(); - try { - const timeoutSignal = AbortSignal.timeout(openclawProbePolicy.timeoutMs); - const signal = AbortSignal.any([ - runtimeShutdownController.signal, - timeoutSignal, - ]); - await fetchImpl(openclawProbeUrl, { - method: "GET", - signal, - }); - openclawGatewayProbeStatus.reachable = true; - openclawGatewayProbeStatus.lastCheckedAt = checkedAt; - openclawGatewayProbeStatus.lastSuccessAt = checkedAt; - openclawGatewayProbeStatus.lastFailureReason = undefined; - } catch (error) { - if (runtimeShutdownController.signal.aborted) { - return; - } - openclawGatewayProbeStatus.reachable = false; - openclawGatewayProbeStatus.lastCheckedAt = checkedAt; - openclawGatewayProbeStatus.lastFailureReason = sanitizeErrorReason(error); - } finally { - openclawProbeInFlight = false; - } - }; - - const deliverToOpenclawHookWithRetry = async (inputReplay: { - fromAgentDid: string; - payload: unknown; - requestId: string; - toAgentDid: string; - }): Promise => { - let attempt = 1; - - while (true) { - try { - await deliverToOpenclawHook({ - fetchImpl, - fromAgentDid: inputReplay.fromAgentDid, - openclawHookUrl, - openclawHookToken: currentOpenclawHookToken, - payload: inputReplay.payload, - requestId: inputReplay.requestId, - shutdownSignal: runtimeShutdownController.signal, - toAgentDid: inputReplay.toAgentDid, - }); - return; - } catch (error) { - if ( - error instanceof LocalOpenclawDeliveryError && - error.code === "RUNTIME_STOPPING" - ) { - throw error; - } - - const retryable = - error instanceof LocalOpenclawDeliveryError ? error.retryable : true; - const authRejected = - error instanceof LocalOpenclawDeliveryError && - error.code === "HOOK_AUTH_REJECTED"; - - if (authRejected) { - const previousToken = currentOpenclawHookToken; - await syncOpenclawHookToken("auth_rejected"); - const tokenChanged = currentOpenclawHookToken !== previousToken; - const attemptsRemaining = - attempt < inboundReplayPolicy.runtimeReplayMaxAttempts; - if (tokenChanged && !runtimeStopping && attemptsRemaining) { - logger.warn( - "connector.inbound.replay_hook_auth_rejected_retrying", - { - requestId: inputReplay.requestId, - attempt, - }, - ); - attempt += 1; - continue; - } - } - - const attemptsRemaining = - attempt < inboundReplayPolicy.runtimeReplayMaxAttempts; - if (!retryable || !attemptsRemaining || runtimeStopping) { - throw error; - } - - const retryDelayMs = computeRuntimeReplayRetryDelayMs({ - attemptCount: attempt, - policy: inboundReplayPolicy, - }); - logger.warn("connector.inbound.replay_retry_scheduled", { - requestId: inputReplay.requestId, - attempt, - retryDelayMs, - reason: sanitizeErrorReason(error), - }); - await waitWithAbort({ - delayMs: retryDelayMs, - signal: runtimeShutdownController.signal, - }); - attempt += 1; - } - } - }; - - const readInboundReplayView = async (): Promise => { - const snapshot: ConnectorInboundInboxSnapshot = - await inboundInbox.getSnapshot(); - return { - snapshot, - replayerActive: inboundReplayStatus.replayerActive || replayInFlight, - lastReplayAt: inboundReplayStatus.lastReplayAt, - lastReplayError: inboundReplayStatus.lastReplayError, - openclawGateway: { - url: openclawProbeUrl, - reachable: openclawGatewayProbeStatus.reachable, - lastCheckedAt: openclawGatewayProbeStatus.lastCheckedAt, - lastSuccessAt: openclawGatewayProbeStatus.lastSuccessAt, - lastFailureReason: openclawGatewayProbeStatus.lastFailureReason, - }, - openclawHook: { - url: openclawHookUrl, - lastAttemptAt: inboundReplayStatus.lastAttemptAt, - lastAttemptStatus: inboundReplayStatus.lastAttemptStatus, - }, - }; - }; - - const replayPendingInboundMessages = async (): Promise => { - if (runtimeStopping || replayInFlight) { - return; - } - - replayInFlight = true; - inboundReplayStatus.replayerActive = true; - - try { - const dueItems = await inboundInbox.listDuePending({ - nowMs: nowUtcMs(), - limit: inboundReplayPolicy.batchSize, - }); - if (dueItems.length === 0) { - return; - } - await syncOpenclawHookToken("batch"); - if (!openclawGatewayProbeStatus.reachable) { - logger.info("connector.inbound.replay_skipped_gateway_unreachable", { - pendingCount: dueItems.length, - openclawBaseUrl: openclawProbeUrl, - lastFailureReason: openclawGatewayProbeStatus.lastFailureReason, - }); - return; - } - - const laneByKey = new Map(); - for (const pending of dueItems) { - const laneKey = - pending.conversationId !== undefined - ? `conversation:${pending.conversationId}` - : "legacy-best-effort"; - const lane = laneByKey.get(laneKey); - if (lane) { - lane.push(pending); - } else { - laneByKey.set(laneKey, [pending]); - } - } + const outboundBaseUrl = normalizeOutboundBaseUrl(input.outboundBaseUrl); + const outboundPath = normalizeOutboundPath(input.outboundPath); + const outboundUrl = new URL(outboundPath, outboundBaseUrl).toString(); - await Promise.all( - Array.from(laneByKey.values()).map(async (laneItems) => { - for (const pending of laneItems) { - inboundReplayStatus.lastAttemptAt = nowIso(); - try { - await deliverToOpenclawHookWithRetry({ - fromAgentDid: pending.fromAgentDid, - requestId: pending.requestId, - payload: pending.payload, - toAgentDid: pending.toAgentDid, - }); - await inboundInbox.markDelivered(pending.requestId); - inboundReplayStatus.lastReplayAt = nowIso(); - inboundReplayStatus.lastReplayError = undefined; - inboundReplayStatus.lastAttemptStatus = "ok"; - logger.info("connector.inbound.replay_succeeded", { - requestId: pending.requestId, - attemptCount: pending.attemptCount + 1, - conversationId: pending.conversationId, - }); + const relayService = createRelayService({ + configDir: input.configDir, + agentName: input.agentName, + registryUrl: input.registryUrl, + fetchImpl, + secretKey, + ait: input.credentials.ait, + defaultReceiptCallbackUrl, + trustedReceiptTargets, + getCurrentAuth: authController.getCurrentAuth, + setCurrentAuth: authController.persistCurrentAuth, + syncAuthFromDisk: authController.syncAuthFromDisk, + }); - if (pending.replyTo) { - try { - await relayService.postDeliveryReceipt({ - requestId: pending.requestId, - senderAgentDid: pending.fromAgentDid, - recipientAgentDid: pending.toAgentDid, - replyTo: pending.replyTo, - status: "processed_by_openclaw", - }); - } catch (error) { - logger.warn("connector.inbound.delivery_receipt_failed", { - requestId: pending.requestId, - reason: sanitizeErrorReason(error), - status: "processed_by_openclaw", - }); - } - } - } catch (error) { - if ( - error instanceof LocalOpenclawDeliveryError && - error.code === "RUNTIME_STOPPING" - ) { - logger.info("connector.inbound.replay_stopped", { - requestId: pending.requestId, - }); - return; - } - const reason = sanitizeErrorReason(error); - const retryable = - error instanceof LocalOpenclawDeliveryError - ? error.retryable - : true; - const nextAttemptAt = toIso( - nowUtcMs() + - computeReplayDelayMs({ - attemptCount: pending.attemptCount + 1, - policy: inboundReplayPolicy, - }) * - (retryable ? 1 : 10), - ); - const markResult = await inboundInbox.markReplayFailure({ - requestId: pending.requestId, - errorMessage: reason, - nextAttemptAt, - retryable, - maxNonRetryableAttempts: - inboundReplayPolicy.deadLetterNonRetryableMaxAttempts, - }); - inboundReplayStatus.lastReplayError = reason; - inboundReplayStatus.lastAttemptStatus = "failed"; - logger.warn("connector.inbound.replay_failed", { - requestId: pending.requestId, - attemptCount: pending.attemptCount + 1, - retryable, - nextAttemptAt, - movedToDeadLetter: markResult.movedToDeadLetter, - reason, - }); + const replayController = createInboundReplayController({ + fetchImpl, + getCurrentOpenclawHookToken: + openclawHookTokenController.getCurrentOpenclawHookToken, + inboundInbox, + inboundReplayPolicy, + isRuntimeStopping: () => runtimeStopping, + logger, + openclawGatewayProbeStatus, + openclawHookUrl, + openclawProbeUrl, + postDeliveryReceipt: relayService.postDeliveryReceipt, + runtimeShutdownSignal: runtimeShutdownController.signal, + syncOpenclawHookToken: openclawHookTokenController.syncOpenclawHookToken, + }); - if (markResult.movedToDeadLetter && pending.replyTo) { - try { - await relayService.postDeliveryReceipt({ - requestId: pending.requestId, - senderAgentDid: pending.fromAgentDid, - recipientAgentDid: pending.toAgentDid, - replyTo: pending.replyTo, - status: "dead_lettered", - reason, - }); - } catch (receiptError) { - logger.warn("connector.inbound.delivery_receipt_failed", { - requestId: pending.requestId, - reason: sanitizeErrorReason(receiptError), - status: "dead_lettered", - }); - } - } - } - } - }), - ); - } finally { - replayInFlight = false; - inboundReplayStatus.replayerActive = false; - } - }; + const openclawProbeController = createOpenclawGatewayProbeController({ + fetchImpl, + isRuntimeStopping: () => runtimeStopping, + logger, + openclawGatewayProbeStatus, + openclawProbePolicy, + openclawProbeUrl, + runtimeShutdownSignal: runtimeShutdownController.signal, + }); const outboundQueuePersistence = createOutboundQueuePersistence({ configDir: input.configDir, @@ -510,7 +181,8 @@ export async function startConnectorRuntime( connectionHeadersProvider: resolveUpgradeHeaders, openclawBaseUrl, openclawHookPath, - openclawHookToken: currentOpenclawHookToken, + openclawHookToken: + openclawHookTokenController.getCurrentOpenclawHookToken(), fetchImpl, logger, hooks: { @@ -519,9 +191,9 @@ export async function startConnectorRuntime( status, immediateRetry, }); - await syncAuthFromDisk(); + await authController.syncAuthFromDisk(); try { - await refreshCurrentAuth(); + await authController.refreshCurrentAuth(); } catch (error) { logger.warn( "connector.runtime.registry_auth_refresh_on_ws_upgrade_reject_failed", @@ -552,30 +224,12 @@ export async function startConnectorRuntime( duplicate: persisted.duplicate, pendingCount: persisted.pendingCount, }); - void replayPendingInboundMessages(); + void replayController.replayPendingInboundMessages(); return { accepted: true }; }, webSocketFactory: createWebSocketFactory(), }); - const outboundBaseUrl = normalizeOutboundBaseUrl(input.outboundBaseUrl); - const outboundPath = normalizeOutboundPath(input.outboundPath); - const outboundUrl = new URL(outboundPath, outboundBaseUrl).toString(); - - const relayService = createRelayService({ - configDir: input.configDir, - agentName: input.agentName, - registryUrl: input.registryUrl, - fetchImpl, - secretKey, - ait: input.credentials.ait, - defaultReceiptCallbackUrl, - trustedReceiptTargets, - getCurrentAuth: () => currentAuth, - setCurrentAuth: persistCurrentAuth, - syncAuthFromDisk, - }); - const server = createServer( createRuntimeRequestHandler({ connectorClient, @@ -584,10 +238,10 @@ export async function startConnectorRuntime( outboundBaseUrl, outboundPath, outboundUrl, - readInboundReplayView, + readInboundReplayView: replayController.readInboundReplayView, relayToPeer: relayService.relayToPeer, replayPendingInboundMessages: () => { - void replayPendingInboundMessages(); + void replayController.replayPendingInboundMessages(); }, wsUrl, }), @@ -634,16 +288,18 @@ export async function startConnectorRuntime( ); }); - await syncOpenclawHookToken("batch"); - await probeOpenclawGateway(); + await openclawHookTokenController.syncOpenclawHookToken("batch"); + await openclawProbeController.probeOpenclawGateway(); connectorClient.connect(); await inboundInbox.pruneDelivered(); - void replayPendingInboundMessages(); + void replayController.replayPendingInboundMessages(); + replayIntervalHandle = setInterval(() => { - void replayPendingInboundMessages(); + void replayController.replayPendingInboundMessages(); }, inboundReplayPolicy.replayIntervalMs); + openclawProbeIntervalHandle = setInterval(() => { - void probeOpenclawGateway(); + void openclawProbeController.probeOpenclawGateway(); }, openclawProbePolicy.intervalMs); logger.info("connector.runtime.started", { diff --git a/packages/connector/src/runtime/AGENTS.md b/packages/connector/src/runtime/AGENTS.md index a96b326..b5e1608 100644 --- a/packages/connector/src/runtime/AGENTS.md +++ b/packages/connector/src/runtime/AGENTS.md @@ -4,9 +4,13 @@ - Keep connector runtime orchestration readable by separating auth, transport, relay, and server concerns. ## Rules +- Keep runtime auth refresh/sync orchestration in `auth-lifecycle.ts`; treat `auth-storage.ts` as persistence/shape helpers only. - Keep auth disk sync/persistence in `auth-storage.ts`; avoid ad-hoc credential writes. +- Keep OpenClaw hook-token sync precedence in `openclaw-hook-token.ts` so explicit token overrides remain centralized. - Keep hook-delivery retry and abort behavior in `openclaw.ts`. +- Keep gateway probe in-flight/health transitions in `openclaw-probe.ts`; avoid duplicate probe loops in `runtime.ts`. - Keep replay/probe policy loading and retry-delay calculations in `policy.ts`. +- Keep replay orchestration and receipt callbacks in `replay.ts`; avoid re-embedding lane scheduling and dead-letter transitions in `runtime.ts`. - Keep outbound relay and receipt callbacks in `relay-service.ts`. - Keep HTTP route handling in `server.ts` and avoid embedding route logic in helpers. - Keep URL/header/parse helpers focused in `url.ts`, `ws.ts`, and `parse.ts`. diff --git a/packages/connector/src/runtime/auth-lifecycle.ts b/packages/connector/src/runtime/auth-lifecycle.ts new file mode 100644 index 0000000..3db1bd8 --- /dev/null +++ b/packages/connector/src/runtime/auth-lifecycle.ts @@ -0,0 +1,92 @@ +import type { AgentAuthBundle, Logger } from "@clawdentity/sdk"; +import { nowUtcMs, refreshAgentAuthWithClawProof } from "@clawdentity/sdk"; +import { + readRegistryAuthFromDisk, + writeRegistryAuthAtomic, +} from "./auth-storage.js"; +import { shouldRefreshAccessToken } from "./parse.js"; + +export type RuntimeAuthController = { + getCurrentAuth: () => AgentAuthBundle; + persistCurrentAuth: (nextAuth: AgentAuthBundle) => Promise; + refreshCurrentAuth: () => Promise; + refreshCurrentAuthIfNeeded: () => Promise; + syncAuthFromDisk: () => Promise; +}; + +export function createRuntimeAuthController(input: { + agentName: string; + ait: string; + configDir: string; + fetchImpl: typeof fetch; + initialAuth: AgentAuthBundle; + logger: Logger; + registryUrl: string; + secretKey: Uint8Array; +}): RuntimeAuthController { + let currentAuth = input.initialAuth; + + const syncAuthFromDisk = async (): Promise => { + const diskAuth = await readRegistryAuthFromDisk({ + configDir: input.configDir, + agentName: input.agentName, + logger: input.logger, + }); + if (!diskAuth) { + return; + } + + if ( + diskAuth.accessToken === currentAuth.accessToken && + diskAuth.accessExpiresAt === currentAuth.accessExpiresAt && + diskAuth.refreshToken === currentAuth.refreshToken && + diskAuth.refreshExpiresAt === currentAuth.refreshExpiresAt + ) { + return; + } + + currentAuth = diskAuth; + input.logger.info("connector.runtime.registry_auth_synced", { + agentName: input.agentName, + }); + }; + + const persistCurrentAuth = async ( + nextAuth: AgentAuthBundle, + ): Promise => { + currentAuth = nextAuth; + await writeRegistryAuthAtomic({ + configDir: input.configDir, + agentName: input.agentName, + auth: nextAuth, + }); + }; + + const refreshCurrentAuth = async (): Promise => { + const refreshed = await refreshAgentAuthWithClawProof({ + registryUrl: input.registryUrl, + ait: input.ait, + secretKey: input.secretKey, + refreshToken: currentAuth.refreshToken, + fetchImpl: input.fetchImpl, + }); + await persistCurrentAuth(refreshed); + }; + + const refreshCurrentAuthIfNeeded = async (): Promise => { + await syncAuthFromDisk(); + if (!shouldRefreshAccessToken(currentAuth, nowUtcMs())) { + return; + } + + await refreshCurrentAuth(); + }; + + return { + getCurrentAuth: () => currentAuth, + persistCurrentAuth, + refreshCurrentAuth, + refreshCurrentAuthIfNeeded, + syncAuthFromDisk, + }; +} diff --git a/packages/connector/src/runtime/openclaw-hook-token.ts b/packages/connector/src/runtime/openclaw-hook-token.ts new file mode 100644 index 0000000..7eab2f7 --- /dev/null +++ b/packages/connector/src/runtime/openclaw-hook-token.ts @@ -0,0 +1,47 @@ +import type { Logger } from "@clawdentity/sdk"; +import { readOpenclawHookTokenFromRelayRuntimeConfig } from "./openclaw.js"; + +export type OpenclawHookTokenSyncReason = "auth_rejected" | "batch"; + +export type OpenclawHookTokenController = { + getCurrentOpenclawHookToken: () => string | undefined; + syncOpenclawHookToken: (reason: OpenclawHookTokenSyncReason) => Promise; +}; + +export function createOpenclawHookTokenController(input: { + configDir: string; + explicitOpenclawHookToken: string | undefined; + logger: Logger; +}): OpenclawHookTokenController { + const hasExplicitOpenclawHookToken = + input.explicitOpenclawHookToken !== undefined; + let currentOpenclawHookToken = input.explicitOpenclawHookToken; + + const syncOpenclawHookToken = async ( + reason: OpenclawHookTokenSyncReason, + ): Promise => { + if (hasExplicitOpenclawHookToken) { + return; + } + + const diskToken = await readOpenclawHookTokenFromRelayRuntimeConfig({ + configDir: input.configDir, + logger: input.logger, + }); + if (diskToken === currentOpenclawHookToken) { + return; + } + + currentOpenclawHookToken = diskToken; + input.logger.info("connector.runtime.openclaw_hook_token_synced", { + reason, + source: diskToken !== undefined ? "openclaw-relay.json" : "unset", + hasToken: currentOpenclawHookToken !== undefined, + }); + }; + + return { + getCurrentOpenclawHookToken: () => currentOpenclawHookToken, + syncOpenclawHookToken, + }; +} diff --git a/packages/connector/src/runtime/openclaw-probe.ts b/packages/connector/src/runtime/openclaw-probe.ts new file mode 100644 index 0000000..b5bf2f8 --- /dev/null +++ b/packages/connector/src/runtime/openclaw-probe.ts @@ -0,0 +1,63 @@ +import type { Logger } from "@clawdentity/sdk"; +import { nowIso } from "@clawdentity/sdk"; +import { sanitizeErrorReason } from "./errors.js"; +import type { + OpenclawGatewayProbeStatus, + OpenclawProbePolicy, +} from "./types.js"; + +export function createOpenclawGatewayProbeController(input: { + fetchImpl: typeof fetch; + isRuntimeStopping: () => boolean; + logger: Logger; + openclawGatewayProbeStatus: OpenclawGatewayProbeStatus; + openclawProbePolicy: OpenclawProbePolicy; + openclawProbeUrl: string; + runtimeShutdownSignal: AbortSignal; +}): { + probeOpenclawGateway: () => Promise; +} { + let openclawProbeInFlight = false; + + const probeOpenclawGateway = async (): Promise => { + if (input.isRuntimeStopping() || openclawProbeInFlight) { + return; + } + + openclawProbeInFlight = true; + + const checkedAt = nowIso(); + try { + const timeoutSignal = AbortSignal.timeout( + input.openclawProbePolicy.timeoutMs, + ); + const signal = AbortSignal.any([ + input.runtimeShutdownSignal, + timeoutSignal, + ]); + await input.fetchImpl(input.openclawProbeUrl, { + method: "GET", + signal, + }); + input.openclawGatewayProbeStatus.reachable = true; + input.openclawGatewayProbeStatus.lastCheckedAt = checkedAt; + input.openclawGatewayProbeStatus.lastSuccessAt = checkedAt; + input.openclawGatewayProbeStatus.lastFailureReason = undefined; + } catch (error) { + if (input.runtimeShutdownSignal.aborted) { + return; + } + + input.openclawGatewayProbeStatus.reachable = false; + input.openclawGatewayProbeStatus.lastCheckedAt = checkedAt; + input.openclawGatewayProbeStatus.lastFailureReason = + sanitizeErrorReason(error); + } finally { + openclawProbeInFlight = false; + } + }; + + return { + probeOpenclawGateway, + }; +} diff --git a/packages/connector/src/runtime/replay.ts b/packages/connector/src/runtime/replay.ts new file mode 100644 index 0000000..b451f40 --- /dev/null +++ b/packages/connector/src/runtime/replay.ts @@ -0,0 +1,329 @@ +import type { Logger } from "@clawdentity/sdk"; +import { nowIso, nowUtcMs, toIso } from "@clawdentity/sdk"; +import type { + ConnectorInboundInbox, + ConnectorInboundInboxItem, + ConnectorInboundInboxSnapshot, +} from "../inbound-inbox.js"; +import { LocalOpenclawDeliveryError, sanitizeErrorReason } from "./errors.js"; +import { deliverToOpenclawHook, waitWithAbort } from "./openclaw.js"; +import { + computeReplayDelayMs, + computeRuntimeReplayRetryDelayMs, +} from "./policy.js"; +import type { + InboundReplayPolicy, + InboundReplayStatus, + InboundReplayView, + OpenclawGatewayProbeStatus, +} from "./types.js"; + +type DeliveryReceiptInput = { + reason?: string; + recipientAgentDid: string; + replyTo: string; + requestId: string; + senderAgentDid: string; + status: "processed_by_openclaw" | "dead_lettered"; +}; + +function groupDueItemsByLane( + dueItems: ConnectorInboundInboxItem[], +): ConnectorInboundInboxItem[][] { + const laneByKey = new Map(); + + for (const pending of dueItems) { + const laneKey = + pending.conversationId !== undefined + ? `conversation:${pending.conversationId}` + : "legacy-best-effort"; + const lane = laneByKey.get(laneKey); + if (lane) { + lane.push(pending); + } else { + laneByKey.set(laneKey, [pending]); + } + } + + return Array.from(laneByKey.values()); +} + +export function createInboundReplayController(input: { + fetchImpl: typeof fetch; + getCurrentOpenclawHookToken: () => string | undefined; + inboundInbox: ConnectorInboundInbox; + inboundReplayPolicy: InboundReplayPolicy; + isRuntimeStopping: () => boolean; + logger: Logger; + openclawGatewayProbeStatus: OpenclawGatewayProbeStatus; + openclawHookUrl: string; + openclawProbeUrl: string; + postDeliveryReceipt: (inputReceipt: DeliveryReceiptInput) => Promise; + runtimeShutdownSignal: AbortSignal; + syncOpenclawHookToken: (reason: "auth_rejected" | "batch") => Promise; +}): { + readInboundReplayView: () => Promise; + replayPendingInboundMessages: () => Promise; +} { + const inboundReplayStatus: InboundReplayStatus = { + replayerActive: false, + }; + + let replayInFlight = false; + + const deliverToOpenclawHookWithRetry = async (inputReplay: { + fromAgentDid: string; + payload: unknown; + requestId: string; + toAgentDid: string; + }): Promise => { + let attempt = 1; + + while (true) { + try { + await deliverToOpenclawHook({ + fetchImpl: input.fetchImpl, + fromAgentDid: inputReplay.fromAgentDid, + openclawHookUrl: input.openclawHookUrl, + openclawHookToken: input.getCurrentOpenclawHookToken(), + payload: inputReplay.payload, + requestId: inputReplay.requestId, + shutdownSignal: input.runtimeShutdownSignal, + toAgentDid: inputReplay.toAgentDid, + }); + return; + } catch (error) { + if ( + error instanceof LocalOpenclawDeliveryError && + error.code === "RUNTIME_STOPPING" + ) { + throw error; + } + + const retryable = + error instanceof LocalOpenclawDeliveryError ? error.retryable : true; + const authRejected = + error instanceof LocalOpenclawDeliveryError && + error.code === "HOOK_AUTH_REJECTED"; + + if (authRejected) { + const previousToken = input.getCurrentOpenclawHookToken(); + await input.syncOpenclawHookToken("auth_rejected"); + const tokenChanged = + input.getCurrentOpenclawHookToken() !== previousToken; + const attemptsRemaining = + attempt < input.inboundReplayPolicy.runtimeReplayMaxAttempts; + if (tokenChanged && !input.isRuntimeStopping() && attemptsRemaining) { + input.logger.warn( + "connector.inbound.replay_hook_auth_rejected_retrying", + { + requestId: inputReplay.requestId, + attempt, + }, + ); + attempt += 1; + continue; + } + } + + const attemptsRemaining = + attempt < input.inboundReplayPolicy.runtimeReplayMaxAttempts; + if (!retryable || !attemptsRemaining || input.isRuntimeStopping()) { + throw error; + } + + const retryDelayMs = computeRuntimeReplayRetryDelayMs({ + attemptCount: attempt, + policy: input.inboundReplayPolicy, + }); + input.logger.warn("connector.inbound.replay_retry_scheduled", { + requestId: inputReplay.requestId, + attempt, + retryDelayMs, + reason: sanitizeErrorReason(error), + }); + await waitWithAbort({ + delayMs: retryDelayMs, + signal: input.runtimeShutdownSignal, + }); + attempt += 1; + } + } + }; + + const readInboundReplayView = async (): Promise => { + const snapshot: ConnectorInboundInboxSnapshot = + await input.inboundInbox.getSnapshot(); + return { + snapshot, + replayerActive: inboundReplayStatus.replayerActive || replayInFlight, + lastReplayAt: inboundReplayStatus.lastReplayAt, + lastReplayError: inboundReplayStatus.lastReplayError, + openclawGateway: { + url: input.openclawProbeUrl, + reachable: input.openclawGatewayProbeStatus.reachable, + lastCheckedAt: input.openclawGatewayProbeStatus.lastCheckedAt, + lastSuccessAt: input.openclawGatewayProbeStatus.lastSuccessAt, + lastFailureReason: input.openclawGatewayProbeStatus.lastFailureReason, + }, + openclawHook: { + url: input.openclawHookUrl, + lastAttemptAt: inboundReplayStatus.lastAttemptAt, + lastAttemptStatus: inboundReplayStatus.lastAttemptStatus, + }, + }; + }; + + const replayPendingInboundMessages = async (): Promise => { + if (input.isRuntimeStopping() || replayInFlight) { + return; + } + + replayInFlight = true; + inboundReplayStatus.replayerActive = true; + + try { + const dueItems = await input.inboundInbox.listDuePending({ + nowMs: nowUtcMs(), + limit: input.inboundReplayPolicy.batchSize, + }); + if (dueItems.length === 0) { + return; + } + + await input.syncOpenclawHookToken("batch"); + if (!input.openclawGatewayProbeStatus.reachable) { + input.logger.info( + "connector.inbound.replay_skipped_gateway_unreachable", + { + pendingCount: dueItems.length, + openclawBaseUrl: input.openclawProbeUrl, + lastFailureReason: + input.openclawGatewayProbeStatus.lastFailureReason, + }, + ); + return; + } + + const laneItems = groupDueItemsByLane(dueItems); + await Promise.all( + laneItems.map(async (lane) => { + for (const pending of lane) { + inboundReplayStatus.lastAttemptAt = nowIso(); + try { + await deliverToOpenclawHookWithRetry({ + fromAgentDid: pending.fromAgentDid, + requestId: pending.requestId, + payload: pending.payload, + toAgentDid: pending.toAgentDid, + }); + await input.inboundInbox.markDelivered(pending.requestId); + inboundReplayStatus.lastReplayAt = nowIso(); + inboundReplayStatus.lastReplayError = undefined; + inboundReplayStatus.lastAttemptStatus = "ok"; + input.logger.info("connector.inbound.replay_succeeded", { + requestId: pending.requestId, + attemptCount: pending.attemptCount + 1, + conversationId: pending.conversationId, + }); + + if (pending.replyTo) { + try { + await input.postDeliveryReceipt({ + requestId: pending.requestId, + senderAgentDid: pending.fromAgentDid, + recipientAgentDid: pending.toAgentDid, + replyTo: pending.replyTo, + status: "processed_by_openclaw", + }); + } catch (error) { + input.logger.warn( + "connector.inbound.delivery_receipt_failed", + { + requestId: pending.requestId, + reason: sanitizeErrorReason(error), + status: "processed_by_openclaw", + }, + ); + } + } + } catch (error) { + if ( + error instanceof LocalOpenclawDeliveryError && + error.code === "RUNTIME_STOPPING" + ) { + input.logger.info("connector.inbound.replay_stopped", { + requestId: pending.requestId, + }); + return; + } + + const reason = sanitizeErrorReason(error); + const retryable = + error instanceof LocalOpenclawDeliveryError + ? error.retryable + : true; + const nextAttemptAt = toIso( + nowUtcMs() + + computeReplayDelayMs({ + attemptCount: pending.attemptCount + 1, + policy: input.inboundReplayPolicy, + }) * + (retryable ? 1 : 10), + ); + + const markResult = await input.inboundInbox.markReplayFailure({ + requestId: pending.requestId, + errorMessage: reason, + nextAttemptAt, + retryable, + maxNonRetryableAttempts: + input.inboundReplayPolicy.deadLetterNonRetryableMaxAttempts, + }); + inboundReplayStatus.lastReplayError = reason; + inboundReplayStatus.lastAttemptStatus = "failed"; + input.logger.warn("connector.inbound.replay_failed", { + requestId: pending.requestId, + attemptCount: pending.attemptCount + 1, + retryable, + nextAttemptAt, + movedToDeadLetter: markResult.movedToDeadLetter, + reason, + }); + + if (markResult.movedToDeadLetter && pending.replyTo) { + try { + await input.postDeliveryReceipt({ + requestId: pending.requestId, + senderAgentDid: pending.fromAgentDid, + recipientAgentDid: pending.toAgentDid, + replyTo: pending.replyTo, + status: "dead_lettered", + reason, + }); + } catch (receiptError) { + input.logger.warn( + "connector.inbound.delivery_receipt_failed", + { + requestId: pending.requestId, + reason: sanitizeErrorReason(receiptError), + status: "dead_lettered", + }, + ); + } + } + } + } + }), + ); + } finally { + replayInFlight = false; + inboundReplayStatus.replayerActive = false; + } + }; + + return { + readInboundReplayView, + replayPendingInboundMessages, + }; +} From e86f1c90f3a89cc391c1bece98f5e9e3c4426042 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:37:45 +0530 Subject: [PATCH 141/190] refactor(connector): split client inbound and metrics modules --- packages/connector/src/AGENTS.md | 2 + packages/connector/src/client.ts | 118 ++++++----------------- packages/connector/src/client/AGENTS.md | 2 + packages/connector/src/client/inbound.ts | 53 ++++++++++ packages/connector/src/client/metrics.ts | 98 +++++++++++++++++++ 5 files changed, 183 insertions(+), 90 deletions(-) create mode 100644 packages/connector/src/client/inbound.ts create mode 100644 packages/connector/src/client/metrics.ts diff --git a/packages/connector/src/AGENTS.md b/packages/connector/src/AGENTS.md index 7712be2..295ebe5 100644 --- a/packages/connector/src/AGENTS.md +++ b/packages/connector/src/AGENTS.md @@ -5,6 +5,8 @@ - Keep `client.ts` as the stable public surface (`ConnectorClient` + exported client types) and route internal concerns through `client/` modules: - `client/types.ts` for externally consumed client types. - `client/helpers.ts` for shared pure helpers (event parsing, sanitization, normalization). + - `client/inbound.ts` for parsed frame dispatch orchestration (`heartbeat`, `heartbeat_ack`, `deliver`). + - `client/metrics.ts` for websocket uptime/reconnect and inbound ack-latency tracking. - `client/retry.ts` for reusable backoff math. - `client/heartbeat.ts` for heartbeat scheduling, ack tracking, and RTT metrics. - `client/queue.ts` for outbound queue + persistence orchestration. diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index 6e2a35a..c96fbdd 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -16,7 +16,9 @@ import { toOpenclawHookUrl, WS_READY_STATE_CONNECTING, } from "./client/helpers.js"; +import { handleIncomingConnectorMessage } from "./client/inbound.js"; import { handleInboundDeliverFrame } from "./client/inbound-delivery.js"; +import { ConnectorClientMetricsTracker } from "./client/metrics.js"; import { ConnectorOutboundQueueManager } from "./client/queue.js"; import { computeJitteredBackoffDelayMs } from "./client/retry.js"; import type { @@ -52,7 +54,6 @@ import { enqueueFrameSchema, type HeartbeatAckFrame, type HeartbeatFrame, - parseFrame, serializeFrame, } from "./frames.js"; @@ -95,21 +96,12 @@ export class ConnectorClient { private readonly heartbeatManager: ConnectorHeartbeatManager; private readonly outboundQueue: ConnectorOutboundQueueManager; private readonly localOpenclawDelivery: LocalOpenclawDeliveryClient; + private readonly metricsTracker: ConnectorClientMetricsTracker; private socket: ConnectorWebSocket | undefined; private reconnectTimeout: ReturnType | undefined; private connectTimeout: ReturnType | undefined; private reconnectAttempt = 0; - private reconnectCount = 0; - private connectAttempts = 0; - private connectedSinceMs: number | undefined; - private accumulatedConnectedMs = 0; - private lastConnectedAtIso: string | undefined; - - private inboundAckLatencySampleCount = 0; - private inboundAckLatencyTotalMs = 0; - private inboundAckLatencyMaxMs = 0; - private inboundAckLatencyLastMs: number | undefined; private authUpgradeImmediateRetryUsed = false; private started = false; @@ -203,6 +195,7 @@ export class ConnectorClient { persistence: this.outboundQueuePersistence, logger: this.logger, }); + this.metricsTracker = new ConnectorClientMetricsTracker(this.now); const openclawHookUrl = toOpenclawHookUrl( options.openclawBaseUrl, @@ -257,37 +250,11 @@ export class ConnectorClient { } getMetricsSnapshot(): ConnectorClientMetricsSnapshot { - const nowMs = this.now(); - const uptimeMs = - this.accumulatedConnectedMs + - (this.connectedSinceMs === undefined ? 0 : nowMs - this.connectedSinceMs); - - return { - connection: { - connectAttempts: this.connectAttempts, - connected: this.isConnected(), - reconnectCount: this.reconnectCount, - uptimeMs: Math.max(0, uptimeMs), - lastConnectedAt: this.lastConnectedAtIso, - }, + return this.metricsTracker.getSnapshot({ + connected: this.isConnected(), heartbeat: this.heartbeatManager.getMetricsSnapshot(), - inboundDelivery: { - sampleCount: this.inboundAckLatencySampleCount, - lastAckLatencyMs: this.inboundAckLatencyLastMs, - maxAckLatencyMs: - this.inboundAckLatencySampleCount > 0 - ? this.inboundAckLatencyMaxMs - : undefined, - avgAckLatencyMs: - this.inboundAckLatencySampleCount > 0 - ? Math.floor( - this.inboundAckLatencyTotalMs / - this.inboundAckLatencySampleCount, - ) - : undefined, - }, outboundQueue: this.outboundQueue.getMetricsSnapshot(), - }; + }); } enqueueOutbound(input: ConnectorOutboundEnqueueInput): EnqueueFrame { @@ -309,7 +276,7 @@ export class ConnectorClient { private async connectSocket(): Promise { this.clearReconnectTimeout(); - this.connectAttempts += 1; + this.metricsTracker.onConnectAttempt(); if (this.outboundQueuePersistence !== undefined) { await this.ensureOutboundQueueLoaded(); @@ -355,8 +322,7 @@ export class ConnectorClient { this.clearConnectTimeout(); this.reconnectAttempt = 0; this.authUpgradeImmediateRetryUsed = false; - this.connectedSinceMs = this.now(); - this.lastConnectedAtIso = this.makeTimestamp(); + this.metricsTracker.onSocketConnected(this.makeTimestamp()); this.logger.info("connector.websocket.connected", { url: this.connectorUrl, }); @@ -469,7 +435,7 @@ export class ConnectorClient { if (options?.incrementAttempt ?? true) { this.reconnectAttempt += 1; } - this.reconnectCount += 1; + this.metricsTracker.onReconnectScheduled(); this.reconnectTimeout = setTimeout(() => { void this.connectSocket(); @@ -530,13 +496,7 @@ export class ConnectorClient { } this.socket = undefined; - if (this.connectedSinceMs !== undefined) { - this.accumulatedConnectedMs += Math.max( - 0, - this.now() - this.connectedSinceMs, - ); - this.connectedSinceMs = undefined; - } + this.metricsTracker.onSocketDetached(); this.clearSocketState(); return true; } @@ -688,33 +648,22 @@ export class ConnectorClient { } private async handleIncomingMessage(rawFrame: unknown): Promise { - let frame: ConnectorFrame; - - try { - frame = parseFrame(rawFrame); - } catch (error) { - this.logger.warn("connector.frame.parse_failed", { - reason: sanitizeErrorReason(error), - }); - return; - } - - this.hooks.onFrame?.(frame); - - if (frame.type === "heartbeat") { - this.handleHeartbeatFrame(frame); - return; - } - - if (frame.type === "heartbeat_ack") { - this.heartbeatManager.handleHeartbeatAck(frame); - return; - } - - if (frame.type === "deliver") { - await this.handleDeliverFrame(frame); - return; - } + await handleIncomingConnectorMessage({ + rawFrame, + logger: this.logger, + handlers: { + onFrame: this.hooks.onFrame, + onHeartbeatFrame: (frame) => { + this.handleHeartbeatFrame(frame); + }, + onHeartbeatAckFrame: (frame) => { + this.heartbeatManager.handleHeartbeatAck(frame); + }, + onDeliverFrame: async (frame) => { + await this.handleDeliverFrame(frame); + }, + }, + }); } private handleHeartbeatFrame(frame: HeartbeatFrame): void { @@ -743,22 +692,11 @@ export class ConnectorClient { this.sendFrame(ackFrame); }, recordAckLatency: (durationMs) => { - this.recordInboundDeliveryAckLatency(durationMs); + this.metricsTracker.recordInboundDeliveryAckLatency(durationMs); }, }); } - private recordInboundDeliveryAckLatency(durationMs: number): void { - const latencyMs = Math.max(0, Math.floor(durationMs)); - this.inboundAckLatencySampleCount += 1; - this.inboundAckLatencyTotalMs += latencyMs; - this.inboundAckLatencyMaxMs = Math.max( - this.inboundAckLatencyMaxMs, - latencyMs, - ); - this.inboundAckLatencyLastMs = latencyMs; - } - private makeFrameId(): string { return this.ulidFactory(this.now()); } diff --git a/packages/connector/src/client/AGENTS.md b/packages/connector/src/client/AGENTS.md index 2d21287..bb76c98 100644 --- a/packages/connector/src/client/AGENTS.md +++ b/packages/connector/src/client/AGENTS.md @@ -5,6 +5,8 @@ ## Rules - Keep frame/event parsing and sanitization in `helpers.ts` as pure functions. +- Keep inbound frame parsing + frame-type dispatch in `inbound.ts` so `client.ts` only wires handlers. +- Keep connector transport/inbound delivery metrics state in `metrics.ts` to avoid duplicating counters in `client.ts`. - Keep reconnect delay math in `retry.ts` and avoid inline backoff duplication. - Keep heartbeat tracking and metrics centralized in `heartbeat.ts`. - Keep outbound queue persistence and load/flush semantics centralized in `queue.ts`. diff --git a/packages/connector/src/client/inbound.ts b/packages/connector/src/client/inbound.ts new file mode 100644 index 0000000..b5b0f8f --- /dev/null +++ b/packages/connector/src/client/inbound.ts @@ -0,0 +1,53 @@ +import type { Logger } from "@clawdentity/sdk"; +import { + type ConnectorFrame, + type DeliverFrame, + type HeartbeatAckFrame, + type HeartbeatFrame, + parseFrame, +} from "../frames.js"; +import { sanitizeErrorReason } from "./helpers.js"; + +type ConnectorInboundMessageHandlers = { + onFrame?: (frame: ConnectorFrame) => void; + onHeartbeatFrame: (frame: HeartbeatFrame) => void; + onHeartbeatAckFrame: (frame: HeartbeatAckFrame) => void; + onDeliverFrame: (frame: DeliverFrame) => Promise; +}; + +type HandleIncomingConnectorMessageInput = { + rawFrame: unknown; + logger: Logger; + handlers: ConnectorInboundMessageHandlers; +}; + +export async function handleIncomingConnectorMessage( + input: HandleIncomingConnectorMessageInput, +): Promise { + let frame: ConnectorFrame; + + try { + frame = parseFrame(input.rawFrame); + } catch (error) { + input.logger.warn("connector.frame.parse_failed", { + reason: sanitizeErrorReason(error), + }); + return; + } + + input.handlers.onFrame?.(frame); + + if (frame.type === "heartbeat") { + input.handlers.onHeartbeatFrame(frame); + return; + } + + if (frame.type === "heartbeat_ack") { + input.handlers.onHeartbeatAckFrame(frame); + return; + } + + if (frame.type === "deliver") { + await input.handlers.onDeliverFrame(frame); + } +} diff --git a/packages/connector/src/client/metrics.ts b/packages/connector/src/client/metrics.ts new file mode 100644 index 0000000..48b18c6 --- /dev/null +++ b/packages/connector/src/client/metrics.ts @@ -0,0 +1,98 @@ +import type { ConnectorClientMetricsSnapshot } from "./types.js"; + +type HeartbeatMetricsSnapshot = ConnectorClientMetricsSnapshot["heartbeat"]; +type OutboundQueueMetricsSnapshot = + ConnectorClientMetricsSnapshot["outboundQueue"]; + +type ConnectorMetricsSnapshotInput = { + connected: boolean; + heartbeat: HeartbeatMetricsSnapshot; + outboundQueue: OutboundQueueMetricsSnapshot; +}; + +export class ConnectorClientMetricsTracker { + private connectAttempts = 0; + private reconnectCount = 0; + private connectedSinceMs: number | undefined; + private accumulatedConnectedMs = 0; + private lastConnectedAtIso: string | undefined; + + private inboundAckLatencySampleCount = 0; + private inboundAckLatencyTotalMs = 0; + private inboundAckLatencyMaxMs = 0; + private inboundAckLatencyLastMs: number | undefined; + + constructor(private readonly now: () => number) {} + + onConnectAttempt(): void { + this.connectAttempts += 1; + } + + onReconnectScheduled(): void { + this.reconnectCount += 1; + } + + onSocketConnected(connectedAtIso: string): void { + this.connectedSinceMs = this.now(); + this.lastConnectedAtIso = connectedAtIso; + } + + onSocketDetached(): void { + if (this.connectedSinceMs === undefined) { + return; + } + + this.accumulatedConnectedMs += Math.max( + 0, + this.now() - this.connectedSinceMs, + ); + this.connectedSinceMs = undefined; + } + + recordInboundDeliveryAckLatency(durationMs: number): void { + const latencyMs = Math.max(0, Math.floor(durationMs)); + this.inboundAckLatencySampleCount += 1; + this.inboundAckLatencyTotalMs += latencyMs; + this.inboundAckLatencyMaxMs = Math.max( + this.inboundAckLatencyMaxMs, + latencyMs, + ); + this.inboundAckLatencyLastMs = latencyMs; + } + + getSnapshot( + input: ConnectorMetricsSnapshotInput, + ): ConnectorClientMetricsSnapshot { + const nowMs = this.now(); + const uptimeMs = + this.accumulatedConnectedMs + + (this.connectedSinceMs === undefined ? 0 : nowMs - this.connectedSinceMs); + + return { + connection: { + connectAttempts: this.connectAttempts, + connected: input.connected, + reconnectCount: this.reconnectCount, + uptimeMs: Math.max(0, uptimeMs), + lastConnectedAt: this.lastConnectedAtIso, + }, + heartbeat: input.heartbeat, + inboundDelivery: { + sampleCount: this.inboundAckLatencySampleCount, + lastAckLatencyMs: this.inboundAckLatencyLastMs, + maxAckLatencyMs: + this.inboundAckLatencySampleCount > 0 + ? this.inboundAckLatencyMaxMs + : undefined, + avgAckLatencyMs: + this.inboundAckLatencySampleCount > 0 + ? Math.floor( + this.inboundAckLatencyTotalMs / + this.inboundAckLatencySampleCount, + ) + : undefined, + }, + outboundQueue: input.outboundQueue, + }; + } +} From 6a75ade0db55fbcc0e78f174ddaa64855492e6e1 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:42:14 +0530 Subject: [PATCH 142/190] refactor(proxy): extract relay socket tracking module --- apps/proxy/src/AGENTS.md | 2 +- apps/proxy/src/agent-relay-session/AGENTS.md | 13 ++ apps/proxy/src/agent-relay-session/core.ts | 125 +++------------- .../src/agent-relay-session/socket-tracker.ts | 134 ++++++++++++++++++ 4 files changed, 165 insertions(+), 109 deletions(-) create mode 100644 apps/proxy/src/agent-relay-session/AGENTS.md create mode 100644 apps/proxy/src/agent-relay-session/socket-tracker.ts diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index 46436c6..e39a415 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -97,6 +97,6 @@ ## Agent Relay Session Modularization - Keep `agent-relay-session.ts` focused on Durable Object state machine orchestration; move helpers, parsers, and queue helpers into `apps/proxy/src/agent-relay-session/` so the entry file stays below 800 lines. -- Name helper modules by concern (`types`, `errors`, `frames`, `parsers`, `queue`, `policy`, `rpc`) and avoid importing back from `agent-relay-session.ts` to prevent cycles. +- Name helper modules by concern (`types`, `errors`, `frames`, `parsers`, `queue`, `policy`, `rpc`, `socket-tracker`) and avoid importing back from `agent-relay-session.ts` to prevent cycles. - Re-export the public API (`Relay*` types, `AgentRelaySession`, and RPC helpers) from `agent-relay-session.ts` so existing imports in routes/tests stay untouched. - When introducing a new helper, document it here so future splits keep the Durable Object surface lean and test coverage aware. diff --git a/apps/proxy/src/agent-relay-session/AGENTS.md b/apps/proxy/src/agent-relay-session/AGENTS.md new file mode 100644 index 0000000..6f5eb0e --- /dev/null +++ b/apps/proxy/src/agent-relay-session/AGENTS.md @@ -0,0 +1,13 @@ +# AGENTS.md (apps/proxy/src/agent-relay-session) + +## Purpose +- Keep relay Durable Object code modular, deterministic, and below 800 lines per file. + +## Rules +- Keep `core.ts` as orchestration only (fetch/alarm lifecycle, queue + delivery flow), not as a utility dump. +- Keep socket liveness/heartbeat/pending-close tracking in `socket-tracker.ts`. +- Keep frame construction/parsing helpers in `frames.ts`; do not duplicate frame payload logic in `core.ts`. +- Keep queue receipt normalization/pruning/upsert/delete behavior in `queue-state.ts`. +- Keep retry delay math in `policy.ts` and alarm scheduling in `scheduler.ts`. +- Keep request payload validation in `parsers.ts` and RPC error envelopes in `rpc.ts`. +- Keep shared relay constants in `constants.ts`; avoid repeating close codes and route paths inline. diff --git a/apps/proxy/src/agent-relay-session/core.ts b/apps/proxy/src/agent-relay-session/core.ts index ea1f8f0..9926b9c 100644 --- a/apps/proxy/src/agent-relay-session/core.ts +++ b/apps/proxy/src/agent-relay-session/core.ts @@ -21,7 +21,6 @@ import { getWebSocketMessageBytes, toDeliverFrame, toHeartbeatAckFrame, - toHeartbeatFrame, toRelayDeliveryResult, } from "./frames.js"; import { @@ -40,6 +39,7 @@ import { } from "./queue-state.js"; import { toErrorResponse } from "./rpc.js"; import { scheduleNextRelayAlarm } from "./scheduler.js"; +import { RelaySocketTracker } from "./socket-tracker.js"; import type { DurableObjectStateLike, PendingDelivery, @@ -55,10 +55,8 @@ import type { export class AgentRelaySession { private readonly deliveryPolicy: RelayDeliveryPolicy; - private readonly heartbeatAckSockets = new Map(); private readonly pendingDeliveries = new Map(); - private readonly socketLastAckAtMs = new Map(); - private readonly socketsPendingClose = new Set(); + private readonly socketTracker: RelaySocketTracker; private readonly state: DurableObjectStateLike; private inMemoryQueueState: RelayQueueState = { deliveries: [], @@ -78,6 +76,10 @@ export class AgentRelaySession { retryMaxAttempts: config.relayRetryMaxAttempts, retryMaxMs: config.relayRetryMaxMs, }; + this.socketTracker = new RelaySocketTracker({ + heartbeatAckTimeoutMs: RELAY_HEARTBEAT_ACK_TIMEOUT_MS, + staleCloseCode: RELAY_SOCKET_STALE_CLOSE_CODE, + }); } async fetch(request: Request): Promise { @@ -341,14 +343,14 @@ export class AgentRelaySession { const frame = frameResult; if (frame.type === "heartbeat") { - this.touchSocketAck(ws, nowMs); + this.socketTracker.touchSocketAck(ws, nowMs); ws.send(toHeartbeatAckFrame(frame.id)); await this.scheduleFromStorage(); return; } if (frame.type === "deliver_ack") { - this.touchSocketAck(ws, nowMs); + this.socketTracker.touchSocketAck(ws, nowMs); const pending = this.pendingDeliveries.get(frame.ackId); if (pending) { clearTimeout(pending.timeoutHandle); @@ -360,9 +362,7 @@ export class AgentRelaySession { } if (frame.type === "heartbeat_ack") { - const ackedSocket = this.heartbeatAckSockets.get(frame.ackId); - this.heartbeatAckSockets.delete(frame.ackId); - this.touchSocketAck(ackedSocket ?? ws, nowMs); + this.socketTracker.handleHeartbeatAck(frame.ackId, ws, nowMs); await this.scheduleFromStorage(); return; } @@ -377,8 +377,7 @@ export class AgentRelaySession { wasClean?: boolean, ): Promise { if (ws !== undefined) { - this.removeSocketTracking(ws); - this.socketsPendingClose.delete(ws); + this.socketTracker.onSocketClosed(ws); } const gracefulClose = code === 1000 && (wasClean ?? true); @@ -423,7 +422,7 @@ export class AgentRelaySession { const server = pair[1]; this.state.acceptWebSocket(server, [connectorAgentDid]); - this.touchSocketAck(server, nowMs); + this.socketTracker.touchSocketAck(server, nowMs); void this.drainQueueOnReconnect(); return new Response(null, { @@ -649,108 +648,18 @@ export class AgentRelaySession { } private getActiveSockets(nowMs: number): WebSocket[] { - const sockets = this.state.getWebSockets(); - this.pruneSocketTracking(sockets); - const activeSockets: WebSocket[] = []; - - for (const socket of sockets) { - if (this.socketsPendingClose.has(socket)) { - continue; - } - - const lastAckAtMs = this.resolveSocketLastAckAtMs(socket, nowMs); - if (nowMs - lastAckAtMs > RELAY_HEARTBEAT_ACK_TIMEOUT_MS) { - this.closeSocket( - socket, - RELAY_SOCKET_STALE_CLOSE_CODE, - "heartbeat_ack_timeout", - ); - continue; - } - - activeSockets.push(socket); - } - - return activeSockets; - } - - private resolveSocketLastAckAtMs(socket: WebSocket, nowMs: number): number { - const existing = this.socketLastAckAtMs.get(socket); - if (existing !== undefined) { - return existing; - } - - this.socketLastAckAtMs.set(socket, nowMs); - return nowMs; - } - - private touchSocketAck(socket: WebSocket, nowMs: number): void { - if (this.socketsPendingClose.has(socket)) { - return; - } - this.socketLastAckAtMs.set(socket, nowMs); + return this.socketTracker.getActiveSockets( + this.state.getWebSockets(), + nowMs, + ); } private sendHeartbeatFrame(socket: WebSocket, nowMs: number): void { - const heartbeatFrame = toHeartbeatFrame(nowMs); - this.clearSocketHeartbeatAcks(socket); - this.heartbeatAckSockets.set(heartbeatFrame.id, socket); - - try { - socket.send(heartbeatFrame.payload); - } catch { - this.heartbeatAckSockets.delete(heartbeatFrame.id); - this.closeSocket( - socket, - RELAY_SOCKET_STALE_CLOSE_CODE, - "heartbeat_send_failed", - ); - } - } - - private clearSocketHeartbeatAcks(socket: WebSocket): void { - for (const [ackId, ackSocket] of this.heartbeatAckSockets) { - if (ackSocket === socket) { - this.heartbeatAckSockets.delete(ackId); - } - } + this.socketTracker.sendHeartbeatFrame(socket, nowMs); } private closeSocket(socket: WebSocket, code: number, reason: string): void { - this.socketsPendingClose.add(socket); - this.removeSocketTracking(socket); - try { - socket.close(code, reason); - } catch { - // Ignore close errors for already-closed sockets. - } - } - - private removeSocketTracking(socket: WebSocket): void { - this.socketLastAckAtMs.delete(socket); - this.clearSocketHeartbeatAcks(socket); - } - - private pruneSocketTracking(activeSockets: WebSocket[]): void { - const activeSocketSet = new Set(activeSockets); - - for (const socket of this.socketLastAckAtMs.keys()) { - if (!activeSocketSet.has(socket)) { - this.socketLastAckAtMs.delete(socket); - } - } - - for (const socket of this.socketsPendingClose) { - if (!activeSocketSet.has(socket)) { - this.socketsPendingClose.delete(socket); - } - } - - for (const [ackId, socket] of this.heartbeatAckSockets.entries()) { - if (!activeSocketSet.has(socket)) { - this.heartbeatAckSockets.delete(ackId); - } - } + this.socketTracker.closeSocket(socket, code, reason); } private async drainQueueOnReconnect(): Promise { diff --git a/apps/proxy/src/agent-relay-session/socket-tracker.ts b/apps/proxy/src/agent-relay-session/socket-tracker.ts new file mode 100644 index 0000000..23c73e4 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/socket-tracker.ts @@ -0,0 +1,134 @@ +import { toHeartbeatFrame } from "./frames.js"; + +type RelaySocketTrackerOptions = { + heartbeatAckTimeoutMs: number; + staleCloseCode: number; +}; + +export class RelaySocketTracker { + private readonly heartbeatAckSockets = new Map(); + private readonly socketLastAckAtMs = new Map(); + private readonly socketsPendingClose = new Set(); + + constructor(private readonly options: RelaySocketTrackerOptions) {} + + getActiveSockets(sockets: WebSocket[], nowMs: number): WebSocket[] { + this.pruneSocketTracking(sockets); + const activeSockets: WebSocket[] = []; + + for (const socket of sockets) { + if (this.socketsPendingClose.has(socket)) { + continue; + } + + const lastAckAtMs = this.resolveSocketLastAckAtMs(socket, nowMs); + if (nowMs - lastAckAtMs > this.options.heartbeatAckTimeoutMs) { + this.closeSocket( + socket, + this.options.staleCloseCode, + "heartbeat_ack_timeout", + ); + continue; + } + + activeSockets.push(socket); + } + + return activeSockets; + } + + touchSocketAck(socket: WebSocket, nowMs: number): void { + if (this.socketsPendingClose.has(socket)) { + return; + } + + this.socketLastAckAtMs.set(socket, nowMs); + } + + sendHeartbeatFrame(socket: WebSocket, nowMs: number): void { + const heartbeatFrame = toHeartbeatFrame(nowMs); + this.clearSocketHeartbeatAcks(socket); + this.heartbeatAckSockets.set(heartbeatFrame.id, socket); + + try { + socket.send(heartbeatFrame.payload); + } catch { + this.heartbeatAckSockets.delete(heartbeatFrame.id); + this.closeSocket( + socket, + this.options.staleCloseCode, + "heartbeat_send_failed", + ); + } + } + + handleHeartbeatAck( + ackId: string, + fallbackSocket: WebSocket, + nowMs: number, + ): void { + const ackedSocket = this.heartbeatAckSockets.get(ackId); + this.heartbeatAckSockets.delete(ackId); + this.touchSocketAck(ackedSocket ?? fallbackSocket, nowMs); + } + + closeSocket(socket: WebSocket, code: number, reason: string): void { + this.socketsPendingClose.add(socket); + this.removeSocketTracking(socket); + try { + socket.close(code, reason); + } catch { + // Ignore close errors for already-closed sockets. + } + } + + onSocketClosed(socket: WebSocket): void { + this.removeSocketTracking(socket); + this.socketsPendingClose.delete(socket); + } + + private resolveSocketLastAckAtMs(socket: WebSocket, nowMs: number): number { + const existing = this.socketLastAckAtMs.get(socket); + if (existing !== undefined) { + return existing; + } + + this.socketLastAckAtMs.set(socket, nowMs); + return nowMs; + } + + private clearSocketHeartbeatAcks(socket: WebSocket): void { + for (const [ackId, ackSocket] of this.heartbeatAckSockets) { + if (ackSocket === socket) { + this.heartbeatAckSockets.delete(ackId); + } + } + } + + private removeSocketTracking(socket: WebSocket): void { + this.socketLastAckAtMs.delete(socket); + this.clearSocketHeartbeatAcks(socket); + } + + private pruneSocketTracking(activeSockets: WebSocket[]): void { + const activeSocketSet = new Set(activeSockets); + + for (const socket of this.socketLastAckAtMs.keys()) { + if (!activeSocketSet.has(socket)) { + this.socketLastAckAtMs.delete(socket); + } + } + + for (const socket of this.socketsPendingClose) { + if (!activeSocketSet.has(socket)) { + this.socketsPendingClose.delete(socket); + } + } + + for (const [ackId, socket] of this.heartbeatAckSockets.entries()) { + if (!activeSocketSet.has(socket)) { + this.heartbeatAckSockets.delete(ackId); + } + } + } +} From 4b116eaae2c6a68b7e6d4f284adc12183f3dc754 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 18:56:17 +0530 Subject: [PATCH 143/190] chore(quality): enforce file-size guard and reuse common isRecord --- .github/AGENTS.md | 3 +- .github/workflows/ci.yml | 1 + AGENTS.md | 1 + apps/cli/src/commands/AGENTS.md | 1 + apps/cli/src/commands/invite.ts | 9 +- apps/cli/src/commands/verify.ts | 9 +- apps/openclaw-skill/package.json | 1 + apps/openclaw-skill/src/AGENTS.md | 1 + .../src/transforms/peers-config.ts | 5 +- .../src/transforms/registry-auth.ts | 5 +- .../src/transforms/relay-to-peer.ts | 5 +- package.json | 1 + packages/sdk/src/AGENTS.md | 2 +- packages/sdk/src/agent-auth-client.ts | 9 +- pnpm-lock.yaml | 3 + scripts/AGENTS.md | 11 ++ scripts/quality/check-file-size.mjs | 119 ++++++++++++++++++ 17 files changed, 157 insertions(+), 29 deletions(-) create mode 100644 scripts/AGENTS.md create mode 100644 scripts/quality/check-file-size.mjs diff --git a/.github/AGENTS.md b/.github/AGENTS.md index f8bf3df..2a9707c 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -12,8 +12,9 @@ - Avoid duplicate CI runs for PR updates by limiting `push` triggers to long-lived branches (`main`, `develop`) and using `pull_request` for feature branches. ## Quality Gates -- CI command order: install -> base/head setup -> lint -> affected checks. +- CI command order: install -> base/head setup -> file-size guard (`pnpm check:file-size`) -> lint -> affected checks. - Affected checks in CI must include `lint`, `format`, `typecheck`, `test`, and `build`. +- File-size guard scope: tracked source files under `apps/**` and `packages/**`, hard limit `800` lines, excluding `dist`, `.wrangler`, `worker-configuration.d.ts`, `drizzle/meta`, and `node_modules`. ## Deployment Rules (Develop) - `deploy-develop.yml` runs on pushes to `develop`. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 879a5fc..4cf3ccc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,5 +41,6 @@ jobs: echo "NX_BASE=$BASE_SHA" >> "$GITHUB_ENV" echo "NX_HEAD=${{ github.sha }}" >> "$GITHUB_ENV" fi + - run: pnpm check:file-size - run: pnpm lint - run: pnpm affected:ci diff --git a/AGENTS.md b/AGENTS.md index 0f57ee3..14ba071 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -42,6 +42,7 @@ ## Validation Baseline - Run and pass: `pnpm lint`, `pnpm -r typecheck`, `pnpm -r test`, `pnpm -r build` for implementation changes. - Lint runs at root (`pnpm lint` via `biome check .`), not per-package. +- File-size guard must pass via `pnpm check:file-size` (fails when tracked source files under `apps/**` or `packages/**` exceed 800 lines; excludes `dist`, `.wrangler`, `worker-configuration.d.ts`, `drizzle/meta`, `node_modules`). - For planning/doc changes, verify dependency/order consistency against the active GitHub issue tracker. ## Cloudflare Worker & Wrangler Conventions diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 849bb21..bb1993c 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -9,6 +9,7 @@ - Use `withErrorHandling` for command actions unless a command has a documented reason not to. - Route all user-facing messages through `writeStdoutLine`/`writeStderrLine`. - For new command-domain errors, use SDK `AppError` with stable `code` values. +- Reuse `@clawdentity/common` guards (for example `isRecord`) instead of redefining local record/type guard helpers in command modules. - Keep command timestamps UTC and standardized through SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`) instead of direct `Date` calls. - Normalize Commander option keys at the command boundary when helper/runtime option names differ (for example `--peer` -> `peerAlias`) so flags are never silently ignored. diff --git a/apps/cli/src/commands/invite.ts b/apps/cli/src/commands/invite.ts index 6ab1ea8..d63e443 100644 --- a/apps/cli/src/commands/invite.ts +++ b/apps/cli/src/commands/invite.ts @@ -1,4 +1,7 @@ -import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; +import { + isRecord, + parseJsonResponseSafe as parseJsonResponse, +} from "@clawdentity/common"; import { INVITES_PATH, INVITES_REDEEM_PATH } from "@clawdentity/protocol"; import { AppError, createLogger } from "@clawdentity/sdk"; import { Command } from "commander"; @@ -74,10 +77,6 @@ type InviteRuntime = { config: CliConfig; }; -const isRecord = (value: unknown): value is Record => { - return typeof value === "object" && value !== null; -}; - function parseNonEmptyString(value: unknown): string { if (typeof value !== "string") { return ""; diff --git a/apps/cli/src/commands/verify.ts b/apps/cli/src/commands/verify.ts index 04036dc..a0f133c 100644 --- a/apps/cli/src/commands/verify.ts +++ b/apps/cli/src/commands/verify.ts @@ -1,5 +1,8 @@ import { readFile } from "node:fs/promises"; -import { parseJsonResponseSafe as parseResponseJson } from "@clawdentity/common"; +import { + isRecord, + parseJsonResponseSafe as parseResponseJson, +} from "@clawdentity/common"; import { parseCrlClaims } from "@clawdentity/protocol"; import { createLogger, @@ -59,10 +62,6 @@ class VerifyCommandError extends Error { } } -const isRecord = (value: unknown): value is Record => { - return typeof value === "object" && value !== null; -}; - const normalizeRegistryUrl = (registryUrl: string): string => { try { return new URL(registryUrl).toString(); diff --git a/apps/openclaw-skill/package.json b/apps/openclaw-skill/package.json index e88b99e..1be4261 100644 --- a/apps/openclaw-skill/package.json +++ b/apps/openclaw-skill/package.json @@ -11,6 +11,7 @@ "typecheck": "tsc --noEmit" }, "dependencies": { + "@clawdentity/common": "workspace:*", "@clawdentity/protocol": "workspace:*", "@clawdentity/sdk": "workspace:*" }, diff --git a/apps/openclaw-skill/src/AGENTS.md b/apps/openclaw-skill/src/AGENTS.md index a9c521d..9ff99e1 100644 --- a/apps/openclaw-skill/src/AGENTS.md +++ b/apps/openclaw-skill/src/AGENTS.md @@ -8,6 +8,7 @@ ## Safety Rules - Validate external input (`payload`, peer config JSON) before use. +- Reuse `@clawdentity/common` guards (for example `isRecord`) instead of redefining local record/type guard helpers in transform modules. - Do not log relay payload contents or local connector credential material. - Keep local auth/lock timestamps UTC and standardized via SDK datetime helpers (`nowUtcMs`, `toIso`, `nowIso`) instead of direct `Date` calls. - Keep transform relay path as local connector handoff only, not direct peer HTTP calls. diff --git a/apps/openclaw-skill/src/transforms/peers-config.ts b/apps/openclaw-skill/src/transforms/peers-config.ts index 90ad2c3..c6df9e9 100644 --- a/apps/openclaw-skill/src/transforms/peers-config.ts +++ b/apps/openclaw-skill/src/transforms/peers-config.ts @@ -1,6 +1,7 @@ import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; import { homedir } from "node:os"; import { dirname, join } from "node:path"; +import { isRecord } from "@clawdentity/common"; const CLAWDENTITY_DIR = ".clawdentity"; const PEERS_FILENAME = "peers.json"; @@ -24,10 +25,6 @@ export type PeersConfigPathOptions = { homeDir?: string; }; -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - function getErrorCode(error: unknown): string | undefined { if (!isRecord(error)) { return undefined; diff --git a/apps/openclaw-skill/src/transforms/registry-auth.ts b/apps/openclaw-skill/src/transforms/registry-auth.ts index f537cd2..a420ebe 100644 --- a/apps/openclaw-skill/src/transforms/registry-auth.ts +++ b/apps/openclaw-skill/src/transforms/registry-auth.ts @@ -8,6 +8,7 @@ import { writeFile, } from "node:fs/promises"; import { join } from "node:path"; +import { isRecord } from "@clawdentity/common"; import { type AgentAuthBundle, nowUtcMs } from "@clawdentity/sdk"; const CLAWDENTITY_DIR = ".clawdentity"; @@ -18,10 +19,6 @@ const LOCK_RETRY_DELAY_MS = 50; const LOCK_MAX_ATTEMPTS = 200; const STALE_LOCK_AGE_MS = 30_000; -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - function getErrorCode(error: unknown): string | undefined { if (!isRecord(error)) { return undefined; diff --git a/apps/openclaw-skill/src/transforms/relay-to-peer.ts b/apps/openclaw-skill/src/transforms/relay-to-peer.ts index 537196f..3896b52 100644 --- a/apps/openclaw-skill/src/transforms/relay-to-peer.ts +++ b/apps/openclaw-skill/src/transforms/relay-to-peer.ts @@ -1,6 +1,7 @@ import { readFile } from "node:fs/promises"; import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; +import { isRecord } from "@clawdentity/common"; import { loadPeersConfig, type PeersConfigPathOptions, @@ -35,10 +36,6 @@ type ConnectorRelayRequest = { peerProxyUrl: string; }; -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - function getErrorCode(error: unknown): string | undefined { if (!isRecord(error)) { return undefined; diff --git a/package.json b/package.json index 7ecbb65..6fd7e31 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ }, "scripts": { "prepare": "husky", + "check:file-size": "node ./scripts/quality/check-file-size.mjs", "format": "biome format .", "lint": "biome check .", "lint:staged": "lint-staged --concurrent false", diff --git a/packages/sdk/src/AGENTS.md b/packages/sdk/src/AGENTS.md index aeb5123..d4a0928 100644 --- a/packages/sdk/src/AGENTS.md +++ b/packages/sdk/src/AGENTS.md @@ -4,4 +4,4 @@ - Follow `packages/sdk/AGENTS.md` as the canonical SDK guidance. - Keep datetime primitives centralized in `datetime.ts` and exported through `index.ts` (`nowUtcMs`, `toIso`, `nowIso`, `addSeconds`, `isExpired`). - Keep helper tests focused and deterministic in `datetime.test.ts`. -- Reuse `@clawdentity/common` primitives (for example safe JSON response parsing) instead of duplicating generic transport helpers in SDK clients. +- Reuse `@clawdentity/common` primitives (for example `isRecord` and safe JSON response parsing) instead of duplicating generic transport helpers in SDK clients. diff --git a/packages/sdk/src/agent-auth-client.ts b/packages/sdk/src/agent-auth-client.ts index e84da4b..c75d3f2 100644 --- a/packages/sdk/src/agent-auth-client.ts +++ b/packages/sdk/src/agent-auth-client.ts @@ -1,4 +1,7 @@ -import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; +import { + isRecord, + parseJsonResponseSafe as parseJsonResponse, +} from "@clawdentity/common"; import { AGENT_AUTH_REFRESH_PATH, encodeBase64url, @@ -29,10 +32,6 @@ type RefreshSingleFlightOptions = { const refreshSingleFlights = new Map>(); -const isRecord = (value: unknown): value is Record => { - return typeof value === "object" && value !== null; -}; - const parseNonEmptyString = (value: unknown): string => { if (typeof value !== "string") { return ""; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0aa997e..aef98c8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -75,6 +75,9 @@ importers: apps/openclaw-skill: dependencies: + '@clawdentity/common': + specifier: workspace:* + version: link:../../packages/common '@clawdentity/protocol': specifier: workspace:* version: link:../../packages/protocol diff --git a/scripts/AGENTS.md b/scripts/AGENTS.md new file mode 100644 index 0000000..2cfa85f --- /dev/null +++ b/scripts/AGENTS.md @@ -0,0 +1,11 @@ +# AGENTS.md (scripts) + +## Purpose +- Keep repository utility scripts deterministic, fast, and CI-safe. +- Centralize reusable quality checks that are invoked from root `package.json` scripts. + +## Rules +- Prefer Node-based scripts for cross-platform behavior in local and CI environments. +- Keep script output deterministic: sorted traversal, stable formatting, and explicit non-zero exits on guard failures. +- File-size guard entrypoint is `scripts/quality/check-file-size.mjs`, exposed at root as `pnpm check:file-size`. +- The file-size guard enforces an 800-line limit for tracked source files under `apps/**` and `packages/**`, excluding `dist`, `.wrangler`, `worker-configuration.d.ts`, `drizzle/meta`, and `node_modules`. diff --git a/scripts/quality/check-file-size.mjs b/scripts/quality/check-file-size.mjs new file mode 100644 index 0000000..f3bc413 --- /dev/null +++ b/scripts/quality/check-file-size.mjs @@ -0,0 +1,119 @@ +#!/usr/bin/env node + +import { execFileSync } from "node:child_process"; +import { readFileSync } from "node:fs"; + +const MAX_LINES = 800; +const EXCLUDED_DIR_SEGMENTS = new Set([ + "dist", + ".wrangler", + "node_modules", +]); +const EXCLUDED_PATH_SNIPPETS = ["/drizzle/meta/"]; +const EXCLUDED_BASENAMES = new Set(["worker-configuration.d.ts"]); +const SOURCE_EXTENSIONS = new Set([ + ".ts", + ".tsx", + ".js", + ".jsx", + ".mjs", + ".cjs", + ".mts", + ".cts", + ".d.ts", + ".json", + ".jsonc", + ".sql", + ".sh", +]); + +function compareStrings(left, right) { + if (left === right) { + return 0; + } + + return left < right ? -1 : 1; +} + +const trackedFilesOutput = execFileSync("git", ["ls-files", "--", "apps", "packages"], { + encoding: "utf8", +}); + +const trackedFiles = trackedFilesOutput + .split("\n") + .map((filePath) => filePath.trim()) + .filter(Boolean) + .map((filePath) => filePath.replaceAll("\\", "/")) + .sort(compareStrings); + +function isExcluded(filePath) { + if (EXCLUDED_BASENAMES.has(filePath.split("/").at(-1))) { + return true; + } + + if (EXCLUDED_PATH_SNIPPETS.some((snippet) => filePath.includes(snippet))) { + return true; + } + + const segments = filePath.split("/"); + return segments.some((segment) => EXCLUDED_DIR_SEGMENTS.has(segment)); +} + +function getExtension(filePath) { + if (filePath.endsWith(".d.ts")) { + return ".d.ts"; + } + + const extensionStart = filePath.lastIndexOf("."); + if (extensionStart === -1) { + return ""; + } + + return filePath.slice(extensionStart); +} + +function countLines(filePath) { + const contents = readFileSync(filePath, "utf8"); + + if (contents.length === 0) { + return 0; + } + + const newlineMatches = contents.match(/\n/g); + const newlineCount = newlineMatches === null ? 0 : newlineMatches.length; + return contents.endsWith("\n") ? newlineCount : newlineCount + 1; +} + +const sourceFiles = trackedFiles.filter((filePath) => { + if (isExcluded(filePath)) { + return false; + } + + return SOURCE_EXTENSIONS.has(getExtension(filePath)); +}); + +const violations = sourceFiles + .map((filePath) => ({ filePath, lineCount: countLines(filePath) })) + .filter(({ lineCount }) => lineCount > MAX_LINES) + .sort((left, right) => { + if (right.lineCount !== left.lineCount) { + return right.lineCount - left.lineCount; + } + return compareStrings(left.filePath, right.filePath); + }); + +if (violations.length > 0) { + console.error( + `Found ${violations.length} source file(s) exceeding ${MAX_LINES} lines under apps/ and packages/:`, + ); + + for (const violation of violations) { + console.error(`- ${violation.filePath}: ${violation.lineCount} lines`); + } + + process.exit(1); +} + +console.log( + `File-size guard passed: ${sourceFiles.length} source file(s) checked (max ${MAX_LINES} lines).`, +); From a7af9a85ab2e37b6c3da8be0ed5272731a511523 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 19:08:52 +0530 Subject: [PATCH 144/190] feat: align deterministic bootstrap service env across registry and proxy --- .env.example | 4 +- .github/AGENTS.md | 8 +- apps/cli/src/commands/AGENTS.md | 6 +- apps/cli/src/commands/admin.test.ts | 2 - apps/cli/src/commands/admin.ts | 8 +- apps/cli/src/commands/verify.test.ts | 4 +- apps/cli/src/commands/verify.ts | 4 +- apps/openclaw-skill/skill/SKILL.md | 2 +- .../skill/references/clawdentity-protocol.md | 2 +- .../skill/references/clawdentity-registry.md | 8 + apps/proxy/.env.example | 6 +- apps/proxy/AGENTS.md | 2 +- apps/proxy/src/auth-middleware.ts | 4 +- apps/proxy/src/config.test.ts | 14 +- apps/proxy/src/config.ts | 41 +- apps/proxy/src/pairing-route.test.ts | 8 +- apps/proxy/src/worker.test.ts | 12 +- apps/proxy/src/worker.ts | 8 +- apps/registry/.env.example | 4 + apps/registry/src/AGENTS.md | 4 +- apps/registry/src/agent-registration.ts | 2 +- apps/registry/src/server.test.ts | 714 +++++++++++++++--- apps/registry/src/server.ts | 76 +- packages/sdk/src/config.test.ts | 46 +- packages/sdk/src/config.ts | 31 +- packages/sdk/src/index.test.ts | 12 +- packages/sdk/src/runtime-environment.test.ts | 4 +- packages/sdk/src/runtime-environment.ts | 2 +- scripts/env/sync-worktree-env.sh | 12 +- 29 files changed, 835 insertions(+), 215 deletions(-) diff --git a/.env.example b/.env.example index 198e4fa..46d46c1 100644 --- a/.env.example +++ b/.env.example @@ -10,10 +10,10 @@ CLAWDENTITY_REGISTRY_URL=http://127.0.0.1:8788 CLAWDENTITY_PROXY_URL=http://127.0.0.1:8787 BOOTSTRAP_SECRET=replace-with-random-secret +BOOTSTRAP_INTERNAL_SERVICE_ID=replace-with-internal-service-id +BOOTSTRAP_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret REGISTRY_SIGNING_KEY=replace-with-base64url-ed25519-private-key REGISTRY_SIGNING_KEYS=[{"kid":"reg-dev-key-1","alg":"EdDSA","crv":"Ed25519","x":"replace-with-base64url-ed25519-public-key","status":"active"}] -REGISTRY_INTERNAL_SERVICE_ID=replace-with-internal-service-id -REGISTRY_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret # Optional keys APP_VERSION=local-dev diff --git a/.github/AGENTS.md b/.github/AGENTS.md index d8d7373..9648407 100644 --- a/.github/AGENTS.md +++ b/.github/AGENTS.md @@ -24,8 +24,8 @@ - Install dependencies before any `pnpm exec wrangler ...` command so Wrangler is available on clean runners. - Regenerate Worker type bindings in CI with dotenv overlays disabled (`pnpm -F @clawdentity/registry run types:dev` and `pnpm -F @clawdentity/proxy run types:dev`) and fail on git diff drift for `worker-configuration.d.ts` to prevent stale runtime binding types from shipping. - Sync proxy internal-service credentials from GitHub secrets on every deploy: - - `REGISTRY_INTERNAL_SERVICE_ID` - - `REGISTRY_INTERNAL_SERVICE_SECRET` + - `BOOTSTRAP_INTERNAL_SERVICE_ID` + - `BOOTSTRAP_INTERNAL_SERVICE_SECRET` - Push both values into proxy Worker secrets before proxy deploy. - Add a Wrangler preflight dry-run for both workers before mutating remote state (migrations/deploy): - `wrangler deploy --env dev --dry-run --var APP_VERSION:` @@ -55,8 +55,8 @@ - Required deploy secrets: - `CLOUDFLARE_API_TOKEN` - `CLOUDFLARE_ACCOUNT_ID` - - `REGISTRY_INTERNAL_SERVICE_ID` - - `REGISTRY_INTERNAL_SERVICE_SECRET` + - `BOOTSTRAP_INTERNAL_SERVICE_ID` + - `BOOTSTRAP_INTERNAL_SERVICE_SECRET` - Mirror to `CF_API_TOKEN` and `CF_ACCOUNT_ID` for tooling compatibility. - Optional deploy secrets: - `REGISTRY_HEALTH_URL` (only needed when dev registry health endpoint is not `https://dev.registry.clawdentity.com`; CI falls back to that URL by default). diff --git a/apps/cli/src/commands/AGENTS.md b/apps/cli/src/commands/AGENTS.md index 3832f70..ee366c8 100644 --- a/apps/cli/src/commands/AGENTS.md +++ b/apps/cli/src/commands/AGENTS.md @@ -60,11 +60,11 @@ - `admin bootstrap` must call registry `/v1/admin/bootstrap` with `x-bootstrap-secret` and fail with stable CLI error codes/messages. - `admin bootstrap` must import `ADMIN_BOOTSTRAP_PATH` from `@clawdentity/protocol` instead of duplicating endpoint literals in command code/tests. - Treat bootstrap API key token as a write-once secret (print once) and never log secret contents. -- Never print internal service secret from `admin bootstrap`; operators must provision/rotate `REGISTRY_INTERNAL_SERVICE_SECRET` manually in Cloudflare. +- Never print internal service secret from `admin bootstrap`; operators must provision/rotate `BOOTSTRAP_INTERNAL_SERVICE_SECRET` manually in Cloudflare. - Normalize registry URL through URL parsing before requests; reject invalid URLs before network calls. - Persist bootstrap output in deterministic order: `registryUrl` then `apiKey`, so CLI state is predictable after onboarding. -- Bootstrap command output should explicitly remind operators to set `REGISTRY_INTERNAL_SERVICE_ID` and `REGISTRY_INTERNAL_SERVICE_SECRET` on proxy environment before deploy. -- Bootstrap response parsing must require `{ human, apiKey, internalService }` to prevent partially-valid onboarding state. +- Bootstrap command output should explicitly remind operators to set `BOOTSTRAP_INTERNAL_SERVICE_ID` and `BOOTSTRAP_INTERNAL_SERVICE_SECRET` on proxy environment before deploy. +- Bootstrap response parsing must require `{ human, apiKey, internalService }` with internal service metadata (`id`, `name`) to prevent partially-valid onboarding state. - Config persistence failures after successful bootstrap must not hide the returned PAT token; print secrets first, then surface recovery instructions. ## API Key Command Rules diff --git a/apps/cli/src/commands/admin.test.ts b/apps/cli/src/commands/admin.test.ts index fae92b6..64351e8 100644 --- a/apps/cli/src/commands/admin.test.ts +++ b/apps/cli/src/commands/admin.test.ts @@ -29,7 +29,6 @@ describe("admin bootstrap helper", () => { internalService: { id: "01KHH000000000000000000002", name: "proxy-pairing", - secret: "clw_srv_testsecret", }, }), { status: 201, headers: { "content-type": "application/json" } }, @@ -53,7 +52,6 @@ describe("admin bootstrap helper", () => { expect(result.human.did).toBe("did:claw:human:00000000000000000000000000"); expect(result.apiKey.token).toBe("clw_pat_testtoken"); expect(result.internalService.id).toBe("01KHH000000000000000000002"); - expect(result.internalService.secret).toBe("clw_srv_testsecret"); expect(result.registryUrl).toBe("https://api.example.com/"); expect(fetchMock).toHaveBeenCalledTimes(1); const [calledInput, calledInit] = fetchMock.mock.calls[0] as [ diff --git a/apps/cli/src/commands/admin.ts b/apps/cli/src/commands/admin.ts index 000a40b..024bd34 100644 --- a/apps/cli/src/commands/admin.ts +++ b/apps/cli/src/commands/admin.ts @@ -34,7 +34,6 @@ type AdminBootstrapResponse = { internalService: { id: string; name: string; - secret: string; }; }; @@ -119,7 +118,6 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { const apiKeyToken = parseNonEmptyString(apiKey.token); const internalServiceId = parseNonEmptyString(internalService.id); const internalServiceName = parseNonEmptyString(internalService.name); - const internalServiceSecret = parseNonEmptyString(internalService.secret); if ( humanId.length === 0 || @@ -129,8 +127,7 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { apiKeyName.length === 0 || apiKeyToken.length === 0 || internalServiceId.length === 0 || - internalServiceName.length === 0 || - internalServiceSecret.length === 0 + internalServiceName.length === 0 ) { throw createCliError( "CLI_ADMIN_BOOTSTRAP_INVALID_RESPONSE", @@ -154,7 +151,6 @@ function parseBootstrapResponse(payload: unknown): AdminBootstrapResponse { internalService: { id: internalServiceId, name: internalServiceName, - secret: internalServiceSecret, }, }; } @@ -298,7 +294,7 @@ export const createAdminCommand = (): Command => { `Internal service name: ${result.internalService.name}`, ); writeStdoutLine( - "Set proxy secrets REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET manually in Cloudflare before proxy deploy.", + "Set proxy secrets BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET manually in Cloudflare before proxy deploy.", ); await persistBootstrapConfig(result.registryUrl, result.apiKey.token); diff --git a/apps/cli/src/commands/verify.test.ts b/apps/cli/src/commands/verify.test.ts index 833d0c4..1d5e032 100644 --- a/apps/cli/src/commands/verify.test.ts +++ b/apps/cli/src/commands/verify.test.ts @@ -167,7 +167,9 @@ describe("verify command", () => { mockedNowUtcMs.mockImplementation(() => Date.now()); mockedParseRegistryConfig.mockReturnValue({ - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEYS: [activeSigningKey], }); diff --git a/apps/cli/src/commands/verify.ts b/apps/cli/src/commands/verify.ts index 9d270e0..17f4b62 100644 --- a/apps/cli/src/commands/verify.ts +++ b/apps/cli/src/commands/verify.ts @@ -149,7 +149,9 @@ const parseSigningKeys = (payload: unknown): RegistrySigningKey[] => { } const parsedConfig = parseRegistryConfig({ - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEYS: JSON.stringify(payload.keys), }); diff --git a/apps/openclaw-skill/skill/SKILL.md b/apps/openclaw-skill/skill/SKILL.md index a87d093..df94b82 100644 --- a/apps/openclaw-skill/skill/SKILL.md +++ b/apps/openclaw-skill/skill/SKILL.md @@ -406,7 +406,7 @@ Do not suggest switching endpoints unless user explicitly asks for endpoint chan - `CLI_PAIR_STATUS_WAIT_CANCELLED`: wait interrupted (SIGINT). Run `pair recover`. - `CLI_PAIR_CONFIRM_INPUT_CONFLICT`: cannot provide both `--ticket` and `--qr-file`. Use one path only. - `CLI_PAIR_PROXY_URL_MISMATCH`: local `proxyUrl` does not match registry metadata. Rerun `clawdentity invite redeem `. -- `PROXY_PAIR_OWNERSHIP_UNAVAILABLE`: proxy cannot authenticate to registry ownership endpoint. Bootstrap should create `proxy-pairing` internal service automatically on fresh environments. For already-bootstrapped environments, create or rotate the internal service via admin API and update proxy secrets (`REGISTRY_INTERNAL_SERVICE_ID`, `REGISTRY_INTERNAL_SERVICE_SECRET`). +- `PROXY_PAIR_OWNERSHIP_UNAVAILABLE`: proxy cannot authenticate to registry ownership endpoint. Ensure registry deterministic bootstrap credentials are configured (`BOOTSTRAP_INTERNAL_SERVICE_ID`, `BOOTSTRAP_INTERNAL_SERVICE_SECRET`) and proxy secrets match (`BOOTSTRAP_INTERNAL_SERVICE_ID`, `BOOTSTRAP_INTERNAL_SERVICE_SECRET`). On already-bootstrapped environments, rotate internal service via admin API and update proxy secrets together. - Responder shows peer but initiator does not: - Cause: initiator started pairing without `--wait`. - Fix: run `clawdentity pair status --ticket --wait` on initiator. diff --git a/apps/openclaw-skill/skill/references/clawdentity-protocol.md b/apps/openclaw-skill/skill/references/clawdentity-protocol.md index f947839..3a7ffa2 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-protocol.md +++ b/apps/openclaw-skill/skill/references/clawdentity-protocol.md @@ -226,7 +226,7 @@ The connector `deliver` frame includes `fromAgentDid` as a top-level field. Inbo | HTTP Status | Error Code | Meaning | Recovery | |---|---|---|---| | 403 | `PROXY_PAIR_OWNERSHIP_FORBIDDEN` | Initiator ownership check failed | Recreate/refresh the local agent identity | -| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | Ensure bootstrap created `proxy-pairing` internal service on registry; for existing envs create/rotate service credentials and update proxy `REGISTRY_INTERNAL_SERVICE_ID` + `REGISTRY_INTERNAL_SERVICE_SECRET` | +| 503 | `PROXY_PAIR_OWNERSHIP_UNAVAILABLE` | Registry ownership lookup unavailable | Ensure registry deterministic bootstrap credentials are configured (`BOOTSTRAP_INTERNAL_SERVICE_ID`, `BOOTSTRAP_INTERNAL_SERVICE_SECRET`) and proxy credentials match (`BOOTSTRAP_INTERNAL_SERVICE_ID`, `BOOTSTRAP_INTERNAL_SERVICE_SECRET`); for existing envs rotate credentials together | | — | `CLI_PAIR_AGENT_NOT_FOUND` | Agent ait.jwt or secret.key missing/empty | Run `agent create` or `agent auth refresh` | | — | `CLI_PAIR_HUMAN_NAME_MISSING` | Local config is missing `humanName` | Set via `invite redeem` or config | | — | `CLI_PAIR_PROXY_URL_REQUIRED` | Proxy URL could not be resolved | Run `invite redeem` or set `CLAWDENTITY_PROXY_URL` | diff --git a/apps/openclaw-skill/skill/references/clawdentity-registry.md b/apps/openclaw-skill/skill/references/clawdentity-registry.md index e084385..8c0b0f5 100644 --- a/apps/openclaw-skill/skill/references/clawdentity-registry.md +++ b/apps/openclaw-skill/skill/references/clawdentity-registry.md @@ -31,6 +31,9 @@ Human DID: did:claw:human:01H... API key name: API key token (shown once): +Internal service ID: +Internal service name: proxy-pairing +Set proxy secrets BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET manually in Cloudflare before proxy deploy. API key saved to local config ``` @@ -52,6 +55,11 @@ API key saved to local config - One-time operation: succeeds only on first call per registry. - Automatically persists `registryUrl` and `apiKey` to local config. - Registry must have `BOOTSTRAP_SECRET` environment variable set. +- Registry must also have deterministic service credentials configured: + - `BOOTSTRAP_INTERNAL_SERVICE_ID` + - `BOOTSTRAP_INTERNAL_SERVICE_SECRET` +- `BOOTSTRAP_INTERNAL_SERVICE_ID` must match proxy `BOOTSTRAP_INTERNAL_SERVICE_ID`. +- `BOOTSTRAP_INTERNAL_SERVICE_SECRET` must match proxy `BOOTSTRAP_INTERNAL_SERVICE_SECRET`. - After bootstrap, admin can create invites with `clawdentity invite create`. ## API Key Lifecycle diff --git a/apps/proxy/.env.example b/apps/proxy/.env.example index b5595ff..9058aa5 100644 --- a/apps/proxy/.env.example +++ b/apps/proxy/.env.example @@ -7,8 +7,10 @@ ENVIRONMENT=development APP_VERSION=local-dev REGISTRY_URL=https://dev.registry.clawdentity.com -REGISTRY_INTERNAL_SERVICE_ID=replace-with-internal-service-id -REGISTRY_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret +BOOTSTRAP_INTERNAL_SERVICE_ID=replace-with-internal-service-id +BOOTSTRAP_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret +# Keep proxy credentials exactly aligned with registry bootstrap credentials: +# BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET. INJECT_IDENTITY_INTO_MESSAGE=true # Trust backend policy: # - local: in-memory trust fallback is allowed when PROXY_TRUST_STATE is unavailable. diff --git a/apps/proxy/AGENTS.md b/apps/proxy/AGENTS.md index 29bcc32..bdfd427 100644 --- a/apps/proxy/AGENTS.md +++ b/apps/proxy/AGENTS.md @@ -41,7 +41,7 @@ - `LISTEN_PORT` or `PORT` - `OPENCLAW_BASE_URL` - `REGISTRY_URL` or `CLAWDENTITY_REGISTRY_URL` - - `REGISTRY_INTERNAL_SERVICE_ID` + `REGISTRY_INTERNAL_SERVICE_SECRET` (required together for proxy-to-registry identity ownership checks) + - `BOOTSTRAP_INTERNAL_SERVICE_ID` + `BOOTSTRAP_INTERNAL_SERVICE_SECRET` (required together for proxy-to-registry identity ownership checks) - `OPENCLAW_STATE_DIR` - `RELAY_QUEUE_MAX_MESSAGES_PER_AGENT`, `RELAY_QUEUE_TTL_SECONDS`, `RELAY_RETRY_INITIAL_MS`, `RELAY_RETRY_MAX_MS`, `RELAY_RETRY_MAX_ATTEMPTS`, `RELAY_RETRY_JITTER_RATIO` diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index fdf1290..65c856a 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -201,7 +201,9 @@ function parseRegistrySigningKeys(payload: unknown): RegistrySigningKey[] { const parsed = (() => { try { return parseRegistryConfig({ - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEYS: JSON.stringify(payload.keys), }); } catch (error) { diff --git a/apps/proxy/src/config.test.ts b/apps/proxy/src/config.test.ts index 8980de0..379ae32 100644 --- a/apps/proxy/src/config.test.ts +++ b/apps/proxy/src/config.test.ts @@ -56,8 +56,8 @@ describe("proxy config", () => { const config = parseProxyConfig({ PORT: "4100", CLAWDENTITY_REGISTRY_URL: "https://registry.example.com", - REGISTRY_INTERNAL_SERVICE_ID: "01KHSVCABCDEFGHJKMNOPQRST", - REGISTRY_INTERNAL_SERVICE_SECRET: + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", ENVIRONMENT: "local", CRL_STALE_BEHAVIOR: "fail-closed", @@ -76,7 +76,7 @@ describe("proxy config", () => { expect(config.listenPort).toBe(4100); expect(config.registryUrl).toBe("https://registry.example.com"); - expect(config.registryInternalServiceId).toBe("01KHSVCABCDEFGHJKMNOPQRST"); + expect(config.registryInternalServiceId).toBe("01HF7YAT00W6W7CM7N3W5FDXT4"); expect(config.registryInternalServiceSecret).toBe( "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", ); @@ -172,12 +172,12 @@ describe("proxy config", () => { it("throws when only one internal service credential is provided", () => { expect(() => parseProxyConfig({ - REGISTRY_INTERNAL_SERVICE_ID: "svc-id-only", + BOOTSTRAP_INTERNAL_SERVICE_ID: "svc-id-only", }), ).toThrow(ProxyConfigError); expect(() => parseProxyConfig({ - REGISTRY_INTERNAL_SERVICE_SECRET: "clw_srv_secret-only", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_secret-only", }), ).toThrow(ProxyConfigError); }); @@ -200,8 +200,8 @@ describe("proxy config", () => { { ENVIRONMENT: "local", REGISTRY_URL: "https://registry.example.test", - REGISTRY_INTERNAL_SERVICE_ID: "svc-proxy-registry", - REGISTRY_INTERNAL_SERVICE_SECRET: "secret-proxy-registry", + BOOTSTRAP_INTERNAL_SERVICE_ID: "svc-proxy-registry", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "secret-proxy-registry", }, { requireRuntimeKeys: true, diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 25f9d1e..8efef89 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -9,7 +9,6 @@ export const proxyEnvironmentValues = [ "local", "development", "production", - "test", ] as const; export type ProxyEnvironment = (typeof proxyEnvironmentValues)[number]; @@ -87,8 +86,8 @@ const proxyRuntimeEnvSchema = z.object({ .default(DEFAULT_PROXY_LISTEN_PORT), OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), - REGISTRY_INTERNAL_SERVICE_ID: z.string().trim().min(1).optional(), - REGISTRY_INTERNAL_SERVICE_SECRET: z.string().trim().min(1).optional(), + BOOTSTRAP_INTERNAL_SERVICE_ID: z.string().trim().min(1).optional(), + BOOTSTRAP_INTERNAL_SERVICE_SECRET: z.string().trim().min(1).optional(), ENVIRONMENT: z .enum(proxyEnvironmentValues) .default(DEFAULT_PROXY_ENVIRONMENT), @@ -194,8 +193,8 @@ type RuntimeEnvInput = { OPENCLAW_BASE_URL?: unknown; REGISTRY_URL?: unknown; CLAWDENTITY_REGISTRY_URL?: unknown; - REGISTRY_INTERNAL_SERVICE_ID?: unknown; - REGISTRY_INTERNAL_SERVICE_SECRET?: unknown; + BOOTSTRAP_INTERNAL_SERVICE_ID?: unknown; + BOOTSTRAP_INTERNAL_SERVICE_SECRET?: unknown; ENVIRONMENT?: unknown; ALLOW_ALL_VERIFIED?: unknown; CRL_REFRESH_INTERVAL_MS?: unknown; @@ -485,11 +484,11 @@ function normalizeRuntimeEnv(input: unknown): Record { "REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL", ]), - REGISTRY_INTERNAL_SERVICE_ID: firstNonEmpty(env, [ - "REGISTRY_INTERNAL_SERVICE_ID", + BOOTSTRAP_INTERNAL_SERVICE_ID: firstNonEmpty(env, [ + "BOOTSTRAP_INTERNAL_SERVICE_ID", ]), - REGISTRY_INTERNAL_SERVICE_SECRET: firstNonEmpty(env, [ - "REGISTRY_INTERNAL_SERVICE_SECRET", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: firstNonEmpty(env, [ + "BOOTSTRAP_INTERNAL_SERVICE_SECRET", ]), ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), @@ -569,12 +568,12 @@ const REQUIRED_PROXY_RUNTIME_ENV_KEYS: readonly { aliases: ["REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL"], }, { - key: "REGISTRY_INTERNAL_SERVICE_ID", - aliases: ["REGISTRY_INTERNAL_SERVICE_ID"], + key: "BOOTSTRAP_INTERNAL_SERVICE_ID", + aliases: ["BOOTSTRAP_INTERNAL_SERVICE_ID"], }, { - key: "REGISTRY_INTERNAL_SERVICE_SECRET", - aliases: ["REGISTRY_INTERNAL_SERVICE_SECRET"], + key: "BOOTSTRAP_INTERNAL_SERVICE_SECRET", + aliases: ["BOOTSTRAP_INTERNAL_SERVICE_SECRET"], }, ]; @@ -641,13 +640,13 @@ export function parseProxyConfig( parsedRuntimeEnv.data.RELAY_MAX_IN_FLIGHT_DELIVERIES, relayMaxFrameBytes: parsedRuntimeEnv.data.RELAY_MAX_FRAME_BYTES, }; - if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID !== undefined) { + if (parsedRuntimeEnv.data.BOOTSTRAP_INTERNAL_SERVICE_ID !== undefined) { candidateConfig.registryInternalServiceId = - parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID; + parsedRuntimeEnv.data.BOOTSTRAP_INTERNAL_SERVICE_ID; } - if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET !== undefined) { + if (parsedRuntimeEnv.data.BOOTSTRAP_INTERNAL_SERVICE_SECRET !== undefined) { candidateConfig.registryInternalServiceSecret = - parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET; + parsedRuntimeEnv.data.BOOTSTRAP_INTERNAL_SERVICE_SECRET; } const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); @@ -659,11 +658,11 @@ export function parseProxyConfig( if (hasServiceId !== hasServiceSecret) { throw toConfigValidationError({ fieldErrors: { - REGISTRY_INTERNAL_SERVICE_ID: [ - "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", + BOOTSTRAP_INTERNAL_SERVICE_ID: [ + "BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET must be set together.", ], - REGISTRY_INTERNAL_SERVICE_SECRET: [ - "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: [ + "BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET must be set together.", ], }, formErrors: [], diff --git a/apps/proxy/src/pairing-route.test.ts b/apps/proxy/src/pairing-route.test.ts index cfc374b..8a7ecdd 100644 --- a/apps/proxy/src/pairing-route.test.ts +++ b/apps/proxy/src/pairing-route.test.ts @@ -75,7 +75,7 @@ async function createSignedTicketFixture(input: { } function createPairingApp(input?: { - environment?: "local" | "development" | "production" | "test"; + environment?: "local" | "development" | "production"; startFetchImpl?: typeof fetch; confirmFetchImpl?: typeof fetch; nowMs?: () => number; @@ -84,8 +84,8 @@ function createPairingApp(input?: { const app = createProxyApp({ config: parseProxyConfig({ REGISTRY_URL: "https://registry.example.com", - REGISTRY_INTERNAL_SERVICE_ID: "01KHSVCABCDEFGHJKMNOPQRST", - REGISTRY_INTERNAL_SERVICE_SECRET: + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", ENVIRONMENT: input?.environment, }), @@ -169,7 +169,7 @@ describe(`POST ${PAIR_START_PATH}`, () => { | undefined; const ownershipHeaders = new Headers(ownershipCallInit?.headers); expect(ownershipHeaders.get("x-claw-service-id")).toBe( - "01KHSVCABCDEFGHJKMNOPQRST", + "01HF7YAT00W6W7CM7N3W5FDXT4", ); expect(ownershipHeaders.get("x-claw-service-secret")).toBe( "clw_srv_kx2qkQhJ9j9d2l2fF6uH3m6l9Hj7sVfW8Q2r3L4", diff --git a/apps/proxy/src/worker.test.ts b/apps/proxy/src/worker.test.ts index 8ac4089..3acaa0c 100644 --- a/apps/proxy/src/worker.test.ts +++ b/apps/proxy/src/worker.test.ts @@ -30,8 +30,8 @@ function createRequiredBindings( return { ENVIRONMENT: "local", REGISTRY_URL: "https://registry.example.test", - REGISTRY_INTERNAL_SERVICE_ID: "svc-proxy-registry", - REGISTRY_INTERNAL_SERVICE_SECRET: "secret-proxy-registry", + BOOTSTRAP_INTERNAL_SERVICE_ID: "svc-proxy-registry", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "secret-proxy-registry", ...overrides, }; } @@ -188,10 +188,10 @@ describe("proxy worker", () => { "REGISTRY_URL is required", ); expect( - payload.error.details.fieldErrors?.REGISTRY_INTERNAL_SERVICE_ID?.[0], - ).toBe("REGISTRY_INTERNAL_SERVICE_ID is required"); + payload.error.details.fieldErrors?.BOOTSTRAP_INTERNAL_SERVICE_ID?.[0], + ).toBe("BOOTSTRAP_INTERNAL_SERVICE_ID is required"); expect( - payload.error.details.fieldErrors?.REGISTRY_INTERNAL_SERVICE_SECRET?.[0], - ).toBe("REGISTRY_INTERNAL_SERVICE_SECRET is required"); + payload.error.details.fieldErrors?.BOOTSTRAP_INTERNAL_SERVICE_SECRET?.[0], + ).toBe("BOOTSTRAP_INTERNAL_SERVICE_SECRET is required"); }); }); diff --git a/apps/proxy/src/worker.ts b/apps/proxy/src/worker.ts index 2c74849..99b8a2e 100644 --- a/apps/proxy/src/worker.ts +++ b/apps/proxy/src/worker.ts @@ -22,8 +22,8 @@ export type ProxyWorkerBindings = { PROXY_TRUST_STATE?: ProxyTrustStateNamespace; REGISTRY_URL?: string; CLAWDENTITY_REGISTRY_URL?: string; - REGISTRY_INTERNAL_SERVICE_ID?: string; - REGISTRY_INTERNAL_SERVICE_SECRET?: string; + BOOTSTRAP_INTERNAL_SERVICE_ID?: string; + BOOTSTRAP_INTERNAL_SERVICE_SECRET?: string; ENVIRONMENT?: string; ALLOW_ALL_VERIFIED?: string; CRL_REFRESH_INTERVAL_MS?: string; @@ -60,8 +60,8 @@ function toCacheKey(env: ProxyWorkerBindings): string { env.PROXY_TRUST_STATE === undefined ? "no-trust-do" : "has-trust-do", env.REGISTRY_URL, env.CLAWDENTITY_REGISTRY_URL, - env.REGISTRY_INTERNAL_SERVICE_ID, - env.REGISTRY_INTERNAL_SERVICE_SECRET, + env.BOOTSTRAP_INTERNAL_SERVICE_ID, + env.BOOTSTRAP_INTERNAL_SERVICE_SECRET, env.ENVIRONMENT, env.ALLOW_ALL_VERIFIED, env.CRL_REFRESH_INTERVAL_MS, diff --git a/apps/registry/.env.example b/apps/registry/.env.example index 4c5b32a..bb477f5 100644 --- a/apps/registry/.env.example +++ b/apps/registry/.env.example @@ -2,6 +2,8 @@ # Generated values are written by: scripts/env/sync-worktree-env.sh # For cloud deploys, keep secrets in Wrangler: # wrangler secret put BOOTSTRAP_SECRET --env +# wrangler secret put BOOTSTRAP_INTERNAL_SERVICE_ID --env +# wrangler secret put BOOTSTRAP_INTERNAL_SERVICE_SECRET --env # wrangler secret put REGISTRY_SIGNING_KEY --env # wrangler secret put REGISTRY_SIGNING_KEYS --env @@ -14,5 +16,7 @@ REGISTRY_ISSUER_URL=https://dev.registry.clawdentity.com # Secrets (required at startup for non-test environments) BOOTSTRAP_SECRET=replace-with-random-secret +BOOTSTRAP_INTERNAL_SERVICE_ID=replace-with-internal-service-id +BOOTSTRAP_INTERNAL_SERVICE_SECRET=replace-with-internal-service-secret REGISTRY_SIGNING_KEY=replace-with-base64url-ed25519-private-key REGISTRY_SIGNING_KEYS=[{"kid":"reg-key-1","alg":"EdDSA","crv":"Ed25519","x":"replace-with-base64url-ed25519-public-key","status":"active"}] diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index 45369f8..cb251e0 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -11,6 +11,7 @@ - `/health` must return HTTP 200 with `{ status, version, environment }` on valid config. - Invalid runtime config must fail through the shared error handler and return `CONFIG_VALIDATION_FAILED`. - Runtime startup config must fail fast for non-test environments when required keys are missing (`PROXY_URL`, `REGISTRY_ISSUER_URL`, `EVENT_BUS_BACKEND`, `BOOTSTRAP_SECRET`, `REGISTRY_SIGNING_KEY`, `REGISTRY_SIGNING_KEYS`). +- `BOOTSTRAP_INTERNAL_SERVICE_ID` and `BOOTSTRAP_INTERNAL_SERVICE_SECRET` are required bootstrap credentials in every environment and must be set together. ## Admin Bootstrap Contract - `POST /v1/admin/bootstrap` is a one-time bootstrap endpoint gated by `BOOTSTRAP_SECRET`. @@ -18,8 +19,9 @@ - Require `x-bootstrap-secret` header and compare with constant-time semantics; invalid/missing secret must return `401 ADMIN_BOOTSTRAP_UNAUTHORIZED`. - If `BOOTSTRAP_SECRET` is not configured, return `503 ADMIN_BOOTSTRAP_DISABLED`. - If any admin human already exists, return `409 ADMIN_BOOTSTRAP_ALREADY_COMPLETED`. -- Success response must include `{ human, apiKey, internalService }`; return plaintext PAT and internal service secret only in bootstrap response. +- Success response must include `{ human, apiKey, internalService }` with internal service metadata only (`id`, `name`); never return plaintext internal service secret from bootstrap. - Bootstrap must create a default internal service named `proxy-pairing` with scope `identity.read` in the same mutation unit as admin + PAT creation. +- Bootstrap must seed `proxy-pairing` service credentials from `BOOTSTRAP_INTERNAL_SERVICE_ID` and `BOOTSTRAP_INTERNAL_SERVICE_SECRET` so fresh-DB recovery is deterministic. - Persist admin bootstrap atomically where supported (transaction). When falling back because transactions are unavailable, run manual compensation rollback so no partial bootstrap state survives. - Fallback path must be compensation-safe: if API key/internal-service insert fails after admin insert, delete inserted `internal_services` + `api_keys` rows before deleting the admin human so retry remains possible under FK constraints. diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index 821adcb..0868f32 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -36,7 +36,7 @@ const REGISTRY_ISSUER_BY_ENVIRONMENT: Record< > = { development: "https://dev.registry.clawdentity.com", production: "https://registry.clawdentity.com", - test: "https://dev.registry.clawdentity.com", + local: "https://dev.registry.clawdentity.com", }; type AgentRegistrationBody = { diff --git a/apps/registry/src/server.test.ts b/apps/registry/src/server.test.ts index 3978110..a90d908 100644 --- a/apps/registry/src/server.test.ts +++ b/apps/registry/src/server.test.ts @@ -2577,14 +2577,19 @@ describe("GET /health", () => { const res = await app.request( "/health", {}, - { DB: {}, ENVIRONMENT: "test" }, + { + DB: {}, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(200); const body = await res.json(); expect(body).toEqual({ status: "ok", version: "0.0.0", - environment: "test", + environment: "local", }); expect(res.headers.get(REQUEST_ID_HEADER)).toBeTruthy(); }); @@ -2593,7 +2598,13 @@ describe("GET /health", () => { const res = await createRegistryApp().request( "/health", {}, - { DB: {}, ENVIRONMENT: "test", APP_VERSION: "sha-1234567890" }, + { + DB: {}, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + APP_VERSION: "sha-1234567890", + }, ); expect(res.status).toBe(200); @@ -2601,7 +2612,7 @@ describe("GET /health", () => { expect(body).toEqual({ status: "ok", version: "sha-1234567890", - environment: "test", + environment: "local", }); }); @@ -2629,6 +2640,8 @@ describe(`GET ${REGISTRY_METADATA_PATH}`, () => { { DB: {} as D1Database, ENVIRONMENT: "development", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", APP_VERSION: "sha-meta-123", PROXY_URL: "https://dev.proxy.clawdentity.com", REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com", @@ -2680,7 +2693,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -2708,7 +2723,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2732,7 +2749,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2756,7 +2775,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2782,7 +2803,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2807,7 +2830,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2836,7 +2861,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2859,7 +2886,6 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { internalService: { id: string; name: string; - secret: string; }; }; @@ -2870,8 +2896,8 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { expect(body.human.status).toBe("active"); expect(body.apiKey.name).toBe("prod-admin-key"); expect(body.apiKey.token.startsWith("clw_pat_")).toBe(true); + expect(body.internalService.id).toBe("01HF7YAT00W6W7CM7N3W5FDXT4"); expect(body.internalService.name).toBe("proxy-pairing"); - expect(body.internalService.secret.startsWith("clw_srv_")).toBe(true); expect(humanInserts).toHaveLength(1); expect(apiKeyInserts).toHaveLength(1); @@ -2887,10 +2913,10 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { JSON.stringify(["identity.read"]), ); expect(internalServiceInserts[0]?.secret_prefix).toBe( - deriveInternalServiceSecretPrefix(body.internalService.secret), + deriveInternalServiceSecretPrefix("clw_srv_bootstrapsecret"), ); expect(internalServiceInserts[0]?.secret_hash).toBe( - await hashInternalServiceSecret(body.internalService.secret), + await hashInternalServiceSecret("clw_srv_bootstrapsecret"), ); }); @@ -2913,7 +2939,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -2942,7 +2970,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -2992,7 +3022,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3024,7 +3056,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3047,7 +3081,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3077,7 +3113,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3100,7 +3138,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3129,7 +3169,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3152,7 +3194,9 @@ describe(`POST ${ADMIN_BOOTSTRAP_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", BOOTSTRAP_SECRET: "bootstrap-secret", }, ); @@ -3169,7 +3213,9 @@ describe("GET /.well-known/claw-keys.json", () => { {}, { DB: {} as D1Database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEYS: JSON.stringify([ { kid: "reg-key-1", @@ -3221,7 +3267,9 @@ describe("GET /.well-known/claw-keys.json", () => { {}, { DB: {} as D1Database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEYS: JSON.stringify([ { kid: "reg-key-1", @@ -3276,7 +3324,9 @@ describe("GET /.well-known/claw-keys.json", () => { {}, { DB: {} as D1Database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEYS: JSON.stringify([ { kid: "reg-key-1", @@ -3382,7 +3432,9 @@ describe("GET /v1/crl", () => { {}, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -3400,7 +3452,9 @@ describe("GET /v1/crl", () => { {}, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -3455,7 +3509,12 @@ describe("GET /v1/crl", () => { const response = await createRegistryApp().request( "/v1/crl", {}, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(404); @@ -3486,7 +3545,12 @@ describe("GET /v1/crl", () => { "CF-Connecting-IP": "203.0.113.77", }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(404); @@ -3499,7 +3563,12 @@ describe("GET /v1/crl", () => { "CF-Connecting-IP": "203.0.113.77", }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(rateLimited.status).toBe(429); @@ -3538,7 +3607,12 @@ describe("GET /v1/crl", () => { const response = await createRegistryApp().request( "/v1/crl", {}, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(500); @@ -3579,7 +3653,12 @@ describe("GET /v1/resolve/:id", () => { const res = await createRegistryApp().request( `/v1/resolve/${agentId}`, {}, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(200); @@ -3624,7 +3703,12 @@ describe("GET /v1/resolve/:id", () => { const res = await createRegistryApp().request( `/v1/resolve/${agentId}`, {}, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(200); @@ -3636,7 +3720,12 @@ describe("GET /v1/resolve/:id", () => { const res = await createRegistryApp().request( "/v1/resolve/not-a-ulid", {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(400); @@ -3660,7 +3749,12 @@ describe("GET /v1/resolve/:id", () => { const res = await createRegistryApp().request( `/v1/resolve/${missingAgentId}`, {}, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(404); @@ -3695,7 +3789,12 @@ describe("GET /v1/resolve/:id", () => { "CF-Connecting-IP": "203.0.113.10", }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(200); @@ -3708,7 +3807,12 @@ describe("GET /v1/resolve/:id", () => { "CF-Connecting-IP": "203.0.113.10", }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(rateLimited.status).toBe(429); @@ -3722,7 +3826,12 @@ describe("GET /v1/me", () => { const res = await createRegistryApp().request( "/v1/me", {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -3741,7 +3850,12 @@ describe("GET /v1/me", () => { { headers: { Authorization: "Bearer clw_pat_invalid-token-value" }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -3757,7 +3871,12 @@ describe("GET /v1/me", () => { { headers: { Authorization: "Bearer clw_pat_" }, }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -3776,7 +3895,12 @@ describe("GET /v1/me", () => { { headers: { Authorization: `Bearer ${validToken}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(200); @@ -3815,7 +3939,12 @@ describe(`POST ${INVITES_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(401); @@ -3842,7 +3971,12 @@ describe(`POST ${INVITES_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(403); @@ -3866,7 +4000,12 @@ describe(`POST ${INVITES_PATH}`, () => { expiresAt: "not-an-iso-date", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -3899,7 +4038,12 @@ describe(`POST ${INVITES_PATH}`, () => { expiresAt, }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(201); @@ -3936,7 +4080,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -3964,7 +4113,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { code: "clw_inv_missing", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -3999,7 +4153,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { code: "clw_inv_expired", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -4034,7 +4193,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { code: "clw_inv_redeemed", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(409); @@ -4074,7 +4238,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { apiKeyName: "primary-invite-key", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(redeemResponse.status).toBe(201); @@ -4112,7 +4281,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { Authorization: `Bearer ${redeemBody.apiKey.token}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(meResponse.status).toBe(200); @@ -4160,7 +4334,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { displayName: "Fallback Invitee", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(firstResponse.status).toBe(500); @@ -4179,7 +4358,12 @@ describe(`POST ${INVITES_REDEEM_PATH}`, () => { displayName: "Fallback Invitee", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(secondResponse.status).toBe(201); @@ -4199,7 +4383,12 @@ describe(`POST ${ME_API_KEYS_PATH}`, () => { }, body: JSON.stringify({ name: "workstation" }), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(401); @@ -4223,7 +4412,12 @@ describe(`POST ${ME_API_KEYS_PATH}`, () => { name: "workstation", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(201); @@ -4262,7 +4456,12 @@ describe(`POST ${ME_API_KEYS_PATH}`, () => { Authorization: `Bearer ${token}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(201); @@ -4333,7 +4532,12 @@ describe(`GET ${ME_API_KEYS_PATH}`, () => { Authorization: `Bearer ${authToken}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(200); @@ -4386,7 +4590,12 @@ describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { Authorization: `Bearer ${token}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -4406,7 +4615,12 @@ describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { Authorization: `Bearer ${token}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(404); @@ -4456,7 +4670,12 @@ describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { Authorization: `Bearer ${authToken}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(revokeResponse.status).toBe(204); @@ -4467,7 +4686,12 @@ describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { Authorization: `Bearer ${rotateToken}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(revokedAuth.status).toBe(401); const revokedBody = (await revokedAuth.json()) as { @@ -4482,7 +4706,12 @@ describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { Authorization: `Bearer ${authToken}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(activeAuth.status).toBe(200); }); @@ -4528,7 +4757,12 @@ describe(`DELETE ${ME_API_KEYS_PATH}/:id`, () => { Authorization: `Bearer ${authToken}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(204); @@ -4540,7 +4774,12 @@ describe("GET /v1/agents", () => { const res = await createRegistryApp().request( "/v1/agents", {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -4593,7 +4832,12 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(200); @@ -4668,7 +4912,12 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(statusRes.status).toBe(200); const statusBody = (await statusRes.json()) as { @@ -4695,7 +4944,12 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(frameworkRes.status).toBe(200); const frameworkBody = (await frameworkRes.json()) as { @@ -4761,7 +5015,12 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(firstPage.status).toBe(200); @@ -4794,7 +5053,12 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(secondPage.status).toBe(200); @@ -4832,7 +5096,12 @@ describe("GET /v1/agents", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(400); @@ -4862,6 +5131,8 @@ describe("GET /v1/agents", () => { { DB: database, ENVIRONMENT: "production", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", PROXY_URL: "https://proxy.clawdentity.com", REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", EVENT_BUS_BACKEND: "memory", @@ -4899,7 +5170,12 @@ describe("GET /v1/agents/:id/ownership", () => { const res = await createRegistryApp().request( `/v1/agents/${agentId}/ownership`, {}, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -4932,7 +5208,12 @@ describe("GET /v1/agents/:id/ownership", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(200); @@ -4964,7 +5245,12 @@ describe("GET /v1/agents/:id/ownership", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(foreignRes.status).toBe(200); expect((await foreignRes.json()) as { ownsAgent: boolean }).toEqual({ @@ -4976,7 +5262,12 @@ describe("GET /v1/agents/:id/ownership", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(missingRes.status).toBe(200); expect((await missingRes.json()) as { ownsAgent: boolean }).toEqual({ @@ -4993,7 +5284,12 @@ describe("GET /v1/agents/:id/ownership", () => { { headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(400); @@ -5023,7 +5319,12 @@ describe("internal service-auth routes", () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -5041,7 +5342,12 @@ describe("internal service-auth routes", () => { { method: "GET", }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); }); @@ -5055,7 +5361,12 @@ describe("DELETE /v1/agents/:id", () => { { method: "DELETE", }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -5078,7 +5389,12 @@ describe("DELETE /v1/agents/:id", () => { method: "DELETE", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(404); @@ -5115,7 +5431,12 @@ describe("DELETE /v1/agents/:id", () => { method: "DELETE", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(404); @@ -5153,7 +5474,12 @@ describe("DELETE /v1/agents/:id", () => { method: "DELETE", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(204); @@ -5198,7 +5524,12 @@ describe("DELETE /v1/agents/:id", () => { method: "DELETE", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); const second = await createRegistryApp().request( `/v1/agents/${agentId}`, @@ -5206,7 +5537,12 @@ describe("DELETE /v1/agents/:id", () => { method: "DELETE", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(first.status).toBe(204); @@ -5239,7 +5575,12 @@ describe("DELETE /v1/agents/:id", () => { method: "DELETE", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(409); @@ -5266,7 +5607,12 @@ describe("POST /v1/agents/:id/reissue", () => { { method: "POST", }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -5289,7 +5635,12 @@ describe("POST /v1/agents/:id/reissue", () => { method: "POST", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(404); @@ -5327,7 +5678,12 @@ describe("POST /v1/agents/:id/reissue", () => { method: "POST", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(404); @@ -5365,7 +5721,12 @@ describe("POST /v1/agents/:id/reissue", () => { method: "POST", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(409); @@ -5409,7 +5770,12 @@ describe("POST /v1/agents/:id/reissue", () => { method: "POST", headers: { Authorization: `Bearer ${token}` }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(409); @@ -5468,7 +5834,9 @@ describe("POST /v1/agents/:id/reissue", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -5524,7 +5892,9 @@ describe("POST /v1/agents/:id/reissue", () => { {}, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -5610,7 +5980,9 @@ describe("POST /v1/agents/:id/reissue", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -5681,7 +6053,9 @@ describe("POST /v1/agents/:id/reissue", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -5732,7 +6106,12 @@ describe(`POST ${AGENT_REGISTRATION_CHALLENGE_PATH}`, () => { publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", }), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -5758,7 +6137,12 @@ describe(`POST ${AGENT_REGISTRATION_CHALLENGE_PATH}`, () => { publicKey: "not-base64url", }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(400); @@ -5793,7 +6177,12 @@ describe(`POST ${AGENT_REGISTRATION_CHALLENGE_PATH}`, () => { publicKey: encodeBase64url(agentKeypair.publicKey), }), }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(201); @@ -5836,7 +6225,12 @@ describe("POST /v1/agents", () => { publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", }), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(res.status).toBe(401); @@ -5868,7 +6262,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -5918,7 +6314,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -5952,6 +6350,8 @@ describe("POST /v1/agents", () => { { DB: database, ENVIRONMENT: "production", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", PROXY_URL: "https://proxy.clawdentity.com", REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", EVENT_BUS_BACKEND: "memory", @@ -6003,6 +6403,8 @@ describe("POST /v1/agents", () => { { DB: database, ENVIRONMENT: "production", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", PROXY_URL: "https://proxy.clawdentity.com", REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", EVENT_BUS_BACKEND: "memory", @@ -6059,7 +6461,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6127,7 +6531,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6195,7 +6601,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6240,7 +6648,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6285,7 +6695,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6391,7 +6803,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -6432,7 +6846,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -6456,7 +6872,9 @@ describe("POST /v1/agents", () => { {}, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: signingKeyset, }, @@ -6517,7 +6935,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); expect(challengeResponse.status).toBe(201); @@ -6552,7 +6972,9 @@ describe("POST /v1/agents", () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6692,7 +7114,9 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6789,7 +7213,9 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6871,7 +7297,9 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", REGISTRY_SIGNING_KEY: encodeBase64url(fixture.signer.secretKey), REGISTRY_SIGNING_KEYS: JSON.stringify([ { @@ -6916,7 +7344,12 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -6932,7 +7365,12 @@ describe(`POST ${AGENT_AUTH_REFRESH_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(rateLimited.status).toBe(429); @@ -7004,7 +7442,9 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -7028,7 +7468,9 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { }, { DB: {}, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -7099,7 +7541,9 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -7175,7 +7619,9 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { }, { DB: database, - ENVIRONMENT: "test", + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", }, ); @@ -7206,7 +7652,12 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(response.status).toBe(400); @@ -7222,7 +7673,12 @@ describe(`POST ${AGENT_AUTH_VALIDATE_PATH}`, () => { }, body: JSON.stringify({}), }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, + { + DB: {} as D1Database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(rateLimited.status).toBe(429); @@ -7285,7 +7741,12 @@ describe("DELETE /v1/agents/:id/auth/revoke", () => { Authorization: `Bearer ${token}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(firstResponse.status).toBe(204); expect(agentAuthSessionRows[0]?.status).toBe("revoked"); @@ -7306,7 +7767,12 @@ describe("DELETE /v1/agents/:id/auth/revoke", () => { Authorization: `Bearer ${token}`, }, }, - { DB: database, ENVIRONMENT: "test" }, + { + DB: database, + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }, ); expect(secondResponse.status).toBe(204); }); diff --git a/apps/registry/src/server.ts b/apps/registry/src/server.ts index bb25326..8c0d06f 100644 --- a/apps/registry/src/server.ts +++ b/apps/registry/src/server.ts @@ -94,6 +94,7 @@ import { deriveInternalServiceSecretPrefix, generateInternalServiceSecret, hashInternalServiceSecret, + INTERNAL_SERVICE_SECRET_MARKER, } from "./auth/service-auth.js"; import { createDb } from "./db/client.js"; import { @@ -138,6 +139,8 @@ type Bindings = { EVENT_BUS_BACKEND?: "memory" | "queue"; EVENT_BUS_QUEUE?: QueuePublisher; BOOTSTRAP_SECRET?: string; + BOOTSTRAP_INTERNAL_SERVICE_ID?: string; + BOOTSTRAP_INTERNAL_SERVICE_SECRET?: string; REGISTRY_SIGNING_KEY?: string; REGISTRY_SIGNING_KEYS?: string; }; @@ -155,7 +158,7 @@ const PROXY_URL_BY_ENVIRONMENT: Record = { development: "https://dev.proxy.clawdentity.com", production: "https://proxy.clawdentity.com", - test: "https://dev.proxy.clawdentity.com", + local: "https://dev.proxy.clawdentity.com", }; // Deterministic bootstrap identity guarantees one-time admin creation under races. const BOOTSTRAP_ADMIN_HUMAN_ID = "00000000000000000000000000"; @@ -924,6 +927,67 @@ function requireBootstrapSecret(bootstrapSecret: string | undefined): string { }); } +function requireBootstrapInternalServiceCredentials(config: RegistryConfig): { + id: string; + secret: string; +} { + const serviceId = + typeof config.BOOTSTRAP_INTERNAL_SERVICE_ID === "string" + ? config.BOOTSTRAP_INTERNAL_SERVICE_ID.trim() + : ""; + const serviceSecret = + typeof config.BOOTSTRAP_INTERNAL_SERVICE_SECRET === "string" + ? config.BOOTSTRAP_INTERNAL_SERVICE_SECRET.trim() + : ""; + + const fieldErrors: Record = {}; + + if (serviceId.length === 0) { + fieldErrors.BOOTSTRAP_INTERNAL_SERVICE_ID = [ + "BOOTSTRAP_INTERNAL_SERVICE_ID is required", + ]; + } else { + try { + parseUlid(serviceId); + } catch { + fieldErrors.BOOTSTRAP_INTERNAL_SERVICE_ID = [ + "BOOTSTRAP_INTERNAL_SERVICE_ID must be a valid ULID", + ]; + } + } + + if (serviceSecret.length === 0) { + fieldErrors.BOOTSTRAP_INTERNAL_SERVICE_SECRET = [ + "BOOTSTRAP_INTERNAL_SERVICE_SECRET is required", + ]; + } else if ( + !serviceSecret.startsWith(INTERNAL_SERVICE_SECRET_MARKER) || + serviceSecret.length <= INTERNAL_SERVICE_SECRET_MARKER.length + ) { + fieldErrors.BOOTSTRAP_INTERNAL_SERVICE_SECRET = [ + `BOOTSTRAP_INTERNAL_SERVICE_SECRET must start with ${INTERNAL_SERVICE_SECRET_MARKER}`, + ]; + } + + if (Object.keys(fieldErrors).length > 0) { + throw new AppError({ + code: "CONFIG_VALIDATION_FAILED", + message: "Registry configuration is invalid", + status: 500, + expose: true, + details: { + fieldErrors, + formErrors: [], + }, + }); + } + + return { + id: serviceId, + secret: serviceSecret, + }; +} + function parseBootstrapSecretHeader(headerValue: string | undefined): string { if (typeof headerValue !== "string" || headerValue.trim().length === 0) { throw new AppError({ @@ -1077,6 +1141,8 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const expectedBootstrapSecret = requireBootstrapSecret( config.BOOTSTRAP_SECRET, ); + const bootstrapInternalService = + requireBootstrapInternalServiceCredentials(config); const providedBootstrapSecret = parseBootstrapSecretHeader( c.req.header("x-bootstrap-secret"), ); @@ -1118,14 +1184,13 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { const apiKeyHash = await hashApiKeyToken(apiKeyToken); const apiKeyPrefix = deriveApiKeyLookupPrefix(apiKeyToken); const apiKeyId = generateUlid(nowUtcMs() + 1); - const internalServiceSecret = generateInternalServiceSecret(); const internalServiceSecretHash = await hashInternalServiceSecret( - internalServiceSecret, + bootstrapInternalService.secret, ); const internalServiceSecretPrefix = deriveInternalServiceSecretPrefix( - internalServiceSecret, + bootstrapInternalService.secret, ); - const internalServiceId = generateUlid(nowUtcMs() + 2); + const internalServiceId = bootstrapInternalService.id; const createdAt = nowIso(); const rollbackBootstrapMutation = async ( @@ -1241,7 +1306,6 @@ function createRegistryApp(options: CreateRegistryAppOptions = {}) { internalService: { id: internalServiceId, name: BOOTSTRAP_INTERNAL_SERVICE_NAME, - secret: internalServiceSecret, }, }, 201, diff --git a/packages/sdk/src/config.test.ts b/packages/sdk/src/config.test.ts index b62bc67..85ac640 100644 --- a/packages/sdk/src/config.test.ts +++ b/packages/sdk/src/config.test.ts @@ -2,10 +2,21 @@ import { describe, expect, it } from "vitest"; import { parseRegistryConfig } from "./config.js"; import { AppError } from "./exceptions.js"; +const bootstrapInternalServiceConfig = { + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", +} as const; + describe("config helpers", () => { it("parses a valid registry config", () => { - expect(parseRegistryConfig({ ENVIRONMENT: "development" })).toEqual({ + expect( + parseRegistryConfig({ + ENVIRONMENT: "development", + ...bootstrapInternalServiceConfig, + }), + ).toEqual({ ENVIRONMENT: "development", + ...bootstrapInternalServiceConfig, }); }); @@ -14,16 +25,19 @@ describe("config helpers", () => { parseRegistryConfig({ ENVIRONMENT: "development", EVENT_BUS_BACKEND: "queue", + ...bootstrapInternalServiceConfig, }), ).toEqual({ ENVIRONMENT: "development", EVENT_BUS_BACKEND: "queue", + ...bootstrapInternalServiceConfig, }); }); it("parses REGISTRY_SIGNING_KEYS into validated key entries", () => { const config = parseRegistryConfig({ ENVIRONMENT: "development", + ...bootstrapInternalServiceConfig, REGISTRY_SIGNING_KEYS: JSON.stringify([ { kid: "reg-key-1", @@ -51,10 +65,12 @@ describe("config helpers", () => { parseRegistryConfig({ ENVIRONMENT: "development", APP_VERSION: "sha-abcdef123456", + ...bootstrapInternalServiceConfig, }), ).toEqual({ ENVIRONMENT: "development", APP_VERSION: "sha-abcdef123456", + ...bootstrapInternalServiceConfig, }); }); @@ -63,10 +79,12 @@ describe("config helpers", () => { parseRegistryConfig({ ENVIRONMENT: "development", PROXY_URL: "https://dev.proxy.clawdentity.com", + ...bootstrapInternalServiceConfig, }), ).toEqual({ ENVIRONMENT: "development", PROXY_URL: "https://dev.proxy.clawdentity.com", + ...bootstrapInternalServiceConfig, }); }); @@ -75,10 +93,12 @@ describe("config helpers", () => { parseRegistryConfig({ ENVIRONMENT: "development", REGISTRY_ISSUER_URL: "http://host.docker.internal:8788", + ...bootstrapInternalServiceConfig, }), ).toEqual({ ENVIRONMENT: "development", REGISTRY_ISSUER_URL: "http://host.docker.internal:8788", + ...bootstrapInternalServiceConfig, }); }); @@ -254,6 +274,7 @@ describe("config helpers", () => { REGISTRY_ISSUER_URL: "https://dev.registry.clawdentity.com", EVENT_BUS_BACKEND: "memory", BOOTSTRAP_SECRET: "bootstrap-secret", + ...bootstrapInternalServiceConfig, REGISTRY_SIGNING_KEY: "VGVzdFNpZ25pbmdLZXlGb3JEZXZlbG9wbWVudF9PcGVyYXRpb25zMTIz", REGISTRY_SIGNING_KEYS: JSON.stringify([ @@ -273,14 +294,31 @@ describe("config helpers", () => { expect(config.PROXY_URL).toBe("https://dev.proxy.clawdentity.com"); }); - it("skips requireRuntimeKeys validation in test environment", () => { + it("skips non-bootstrap runtime key validation in local environment", () => { const config = parseRegistryConfig( { - ENVIRONMENT: "test", + ENVIRONMENT: "local", + ...bootstrapInternalServiceConfig, }, { requireRuntimeKeys: true }, ); - expect(config.ENVIRONMENT).toBe("test"); + expect(config.ENVIRONMENT).toBe("local"); + }); + + it("throws when only one bootstrap internal service key is provided", () => { + expect(() => + parseRegistryConfig({ + ENVIRONMENT: "development", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + }), + ).toThrow(AppError); + + expect(() => + parseRegistryConfig({ + ENVIRONMENT: "development", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }), + ).toThrow(AppError); }); }); diff --git a/packages/sdk/src/config.ts b/packages/sdk/src/config.ts index fc8caa6..6bcae2b 100644 --- a/packages/sdk/src/config.ts +++ b/packages/sdk/src/config.ts @@ -94,6 +94,8 @@ export const registryConfigSchema = z.object({ REGISTRY_ISSUER_URL: z.string().url().optional(), EVENT_BUS_BACKEND: registryEventBusBackendSchema.optional(), BOOTSTRAP_SECRET: z.string().min(1).optional(), + BOOTSTRAP_INTERNAL_SERVICE_ID: z.string().min(1), + BOOTSTRAP_INTERNAL_SERVICE_SECRET: z.string().min(1), REGISTRY_SIGNING_KEY: z.string().min(1).optional(), REGISTRY_SIGNING_KEYS: registrySigningKeysEnvSchema.optional(), }); @@ -109,6 +111,8 @@ const REQUIRED_REGISTRY_RUNTIME_KEYS = [ "REGISTRY_ISSUER_URL", "EVENT_BUS_BACKEND", "BOOTSTRAP_SECRET", + "BOOTSTRAP_INTERNAL_SERVICE_ID", + "BOOTSTRAP_INTERNAL_SERVICE_SECRET", "REGISTRY_SIGNING_KEY", "REGISTRY_SIGNING_KEYS", ] as const; @@ -127,7 +131,7 @@ function throwRegistryConfigValidationError(details: { } function assertRequiredRegistryRuntimeKeys(input: RegistryConfig): void { - if (input.ENVIRONMENT === "test") { + if (input.ENVIRONMENT === "local") { return; } @@ -157,12 +161,37 @@ function assertRequiredRegistryRuntimeKeys(input: RegistryConfig): void { } } +function assertBootstrapInternalServicePair(input: RegistryConfig): void { + const hasServiceId = + typeof input.BOOTSTRAP_INTERNAL_SERVICE_ID === "string" && + input.BOOTSTRAP_INTERNAL_SERVICE_ID.trim().length > 0; + const hasServiceSecret = + typeof input.BOOTSTRAP_INTERNAL_SERVICE_SECRET === "string" && + input.BOOTSTRAP_INTERNAL_SERVICE_SECRET.trim().length > 0; + if (hasServiceId === hasServiceSecret) { + return; + } + + throwRegistryConfigValidationError({ + fieldErrors: { + BOOTSTRAP_INTERNAL_SERVICE_ID: [ + "BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET must be set together.", + ], + BOOTSTRAP_INTERNAL_SERVICE_SECRET: [ + "BOOTSTRAP_INTERNAL_SERVICE_ID and BOOTSTRAP_INTERNAL_SERVICE_SECRET must be set together.", + ], + }, + formErrors: [], + }); +} + export function parseRegistryConfig( env: unknown, options: ParseRegistryConfigOptions = {}, ): RegistryConfig { const parsed = registryConfigSchema.safeParse(env); if (parsed.success) { + assertBootstrapInternalServicePair(parsed.data); if (options.requireRuntimeKeys === true) { assertRequiredRegistryRuntimeKeys(parsed.data); } diff --git a/packages/sdk/src/index.test.ts b/packages/sdk/src/index.test.ts index 29e534a..6202c60 100644 --- a/packages/sdk/src/index.test.ts +++ b/packages/sdk/src/index.test.ts @@ -46,10 +46,14 @@ describe("sdk", () => { "2026-01-01T00:00:10.000Z", ); expect(resolveRequestId("valid-id-123")).toBe("valid-id-123"); - expect(parseRegistryConfig({ ENVIRONMENT: "test" }).ENVIRONMENT).toBe( - "test", - ); - expect(shouldExposeVerboseErrors("test")).toBe(true); + expect( + parseRegistryConfig({ + ENVIRONMENT: "local", + BOOTSTRAP_INTERNAL_SERVICE_ID: "01HF7YAT00W6W7CM7N3W5FDXT4", + BOOTSTRAP_INTERNAL_SERVICE_SECRET: "clw_srv_bootstrapsecret", + }).ENVIRONMENT, + ).toBe("local"); + expect(shouldExposeVerboseErrors("local")).toBe(true); expect(REQUEST_ID_HEADER).toBe("x-request-id"); expect(AppError).toBeTypeOf("function"); const eventBus = createInMemoryEventBus(); diff --git a/packages/sdk/src/runtime-environment.test.ts b/packages/sdk/src/runtime-environment.test.ts index 29b9648..ab2eff5 100644 --- a/packages/sdk/src/runtime-environment.test.ts +++ b/packages/sdk/src/runtime-environment.test.ts @@ -7,15 +7,15 @@ import { describe("runtime environment helpers", () => { it("declares the supported runtime environments", () => { expect(runtimeEnvironmentValues).toEqual([ + "local", "development", "production", - "test", ]); }); it("exposes verbose errors for non-production environments", () => { expect(shouldExposeVerboseErrors("development")).toBe(true); - expect(shouldExposeVerboseErrors("test")).toBe(true); + expect(shouldExposeVerboseErrors("local")).toBe(true); }); it("hides verbose errors in production", () => { diff --git a/packages/sdk/src/runtime-environment.ts b/packages/sdk/src/runtime-environment.ts index 4952a34..052b89f 100644 --- a/packages/sdk/src/runtime-environment.ts +++ b/packages/sdk/src/runtime-environment.ts @@ -1,7 +1,7 @@ export const runtimeEnvironmentValues = [ + "local", "development", "production", - "test", ] as const; export type RuntimeEnvironment = (typeof runtimeEnvironmentValues)[number]; diff --git a/scripts/env/sync-worktree-env.sh b/scripts/env/sync-worktree-env.sh index fb38813..b3fc57d 100755 --- a/scripts/env/sync-worktree-env.sh +++ b/scripts/env/sync-worktree-env.sh @@ -53,10 +53,10 @@ required_keys=( "CLAWDENTITY_REGISTRY_URL" "CLAWDENTITY_PROXY_URL" "BOOTSTRAP_SECRET" + "BOOTSTRAP_INTERNAL_SERVICE_ID" + "BOOTSTRAP_INTERNAL_SERVICE_SECRET" "REGISTRY_SIGNING_KEY" "REGISTRY_SIGNING_KEYS" - "REGISTRY_INTERNAL_SERVICE_ID" - "REGISTRY_INTERNAL_SERVICE_SECRET" ) missing_keys=() @@ -85,6 +85,8 @@ write_header "$ROOT_ENV_PATH" root_keys=( "CLAWDENTITY_REGISTRY_URL" "CLAWDENTITY_PROXY_URL" + "BOOTSTRAP_INTERNAL_SERVICE_ID" + "BOOTSTRAP_INTERNAL_SERVICE_SECRET" "CLAWDENTITY_API_KEY" "CLAWDENTITY_HUMAN_NAME" "CLAWDENTITY_PROXY_WS_URL" @@ -112,6 +114,8 @@ append_if_set "$REGISTRY_ENV_PATH" "EVENT_BUS_BACKEND" "$EVENT_BUS_BACKEND" append_if_set "$REGISTRY_ENV_PATH" "PROXY_URL" "$CLAWDENTITY_PROXY_URL" append_if_set "$REGISTRY_ENV_PATH" "REGISTRY_ISSUER_URL" "$CLAWDENTITY_REGISTRY_URL" append_if_set "$REGISTRY_ENV_PATH" "BOOTSTRAP_SECRET" "$BOOTSTRAP_SECRET" +append_if_set "$REGISTRY_ENV_PATH" "BOOTSTRAP_INTERNAL_SERVICE_ID" "$BOOTSTRAP_INTERNAL_SERVICE_ID" +append_if_set "$REGISTRY_ENV_PATH" "BOOTSTRAP_INTERNAL_SERVICE_SECRET" "$BOOTSTRAP_INTERNAL_SERVICE_SECRET" append_if_set "$REGISTRY_ENV_PATH" "REGISTRY_SIGNING_KEY" "$REGISTRY_SIGNING_KEY" append_if_set "$REGISTRY_ENV_PATH" "REGISTRY_SIGNING_KEYS" "$REGISTRY_SIGNING_KEYS" @@ -120,8 +124,8 @@ append_if_set "$PROXY_ENV_PATH" "ENVIRONMENT" "development" append_if_set "$PROXY_ENV_PATH" "APP_VERSION" "$APP_VERSION" append_if_set "$PROXY_ENV_PATH" "REGISTRY_URL" "$CLAWDENTITY_REGISTRY_URL" append_if_set "$PROXY_ENV_PATH" "OPENCLAW_BASE_URL" "$OPENCLAW_BASE_URL" -append_if_set "$PROXY_ENV_PATH" "REGISTRY_INTERNAL_SERVICE_ID" "$REGISTRY_INTERNAL_SERVICE_ID" -append_if_set "$PROXY_ENV_PATH" "REGISTRY_INTERNAL_SERVICE_SECRET" "$REGISTRY_INTERNAL_SERVICE_SECRET" +append_if_set "$PROXY_ENV_PATH" "BOOTSTRAP_INTERNAL_SERVICE_ID" "$BOOTSTRAP_INTERNAL_SERVICE_ID" +append_if_set "$PROXY_ENV_PATH" "BOOTSTRAP_INTERNAL_SERVICE_SECRET" "$BOOTSTRAP_INTERNAL_SERVICE_SECRET" append_if_set "$PROXY_ENV_PATH" "INJECT_IDENTITY_INTO_MESSAGE" "$INJECT_IDENTITY_INTO_MESSAGE" proxy_optional_keys=( From 3b3eeb7e63cf656589b7e1fb8b222baa22abe666 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 19:19:40 +0530 Subject: [PATCH 145/190] refactor(registry): split agent registration into focused modules --- apps/registry/src/AGENTS.md | 12 + apps/registry/src/agent-registration.ts | 788 +----------------- .../registry/src/agent-registration/AGENTS.md | 17 + .../src/agent-registration/challenge.ts | 63 ++ .../src/agent-registration/constants.ts | 32 + .../src/agent-registration/creation.ts | 200 +++++ .../registry/src/agent-registration/errors.ts | 60 ++ apps/registry/src/agent-registration/index.ts | 27 + .../src/agent-registration/parsing.ts | 310 +++++++ apps/registry/src/agent-registration/proof.ts | 79 ++ apps/registry/src/agent-registration/types.ts | 83 ++ 11 files changed, 884 insertions(+), 787 deletions(-) create mode 100644 apps/registry/src/agent-registration/AGENTS.md create mode 100644 apps/registry/src/agent-registration/challenge.ts create mode 100644 apps/registry/src/agent-registration/constants.ts create mode 100644 apps/registry/src/agent-registration/creation.ts create mode 100644 apps/registry/src/agent-registration/errors.ts create mode 100644 apps/registry/src/agent-registration/index.ts create mode 100644 apps/registry/src/agent-registration/parsing.ts create mode 100644 apps/registry/src/agent-registration/proof.ts create mode 100644 apps/registry/src/agent-registration/types.ts diff --git a/apps/registry/src/AGENTS.md b/apps/registry/src/AGENTS.md index e0b61d9..362e992 100644 --- a/apps/registry/src/AGENTS.md +++ b/apps/registry/src/AGENTS.md @@ -140,6 +140,12 @@ - `challengeId`: ULID from `/v1/agents/challenge`. - `challengeSignature`: base64url Ed25519 signature over the canonical proof message. - Keep request parsing and validation in a reusable helper module (`agent-registration.ts`) so future routes can share the same constraints without duplicating schema logic. +- Keep `agent-registration.ts` as the stable facade import path and keep implementation split under `agent-registration/` by concern: + - `constants.ts` for defaults/limits/issuer resolution + - `parsing.ts` for payload validation + - `challenge.ts` for challenge construction + - `proof.ts` for ownership-proof verification + - `creation.ts` for registration/reissue claim builders - Keep error detail exposure environment-aware via `shouldExposeVerboseErrors` (shared SDK helper path): return generic messages without internals in `production`, but include validation/config details in `development`/`test` for debugging. - Persist `agents.current_jti` and `agents.expires_at` on insert; generated AIT claims (`jti`, `exp`) must stay in sync with those persisted values. - Verify challenge ownership before signing AIT: challenge must exist for the caller, be unexpired, remain `pending`, and match the request public key + signature. @@ -150,6 +156,12 @@ - Bootstrap agent auth refresh material in the same mutation unit as agent creation by inserting an active `agent_auth_sessions` row. - Response shape is `{ agent, ait, agentAuth }` where `agentAuth` returns short-lived access credentials and rotating refresh credentials. +## Agent Registration Helpers +- Keep `agent-registration.ts` responsibilities grouped by validation/parsing, challenge lifecycle, proof verification, and agent/token builders so each module can be split without changing behavior. +- Share the parsing helpers with any other routes that must reuse the same error exposure (name, framework, key, TTL, challenge fields) and keep environment-aware detail toggles centralized near `shouldExposeVerboseErrors`. +- Any refactor that splits this file should still surface `buildAgentRegistrationChallenge`, `verifyAgentRegistrationOwnershipProof`, `buildAgentRegistrationFromParsed`, `buildAgentReissue`, and `resolveRegistryIssuer` from a single barrel so callers need not change. +- Tests `apps/registry/src/server.test/agent-registration-challenge.test.ts` (challenge creation + persistence) and `apps/registry/src/server.test/agent-registration-create.test.ts` (payload validation, proof verification, error exposure, and issue response) are the canonical guards for this module—keep them green when moving logic into discrete modules. + ## POST /v1/agents/auth/refresh Contract - Public endpoint (no PAT): auth is agent-scoped via `Authorization: Claw ` + PoP headers + refresh token payload. - Apply per-client-IP throttling and return `429 RATE_LIMIT_EXCEEDED` before auth parsing when over budget. diff --git a/apps/registry/src/agent-registration.ts b/apps/registry/src/agent-registration.ts index 821adcb..d118c7e 100644 --- a/apps/registry/src/agent-registration.ts +++ b/apps/registry/src/agent-registration.ts @@ -1,787 +1 @@ -import { - AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, - type AitClaims, - canonicalizeAgentRegistrationProof, - decodeBase64url, - encodeBase64url, - generateUlid, - makeAgentDid, - parseUlid, - validateAgentName, -} from "@clawdentity/protocol"; -import { - AppError, - addSeconds, - nowIso, - nowUtcMs, - type RegistryConfig, - shouldExposeVerboseErrors, - toIso, - verifyEd25519, -} from "@clawdentity/sdk"; - -const DEFAULT_AGENT_FRAMEWORK = "openclaw"; -const DEFAULT_AGENT_TTL_DAYS = 30; -const MAX_FRAMEWORK_LENGTH = 32; -const MIN_AGENT_TTL_DAYS = 1; -const MAX_AGENT_TTL_DAYS = 90; -const DAY_IN_SECONDS = 24 * 60 * 60; -const ED25519_PUBLIC_KEY_LENGTH = 32; -const ED25519_SIGNATURE_LENGTH = 64; -const AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS = 5 * 60; -const AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH = 24; -const REGISTRY_ISSUER_BY_ENVIRONMENT: Record< - RegistryConfig["ENVIRONMENT"], - string -> = { - development: "https://dev.registry.clawdentity.com", - production: "https://registry.clawdentity.com", - test: "https://dev.registry.clawdentity.com", -}; - -type AgentRegistrationBody = { - name: string; - framework?: string; - publicKey: string; - ttlDays?: number; - challengeId: string; - challengeSignature: string; -}; - -type AgentRegistrationChallengeBody = { - publicKey: string; -}; - -export type AgentRegistrationChallenge = { - id: string; - ownerId: string; - publicKey: string; - nonce: string; - status: "pending"; - expiresAt: string; - usedAt: null; - createdAt: string; - updatedAt: string; -}; - -export type AgentRegistrationChallengeResult = { - challenge: AgentRegistrationChallenge; - response: { - challengeId: string; - nonce: string; - ownerDid: string; - expiresAt: string; - algorithm: "Ed25519"; - messageTemplate: string; - }; -}; - -export type PersistedAgentRegistrationChallenge = { - id: string; - ownerId: string; - publicKey: string; - nonce: string; - status: "pending" | "used"; - expiresAt: string; - usedAt: string | null; -}; - -export type AgentRegistrationResult = { - agent: { - id: string; - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - ttlDays: number; - status: "active"; - expiresAt: string; - createdAt: string; - updatedAt: string; - }; - claims: AitClaims; -}; - -export type AgentReissueResult = { - agent: { - id: string; - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - ttlDays: number; - status: "active"; - expiresAt: string; - updatedAt: string; - }; - claims: AitClaims; -}; - -function invalidRegistration(options: { - environment: RegistryConfig["ENVIRONMENT"]; - details?: { - fieldErrors: Record; - formErrors: string[]; - }; -}): AppError { - const exposeDetails = shouldExposeVerboseErrors(options.environment); - return new AppError({ - code: "AGENT_REGISTRATION_INVALID", - message: exposeDetails - ? "Agent registration payload is invalid" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - details: exposeDetails ? options.details : undefined, - }); -} - -function invalidRegistrationChallenge(options: { - environment: RegistryConfig["ENVIRONMENT"]; - details?: { - fieldErrors: Record; - formErrors: string[]; - }; -}): AppError { - const exposeDetails = shouldExposeVerboseErrors(options.environment); - return new AppError({ - code: "AGENT_REGISTRATION_CHALLENGE_INVALID", - message: exposeDetails - ? "Agent registration challenge payload is invalid" - : "Request could not be processed", - status: 400, - expose: exposeDetails, - details: exposeDetails ? options.details : undefined, - }); -} - -function registrationProofError(options: { - environment: RegistryConfig["ENVIRONMENT"]; - code: - | "AGENT_REGISTRATION_CHALLENGE_EXPIRED" - | "AGENT_REGISTRATION_CHALLENGE_REPLAYED" - | "AGENT_REGISTRATION_PROOF_MISMATCH" - | "AGENT_REGISTRATION_PROOF_INVALID"; - message: string; -}): AppError { - const exposeDetails = shouldExposeVerboseErrors(options.environment); - return new AppError({ - code: options.code, - message: exposeDetails ? options.message : "Request could not be processed", - status: 400, - expose: true, - }); -} - -function addFieldError( - fieldErrors: Record, - field: string, - message: string, -): void { - const errors = fieldErrors[field] ?? []; - errors.push(message); - fieldErrors[field] = errors; -} - -function hasControlChars(value: string): boolean { - for (let index = 0; index < value.length; index += 1) { - const code = value.charCodeAt(index); - if (code <= 31 || code === 127) { - return true; - } - } - - return false; -} - -function parseName( - input: unknown, - fieldErrors: Record, -): string { - if (typeof input !== "string") { - addFieldError(fieldErrors, "name", "name is required"); - return ""; - } - - const value = input.trim(); - if (!validateAgentName(value)) { - addFieldError( - fieldErrors, - "name", - "name contains invalid characters or length", - ); - } - - return value; -} - -function parseFramework( - input: unknown, - fieldErrors: Record, -): string | undefined { - if (input === undefined) { - return undefined; - } - - if (typeof input !== "string") { - addFieldError(fieldErrors, "framework", "framework must be a string"); - return undefined; - } - - const value = input.trim(); - if (value.length === 0) { - addFieldError(fieldErrors, "framework", "framework is required"); - return undefined; - } - - if (value.length > MAX_FRAMEWORK_LENGTH) { - addFieldError( - fieldErrors, - "framework", - `framework must be at most ${MAX_FRAMEWORK_LENGTH} characters`, - ); - } - - if (hasControlChars(value)) { - addFieldError( - fieldErrors, - "framework", - "framework contains control characters", - ); - } - - return value; -} - -function parsePublicKey( - input: unknown, - fieldErrors: Record, -): string { - if (typeof input !== "string") { - addFieldError(fieldErrors, "publicKey", "publicKey is required"); - return ""; - } - - const value = input.trim(); - if (value.length === 0) { - addFieldError(fieldErrors, "publicKey", "publicKey is required"); - return ""; - } - - let decodedKey: Uint8Array; - try { - decodedKey = decodeBase64url(value); - } catch { - addFieldError( - fieldErrors, - "publicKey", - "publicKey must be a base64url-encoded 32-byte Ed25519 key", - ); - return value; - } - - if (decodedKey.length !== ED25519_PUBLIC_KEY_LENGTH) { - addFieldError( - fieldErrors, - "publicKey", - "publicKey must be a base64url-encoded 32-byte Ed25519 key", - ); - } - - return value; -} - -function parseTtlDays( - input: unknown, - fieldErrors: Record, -): number | undefined { - if (input === undefined) { - return undefined; - } - - if (typeof input !== "number" || !Number.isFinite(input)) { - addFieldError(fieldErrors, "ttlDays", "ttlDays must be a number"); - return undefined; - } - - if (!Number.isInteger(input)) { - addFieldError(fieldErrors, "ttlDays", "ttlDays must be an integer"); - return undefined; - } - - if (input < MIN_AGENT_TTL_DAYS || input > MAX_AGENT_TTL_DAYS) { - addFieldError( - fieldErrors, - "ttlDays", - `ttlDays must be between ${MIN_AGENT_TTL_DAYS} and ${MAX_AGENT_TTL_DAYS}`, - ); - return undefined; - } - - return input; -} - -function parseChallengeId( - input: unknown, - fieldErrors: Record, -): string { - if (typeof input !== "string") { - addFieldError(fieldErrors, "challengeId", "challengeId is required"); - return ""; - } - - const value = input.trim(); - if (value.length === 0) { - addFieldError(fieldErrors, "challengeId", "challengeId is required"); - return ""; - } - - try { - parseUlid(value); - } catch { - addFieldError(fieldErrors, "challengeId", "challengeId must be a ULID"); - } - - return value; -} - -function parseChallengeSignature( - input: unknown, - fieldErrors: Record, -): string { - if (typeof input !== "string") { - addFieldError( - fieldErrors, - "challengeSignature", - "challengeSignature is required", - ); - return ""; - } - - const value = input.trim(); - if (value.length === 0) { - addFieldError( - fieldErrors, - "challengeSignature", - "challengeSignature is required", - ); - return ""; - } - - let decodedSignature: Uint8Array; - try { - decodedSignature = decodeBase64url(value); - } catch { - addFieldError( - fieldErrors, - "challengeSignature", - "challengeSignature must be a base64url-encoded Ed25519 signature", - ); - return value; - } - - if (decodedSignature.length !== ED25519_SIGNATURE_LENGTH) { - addFieldError( - fieldErrors, - "challengeSignature", - "challengeSignature must be a base64url-encoded Ed25519 signature", - ); - } - - return value; -} - -export function parseAgentRegistrationChallengeBody( - payload: unknown, - environment: RegistryConfig["ENVIRONMENT"], -): AgentRegistrationChallengeBody { - const fieldErrors: Record = {}; - - if (!payload || typeof payload !== "object" || Array.isArray(payload)) { - throw invalidRegistrationChallenge({ - environment, - details: { - fieldErrors: { - body: ["body must be a JSON object"], - }, - formErrors: [], - }, - }); - } - - const objectPayload = payload as Record; - - const parsed: AgentRegistrationChallengeBody = { - publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), - }; - - if (Object.keys(fieldErrors).length > 0) { - throw invalidRegistrationChallenge({ - environment, - details: { fieldErrors, formErrors: [] }, - }); - } - - return parsed; -} - -export function buildAgentRegistrationChallenge(input: { - payload: unknown; - ownerId: string; - ownerDid: string; - environment: RegistryConfig["ENVIRONMENT"]; -}): AgentRegistrationChallengeResult { - const parsedBody = parseAgentRegistrationChallengeBody( - input.payload, - input.environment, - ); - - const createdAt = nowIso(); - const createdAtMs = Date.parse(createdAt); - const challengeId = generateUlid(createdAtMs); - const nonceBytes = crypto.getRandomValues( - new Uint8Array(AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH), - ); - const nonce = encodeBase64url(nonceBytes); - const expiresAt = addSeconds( - createdAt, - AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS, - ); - - const challenge: AgentRegistrationChallenge = { - id: challengeId, - ownerId: input.ownerId, - publicKey: parsedBody.publicKey, - nonce, - status: "pending", - expiresAt, - usedAt: null, - createdAt, - updatedAt: createdAt, - }; - - return { - challenge, - response: { - challengeId, - nonce, - ownerDid: input.ownerDid, - expiresAt, - algorithm: "Ed25519", - messageTemplate: AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, - }, - }; -} - -export function parseAgentRegistrationBody( - payload: unknown, - environment: RegistryConfig["ENVIRONMENT"], -): AgentRegistrationBody { - const fieldErrors: Record = {}; - - if (!payload || typeof payload !== "object" || Array.isArray(payload)) { - throw invalidRegistration({ - environment, - details: { - fieldErrors: { - body: ["body must be a JSON object"], - }, - formErrors: [], - }, - }); - } - - const objectPayload = payload as Record; - - const parsed: AgentRegistrationBody = { - name: parseName(objectPayload.name, fieldErrors), - framework: parseFramework(objectPayload.framework, fieldErrors), - publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), - ttlDays: parseTtlDays(objectPayload.ttlDays, fieldErrors), - challengeId: parseChallengeId(objectPayload.challengeId, fieldErrors), - challengeSignature: parseChallengeSignature( - objectPayload.challengeSignature, - fieldErrors, - ), - }; - - if (Object.keys(fieldErrors).length > 0) { - throw invalidRegistration({ - environment, - details: { fieldErrors, formErrors: [] }, - }); - } - - return parsed; -} - -export async function verifyAgentRegistrationOwnershipProof(input: { - parsedBody: AgentRegistrationBody; - challenge: PersistedAgentRegistrationChallenge; - ownerDid: string; - environment: RegistryConfig["ENVIRONMENT"]; -}): Promise { - if (input.challenge.status !== "pending") { - throw registrationProofError({ - environment: input.environment, - code: "AGENT_REGISTRATION_CHALLENGE_REPLAYED", - message: "Registration challenge has already been used", - }); - } - - const expiresAtMs = Date.parse(input.challenge.expiresAt); - if (!Number.isFinite(expiresAtMs) || expiresAtMs <= nowUtcMs()) { - throw registrationProofError({ - environment: input.environment, - code: "AGENT_REGISTRATION_CHALLENGE_EXPIRED", - message: "Registration challenge has expired", - }); - } - - if (input.challenge.publicKey !== input.parsedBody.publicKey) { - throw registrationProofError({ - environment: input.environment, - code: "AGENT_REGISTRATION_PROOF_MISMATCH", - message: "Registration challenge does not match the provided public key", - }); - } - - let signatureBytes: Uint8Array; - let publicKeyBytes: Uint8Array; - try { - signatureBytes = decodeBase64url(input.parsedBody.challengeSignature); - publicKeyBytes = decodeBase64url(input.parsedBody.publicKey); - } catch { - throw registrationProofError({ - environment: input.environment, - code: "AGENT_REGISTRATION_PROOF_INVALID", - message: "Registration challenge signature is invalid", - }); - } - - const canonical = canonicalizeAgentRegistrationProof({ - challengeId: input.challenge.id, - nonce: input.challenge.nonce, - ownerDid: input.ownerDid, - publicKey: input.parsedBody.publicKey, - name: input.parsedBody.name, - framework: input.parsedBody.framework, - ttlDays: input.parsedBody.ttlDays, - }); - - const verified = await verifyEd25519( - signatureBytes, - new TextEncoder().encode(canonical), - publicKeyBytes, - ); - - if (!verified) { - throw registrationProofError({ - environment: input.environment, - code: "AGENT_REGISTRATION_PROOF_INVALID", - message: "Registration challenge signature is invalid", - }); - } -} - -export function buildAgentRegistrationFromParsed(input: { - parsedBody: AgentRegistrationBody; - ownerDid: string; - issuer: string; -}): AgentRegistrationResult { - const issuedAt = nowIso(); - const issuedAtMs = Date.parse(issuedAt); - const issuedAtSeconds = Math.floor(issuedAtMs / 1000); - const ttlDays = input.parsedBody.ttlDays ?? DEFAULT_AGENT_TTL_DAYS; - const framework = input.parsedBody.framework ?? DEFAULT_AGENT_FRAMEWORK; - const ttlSeconds = ttlDays * DAY_IN_SECONDS; - const expiresAt = addSeconds(issuedAt, ttlSeconds); - - const agentId = generateUlid(issuedAtMs); - const agentDid = makeAgentDid(agentId); - const currentJti = generateUlid(issuedAtMs + 1); - const createdAt = issuedAt; - - return { - agent: { - id: agentId, - did: agentDid, - ownerDid: input.ownerDid, - name: input.parsedBody.name, - framework, - publicKey: input.parsedBody.publicKey, - currentJti, - ttlDays, - status: "active", - expiresAt, - createdAt, - updatedAt: createdAt, - }, - claims: { - iss: input.issuer, - sub: agentDid, - ownerDid: input.ownerDid, - name: input.parsedBody.name, - framework, - cnf: { - jwk: { - kty: "OKP", - crv: "Ed25519", - x: input.parsedBody.publicKey, - }, - }, - iat: issuedAtSeconds, - nbf: issuedAtSeconds, - exp: issuedAtSeconds + ttlSeconds, - jti: currentJti, - }, - }; -} - -export function buildAgentRegistration(input: { - payload: unknown; - ownerDid: string; - issuer: string; - environment: RegistryConfig["ENVIRONMENT"]; -}): AgentRegistrationResult { - const parsedBody = parseAgentRegistrationBody( - input.payload, - input.environment, - ); - - return buildAgentRegistrationFromParsed({ - parsedBody, - ownerDid: input.ownerDid, - issuer: input.issuer, - }); -} - -function resolveReissueExpiry(input: { - previousExpiresAt: string | null; - issuedAt: string; - issuedAtMs: number; - issuedAtSeconds: number; -}): { - expiresAt: string; - exp: number; - ttlDays: number; -} { - const defaultTtlSeconds = DEFAULT_AGENT_TTL_DAYS * DAY_IN_SECONDS; - const defaultExp = input.issuedAtSeconds + defaultTtlSeconds; - const defaultExpiry = addSeconds(input.issuedAt, defaultTtlSeconds); - - if (!input.previousExpiresAt) { - return { - expiresAt: defaultExpiry, - exp: defaultExp, - ttlDays: DEFAULT_AGENT_TTL_DAYS, - }; - } - - const previousExpiryMs = Date.parse(input.previousExpiresAt); - if ( - !Number.isFinite(previousExpiryMs) || - previousExpiryMs <= input.issuedAtMs - ) { - return { - expiresAt: defaultExpiry, - exp: defaultExp, - ttlDays: DEFAULT_AGENT_TTL_DAYS, - }; - } - - const previousExpirySeconds = Math.floor(previousExpiryMs / 1000); - const remainingSeconds = Math.max( - 1, - previousExpirySeconds - input.issuedAtSeconds, - ); - const ttlDays = Math.min( - MAX_AGENT_TTL_DAYS, - Math.max(MIN_AGENT_TTL_DAYS, Math.ceil(remainingSeconds / DAY_IN_SECONDS)), - ); - - return { - expiresAt: toIso(previousExpiryMs), - exp: previousExpirySeconds, - ttlDays, - }; -} - -export function buildAgentReissue(input: { - id: string; - did: string; - ownerDid: string; - name: string; - framework: string | null; - publicKey: string; - previousExpiresAt: string | null; - issuer: string; -}): AgentReissueResult { - const issuedAt = nowIso(); - const issuedAtMs = Date.parse(issuedAt); - const issuedAtSeconds = Math.floor(issuedAtMs / 1000); - const expiry = resolveReissueExpiry({ - previousExpiresAt: input.previousExpiresAt, - issuedAt, - issuedAtMs, - issuedAtSeconds, - }); - const currentJti = generateUlid(issuedAtMs + 1); - const framework = input.framework ?? DEFAULT_AGENT_FRAMEWORK; - - return { - agent: { - id: input.id, - did: input.did, - ownerDid: input.ownerDid, - name: input.name, - framework, - publicKey: input.publicKey, - currentJti, - ttlDays: expiry.ttlDays, - status: "active", - expiresAt: expiry.expiresAt, - updatedAt: issuedAt, - }, - claims: { - iss: input.issuer, - sub: input.did, - ownerDid: input.ownerDid, - name: input.name, - framework, - cnf: { - jwk: { - kty: "OKP", - crv: "Ed25519", - x: input.publicKey, - }, - }, - iat: issuedAtSeconds, - nbf: issuedAtSeconds, - exp: expiry.exp, - jti: currentJti, - }, - }; -} - -export { - DEFAULT_AGENT_FRAMEWORK, - DEFAULT_AGENT_TTL_DAYS, - MAX_AGENT_TTL_DAYS, - MIN_AGENT_TTL_DAYS, -}; - -export function resolveRegistryIssuer( - config: Pick, -): string { - const explicitIssuer = config.REGISTRY_ISSUER_URL?.trim(); - if (explicitIssuer && explicitIssuer.length > 0) { - return explicitIssuer; - } - - return REGISTRY_ISSUER_BY_ENVIRONMENT[config.ENVIRONMENT]; -} +export * from "./agent-registration/index.js"; diff --git a/apps/registry/src/agent-registration/AGENTS.md b/apps/registry/src/agent-registration/AGENTS.md new file mode 100644 index 0000000..6efd071 --- /dev/null +++ b/apps/registry/src/agent-registration/AGENTS.md @@ -0,0 +1,17 @@ +# AGENTS.md (apps/registry/src/agent-registration) + +## Purpose +- Keep agent-registration helpers modular, deterministic, and easy to test independently. + +## Module Boundaries +- Keep shared registration constants and issuer resolution in `constants.ts`. +- Keep payload parsing/validation in `parsing.ts`; do not duplicate field validators across routes. +- Keep challenge construction logic in `challenge.ts` and preserve the challenge response contract. +- Keep ownership-proof verification in `proof.ts` with consistent replay/expiry/signature errors. +- Keep claim/agent object builders in `creation.ts`, including reissue expiry rules. +- Keep exported public surface centralized in `index.ts`, and keep `../agent-registration.ts` as the stable facade import path. + +## Safety +- Preserve environment-aware validation error exposure rules (`shouldExposeVerboseErrors`) for parse failures. +- Preserve challenge nonce length, TTL, and proof canonicalization inputs exactly; changes here are auth-sensitive. +- Preserve reissue expiry behavior: do not extend lifetime beyond prior valid expiry when previous expiry is still active. diff --git a/apps/registry/src/agent-registration/challenge.ts b/apps/registry/src/agent-registration/challenge.ts new file mode 100644 index 0000000..469c35e --- /dev/null +++ b/apps/registry/src/agent-registration/challenge.ts @@ -0,0 +1,63 @@ +import { + AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, + encodeBase64url, + generateUlid, +} from "@clawdentity/protocol"; +import { addSeconds, nowIso, type RegistryConfig } from "@clawdentity/sdk"; +import { + AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH, + AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS, +} from "./constants.js"; +import { parseAgentRegistrationChallengeBody } from "./parsing.js"; +import type { + AgentRegistrationChallenge, + AgentRegistrationChallengeResult, +} from "./types.js"; + +export function buildAgentRegistrationChallenge(input: { + payload: unknown; + ownerId: string; + ownerDid: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): AgentRegistrationChallengeResult { + const parsedBody = parseAgentRegistrationChallengeBody( + input.payload, + input.environment, + ); + + const createdAt = nowIso(); + const createdAtMs = Date.parse(createdAt); + const challengeId = generateUlid(createdAtMs); + const nonceBytes = crypto.getRandomValues( + new Uint8Array(AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH), + ); + const nonce = encodeBase64url(nonceBytes); + const expiresAt = addSeconds( + createdAt, + AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS, + ); + + const challenge: AgentRegistrationChallenge = { + id: challengeId, + ownerId: input.ownerId, + publicKey: parsedBody.publicKey, + nonce, + status: "pending", + expiresAt, + usedAt: null, + createdAt, + updatedAt: createdAt, + }; + + return { + challenge, + response: { + challengeId, + nonce, + ownerDid: input.ownerDid, + expiresAt, + algorithm: "Ed25519", + messageTemplate: AGENT_REGISTRATION_PROOF_MESSAGE_TEMPLATE, + }, + }; +} diff --git a/apps/registry/src/agent-registration/constants.ts b/apps/registry/src/agent-registration/constants.ts new file mode 100644 index 0000000..c145d64 --- /dev/null +++ b/apps/registry/src/agent-registration/constants.ts @@ -0,0 +1,32 @@ +import type { RegistryConfig } from "@clawdentity/sdk"; + +export const DEFAULT_AGENT_FRAMEWORK = "openclaw"; +export const DEFAULT_AGENT_TTL_DAYS = 30; +export const MAX_FRAMEWORK_LENGTH = 32; +export const MIN_AGENT_TTL_DAYS = 1; +export const MAX_AGENT_TTL_DAYS = 90; +export const DAY_IN_SECONDS = 24 * 60 * 60; +export const ED25519_PUBLIC_KEY_LENGTH = 32; +export const ED25519_SIGNATURE_LENGTH = 64; +export const AGENT_REGISTRATION_CHALLENGE_TTL_SECONDS = 5 * 60; +export const AGENT_REGISTRATION_CHALLENGE_NONCE_LENGTH = 24; + +const REGISTRY_ISSUER_BY_ENVIRONMENT: Record< + RegistryConfig["ENVIRONMENT"], + string +> = { + development: "https://dev.registry.clawdentity.com", + production: "https://registry.clawdentity.com", + test: "https://dev.registry.clawdentity.com", +}; + +export function resolveRegistryIssuer( + config: Pick, +): string { + const explicitIssuer = config.REGISTRY_ISSUER_URL?.trim(); + if (explicitIssuer && explicitIssuer.length > 0) { + return explicitIssuer; + } + + return REGISTRY_ISSUER_BY_ENVIRONMENT[config.ENVIRONMENT]; +} diff --git a/apps/registry/src/agent-registration/creation.ts b/apps/registry/src/agent-registration/creation.ts new file mode 100644 index 0000000..274d707 --- /dev/null +++ b/apps/registry/src/agent-registration/creation.ts @@ -0,0 +1,200 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { + addSeconds, + nowIso, + type RegistryConfig, + toIso, +} from "@clawdentity/sdk"; +import { + DAY_IN_SECONDS, + DEFAULT_AGENT_FRAMEWORK, + DEFAULT_AGENT_TTL_DAYS, + MAX_AGENT_TTL_DAYS, + MIN_AGENT_TTL_DAYS, +} from "./constants.js"; +import { parseAgentRegistrationBody } from "./parsing.js"; +import type { + AgentRegistrationBody, + AgentRegistrationResult, + AgentReissueResult, +} from "./types.js"; + +export function buildAgentRegistrationFromParsed(input: { + parsedBody: AgentRegistrationBody; + ownerDid: string; + issuer: string; +}): AgentRegistrationResult { + const issuedAt = nowIso(); + const issuedAtMs = Date.parse(issuedAt); + const issuedAtSeconds = Math.floor(issuedAtMs / 1000); + const ttlDays = input.parsedBody.ttlDays ?? DEFAULT_AGENT_TTL_DAYS; + const framework = input.parsedBody.framework ?? DEFAULT_AGENT_FRAMEWORK; + const ttlSeconds = ttlDays * DAY_IN_SECONDS; + const expiresAt = addSeconds(issuedAt, ttlSeconds); + + const agentId = generateUlid(issuedAtMs); + const agentDid = makeAgentDid(agentId); + const currentJti = generateUlid(issuedAtMs + 1); + const createdAt = issuedAt; + + return { + agent: { + id: agentId, + did: agentDid, + ownerDid: input.ownerDid, + name: input.parsedBody.name, + framework, + publicKey: input.parsedBody.publicKey, + currentJti, + ttlDays, + status: "active", + expiresAt, + createdAt, + updatedAt: createdAt, + }, + claims: { + iss: input.issuer, + sub: agentDid, + ownerDid: input.ownerDid, + name: input.parsedBody.name, + framework, + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: input.parsedBody.publicKey, + }, + }, + iat: issuedAtSeconds, + nbf: issuedAtSeconds, + exp: issuedAtSeconds + ttlSeconds, + jti: currentJti, + }, + }; +} + +export function buildAgentRegistration(input: { + payload: unknown; + ownerDid: string; + issuer: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): AgentRegistrationResult { + const parsedBody = parseAgentRegistrationBody( + input.payload, + input.environment, + ); + + return buildAgentRegistrationFromParsed({ + parsedBody, + ownerDid: input.ownerDid, + issuer: input.issuer, + }); +} + +function resolveReissueExpiry(input: { + previousExpiresAt: string | null; + issuedAt: string; + issuedAtMs: number; + issuedAtSeconds: number; +}): { + expiresAt: string; + exp: number; + ttlDays: number; +} { + const defaultTtlSeconds = DEFAULT_AGENT_TTL_DAYS * DAY_IN_SECONDS; + const defaultExp = input.issuedAtSeconds + defaultTtlSeconds; + const defaultExpiry = addSeconds(input.issuedAt, defaultTtlSeconds); + + if (!input.previousExpiresAt) { + return { + expiresAt: defaultExpiry, + exp: defaultExp, + ttlDays: DEFAULT_AGENT_TTL_DAYS, + }; + } + + const previousExpiryMs = Date.parse(input.previousExpiresAt); + if ( + !Number.isFinite(previousExpiryMs) || + previousExpiryMs <= input.issuedAtMs + ) { + return { + expiresAt: defaultExpiry, + exp: defaultExp, + ttlDays: DEFAULT_AGENT_TTL_DAYS, + }; + } + + const previousExpirySeconds = Math.floor(previousExpiryMs / 1000); + const remainingSeconds = Math.max( + 1, + previousExpirySeconds - input.issuedAtSeconds, + ); + const ttlDays = Math.min( + MAX_AGENT_TTL_DAYS, + Math.max(MIN_AGENT_TTL_DAYS, Math.ceil(remainingSeconds / DAY_IN_SECONDS)), + ); + + return { + expiresAt: toIso(previousExpiryMs), + exp: previousExpirySeconds, + ttlDays, + }; +} + +export function buildAgentReissue(input: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string | null; + publicKey: string; + previousExpiresAt: string | null; + issuer: string; +}): AgentReissueResult { + const issuedAt = nowIso(); + const issuedAtMs = Date.parse(issuedAt); + const issuedAtSeconds = Math.floor(issuedAtMs / 1000); + const expiry = resolveReissueExpiry({ + previousExpiresAt: input.previousExpiresAt, + issuedAt, + issuedAtMs, + issuedAtSeconds, + }); + const currentJti = generateUlid(issuedAtMs + 1); + const framework = input.framework ?? DEFAULT_AGENT_FRAMEWORK; + + return { + agent: { + id: input.id, + did: input.did, + ownerDid: input.ownerDid, + name: input.name, + framework, + publicKey: input.publicKey, + currentJti, + ttlDays: expiry.ttlDays, + status: "active", + expiresAt: expiry.expiresAt, + updatedAt: issuedAt, + }, + claims: { + iss: input.issuer, + sub: input.did, + ownerDid: input.ownerDid, + name: input.name, + framework, + cnf: { + jwk: { + kty: "OKP", + crv: "Ed25519", + x: input.publicKey, + }, + }, + iat: issuedAtSeconds, + nbf: issuedAtSeconds, + exp: expiry.exp, + jti: currentJti, + }, + }; +} diff --git a/apps/registry/src/agent-registration/errors.ts b/apps/registry/src/agent-registration/errors.ts new file mode 100644 index 0000000..e20ae69 --- /dev/null +++ b/apps/registry/src/agent-registration/errors.ts @@ -0,0 +1,60 @@ +import { + AppError, + type RegistryConfig, + shouldExposeVerboseErrors, +} from "@clawdentity/sdk"; + +type ValidationDetails = { + fieldErrors: Record; + formErrors: string[]; +}; + +export function invalidRegistration(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: ValidationDetails; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REGISTRATION_INVALID", + message: exposeDetails + ? "Agent registration payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +export function invalidRegistrationChallenge(options: { + environment: RegistryConfig["ENVIRONMENT"]; + details?: ValidationDetails; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: "AGENT_REGISTRATION_CHALLENGE_INVALID", + message: exposeDetails + ? "Agent registration challenge payload is invalid" + : "Request could not be processed", + status: 400, + expose: exposeDetails, + details: exposeDetails ? options.details : undefined, + }); +} + +export function registrationProofError(options: { + environment: RegistryConfig["ENVIRONMENT"]; + code: + | "AGENT_REGISTRATION_CHALLENGE_EXPIRED" + | "AGENT_REGISTRATION_CHALLENGE_REPLAYED" + | "AGENT_REGISTRATION_PROOF_MISMATCH" + | "AGENT_REGISTRATION_PROOF_INVALID"; + message: string; +}): AppError { + const exposeDetails = shouldExposeVerboseErrors(options.environment); + return new AppError({ + code: options.code, + message: exposeDetails ? options.message : "Request could not be processed", + status: 400, + expose: true, + }); +} diff --git a/apps/registry/src/agent-registration/index.ts b/apps/registry/src/agent-registration/index.ts new file mode 100644 index 0000000..355c949 --- /dev/null +++ b/apps/registry/src/agent-registration/index.ts @@ -0,0 +1,27 @@ +export { buildAgentRegistrationChallenge } from "./challenge.js"; +export { + DEFAULT_AGENT_FRAMEWORK, + DEFAULT_AGENT_TTL_DAYS, + MAX_AGENT_TTL_DAYS, + MIN_AGENT_TTL_DAYS, + resolveRegistryIssuer, +} from "./constants.js"; +export { + buildAgentRegistration, + buildAgentRegistrationFromParsed, + buildAgentReissue, +} from "./creation.js"; +export { + parseAgentRegistrationBody, + parseAgentRegistrationChallengeBody, +} from "./parsing.js"; +export { verifyAgentRegistrationOwnershipProof } from "./proof.js"; +export type { + AgentRegistrationBody, + AgentRegistrationChallenge, + AgentRegistrationChallengeBody, + AgentRegistrationChallengeResult, + AgentRegistrationResult, + AgentReissueResult, + PersistedAgentRegistrationChallenge, +} from "./types.js"; diff --git a/apps/registry/src/agent-registration/parsing.ts b/apps/registry/src/agent-registration/parsing.ts new file mode 100644 index 0000000..c6c993a --- /dev/null +++ b/apps/registry/src/agent-registration/parsing.ts @@ -0,0 +1,310 @@ +import { + decodeBase64url, + parseUlid, + validateAgentName, +} from "@clawdentity/protocol"; +import type { RegistryConfig } from "@clawdentity/sdk"; +import { + ED25519_PUBLIC_KEY_LENGTH, + ED25519_SIGNATURE_LENGTH, + MAX_AGENT_TTL_DAYS, + MAX_FRAMEWORK_LENGTH, + MIN_AGENT_TTL_DAYS, +} from "./constants.js"; +import { invalidRegistration, invalidRegistrationChallenge } from "./errors.js"; +import type { + AgentRegistrationBody, + AgentRegistrationChallengeBody, +} from "./types.js"; + +function addFieldError( + fieldErrors: Record, + field: string, + message: string, +): void { + const errors = fieldErrors[field] ?? []; + errors.push(message); + fieldErrors[field] = errors; +} + +function hasControlChars(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code <= 31 || code === 127) { + return true; + } + } + + return false; +} + +function parseName( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError(fieldErrors, "name", "name is required"); + return ""; + } + + const value = input.trim(); + if (!validateAgentName(value)) { + addFieldError( + fieldErrors, + "name", + "name contains invalid characters or length", + ); + } + + return value; +} + +function parseFramework( + input: unknown, + fieldErrors: Record, +): string | undefined { + if (input === undefined) { + return undefined; + } + + if (typeof input !== "string") { + addFieldError(fieldErrors, "framework", "framework must be a string"); + return undefined; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "framework", "framework is required"); + return undefined; + } + + if (value.length > MAX_FRAMEWORK_LENGTH) { + addFieldError( + fieldErrors, + "framework", + `framework must be at most ${MAX_FRAMEWORK_LENGTH} characters`, + ); + } + + if (hasControlChars(value)) { + addFieldError( + fieldErrors, + "framework", + "framework contains control characters", + ); + } + + return value; +} + +function parsePublicKey( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError(fieldErrors, "publicKey", "publicKey is required"); + return ""; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "publicKey", "publicKey is required"); + return ""; + } + + let decodedKey: Uint8Array; + try { + decodedKey = decodeBase64url(value); + } catch { + addFieldError( + fieldErrors, + "publicKey", + "publicKey must be a base64url-encoded 32-byte Ed25519 key", + ); + return value; + } + + if (decodedKey.length !== ED25519_PUBLIC_KEY_LENGTH) { + addFieldError( + fieldErrors, + "publicKey", + "publicKey must be a base64url-encoded 32-byte Ed25519 key", + ); + } + + return value; +} + +function parseTtlDays( + input: unknown, + fieldErrors: Record, +): number | undefined { + if (input === undefined) { + return undefined; + } + + if (typeof input !== "number" || !Number.isFinite(input)) { + addFieldError(fieldErrors, "ttlDays", "ttlDays must be a number"); + return undefined; + } + + if (!Number.isInteger(input)) { + addFieldError(fieldErrors, "ttlDays", "ttlDays must be an integer"); + return undefined; + } + + if (input < MIN_AGENT_TTL_DAYS || input > MAX_AGENT_TTL_DAYS) { + addFieldError( + fieldErrors, + "ttlDays", + `ttlDays must be between ${MIN_AGENT_TTL_DAYS} and ${MAX_AGENT_TTL_DAYS}`, + ); + return undefined; + } + + return input; +} + +function parseChallengeId( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError(fieldErrors, "challengeId", "challengeId is required"); + return ""; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError(fieldErrors, "challengeId", "challengeId is required"); + return ""; + } + + try { + parseUlid(value); + } catch { + addFieldError(fieldErrors, "challengeId", "challengeId must be a ULID"); + } + + return value; +} + +function parseChallengeSignature( + input: unknown, + fieldErrors: Record, +): string { + if (typeof input !== "string") { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature is required", + ); + return ""; + } + + const value = input.trim(); + if (value.length === 0) { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature is required", + ); + return ""; + } + + let decodedSignature: Uint8Array; + try { + decodedSignature = decodeBase64url(value); + } catch { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature must be a base64url-encoded Ed25519 signature", + ); + return value; + } + + if (decodedSignature.length !== ED25519_SIGNATURE_LENGTH) { + addFieldError( + fieldErrors, + "challengeSignature", + "challengeSignature must be a base64url-encoded Ed25519 signature", + ); + } + + return value; +} + +export function parseAgentRegistrationChallengeBody( + payload: unknown, + environment: RegistryConfig["ENVIRONMENT"], +): AgentRegistrationChallengeBody { + const fieldErrors: Record = {}; + + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw invalidRegistrationChallenge({ + environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const objectPayload = payload as Record; + const parsed: AgentRegistrationChallengeBody = { + publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), + }; + + if (Object.keys(fieldErrors).length > 0) { + throw invalidRegistrationChallenge({ + environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return parsed; +} + +export function parseAgentRegistrationBody( + payload: unknown, + environment: RegistryConfig["ENVIRONMENT"], +): AgentRegistrationBody { + const fieldErrors: Record = {}; + + if (!payload || typeof payload !== "object" || Array.isArray(payload)) { + throw invalidRegistration({ + environment, + details: { + fieldErrors: { + body: ["body must be a JSON object"], + }, + formErrors: [], + }, + }); + } + + const objectPayload = payload as Record; + const parsed: AgentRegistrationBody = { + name: parseName(objectPayload.name, fieldErrors), + framework: parseFramework(objectPayload.framework, fieldErrors), + publicKey: parsePublicKey(objectPayload.publicKey, fieldErrors), + ttlDays: parseTtlDays(objectPayload.ttlDays, fieldErrors), + challengeId: parseChallengeId(objectPayload.challengeId, fieldErrors), + challengeSignature: parseChallengeSignature( + objectPayload.challengeSignature, + fieldErrors, + ), + }; + + if (Object.keys(fieldErrors).length > 0) { + throw invalidRegistration({ + environment, + details: { fieldErrors, formErrors: [] }, + }); + } + + return parsed; +} diff --git a/apps/registry/src/agent-registration/proof.ts b/apps/registry/src/agent-registration/proof.ts new file mode 100644 index 0000000..f8ccccd --- /dev/null +++ b/apps/registry/src/agent-registration/proof.ts @@ -0,0 +1,79 @@ +import { + canonicalizeAgentRegistrationProof, + decodeBase64url, +} from "@clawdentity/protocol"; +import { nowUtcMs, type RegistryConfig, verifyEd25519 } from "@clawdentity/sdk"; +import { registrationProofError } from "./errors.js"; +import type { + AgentRegistrationBody, + PersistedAgentRegistrationChallenge, +} from "./types.js"; + +export async function verifyAgentRegistrationOwnershipProof(input: { + parsedBody: AgentRegistrationBody; + challenge: PersistedAgentRegistrationChallenge; + ownerDid: string; + environment: RegistryConfig["ENVIRONMENT"]; +}): Promise { + if (input.challenge.status !== "pending") { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_CHALLENGE_REPLAYED", + message: "Registration challenge has already been used", + }); + } + + const expiresAtMs = Date.parse(input.challenge.expiresAt); + if (!Number.isFinite(expiresAtMs) || expiresAtMs <= nowUtcMs()) { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_CHALLENGE_EXPIRED", + message: "Registration challenge has expired", + }); + } + + if (input.challenge.publicKey !== input.parsedBody.publicKey) { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_PROOF_MISMATCH", + message: "Registration challenge does not match the provided public key", + }); + } + + let signatureBytes: Uint8Array; + let publicKeyBytes: Uint8Array; + try { + signatureBytes = decodeBase64url(input.parsedBody.challengeSignature); + publicKeyBytes = decodeBase64url(input.parsedBody.publicKey); + } catch { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_PROOF_INVALID", + message: "Registration challenge signature is invalid", + }); + } + + const canonical = canonicalizeAgentRegistrationProof({ + challengeId: input.challenge.id, + nonce: input.challenge.nonce, + ownerDid: input.ownerDid, + publicKey: input.parsedBody.publicKey, + name: input.parsedBody.name, + framework: input.parsedBody.framework, + ttlDays: input.parsedBody.ttlDays, + }); + + const verified = await verifyEd25519( + signatureBytes, + new TextEncoder().encode(canonical), + publicKeyBytes, + ); + + if (!verified) { + throw registrationProofError({ + environment: input.environment, + code: "AGENT_REGISTRATION_PROOF_INVALID", + message: "Registration challenge signature is invalid", + }); + } +} diff --git a/apps/registry/src/agent-registration/types.ts b/apps/registry/src/agent-registration/types.ts new file mode 100644 index 0000000..30a1c80 --- /dev/null +++ b/apps/registry/src/agent-registration/types.ts @@ -0,0 +1,83 @@ +import type { AitClaims } from "@clawdentity/protocol"; + +export type AgentRegistrationBody = { + name: string; + framework?: string; + publicKey: string; + ttlDays?: number; + challengeId: string; + challengeSignature: string; +}; + +export type AgentRegistrationChallengeBody = { + publicKey: string; +}; + +export type AgentRegistrationChallenge = { + id: string; + ownerId: string; + publicKey: string; + nonce: string; + status: "pending"; + expiresAt: string; + usedAt: null; + createdAt: string; + updatedAt: string; +}; + +export type AgentRegistrationChallengeResult = { + challenge: AgentRegistrationChallenge; + response: { + challengeId: string; + nonce: string; + ownerDid: string; + expiresAt: string; + algorithm: "Ed25519"; + messageTemplate: string; + }; +}; + +export type PersistedAgentRegistrationChallenge = { + id: string; + ownerId: string; + publicKey: string; + nonce: string; + status: "pending" | "used"; + expiresAt: string; + usedAt: string | null; +}; + +export type AgentRegistrationResult = { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: "active"; + expiresAt: string; + createdAt: string; + updatedAt: string; + }; + claims: AitClaims; +}; + +export type AgentReissueResult = { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: "active"; + expiresAt: string; + updatedAt: string; + }; + claims: AitClaims; +}; From 3a43ed139cf7488ac4ce527fd42b4c0c55348da4 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 19:27:10 +0530 Subject: [PATCH 146/190] refactor(cli): split openclaw doctor static checks --- apps/cli/src/commands/openclaw/AGENTS.md | 6 +- .../commands/openclaw/doctor-config-checks.ts | 134 +++ .../openclaw/doctor-openclaw-checks.ts | 410 +++++++++ .../commands/openclaw/doctor-state-checks.ts | 245 ++++++ .../commands/openclaw/doctor-static-checks.ts | 799 +----------------- 5 files changed, 806 insertions(+), 788 deletions(-) create mode 100644 apps/cli/src/commands/openclaw/doctor-config-checks.ts create mode 100644 apps/cli/src/commands/openclaw/doctor-openclaw-checks.ts create mode 100644 apps/cli/src/commands/openclaw/doctor-state-checks.ts diff --git a/apps/cli/src/commands/openclaw/AGENTS.md b/apps/cli/src/commands/openclaw/AGENTS.md index 4badd3b..18355f2 100644 --- a/apps/cli/src/commands/openclaw/AGENTS.md +++ b/apps/cli/src/commands/openclaw/AGENTS.md @@ -13,7 +13,11 @@ - `gateway.ts`: OpenClaw gateway pending-device approval flow. - `connector.ts`: connector runtime status/probing/runtime-start helpers. - `config.ts`: OpenClaw config patching and hook/gateway auth normalization. -- `doctor*.ts`: doctor orchestration and check groups. +- `doctor.ts`: doctor orchestration only. +- `doctor-static-checks.ts`: stable export facade only; do not put check logic here. +- `doctor-config-checks.ts`: CLI config doctor checks. +- `doctor-state-checks.ts`: local state marker/credentials/peer/transform checks. +- `doctor-openclaw-checks.ts`: OpenClaw config, base URL, and gateway pairing checks. - `relay.ts`: relay probe and websocket diagnostics. - `setup.ts`: invite encode/decode and setup orchestration. - `command.ts`: commander wiring + stdout formatting calls. diff --git a/apps/cli/src/commands/openclaw/doctor-config-checks.ts b/apps/cli/src/commands/openclaw/doctor-config-checks.ts new file mode 100644 index 0000000..41e8f51 --- /dev/null +++ b/apps/cli/src/commands/openclaw/doctor-config-checks.ts @@ -0,0 +1,134 @@ +import { resolveConfig } from "../../config/manager.js"; +import { parseProxyUrl, toDoctorCheck } from "./common.js"; +import type { + OpenclawDoctorCheckResult, + OpenclawDoctorOptions, +} from "./types.js"; + +export async function runDoctorConfigCheck(input: { + options: OpenclawDoctorOptions; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + if (input.options.includeConfigCheck === false) { + return; + } + + const resolveConfigImpl = input.options.resolveConfigImpl ?? resolveConfig; + try { + const resolvedConfig = await resolveConfigImpl(); + const envProxyUrl = + typeof process.env.CLAWDENTITY_PROXY_URL === "string" + ? process.env.CLAWDENTITY_PROXY_URL.trim() + : ""; + if ( + typeof resolvedConfig.registryUrl !== "string" || + resolvedConfig.registryUrl.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "registryUrl is missing", + remediationHint: + "Run: clawdentity config set registryUrl ", + }), + ); + } else if ( + typeof resolvedConfig.apiKey !== "string" || + resolvedConfig.apiKey.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "apiKey is missing", + remediationHint: "Run: clawdentity config set apiKey ", + }), + ); + } else if (envProxyUrl.length > 0) { + let hasValidEnvProxyUrl = true; + try { + parseProxyUrl(envProxyUrl); + } catch { + hasValidEnvProxyUrl = false; + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "CLAWDENTITY_PROXY_URL is invalid", + remediationHint: + "Set CLAWDENTITY_PROXY_URL to a valid http(s) URL or unset it", + }), + ); + } + + if (hasValidEnvProxyUrl) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: + "registryUrl and apiKey are configured (proxy URL override is active via CLAWDENTITY_PROXY_URL)", + }), + ); + } + } else if ( + typeof resolvedConfig.proxyUrl !== "string" || + resolvedConfig.proxyUrl.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "proxyUrl is missing", + remediationHint: + "Run: clawdentity invite redeem or clawdentity config init", + }), + ); + } else { + let hasValidConfigProxyUrl = true; + try { + parseProxyUrl(resolvedConfig.proxyUrl); + } catch { + hasValidConfigProxyUrl = false; + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "proxyUrl is invalid", + remediationHint: + "Run: clawdentity invite redeem or clawdentity config init", + }), + ); + } + + if (hasValidConfigProxyUrl) { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "pass", + message: "registryUrl, apiKey, and proxyUrl are configured", + }), + ); + } + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "config.registry", + label: "CLI config", + status: "fail", + message: "unable to resolve CLI config", + remediationHint: + "Run: clawdentity config init (or fix your CLI state config file)", + }), + ); + } +} diff --git a/apps/cli/src/commands/openclaw/doctor-openclaw-checks.ts b/apps/cli/src/commands/openclaw/doctor-openclaw-checks.ts new file mode 100644 index 0000000..67eec92 --- /dev/null +++ b/apps/cli/src/commands/openclaw/doctor-openclaw-checks.ts @@ -0,0 +1,410 @@ +import { + isRecord, + normalizeStringArrayWithValues, + toDoctorCheck, +} from "./common.js"; +import { + hasRelayTransformModule, + isCanonicalAgentSessionKey, + isRelayHookMapping, + parseGatewayAuthMode, +} from "./config.js"; +import { + OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + OPENCLAW_SETUP_COMMAND_HINT, + OPENCLAW_SETUP_RESTART_COMMAND_HINT, + OPENCLAW_SETUP_WITH_BASE_URL_HINT, +} from "./constants.js"; +import { readOpenclawGatewayPendingState } from "./gateway.js"; +import { + resolveOpenclawConfigPath, + resolveRelayRuntimeConfigPath, +} from "./paths.js"; +import { readJsonFile, resolveOpenclawBaseUrl } from "./state.js"; +import type { OpenclawDoctorCheckResult } from "./types.js"; + +export async function runDoctorOpenclawConfigCheck(input: { + openclawDir: string; + homeDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const openclawConfigPath = resolveOpenclawConfigPath( + input.openclawDir, + input.homeDir, + ); + try { + const openclawConfig = await readJsonFile(openclawConfigPath); + if (!isRecord(openclawConfig)) { + throw new Error("root"); + } + const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; + const hooksEnabled = hooks.enabled === true; + const hookToken = + typeof hooks.token === "string" && hooks.token.trim().length > 0 + ? hooks.token.trim() + : undefined; + const defaultSessionKey = + typeof hooks.defaultSessionKey === "string" && + hooks.defaultSessionKey.trim().length > 0 + ? hooks.defaultSessionKey.trim() + : undefined; + const allowRequestSessionKey = hooks.allowRequestSessionKey === false; + const allowedSessionKeyPrefixes = normalizeStringArrayWithValues( + hooks.allowedSessionKeyPrefixes, + [], + ); + const missingRequiredSessionPrefixes = + defaultSessionKey === undefined + ? ["hook:"] + : ["hook:", defaultSessionKey].filter( + (prefix) => !allowedSessionKeyPrefixes.includes(prefix), + ); + const mappings = Array.isArray(hooks.mappings) + ? hooks.mappings.filter(isRecord) + : []; + const relayMapping = mappings.find((mapping) => + isRelayHookMapping(mapping), + ); + if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { + input.checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `missing send-to-peer mapping in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "pass", + message: "send-to-peer mapping is configured", + details: { openclawConfigPath }, + }), + ); + } + + if (!hooksEnabled) { + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `hooks.enabled is not true in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else if (hookToken === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `hooks.token is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "pass", + message: "hooks token is configured", + details: { openclawConfigPath }, + }), + ); + } + + const sessionRoutingIssues: string[] = []; + if (defaultSessionKey === undefined) { + sessionRoutingIssues.push("hooks.defaultSessionKey is missing"); + } + if (!allowRequestSessionKey) { + sessionRoutingIssues.push("hooks.allowRequestSessionKey is not false"); + } + if (missingRequiredSessionPrefixes.length > 0) { + sessionRoutingIssues.push( + `hooks.allowedSessionKeyPrefixes is missing: ${missingRequiredSessionPrefixes.join(", ")}`, + ); + } + if ( + defaultSessionKey !== undefined && + isCanonicalAgentSessionKey(defaultSessionKey) + ) { + sessionRoutingIssues.push( + "hooks.defaultSessionKey uses canonical agent format (agent::...); use OpenClaw request session keys like main, global, or subagent:*", + ); + } + + if (sessionRoutingIssues.length > 0) { + input.checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "fail", + message: sessionRoutingIssues.join("; "), + remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, + details: { openclawConfigPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "pass", + message: + "hooks default session and allowed session prefixes are configured", + details: { openclawConfigPath }, + }), + ); + } + + const gateway = isRecord(openclawConfig.gateway) + ? openclawConfig.gateway + : {}; + const gatewayAuth = isRecord(gateway.auth) ? gateway.auth : {}; + const gatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); + const gatewayAuthToken = + typeof gatewayAuth.token === "string" && + gatewayAuth.token.trim().length > 0 + ? gatewayAuth.token.trim() + : undefined; + const gatewayAuthPassword = + typeof gatewayAuth.password === "string" && + gatewayAuth.password.trim().length > 0 + ? gatewayAuth.password.trim() + : undefined; + + if (gatewayAuthMode === "token") { + if (gatewayAuthToken === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.token is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with token mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } + } else if (gatewayAuthMode === "password") { + if (gatewayAuthPassword === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.password is missing in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with password mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } + } else if (gatewayAuthMode === "trusted-proxy") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "pass", + message: "gateway auth is configured with trusted-proxy mode", + details: { openclawConfigPath, gatewayAuthMode }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `gateway.auth.mode is missing or unsupported in ${openclawConfigPath}`, + remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, + details: { openclawConfigPath }, + }), + ); + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.hookMapping", + label: "OpenClaw hook mapping", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.hookToken", + label: "OpenClaw hook auth", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.hookSessionRouting", + label: "OpenClaw hook session routing", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + input.checks.push( + toDoctorCheck({ + id: "state.gatewayAuth", + label: "OpenClaw gateway auth", + status: "fail", + message: `unable to read ${openclawConfigPath}`, + remediationHint: + "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", + details: { openclawConfigPath }, + }), + ); + } +} + +export async function runDoctorOpenclawBaseUrlCheck(input: { + homeDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(input.homeDir); + try { + const openclawBaseUrl = await resolveOpenclawBaseUrl({ + relayRuntimeConfigPath, + }); + input.checks.push( + toDoctorCheck({ + id: "state.openclawBaseUrl", + label: "OpenClaw base URL", + status: "pass", + message: `resolved to ${openclawBaseUrl}`, + }), + ); + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.openclawBaseUrl", + label: "OpenClaw base URL", + status: "fail", + message: `unable to resolve OpenClaw base URL from ${relayRuntimeConfigPath}`, + remediationHint: OPENCLAW_SETUP_WITH_BASE_URL_HINT, + }), + ); + } +} + +export async function runDoctorGatewayPairingCheck(input: { + openclawDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const gatewayPendingState = await readOpenclawGatewayPendingState( + input.openclawDir, + ); + if (gatewayPendingState.status === "missing") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "pass", + message: "no pending gateway device approvals file was found", + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.status === "invalid") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `invalid pending device approvals file: ${gatewayPendingState.gatewayDevicePendingPath}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.status === "unreadable") { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `unable to read pending device approvals at ${gatewayPendingState.gatewayDevicePendingPath}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else if (gatewayPendingState.pendingRequestIds.length === 0) { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "pass", + message: "no pending gateway device approvals", + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.gatewayDevicePairing", + label: "OpenClaw gateway device pairing", + status: "fail", + message: `pending gateway device approvals: ${gatewayPendingState.pendingRequestIds.length}`, + remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, + details: { + gatewayDevicePendingPath: + gatewayPendingState.gatewayDevicePendingPath, + pendingRequestIds: gatewayPendingState.pendingRequestIds, + }, + }), + ); + } +} diff --git a/apps/cli/src/commands/openclaw/doctor-state-checks.ts b/apps/cli/src/commands/openclaw/doctor-state-checks.ts new file mode 100644 index 0000000..7cb733a --- /dev/null +++ b/apps/cli/src/commands/openclaw/doctor-state-checks.ts @@ -0,0 +1,245 @@ +import { readFile } from "node:fs/promises"; +import { AppError } from "@clawdentity/sdk"; +import { assertValidAgentName } from "../agent-name.js"; +import { getErrorCode, toDoctorCheck } from "./common.js"; +import { + OPENCLAW_PAIRING_COMMAND_HINT, + OPENCLAW_SETUP_COMMAND_HINT, +} from "./constants.js"; +import { + resolveOpenclawAgentNamePath, + resolvePeersPath, + resolveTransformPeersPath, + resolveTransformRuntimePath, + resolveTransformTargetPath, +} from "./paths.js"; +import { ensureLocalAgentCredentials, loadPeersConfig } from "./state.js"; +import type { OpenclawDoctorCheckResult, PeersConfig } from "./types.js"; + +export async function runDoctorSelectedAgentCheck(input: { + homeDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const selectedAgentPath = resolveOpenclawAgentNamePath(input.homeDir); + let selectedAgentName: string | undefined; + try { + const selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); + selectedAgentName = assertValidAgentName(selectedAgentRaw.trim()); + input.checks.push( + toDoctorCheck({ + id: "state.selectedAgent", + label: "Selected agent marker", + status: "pass", + message: `selected agent is ${selectedAgentName}`, + }), + ); + } catch (error) { + const missing = getErrorCode(error) === "ENOENT"; + input.checks.push( + toDoctorCheck({ + id: "state.selectedAgent", + label: "Selected agent marker", + status: "fail", + message: missing + ? `missing ${selectedAgentPath}` + : "selected agent marker is invalid", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + } + + return selectedAgentName; +} + +export async function runDoctorCredentialsCheck(input: { + homeDir: string; + selectedAgentName?: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + if (input.selectedAgentName === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "fail", + message: "cannot validate credentials without selected agent marker", + remediationHint: OPENCLAW_SETUP_COMMAND_HINT, + }), + ); + return; + } + + try { + await ensureLocalAgentCredentials(input.homeDir, input.selectedAgentName); + input.checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "pass", + message: "ait.jwt and secret.key are present", + }), + ); + } catch (error) { + const details = error instanceof AppError ? error.details : undefined; + const filePath = + details && typeof details.filePath === "string" + ? details.filePath + : undefined; + input.checks.push( + toDoctorCheck({ + id: "state.credentials", + label: "Local agent credentials", + status: "fail", + message: + filePath === undefined + ? "agent credentials are missing or invalid" + : `credential file missing or empty: ${filePath}`, + remediationHint: + "Run: clawdentity agent create --framework openclaw", + details: + filePath === undefined + ? undefined + : { filePath, selectedAgentName: input.selectedAgentName }, + }), + ); + } +} + +export async function runDoctorPeersCheck(input: { + homeDir: string; + peerAlias?: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const peersPath = resolvePeersPath(input.homeDir); + let peersConfig: PeersConfig | undefined; + try { + peersConfig = await loadPeersConfig(peersPath); + const peerAliases = Object.keys(peersConfig.peers); + if (input.peerAlias !== undefined) { + if (peersConfig.peers[input.peerAlias] === undefined) { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: `peer alias is missing: ${input.peerAlias}`, + remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, + details: { peersPath, peerAlias: input.peerAlias }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: `peer alias exists: ${input.peerAlias}`, + details: { peersPath, peerAlias: input.peerAlias }, + }), + ); + } + } else if (peerAliases.length === 0) { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: "no peers are configured yet (optional until pairing)", + details: { peersPath }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "pass", + message: `configured peers: ${peerAliases.length}`, + details: { peersPath }, + }), + ); + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.peers", + label: "Peers map", + status: "fail", + message: `invalid peers config at ${peersPath}`, + remediationHint: `Fix JSON in ${peersPath} or rerun openclaw setup`, + details: { peersPath }, + }), + ); + } + + return peersConfig; +} + +export async function runDoctorTransformCheck(input: { + openclawDir: string; + checks: OpenclawDoctorCheckResult[]; +}): Promise { + const transformTargetPath = resolveTransformTargetPath(input.openclawDir); + const relayTransformRuntimePath = resolveTransformRuntimePath( + input.openclawDir, + ); + const relayTransformPeersPath = resolveTransformPeersPath(input.openclawDir); + try { + const transformContents = await readFile(transformTargetPath, "utf8"); + const runtimeContents = await readFile(relayTransformRuntimePath, "utf8"); + const peersSnapshotContents = await readFile( + relayTransformPeersPath, + "utf8", + ); + + if ( + transformContents.trim().length === 0 || + runtimeContents.trim().length === 0 || + peersSnapshotContents.trim().length === 0 + ) { + input.checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: "relay transform artifacts are missing or empty", + remediationHint: "Run: clawdentity skill install", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } else { + input.checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "pass", + message: "relay transform artifacts are present", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } + } catch { + input.checks.push( + toDoctorCheck({ + id: "state.transform", + label: "Relay transform", + status: "fail", + message: "missing relay transform artifacts", + remediationHint: "Run: clawdentity skill install", + details: { + transformTargetPath, + relayTransformRuntimePath, + relayTransformPeersPath, + }, + }), + ); + } +} diff --git a/apps/cli/src/commands/openclaw/doctor-static-checks.ts b/apps/cli/src/commands/openclaw/doctor-static-checks.ts index a74ab43..274273a 100644 --- a/apps/cli/src/commands/openclaw/doctor-static-checks.ts +++ b/apps/cli/src/commands/openclaw/doctor-static-checks.ts @@ -1,787 +1,12 @@ -import { readFile } from "node:fs/promises"; -import { AppError } from "@clawdentity/sdk"; -import { resolveConfig } from "../../config/manager.js"; -import { assertValidAgentName } from "../agent-name.js"; -import { - getErrorCode, - isRecord, - normalizeStringArrayWithValues, - parseProxyUrl, - toDoctorCheck, -} from "./common.js"; -import { - hasRelayTransformModule, - isCanonicalAgentSessionKey, - isRelayHookMapping, - parseGatewayAuthMode, -} from "./config.js"; -import { - OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - OPENCLAW_PAIRING_COMMAND_HINT, - OPENCLAW_SETUP_COMMAND_HINT, - OPENCLAW_SETUP_RESTART_COMMAND_HINT, - OPENCLAW_SETUP_WITH_BASE_URL_HINT, -} from "./constants.js"; -import { readOpenclawGatewayPendingState } from "./gateway.js"; -import { - resolveOpenclawAgentNamePath, - resolveOpenclawConfigPath, - resolvePeersPath, - resolveRelayRuntimeConfigPath, - resolveTransformPeersPath, - resolveTransformRuntimePath, - resolveTransformTargetPath, -} from "./paths.js"; -import { - ensureLocalAgentCredentials, - loadPeersConfig, - readJsonFile, - resolveOpenclawBaseUrl, -} from "./state.js"; -import type { - OpenclawDoctorCheckResult, - OpenclawDoctorOptions, - PeersConfig, -} from "./types.js"; - -export async function runDoctorConfigCheck(input: { - options: OpenclawDoctorOptions; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - if (input.options.includeConfigCheck === false) { - return; - } - - const resolveConfigImpl = input.options.resolveConfigImpl ?? resolveConfig; - try { - const resolvedConfig = await resolveConfigImpl(); - const envProxyUrl = - typeof process.env.CLAWDENTITY_PROXY_URL === "string" - ? process.env.CLAWDENTITY_PROXY_URL.trim() - : ""; - if ( - typeof resolvedConfig.registryUrl !== "string" || - resolvedConfig.registryUrl.trim().length === 0 - ) { - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "registryUrl is missing", - remediationHint: - "Run: clawdentity config set registryUrl ", - }), - ); - } else if ( - typeof resolvedConfig.apiKey !== "string" || - resolvedConfig.apiKey.trim().length === 0 - ) { - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "apiKey is missing", - remediationHint: "Run: clawdentity config set apiKey ", - }), - ); - } else if (envProxyUrl.length > 0) { - let hasValidEnvProxyUrl = true; - try { - parseProxyUrl(envProxyUrl); - } catch { - hasValidEnvProxyUrl = false; - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "CLAWDENTITY_PROXY_URL is invalid", - remediationHint: - "Set CLAWDENTITY_PROXY_URL to a valid http(s) URL or unset it", - }), - ); - } - - if (hasValidEnvProxyUrl) { - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "pass", - message: - "registryUrl and apiKey are configured (proxy URL override is active via CLAWDENTITY_PROXY_URL)", - }), - ); - } - } else if ( - typeof resolvedConfig.proxyUrl !== "string" || - resolvedConfig.proxyUrl.trim().length === 0 - ) { - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "proxyUrl is missing", - remediationHint: - "Run: clawdentity invite redeem or clawdentity config init", - }), - ); - } else { - let hasValidConfigProxyUrl = true; - try { - parseProxyUrl(resolvedConfig.proxyUrl); - } catch { - hasValidConfigProxyUrl = false; - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "proxyUrl is invalid", - remediationHint: - "Run: clawdentity invite redeem or clawdentity config init", - }), - ); - } - - if (hasValidConfigProxyUrl) { - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "pass", - message: "registryUrl, apiKey, and proxyUrl are configured", - }), - ); - } - } - } catch { - input.checks.push( - toDoctorCheck({ - id: "config.registry", - label: "CLI config", - status: "fail", - message: "unable to resolve CLI config", - remediationHint: - "Run: clawdentity config init (or fix your CLI state config file)", - }), - ); - } -} - -export async function runDoctorSelectedAgentCheck(input: { - homeDir: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - const selectedAgentPath = resolveOpenclawAgentNamePath(input.homeDir); - let selectedAgentName: string | undefined; - try { - const selectedAgentRaw = await readFile(selectedAgentPath, "utf8"); - selectedAgentName = assertValidAgentName(selectedAgentRaw.trim()); - input.checks.push( - toDoctorCheck({ - id: "state.selectedAgent", - label: "Selected agent marker", - status: "pass", - message: `selected agent is ${selectedAgentName}`, - }), - ); - } catch (error) { - const missing = getErrorCode(error) === "ENOENT"; - input.checks.push( - toDoctorCheck({ - id: "state.selectedAgent", - label: "Selected agent marker", - status: "fail", - message: missing - ? `missing ${selectedAgentPath}` - : "selected agent marker is invalid", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - } - - return selectedAgentName; -} - -export async function runDoctorCredentialsCheck(input: { - homeDir: string; - selectedAgentName?: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - if (input.selectedAgentName === undefined) { - input.checks.push( - toDoctorCheck({ - id: "state.credentials", - label: "Local agent credentials", - status: "fail", - message: "cannot validate credentials without selected agent marker", - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - }), - ); - return; - } - - try { - await ensureLocalAgentCredentials(input.homeDir, input.selectedAgentName); - input.checks.push( - toDoctorCheck({ - id: "state.credentials", - label: "Local agent credentials", - status: "pass", - message: "ait.jwt and secret.key are present", - }), - ); - } catch (error) { - const details = error instanceof AppError ? error.details : undefined; - const filePath = - details && typeof details.filePath === "string" - ? details.filePath - : undefined; - input.checks.push( - toDoctorCheck({ - id: "state.credentials", - label: "Local agent credentials", - status: "fail", - message: - filePath === undefined - ? "agent credentials are missing or invalid" - : `credential file missing or empty: ${filePath}`, - remediationHint: - "Run: clawdentity agent create --framework openclaw", - details: - filePath === undefined - ? undefined - : { filePath, selectedAgentName: input.selectedAgentName }, - }), - ); - } -} - -export async function runDoctorPeersCheck(input: { - homeDir: string; - peerAlias?: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - const peersPath = resolvePeersPath(input.homeDir); - let peersConfig: PeersConfig | undefined; - try { - peersConfig = await loadPeersConfig(peersPath); - const peerAliases = Object.keys(peersConfig.peers); - if (input.peerAlias !== undefined) { - if (peersConfig.peers[input.peerAlias] === undefined) { - input.checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "fail", - message: `peer alias is missing: ${input.peerAlias}`, - remediationHint: OPENCLAW_PAIRING_COMMAND_HINT, - details: { peersPath, peerAlias: input.peerAlias }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "pass", - message: `peer alias exists: ${input.peerAlias}`, - details: { peersPath, peerAlias: input.peerAlias }, - }), - ); - } - } else if (peerAliases.length === 0) { - input.checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "pass", - message: "no peers are configured yet (optional until pairing)", - details: { peersPath }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "pass", - message: `configured peers: ${peerAliases.length}`, - details: { peersPath }, - }), - ); - } - } catch { - input.checks.push( - toDoctorCheck({ - id: "state.peers", - label: "Peers map", - status: "fail", - message: `invalid peers config at ${peersPath}`, - remediationHint: `Fix JSON in ${peersPath} or rerun openclaw setup`, - details: { peersPath }, - }), - ); - } - - return peersConfig; -} - -export async function runDoctorTransformCheck(input: { - openclawDir: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - const transformTargetPath = resolveTransformTargetPath(input.openclawDir); - const relayTransformRuntimePath = resolveTransformRuntimePath( - input.openclawDir, - ); - const relayTransformPeersPath = resolveTransformPeersPath(input.openclawDir); - try { - const transformContents = await readFile(transformTargetPath, "utf8"); - const runtimeContents = await readFile(relayTransformRuntimePath, "utf8"); - const peersSnapshotContents = await readFile( - relayTransformPeersPath, - "utf8", - ); - - if ( - transformContents.trim().length === 0 || - runtimeContents.trim().length === 0 || - peersSnapshotContents.trim().length === 0 - ) { - input.checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "fail", - message: "relay transform artifacts are missing or empty", - remediationHint: "Run: clawdentity skill install", - details: { - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "pass", - message: "relay transform artifacts are present", - details: { - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - }, - }), - ); - } - } catch { - input.checks.push( - toDoctorCheck({ - id: "state.transform", - label: "Relay transform", - status: "fail", - message: "missing relay transform artifacts", - remediationHint: "Run: clawdentity skill install", - details: { - transformTargetPath, - relayTransformRuntimePath, - relayTransformPeersPath, - }, - }), - ); - } -} - -export async function runDoctorOpenclawConfigCheck(input: { - openclawDir: string; - homeDir: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - const openclawConfigPath = resolveOpenclawConfigPath( - input.openclawDir, - input.homeDir, - ); - try { - const openclawConfig = await readJsonFile(openclawConfigPath); - if (!isRecord(openclawConfig)) { - throw new Error("root"); - } - const hooks = isRecord(openclawConfig.hooks) ? openclawConfig.hooks : {}; - const hooksEnabled = hooks.enabled === true; - const hookToken = - typeof hooks.token === "string" && hooks.token.trim().length > 0 - ? hooks.token.trim() - : undefined; - const defaultSessionKey = - typeof hooks.defaultSessionKey === "string" && - hooks.defaultSessionKey.trim().length > 0 - ? hooks.defaultSessionKey.trim() - : undefined; - const allowRequestSessionKey = hooks.allowRequestSessionKey === false; - const allowedSessionKeyPrefixes = normalizeStringArrayWithValues( - hooks.allowedSessionKeyPrefixes, - [], - ); - const missingRequiredSessionPrefixes = - defaultSessionKey === undefined - ? ["hook:"] - : ["hook:", defaultSessionKey].filter( - (prefix) => !allowedSessionKeyPrefixes.includes(prefix), - ); - const mappings = Array.isArray(hooks.mappings) - ? hooks.mappings.filter(isRecord) - : []; - const relayMapping = mappings.find((mapping) => - isRelayHookMapping(mapping), - ); - if (relayMapping === undefined || !hasRelayTransformModule(relayMapping)) { - input.checks.push( - toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", - status: "fail", - message: `missing send-to-peer mapping in ${openclawConfigPath}`, - remediationHint: OPENCLAW_SETUP_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", - status: "pass", - message: "send-to-peer mapping is configured", - details: { openclawConfigPath }, - }), - ); - } - - if (!hooksEnabled) { - input.checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "fail", - message: `hooks.enabled is not true in ${openclawConfigPath}`, - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else if (hookToken === undefined) { - input.checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "fail", - message: `hooks.token is missing in ${openclawConfigPath}`, - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "pass", - message: "hooks token is configured", - details: { openclawConfigPath }, - }), - ); - } - - const sessionRoutingIssues: string[] = []; - if (defaultSessionKey === undefined) { - sessionRoutingIssues.push("hooks.defaultSessionKey is missing"); - } - if (!allowRequestSessionKey) { - sessionRoutingIssues.push("hooks.allowRequestSessionKey is not false"); - } - if (missingRequiredSessionPrefixes.length > 0) { - sessionRoutingIssues.push( - `hooks.allowedSessionKeyPrefixes is missing: ${missingRequiredSessionPrefixes.join(", ")}`, - ); - } - if ( - defaultSessionKey !== undefined && - isCanonicalAgentSessionKey(defaultSessionKey) - ) { - sessionRoutingIssues.push( - "hooks.defaultSessionKey uses canonical agent format (agent::...); use OpenClaw request session keys like main, global, or subagent:*", - ); - } - - if (sessionRoutingIssues.length > 0) { - input.checks.push( - toDoctorCheck({ - id: "state.hookSessionRouting", - label: "OpenClaw hook session routing", - status: "fail", - message: sessionRoutingIssues.join("; "), - remediationHint: OPENCLAW_SETUP_RESTART_COMMAND_HINT, - details: { openclawConfigPath }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.hookSessionRouting", - label: "OpenClaw hook session routing", - status: "pass", - message: - "hooks default session and allowed session prefixes are configured", - details: { openclawConfigPath }, - }), - ); - } - - const gateway = isRecord(openclawConfig.gateway) - ? openclawConfig.gateway - : {}; - const gatewayAuth = isRecord(gateway.auth) ? gateway.auth : {}; - const gatewayAuthMode = parseGatewayAuthMode(gatewayAuth.mode); - const gatewayAuthToken = - typeof gatewayAuth.token === "string" && - gatewayAuth.token.trim().length > 0 - ? gatewayAuth.token.trim() - : undefined; - const gatewayAuthPassword = - typeof gatewayAuth.password === "string" && - gatewayAuth.password.trim().length > 0 - ? gatewayAuth.password.trim() - : undefined; - - if (gatewayAuthMode === "token") { - if (gatewayAuthToken === undefined) { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `gateway.auth.token is missing in ${openclawConfigPath}`, - remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "pass", - message: "gateway auth is configured with token mode", - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } - } else if (gatewayAuthMode === "password") { - if (gatewayAuthPassword === undefined) { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `gateway.auth.password is missing in ${openclawConfigPath}`, - remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "pass", - message: "gateway auth is configured with password mode", - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } - } else if (gatewayAuthMode === "trusted-proxy") { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "pass", - message: "gateway auth is configured with trusted-proxy mode", - details: { openclawConfigPath, gatewayAuthMode }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `gateway.auth.mode is missing or unsupported in ${openclawConfigPath}`, - remediationHint: OPENCLAW_GATEWAY_AUTH_RECOVERY_HINT, - details: { openclawConfigPath }, - }), - ); - } - } catch { - input.checks.push( - toDoctorCheck({ - id: "state.hookMapping", - label: "OpenClaw hook mapping", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - input.checks.push( - toDoctorCheck({ - id: "state.hookToken", - label: "OpenClaw hook auth", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - input.checks.push( - toDoctorCheck({ - id: "state.hookSessionRouting", - label: "OpenClaw hook session routing", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - input.checks.push( - toDoctorCheck({ - id: "state.gatewayAuth", - label: "OpenClaw gateway auth", - status: "fail", - message: `unable to read ${openclawConfigPath}`, - remediationHint: - "Ensure the OpenClaw config file exists (OPENCLAW_CONFIG_PATH/CLAWDBOT_CONFIG_PATH, or state dir) and rerun openclaw setup", - details: { openclawConfigPath }, - }), - ); - } -} - -export async function runDoctorOpenclawBaseUrlCheck(input: { - homeDir: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - const relayRuntimeConfigPath = resolveRelayRuntimeConfigPath(input.homeDir); - try { - const openclawBaseUrl = await resolveOpenclawBaseUrl({ - relayRuntimeConfigPath, - }); - input.checks.push( - toDoctorCheck({ - id: "state.openclawBaseUrl", - label: "OpenClaw base URL", - status: "pass", - message: `resolved to ${openclawBaseUrl}`, - }), - ); - } catch { - input.checks.push( - toDoctorCheck({ - id: "state.openclawBaseUrl", - label: "OpenClaw base URL", - status: "fail", - message: `unable to resolve OpenClaw base URL from ${relayRuntimeConfigPath}`, - remediationHint: OPENCLAW_SETUP_WITH_BASE_URL_HINT, - }), - ); - } -} - -export async function runDoctorGatewayPairingCheck(input: { - openclawDir: string; - checks: OpenclawDoctorCheckResult[]; -}): Promise { - const gatewayPendingState = await readOpenclawGatewayPendingState( - input.openclawDir, - ); - if (gatewayPendingState.status === "missing") { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "pass", - message: "no pending gateway device approvals file was found", - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else if (gatewayPendingState.status === "invalid") { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "fail", - message: `invalid pending device approvals file: ${gatewayPendingState.gatewayDevicePendingPath}`, - remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else if (gatewayPendingState.status === "unreadable") { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "fail", - message: `unable to read pending device approvals at ${gatewayPendingState.gatewayDevicePendingPath}`, - remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else if (gatewayPendingState.pendingRequestIds.length === 0) { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "pass", - message: "no pending gateway device approvals", - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - }, - }), - ); - } else { - input.checks.push( - toDoctorCheck({ - id: "state.gatewayDevicePairing", - label: "OpenClaw gateway device pairing", - status: "fail", - message: `pending gateway device approvals: ${gatewayPendingState.pendingRequestIds.length}`, - remediationHint: OPENCLAW_DEVICE_APPROVAL_RECOVERY_HINT, - details: { - gatewayDevicePendingPath: - gatewayPendingState.gatewayDevicePendingPath, - pendingRequestIds: gatewayPendingState.pendingRequestIds, - }, - }), - ); - } -} +export { runDoctorConfigCheck } from "./doctor-config-checks.js"; +export { + runDoctorGatewayPairingCheck, + runDoctorOpenclawBaseUrlCheck, + runDoctorOpenclawConfigCheck, +} from "./doctor-openclaw-checks.js"; +export { + runDoctorCredentialsCheck, + runDoctorPeersCheck, + runDoctorSelectedAgentCheck, + runDoctorTransformCheck, +} from "./doctor-state-checks.js"; From 6d3f60ebc25d575dc80da75b8e9876d859a85add Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 19:34:45 +0530 Subject: [PATCH 147/190] refactor(proxy): split config into focused modules --- apps/proxy/src/AGENTS.md | 7 + apps/proxy/src/config.ts | 704 +-------------------- apps/proxy/src/config/defaults.ts | 32 + apps/proxy/src/config/env-normalization.ts | 192 ++++++ apps/proxy/src/config/errors.ts | 18 + apps/proxy/src/config/files.ts | 88 +++ apps/proxy/src/config/index.ts | 30 + apps/proxy/src/config/paths.ts | 149 +++++ apps/proxy/src/config/schema.ts | 154 +++++ apps/proxy/src/config/validation.ts | 128 ++++ 10 files changed, 799 insertions(+), 703 deletions(-) create mode 100644 apps/proxy/src/config/defaults.ts create mode 100644 apps/proxy/src/config/env-normalization.ts create mode 100644 apps/proxy/src/config/errors.ts create mode 100644 apps/proxy/src/config/files.ts create mode 100644 apps/proxy/src/config/index.ts create mode 100644 apps/proxy/src/config/paths.ts create mode 100644 apps/proxy/src/config/schema.ts create mode 100644 apps/proxy/src/config/validation.ts diff --git a/apps/proxy/src/AGENTS.md b/apps/proxy/src/AGENTS.md index e39a415..9df118c 100644 --- a/apps/proxy/src/AGENTS.md +++ b/apps/proxy/src/AGENTS.md @@ -4,6 +4,13 @@ - Keep `index.ts` as runtime bootstrap surface and version export. - Keep version resolution in `index.ts` deterministic: prefer `APP_VERSION`, then `PROXY_VERSION`, then fallback constant for local/dev defaults. - Keep runtime env parsing and defaults in `config.ts`; do not scatter `process.env` reads across handlers. +- Keep `config.ts` as a facade export only; place implementation in `config/` modules: + - `config/defaults.ts` for exported defaults and env/value types. + - `config/schema.ts` for zod runtime/env schemas. + - `config/env-normalization.ts` for env aliasing/required-key/deprecation checks. + - `config/paths.ts` for home/state/config path resolution and relay JSON base-url fallback parsing. + - `config/files.ts` for dotenv merge + fallback loading. + - `config/validation.ts` for `parseProxyConfig` and `loadProxyConfig` orchestration. - Keep startup fail-fast env validation in `config.ts` and enforce it from runtime boot (`startProxyServer` + worker runtime build) so missing registry/service credentials fail immediately. - Keep agent DID rate-limit env parsing in `config.ts` (`AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE`, `AGENT_RATE_LIMIT_WINDOW_MS`) and validate as positive integers. - Keep HTTP app composition in `server.ts`. diff --git a/apps/proxy/src/config.ts b/apps/proxy/src/config.ts index 25f9d1e..d2b3ad4 100644 --- a/apps/proxy/src/config.ts +++ b/apps/proxy/src/config.ts @@ -1,703 +1 @@ -import { existsSync, readFileSync } from "node:fs"; -import { homedir } from "node:os"; -import { isAbsolute, join, resolve } from "node:path"; -import dotenv from "dotenv"; -import { z } from "zod"; - -export type ProxyCrlStaleBehavior = "fail-open" | "fail-closed"; -export const proxyEnvironmentValues = [ - "local", - "development", - "production", - "test", -] as const; -export type ProxyEnvironment = (typeof proxyEnvironmentValues)[number]; - -export type ProxyConfigLoadOptions = { - cwd?: string; - homeDir?: string; -}; - -export const DEFAULT_PROXY_LISTEN_PORT = 4000; -export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; -export const DEFAULT_REGISTRY_URL = "https://registry.clawdentity.com"; -export const DEFAULT_PROXY_ENVIRONMENT: ProxyEnvironment = "development"; -export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; -export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; -export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; -export const DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE = 60; -export const DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS = 60 * 1000; -export const DEFAULT_INJECT_IDENTITY_INTO_MESSAGE = true; -export const DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT = 500; -export const DEFAULT_RELAY_QUEUE_TTL_SECONDS = 3600; -export const DEFAULT_RELAY_RETRY_INITIAL_MS = 1000; -export const DEFAULT_RELAY_RETRY_MAX_MS = 30_000; -export const DEFAULT_RELAY_RETRY_MAX_ATTEMPTS = 25; -export const DEFAULT_RELAY_RETRY_JITTER_RATIO = 0.2; -export const DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES = 5; -export const DEFAULT_RELAY_MAX_FRAME_BYTES = 1024 * 1024; - -export class ProxyConfigError extends Error { - readonly code = "CONFIG_VALIDATION_FAILED"; - readonly status = 500; - readonly expose = true; - readonly details: Record; - - constructor(message: string, details: Record) { - super(message); - this.name = "ProxyConfigError"; - this.details = details; - } -} - -const CLAWDENTITY_CONFIG_DIR = ".clawdentity"; -const OPENCLAW_RELAY_CONFIG_FILENAME = "openclaw-relay.json"; - -const envBooleanSchema = z.preprocess((value) => { - if (typeof value === "string") { - const normalized = value.trim().toLowerCase(); - if ( - normalized === "true" || - normalized === "1" || - normalized === "yes" || - normalized === "on" - ) { - return true; - } - - if ( - normalized === "false" || - normalized === "0" || - normalized === "no" || - normalized === "off" - ) { - return false; - } - } - - return value; -}, z.boolean()); - -const proxyRuntimeEnvSchema = z.object({ - LISTEN_PORT: z.coerce - .number() - .int() - .min(1) - .max(65535) - .default(DEFAULT_PROXY_LISTEN_PORT), - OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), - REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), - REGISTRY_INTERNAL_SERVICE_ID: z.string().trim().min(1).optional(), - REGISTRY_INTERNAL_SERVICE_SECRET: z.string().trim().min(1).optional(), - ENVIRONMENT: z - .enum(proxyEnvironmentValues) - .default(DEFAULT_PROXY_ENVIRONMENT), - CRL_REFRESH_INTERVAL_MS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_CRL_REFRESH_INTERVAL_MS), - CRL_MAX_AGE_MS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_CRL_MAX_AGE_MS), - CRL_STALE_BEHAVIOR: z - .enum(["fail-open", "fail-closed"]) - .default(DEFAULT_CRL_STALE_BEHAVIOR), - AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE), - AGENT_RATE_LIMIT_WINDOW_MS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS), - INJECT_IDENTITY_INTO_MESSAGE: envBooleanSchema.default( - DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, - ), - RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT), - RELAY_QUEUE_TTL_SECONDS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_QUEUE_TTL_SECONDS), - RELAY_RETRY_INITIAL_MS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_RETRY_INITIAL_MS), - RELAY_RETRY_MAX_MS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_RETRY_MAX_MS), - RELAY_RETRY_MAX_ATTEMPTS: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_RETRY_MAX_ATTEMPTS), - RELAY_RETRY_JITTER_RATIO: z.coerce - .number() - .min(0) - .max(1) - .default(DEFAULT_RELAY_RETRY_JITTER_RATIO), - RELAY_MAX_IN_FLIGHT_DELIVERIES: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES), - RELAY_MAX_FRAME_BYTES: z.coerce - .number() - .int() - .positive() - .default(DEFAULT_RELAY_MAX_FRAME_BYTES), -}); - -export const proxyConfigSchema = z.object({ - listenPort: z.number().int().min(1).max(65535), - openclawBaseUrl: z.string().url(), - registryUrl: z.string().url(), - registryInternalServiceId: z.string().min(1).optional(), - registryInternalServiceSecret: z.string().min(1).optional(), - environment: z.enum(proxyEnvironmentValues), - crlRefreshIntervalMs: z.number().int().positive(), - crlMaxAgeMs: z.number().int().positive(), - crlStaleBehavior: z.enum(["fail-open", "fail-closed"]), - agentRateLimitRequestsPerMinute: z.number().int().positive(), - agentRateLimitWindowMs: z.number().int().positive(), - injectIdentityIntoMessage: z.boolean(), - relayQueueMaxMessagesPerAgent: z.number().int().positive(), - relayQueueTtlSeconds: z.number().int().positive(), - relayRetryInitialMs: z.number().int().positive(), - relayRetryMaxMs: z.number().int().positive(), - relayRetryMaxAttempts: z.number().int().positive(), - relayRetryJitterRatio: z.number().min(0).max(1), - relayMaxInFlightDeliveries: z.number().int().positive(), - relayMaxFrameBytes: z.number().int().positive(), -}); - -export type ProxyConfig = z.infer; -type ParseProxyConfigOptions = { - requireRuntimeKeys?: boolean; -}; - -type RuntimeEnvInput = { - LISTEN_PORT?: unknown; - PORT?: unknown; - OPENCLAW_BASE_URL?: unknown; - REGISTRY_URL?: unknown; - CLAWDENTITY_REGISTRY_URL?: unknown; - REGISTRY_INTERNAL_SERVICE_ID?: unknown; - REGISTRY_INTERNAL_SERVICE_SECRET?: unknown; - ENVIRONMENT?: unknown; - ALLOW_ALL_VERIFIED?: unknown; - CRL_REFRESH_INTERVAL_MS?: unknown; - CRL_MAX_AGE_MS?: unknown; - CRL_STALE_BEHAVIOR?: unknown; - AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: unknown; - AGENT_RATE_LIMIT_WINDOW_MS?: unknown; - INJECT_IDENTITY_INTO_MESSAGE?: unknown; - RELAY_QUEUE_MAX_MESSAGES_PER_AGENT?: unknown; - RELAY_QUEUE_TTL_SECONDS?: unknown; - RELAY_RETRY_INITIAL_MS?: unknown; - RELAY_RETRY_MAX_MS?: unknown; - RELAY_RETRY_MAX_ATTEMPTS?: unknown; - RELAY_RETRY_JITTER_RATIO?: unknown; - RELAY_MAX_IN_FLIGHT_DELIVERIES?: unknown; - RELAY_MAX_FRAME_BYTES?: unknown; - OPENCLAW_STATE_DIR?: unknown; - HOME?: unknown; - USERPROFILE?: unknown; -}; - -type MutableEnv = Record; - -function isRuntimeEnvInput(value: unknown): value is RuntimeEnvInput { - return typeof value === "object" && value !== null; -} - -function toConfigValidationError( - details: Record, -): ProxyConfigError { - return new ProxyConfigError("Proxy configuration is invalid", details); -} - -function firstNonEmpty( - env: RuntimeEnvInput, - keys: readonly (keyof RuntimeEnvInput)[], -): unknown { - for (const key of keys) { - const rawValue = env[key]; - if (rawValue === undefined || rawValue === null) { - continue; - } - - if (typeof rawValue === "string") { - const trimmed = rawValue.trim(); - if (trimmed.length === 0) { - continue; - } - - return trimmed; - } - - return rawValue; - } - - return undefined; -} - -function firstNonEmptyString( - env: RuntimeEnvInput, - keys: readonly (keyof RuntimeEnvInput)[], -): string | undefined { - const value = firstNonEmpty(env, keys); - return typeof value === "string" ? value : undefined; -} - -function resolveDefaultEnv(): unknown { - const nodeProcess = (globalThis as { process?: { env?: unknown } }).process; - return nodeProcess?.env ?? {}; -} - -function resolveDefaultCwd(): string { - const nodeProcess = ( - globalThis as { - process?: { - cwd?: () => string; - }; - } - ).process; - if (typeof nodeProcess?.cwd === "function") { - return nodeProcess.cwd(); - } - - return "."; -} - -function resolvePathWithHome( - inputPath: string, - cwd: string, - home: string, -): string { - const trimmed = inputPath.trim(); - if (trimmed === "~") { - return home; - } - - if (trimmed.startsWith("~/")) { - return resolve(home, trimmed.slice(2)); - } - - if (isAbsolute(trimmed)) { - return trimmed; - } - - return resolve(cwd, trimmed); -} - -function resolveHomeDir( - env: RuntimeEnvInput, - homeDirOverride?: string, -): string { - if (homeDirOverride !== undefined && homeDirOverride.trim().length > 0) { - return homeDirOverride.trim(); - } - - return firstNonEmptyString(env, ["HOME", "USERPROFILE"]) ?? homedir(); -} - -function resolveStateDir( - env: RuntimeEnvInput, - options: ProxyConfigLoadOptions, -): string { - const cwd = options.cwd ?? resolveDefaultCwd(); - const home = resolveHomeDir(env, options.homeDir); - const stateDirOverride = firstNonEmptyString(env, ["OPENCLAW_STATE_DIR"]); - - if (stateDirOverride !== undefined) { - return resolvePathWithHome(stateDirOverride, cwd, home); - } - - const canonicalStateDir = join(home, ".openclaw"); - return canonicalStateDir; -} - -function resolveOpenclawRelayConfigPath( - env: RuntimeEnvInput, - options: ProxyConfigLoadOptions, -): string { - const home = resolveHomeDir(env, options.homeDir); - return join(home, CLAWDENTITY_CONFIG_DIR, OPENCLAW_RELAY_CONFIG_FILENAME); -} - -function mergeMissingEnvValues( - target: MutableEnv, - values: Record, -): void { - for (const [key, value] of Object.entries(values)) { - const existingValue = target[key]; - if (existingValue !== undefined && existingValue !== null) { - if (typeof existingValue !== "string" || existingValue.trim() !== "") { - continue; - } - } - - if (value.trim() === "") { - continue; - } - - target[key] = value; - } -} - -function parseDotEnvFile(filePath: string): Record { - try { - const raw = readFileSync(filePath, "utf8"); - return dotenv.parse(raw); - } catch (error) { - throw toConfigValidationError({ - fieldErrors: { - DOTENV: [`Unable to parse dotenv file at ${filePath}`], - }, - formErrors: [ - error instanceof Error ? error.message : "Unknown dotenv parse error", - ], - }); - } -} - -function loadEnvWithDotEnvFallback( - env: unknown, - options: ProxyConfigLoadOptions, -): MutableEnv { - const mergedEnv: MutableEnv = isRuntimeEnvInput(env) ? { ...env } : {}; - const cwd = options.cwd ?? resolveDefaultCwd(); - const cwdDotEnvPath = join(cwd, ".env"); - if (existsSync(cwdDotEnvPath)) { - mergeMissingEnvValues(mergedEnv, parseDotEnvFile(cwdDotEnvPath)); - } - - const stateDir = resolveStateDir(mergedEnv as RuntimeEnvInput, options); - const stateDotEnvPath = join(stateDir, ".env"); - if (existsSync(stateDotEnvPath)) { - mergeMissingEnvValues(mergedEnv, parseDotEnvFile(stateDotEnvPath)); - } - - return mergedEnv; -} - -function resolveBaseUrlFromRelayConfig( - env: RuntimeEnvInput, - options: ProxyConfigLoadOptions, -): string | undefined { - const configPath = resolveOpenclawRelayConfigPath(env, options); - if (!existsSync(configPath)) { - return undefined; - } - - let parsed: unknown; - try { - parsed = JSON.parse(readFileSync(configPath, "utf8")); - } catch (error) { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_RELAY_CONFIG_PATH: [ - `Unable to parse relay config at ${configPath}`, - ], - }, - formErrors: [ - error instanceof Error ? error.message : "Unknown relay parse error", - ], - }); - } - - if (typeof parsed !== "object" || parsed === null) { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_RELAY_CONFIG_PATH: ["Relay config root must be a JSON object"], - }, - formErrors: [], - }); - } - - const baseUrlValue = (parsed as Record).openclawBaseUrl; - if (typeof baseUrlValue !== "string" || baseUrlValue.trim().length === 0) { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_RELAY_CONFIG_PATH: [ - "openclawBaseUrl must be a non-empty string", - ], - }, - formErrors: [], - }); - } - - const trimmed = baseUrlValue.trim(); - let parsedUrl: URL; - try { - parsedUrl = new URL(trimmed); - } catch { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_RELAY_CONFIG_PATH: [ - "openclawBaseUrl must be a valid absolute URL", - ], - }, - formErrors: [], - }); - } - - if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { - throw toConfigValidationError({ - fieldErrors: { - OPENCLAW_RELAY_CONFIG_PATH: ["openclawBaseUrl must use http or https"], - }, - formErrors: [], - }); - } - - if ( - parsedUrl.pathname === "/" && - parsedUrl.search.length === 0 && - parsedUrl.hash.length === 0 - ) { - return parsedUrl.origin; - } - - return parsedUrl.toString(); -} - -function normalizeRuntimeEnv(input: unknown): Record { - const env: RuntimeEnvInput = isRuntimeEnvInput(input) ? input : {}; - - return { - LISTEN_PORT: firstNonEmpty(env, ["LISTEN_PORT", "PORT"]), - OPENCLAW_BASE_URL: firstNonEmpty(env, ["OPENCLAW_BASE_URL"]), - REGISTRY_URL: firstNonEmpty(env, [ - "REGISTRY_URL", - "CLAWDENTITY_REGISTRY_URL", - ]), - REGISTRY_INTERNAL_SERVICE_ID: firstNonEmpty(env, [ - "REGISTRY_INTERNAL_SERVICE_ID", - ]), - REGISTRY_INTERNAL_SERVICE_SECRET: firstNonEmpty(env, [ - "REGISTRY_INTERNAL_SERVICE_SECRET", - ]), - ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), - CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), - CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), - CRL_STALE_BEHAVIOR: firstNonEmpty(env, ["CRL_STALE_BEHAVIOR"]), - AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: firstNonEmpty(env, [ - "AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE", - ]), - AGENT_RATE_LIMIT_WINDOW_MS: firstNonEmpty(env, [ - "AGENT_RATE_LIMIT_WINDOW_MS", - ]), - INJECT_IDENTITY_INTO_MESSAGE: firstNonEmpty(env, [ - "INJECT_IDENTITY_INTO_MESSAGE", - ]), - RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: firstNonEmpty(env, [ - "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT", - ]), - RELAY_QUEUE_TTL_SECONDS: firstNonEmpty(env, ["RELAY_QUEUE_TTL_SECONDS"]), - RELAY_RETRY_INITIAL_MS: firstNonEmpty(env, ["RELAY_RETRY_INITIAL_MS"]), - RELAY_RETRY_MAX_MS: firstNonEmpty(env, ["RELAY_RETRY_MAX_MS"]), - RELAY_RETRY_MAX_ATTEMPTS: firstNonEmpty(env, ["RELAY_RETRY_MAX_ATTEMPTS"]), - RELAY_RETRY_JITTER_RATIO: firstNonEmpty(env, ["RELAY_RETRY_JITTER_RATIO"]), - RELAY_MAX_IN_FLIGHT_DELIVERIES: firstNonEmpty(env, [ - "RELAY_MAX_IN_FLIGHT_DELIVERIES", - ]), - RELAY_MAX_FRAME_BYTES: firstNonEmpty(env, ["RELAY_MAX_FRAME_BYTES"]), - }; -} - -function assertNoDeprecatedAllowAllVerified(env: RuntimeEnvInput): void { - const value = env.ALLOW_ALL_VERIFIED; - if ( - value === undefined || - value === null || - (typeof value === "string" && value.trim().length === 0) - ) { - return; - } - - throw toConfigValidationError({ - fieldErrors: { - ALLOW_ALL_VERIFIED: ["ALLOW_ALL_VERIFIED is no longer supported."], - }, - formErrors: [], - }); -} - -function loadOpenclawBaseUrlFromFallback( - env: MutableEnv, - options: ProxyConfigLoadOptions, -): void { - if ( - firstNonEmpty(env as RuntimeEnvInput, ["OPENCLAW_BASE_URL"]) !== undefined - ) { - return; - } - - const openclawBaseUrl = resolveBaseUrlFromRelayConfig( - env as RuntimeEnvInput, - options, - ); - if (openclawBaseUrl !== undefined) { - env.OPENCLAW_BASE_URL = openclawBaseUrl; - } -} - -const REQUIRED_PROXY_RUNTIME_ENV_KEYS: readonly { - key: string; - aliases: readonly (keyof RuntimeEnvInput)[]; -}[] = [ - { - key: "ENVIRONMENT", - aliases: ["ENVIRONMENT"], - }, - { - key: "REGISTRY_URL", - aliases: ["REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL"], - }, - { - key: "REGISTRY_INTERNAL_SERVICE_ID", - aliases: ["REGISTRY_INTERNAL_SERVICE_ID"], - }, - { - key: "REGISTRY_INTERNAL_SERVICE_SECRET", - aliases: ["REGISTRY_INTERNAL_SERVICE_SECRET"], - }, -]; - -function assertRequiredProxyRuntimeKeys(env: RuntimeEnvInput): void { - const fieldErrors: Record = {}; - for (const requiredKey of REQUIRED_PROXY_RUNTIME_ENV_KEYS) { - const value = firstNonEmpty(env, requiredKey.aliases); - if (value !== undefined) { - continue; - } - - fieldErrors[requiredKey.key] = [`${requiredKey.key} is required`]; - } - - if (Object.keys(fieldErrors).length > 0) { - throw toConfigValidationError({ - fieldErrors, - formErrors: [], - }); - } -} - -export function parseProxyConfig( - env: unknown, - options: ParseProxyConfigOptions = {}, -): ProxyConfig { - const inputEnv: RuntimeEnvInput = isRuntimeEnvInput(env) ? env : {}; - assertNoDeprecatedAllowAllVerified(inputEnv); - if (options.requireRuntimeKeys === true) { - assertRequiredProxyRuntimeKeys(inputEnv); - } - - const parsedRuntimeEnv = proxyRuntimeEnvSchema.safeParse( - normalizeRuntimeEnv(inputEnv), - ); - if (!parsedRuntimeEnv.success) { - throw toConfigValidationError({ - fieldErrors: parsedRuntimeEnv.error.flatten().fieldErrors, - formErrors: parsedRuntimeEnv.error.flatten().formErrors, - }); - } - - const candidateConfig: Record = { - listenPort: parsedRuntimeEnv.data.LISTEN_PORT, - openclawBaseUrl: parsedRuntimeEnv.data.OPENCLAW_BASE_URL, - registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, - environment: parsedRuntimeEnv.data.ENVIRONMENT, - crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, - crlMaxAgeMs: parsedRuntimeEnv.data.CRL_MAX_AGE_MS, - crlStaleBehavior: parsedRuntimeEnv.data.CRL_STALE_BEHAVIOR, - agentRateLimitRequestsPerMinute: - parsedRuntimeEnv.data.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, - agentRateLimitWindowMs: parsedRuntimeEnv.data.AGENT_RATE_LIMIT_WINDOW_MS, - injectIdentityIntoMessage: - parsedRuntimeEnv.data.INJECT_IDENTITY_INTO_MESSAGE, - relayQueueMaxMessagesPerAgent: - parsedRuntimeEnv.data.RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, - relayQueueTtlSeconds: parsedRuntimeEnv.data.RELAY_QUEUE_TTL_SECONDS, - relayRetryInitialMs: parsedRuntimeEnv.data.RELAY_RETRY_INITIAL_MS, - relayRetryMaxMs: parsedRuntimeEnv.data.RELAY_RETRY_MAX_MS, - relayRetryMaxAttempts: parsedRuntimeEnv.data.RELAY_RETRY_MAX_ATTEMPTS, - relayRetryJitterRatio: parsedRuntimeEnv.data.RELAY_RETRY_JITTER_RATIO, - relayMaxInFlightDeliveries: - parsedRuntimeEnv.data.RELAY_MAX_IN_FLIGHT_DELIVERIES, - relayMaxFrameBytes: parsedRuntimeEnv.data.RELAY_MAX_FRAME_BYTES, - }; - if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID !== undefined) { - candidateConfig.registryInternalServiceId = - parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID; - } - if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET !== undefined) { - candidateConfig.registryInternalServiceSecret = - parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET; - } - - const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); - if (parsedConfig.success) { - const hasServiceId = - typeof parsedConfig.data.registryInternalServiceId === "string"; - const hasServiceSecret = - typeof parsedConfig.data.registryInternalServiceSecret === "string"; - if (hasServiceId !== hasServiceSecret) { - throw toConfigValidationError({ - fieldErrors: { - REGISTRY_INTERNAL_SERVICE_ID: [ - "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", - ], - REGISTRY_INTERNAL_SERVICE_SECRET: [ - "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", - ], - }, - formErrors: [], - }); - } - if ( - parsedConfig.data.relayRetryMaxMs < parsedConfig.data.relayRetryInitialMs - ) { - throw toConfigValidationError({ - fieldErrors: { - RELAY_RETRY_MAX_MS: [ - "RELAY_RETRY_MAX_MS must be greater than or equal to RELAY_RETRY_INITIAL_MS.", - ], - RELAY_RETRY_INITIAL_MS: [ - "RELAY_RETRY_MAX_MS must be greater than or equal to RELAY_RETRY_INITIAL_MS.", - ], - }, - formErrors: [], - }); - } - return parsedConfig.data; - } - - throw toConfigValidationError({ - fieldErrors: parsedConfig.error.flatten().fieldErrors, - formErrors: parsedConfig.error.flatten().formErrors, - }); -} - -export function loadProxyConfig( - env: unknown = resolveDefaultEnv(), - options: ProxyConfigLoadOptions & ParseProxyConfigOptions = {}, -): ProxyConfig { - const mergedEnv = loadEnvWithDotEnvFallback(env, options); - loadOpenclawBaseUrlFromFallback(mergedEnv, options); - return parseProxyConfig(mergedEnv, options); -} +export * from "./config/index.js"; diff --git a/apps/proxy/src/config/defaults.ts b/apps/proxy/src/config/defaults.ts new file mode 100644 index 0000000..82dd7b8 --- /dev/null +++ b/apps/proxy/src/config/defaults.ts @@ -0,0 +1,32 @@ +export type ProxyCrlStaleBehavior = "fail-open" | "fail-closed"; +export const proxyEnvironmentValues = [ + "local", + "development", + "production", + "test", +] as const; +export type ProxyEnvironment = (typeof proxyEnvironmentValues)[number]; + +export type ProxyConfigLoadOptions = { + cwd?: string; + homeDir?: string; +}; + +export const DEFAULT_PROXY_LISTEN_PORT = 4000; +export const DEFAULT_OPENCLAW_BASE_URL = "http://127.0.0.1:18789"; +export const DEFAULT_REGISTRY_URL = "https://registry.clawdentity.com"; +export const DEFAULT_PROXY_ENVIRONMENT: ProxyEnvironment = "development"; +export const DEFAULT_CRL_REFRESH_INTERVAL_MS = 5 * 60 * 1000; +export const DEFAULT_CRL_MAX_AGE_MS = 15 * 60 * 1000; +export const DEFAULT_CRL_STALE_BEHAVIOR: ProxyCrlStaleBehavior = "fail-open"; +export const DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE = 60; +export const DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS = 60 * 1000; +export const DEFAULT_INJECT_IDENTITY_INTO_MESSAGE = true; +export const DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT = 500; +export const DEFAULT_RELAY_QUEUE_TTL_SECONDS = 3600; +export const DEFAULT_RELAY_RETRY_INITIAL_MS = 1000; +export const DEFAULT_RELAY_RETRY_MAX_MS = 30_000; +export const DEFAULT_RELAY_RETRY_MAX_ATTEMPTS = 25; +export const DEFAULT_RELAY_RETRY_JITTER_RATIO = 0.2; +export const DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES = 5; +export const DEFAULT_RELAY_MAX_FRAME_BYTES = 1024 * 1024; diff --git a/apps/proxy/src/config/env-normalization.ts b/apps/proxy/src/config/env-normalization.ts new file mode 100644 index 0000000..4a18204 --- /dev/null +++ b/apps/proxy/src/config/env-normalization.ts @@ -0,0 +1,192 @@ +import { toConfigValidationError } from "./errors.js"; + +export type RuntimeEnvInput = { + LISTEN_PORT?: unknown; + PORT?: unknown; + OPENCLAW_BASE_URL?: unknown; + REGISTRY_URL?: unknown; + CLAWDENTITY_REGISTRY_URL?: unknown; + REGISTRY_INTERNAL_SERVICE_ID?: unknown; + REGISTRY_INTERNAL_SERVICE_SECRET?: unknown; + ENVIRONMENT?: unknown; + ALLOW_ALL_VERIFIED?: unknown; + CRL_REFRESH_INTERVAL_MS?: unknown; + CRL_MAX_AGE_MS?: unknown; + CRL_STALE_BEHAVIOR?: unknown; + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE?: unknown; + AGENT_RATE_LIMIT_WINDOW_MS?: unknown; + INJECT_IDENTITY_INTO_MESSAGE?: unknown; + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT?: unknown; + RELAY_QUEUE_TTL_SECONDS?: unknown; + RELAY_RETRY_INITIAL_MS?: unknown; + RELAY_RETRY_MAX_MS?: unknown; + RELAY_RETRY_MAX_ATTEMPTS?: unknown; + RELAY_RETRY_JITTER_RATIO?: unknown; + RELAY_MAX_IN_FLIGHT_DELIVERIES?: unknown; + RELAY_MAX_FRAME_BYTES?: unknown; + OPENCLAW_STATE_DIR?: unknown; + HOME?: unknown; + USERPROFILE?: unknown; +}; + +export type MutableEnv = Record; + +export function isRuntimeEnvInput(value: unknown): value is RuntimeEnvInput { + return typeof value === "object" && value !== null; +} + +export function firstNonEmpty( + env: RuntimeEnvInput, + keys: readonly (keyof RuntimeEnvInput)[], +): unknown { + for (const key of keys) { + const rawValue = env[key]; + if (rawValue === undefined || rawValue === null) { + continue; + } + + if (typeof rawValue === "string") { + const trimmed = rawValue.trim(); + if (trimmed.length === 0) { + continue; + } + + return trimmed; + } + + return rawValue; + } + + return undefined; +} + +export function firstNonEmptyString( + env: RuntimeEnvInput, + keys: readonly (keyof RuntimeEnvInput)[], +): string | undefined { + const value = firstNonEmpty(env, keys); + return typeof value === "string" ? value : undefined; +} + +export function resolveDefaultEnv(): unknown { + const nodeProcess = (globalThis as { process?: { env?: unknown } }).process; + return nodeProcess?.env ?? {}; +} + +export function resolveDefaultCwd(): string { + const nodeProcess = ( + globalThis as { + process?: { + cwd?: () => string; + }; + } + ).process; + if (typeof nodeProcess?.cwd === "function") { + return nodeProcess.cwd(); + } + + return "."; +} + +export function normalizeRuntimeEnv(input: unknown): Record { + const env: RuntimeEnvInput = isRuntimeEnvInput(input) ? input : {}; + + return { + LISTEN_PORT: firstNonEmpty(env, ["LISTEN_PORT", "PORT"]), + OPENCLAW_BASE_URL: firstNonEmpty(env, ["OPENCLAW_BASE_URL"]), + REGISTRY_URL: firstNonEmpty(env, [ + "REGISTRY_URL", + "CLAWDENTITY_REGISTRY_URL", + ]), + REGISTRY_INTERNAL_SERVICE_ID: firstNonEmpty(env, [ + "REGISTRY_INTERNAL_SERVICE_ID", + ]), + REGISTRY_INTERNAL_SERVICE_SECRET: firstNonEmpty(env, [ + "REGISTRY_INTERNAL_SERVICE_SECRET", + ]), + ENVIRONMENT: firstNonEmpty(env, ["ENVIRONMENT"]), + CRL_REFRESH_INTERVAL_MS: firstNonEmpty(env, ["CRL_REFRESH_INTERVAL_MS"]), + CRL_MAX_AGE_MS: firstNonEmpty(env, ["CRL_MAX_AGE_MS"]), + CRL_STALE_BEHAVIOR: firstNonEmpty(env, ["CRL_STALE_BEHAVIOR"]), + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: firstNonEmpty(env, [ + "AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE", + ]), + AGENT_RATE_LIMIT_WINDOW_MS: firstNonEmpty(env, [ + "AGENT_RATE_LIMIT_WINDOW_MS", + ]), + INJECT_IDENTITY_INTO_MESSAGE: firstNonEmpty(env, [ + "INJECT_IDENTITY_INTO_MESSAGE", + ]), + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: firstNonEmpty(env, [ + "RELAY_QUEUE_MAX_MESSAGES_PER_AGENT", + ]), + RELAY_QUEUE_TTL_SECONDS: firstNonEmpty(env, ["RELAY_QUEUE_TTL_SECONDS"]), + RELAY_RETRY_INITIAL_MS: firstNonEmpty(env, ["RELAY_RETRY_INITIAL_MS"]), + RELAY_RETRY_MAX_MS: firstNonEmpty(env, ["RELAY_RETRY_MAX_MS"]), + RELAY_RETRY_MAX_ATTEMPTS: firstNonEmpty(env, ["RELAY_RETRY_MAX_ATTEMPTS"]), + RELAY_RETRY_JITTER_RATIO: firstNonEmpty(env, ["RELAY_RETRY_JITTER_RATIO"]), + RELAY_MAX_IN_FLIGHT_DELIVERIES: firstNonEmpty(env, [ + "RELAY_MAX_IN_FLIGHT_DELIVERIES", + ]), + RELAY_MAX_FRAME_BYTES: firstNonEmpty(env, ["RELAY_MAX_FRAME_BYTES"]), + }; +} + +export function assertNoDeprecatedAllowAllVerified(env: RuntimeEnvInput): void { + const value = env.ALLOW_ALL_VERIFIED; + if ( + value === undefined || + value === null || + (typeof value === "string" && value.trim().length === 0) + ) { + return; + } + + throw toConfigValidationError({ + fieldErrors: { + ALLOW_ALL_VERIFIED: ["ALLOW_ALL_VERIFIED is no longer supported."], + }, + formErrors: [], + }); +} + +const REQUIRED_PROXY_RUNTIME_ENV_KEYS: readonly { + key: string; + aliases: readonly (keyof RuntimeEnvInput)[]; +}[] = [ + { + key: "ENVIRONMENT", + aliases: ["ENVIRONMENT"], + }, + { + key: "REGISTRY_URL", + aliases: ["REGISTRY_URL", "CLAWDENTITY_REGISTRY_URL"], + }, + { + key: "REGISTRY_INTERNAL_SERVICE_ID", + aliases: ["REGISTRY_INTERNAL_SERVICE_ID"], + }, + { + key: "REGISTRY_INTERNAL_SERVICE_SECRET", + aliases: ["REGISTRY_INTERNAL_SERVICE_SECRET"], + }, +]; + +export function assertRequiredProxyRuntimeKeys(env: RuntimeEnvInput): void { + const fieldErrors: Record = {}; + for (const requiredKey of REQUIRED_PROXY_RUNTIME_ENV_KEYS) { + const value = firstNonEmpty(env, requiredKey.aliases); + if (value !== undefined) { + continue; + } + + fieldErrors[requiredKey.key] = [`${requiredKey.key} is required`]; + } + + if (Object.keys(fieldErrors).length > 0) { + throw toConfigValidationError({ + fieldErrors, + formErrors: [], + }); + } +} diff --git a/apps/proxy/src/config/errors.ts b/apps/proxy/src/config/errors.ts new file mode 100644 index 0000000..aba8920 --- /dev/null +++ b/apps/proxy/src/config/errors.ts @@ -0,0 +1,18 @@ +export class ProxyConfigError extends Error { + readonly code = "CONFIG_VALIDATION_FAILED"; + readonly status = 500; + readonly expose = true; + readonly details: Record; + + constructor(message: string, details: Record) { + super(message); + this.name = "ProxyConfigError"; + this.details = details; + } +} + +export function toConfigValidationError( + details: Record, +): ProxyConfigError { + return new ProxyConfigError("Proxy configuration is invalid", details); +} diff --git a/apps/proxy/src/config/files.ts b/apps/proxy/src/config/files.ts new file mode 100644 index 0000000..d799dfe --- /dev/null +++ b/apps/proxy/src/config/files.ts @@ -0,0 +1,88 @@ +import { existsSync, readFileSync } from "node:fs"; +import { join } from "node:path"; +import dotenv from "dotenv"; +import type { ProxyConfigLoadOptions } from "./defaults.js"; +import { + firstNonEmpty, + isRuntimeEnvInput, + type MutableEnv, + type RuntimeEnvInput, + resolveDefaultCwd, +} from "./env-normalization.js"; +import { toConfigValidationError } from "./errors.js"; +import { resolveBaseUrlFromRelayConfig, resolveStateDir } from "./paths.js"; + +function mergeMissingEnvValues( + target: MutableEnv, + values: Record, +): void { + for (const [key, value] of Object.entries(values)) { + const existingValue = target[key]; + if (existingValue !== undefined && existingValue !== null) { + if (typeof existingValue !== "string" || existingValue.trim() !== "") { + continue; + } + } + + if (value.trim() === "") { + continue; + } + + target[key] = value; + } +} + +function parseDotEnvFile(filePath: string): Record { + try { + const raw = readFileSync(filePath, "utf8"); + return dotenv.parse(raw); + } catch (error) { + throw toConfigValidationError({ + fieldErrors: { + DOTENV: [`Unable to parse dotenv file at ${filePath}`], + }, + formErrors: [ + error instanceof Error ? error.message : "Unknown dotenv parse error", + ], + }); + } +} + +export function loadEnvWithDotEnvFallback( + env: unknown, + options: ProxyConfigLoadOptions, +): MutableEnv { + const mergedEnv: MutableEnv = isRuntimeEnvInput(env) ? { ...env } : {}; + const cwd = options.cwd ?? resolveDefaultCwd(); + const cwdDotEnvPath = join(cwd, ".env"); + if (existsSync(cwdDotEnvPath)) { + mergeMissingEnvValues(mergedEnv, parseDotEnvFile(cwdDotEnvPath)); + } + + const stateDir = resolveStateDir(mergedEnv as RuntimeEnvInput, options); + const stateDotEnvPath = join(stateDir, ".env"); + if (existsSync(stateDotEnvPath)) { + mergeMissingEnvValues(mergedEnv, parseDotEnvFile(stateDotEnvPath)); + } + + return mergedEnv; +} + +export function loadOpenclawBaseUrlFromFallback( + env: MutableEnv, + options: ProxyConfigLoadOptions, +): void { + if ( + firstNonEmpty(env as RuntimeEnvInput, ["OPENCLAW_BASE_URL"]) !== undefined + ) { + return; + } + + const openclawBaseUrl = resolveBaseUrlFromRelayConfig( + env as RuntimeEnvInput, + options, + ); + if (openclawBaseUrl !== undefined) { + env.OPENCLAW_BASE_URL = openclawBaseUrl; + } +} diff --git a/apps/proxy/src/config/index.ts b/apps/proxy/src/config/index.ts new file mode 100644 index 0000000..2a19d55 --- /dev/null +++ b/apps/proxy/src/config/index.ts @@ -0,0 +1,30 @@ +export type { + ProxyConfigLoadOptions, + ProxyCrlStaleBehavior, + ProxyEnvironment, +} from "./defaults.js"; +export { + DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, + DEFAULT_CRL_MAX_AGE_MS, + DEFAULT_CRL_REFRESH_INTERVAL_MS, + DEFAULT_CRL_STALE_BEHAVIOR, + DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, + DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_PROXY_ENVIRONMENT, + DEFAULT_PROXY_LISTEN_PORT, + DEFAULT_REGISTRY_URL, + DEFAULT_RELAY_MAX_FRAME_BYTES, + DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES, + DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + DEFAULT_RELAY_QUEUE_TTL_SECONDS, + DEFAULT_RELAY_RETRY_INITIAL_MS, + DEFAULT_RELAY_RETRY_JITTER_RATIO, + DEFAULT_RELAY_RETRY_MAX_ATTEMPTS, + DEFAULT_RELAY_RETRY_MAX_MS, + proxyEnvironmentValues, +} from "./defaults.js"; +export { ProxyConfigError } from "./errors.js"; +export type { ProxyConfig } from "./schema.js"; +export { proxyConfigSchema } from "./schema.js"; +export { loadProxyConfig, parseProxyConfig } from "./validation.js"; diff --git a/apps/proxy/src/config/paths.ts b/apps/proxy/src/config/paths.ts new file mode 100644 index 0000000..bc75a9d --- /dev/null +++ b/apps/proxy/src/config/paths.ts @@ -0,0 +1,149 @@ +import { existsSync, readFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { isAbsolute, join, resolve } from "node:path"; +import type { ProxyConfigLoadOptions } from "./defaults.js"; +import { + firstNonEmptyString, + type RuntimeEnvInput, + resolveDefaultCwd, +} from "./env-normalization.js"; +import { toConfigValidationError } from "./errors.js"; + +const CLAWDENTITY_CONFIG_DIR = ".clawdentity"; +const OPENCLAW_RELAY_CONFIG_FILENAME = "openclaw-relay.json"; + +export function resolvePathWithHome( + inputPath: string, + cwd: string, + home: string, +): string { + const trimmed = inputPath.trim(); + if (trimmed === "~") { + return home; + } + + if (trimmed.startsWith("~/")) { + return resolve(home, trimmed.slice(2)); + } + + if (isAbsolute(trimmed)) { + return trimmed; + } + + return resolve(cwd, trimmed); +} + +export function resolveHomeDir( + env: RuntimeEnvInput, + homeDirOverride?: string, +): string { + if (homeDirOverride !== undefined && homeDirOverride.trim().length > 0) { + return homeDirOverride.trim(); + } + + return firstNonEmptyString(env, ["HOME", "USERPROFILE"]) ?? homedir(); +} + +export function resolveStateDir( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string { + const cwd = options.cwd ?? resolveDefaultCwd(); + const home = resolveHomeDir(env, options.homeDir); + const stateDirOverride = firstNonEmptyString(env, ["OPENCLAW_STATE_DIR"]); + + if (stateDirOverride !== undefined) { + return resolvePathWithHome(stateDirOverride, cwd, home); + } + + return join(home, ".openclaw"); +} + +export function resolveOpenclawRelayConfigPath( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string { + const home = resolveHomeDir(env, options.homeDir); + return join(home, CLAWDENTITY_CONFIG_DIR, OPENCLAW_RELAY_CONFIG_FILENAME); +} + +export function resolveBaseUrlFromRelayConfig( + env: RuntimeEnvInput, + options: ProxyConfigLoadOptions, +): string | undefined { + const configPath = resolveOpenclawRelayConfigPath(env, options); + if (!existsSync(configPath)) { + return undefined; + } + + let parsed: unknown; + try { + parsed = JSON.parse(readFileSync(configPath, "utf8")); + } catch (error) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: [ + `Unable to parse relay config at ${configPath}`, + ], + }, + formErrors: [ + error instanceof Error ? error.message : "Unknown relay parse error", + ], + }); + } + + if (typeof parsed !== "object" || parsed === null) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: ["Relay config root must be a JSON object"], + }, + formErrors: [], + }); + } + + const baseUrlValue = (parsed as Record).openclawBaseUrl; + if (typeof baseUrlValue !== "string" || baseUrlValue.trim().length === 0) { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: [ + "openclawBaseUrl must be a non-empty string", + ], + }, + formErrors: [], + }); + } + + const trimmed = baseUrlValue.trim(); + let parsedUrl: URL; + try { + parsedUrl = new URL(trimmed); + } catch { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: [ + "openclawBaseUrl must be a valid absolute URL", + ], + }, + formErrors: [], + }); + } + + if (parsedUrl.protocol !== "http:" && parsedUrl.protocol !== "https:") { + throw toConfigValidationError({ + fieldErrors: { + OPENCLAW_RELAY_CONFIG_PATH: ["openclawBaseUrl must use http or https"], + }, + formErrors: [], + }); + } + + if ( + parsedUrl.pathname === "/" && + parsedUrl.search.length === 0 && + parsedUrl.hash.length === 0 + ) { + return parsedUrl.origin; + } + + return parsedUrl.toString(); +} diff --git a/apps/proxy/src/config/schema.ts b/apps/proxy/src/config/schema.ts new file mode 100644 index 0000000..809cbbe --- /dev/null +++ b/apps/proxy/src/config/schema.ts @@ -0,0 +1,154 @@ +import { z } from "zod"; +import { + DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS, + DEFAULT_CRL_MAX_AGE_MS, + DEFAULT_CRL_REFRESH_INTERVAL_MS, + DEFAULT_CRL_STALE_BEHAVIOR, + DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, + DEFAULT_OPENCLAW_BASE_URL, + DEFAULT_PROXY_ENVIRONMENT, + DEFAULT_PROXY_LISTEN_PORT, + DEFAULT_REGISTRY_URL, + DEFAULT_RELAY_MAX_FRAME_BYTES, + DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES, + DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + DEFAULT_RELAY_QUEUE_TTL_SECONDS, + DEFAULT_RELAY_RETRY_INITIAL_MS, + DEFAULT_RELAY_RETRY_JITTER_RATIO, + DEFAULT_RELAY_RETRY_MAX_ATTEMPTS, + DEFAULT_RELAY_RETRY_MAX_MS, + proxyEnvironmentValues, +} from "./defaults.js"; + +const envBooleanSchema = z.preprocess((value) => { + if (typeof value === "string") { + const normalized = value.trim().toLowerCase(); + if ( + normalized === "true" || + normalized === "1" || + normalized === "yes" || + normalized === "on" + ) { + return true; + } + + if ( + normalized === "false" || + normalized === "0" || + normalized === "no" || + normalized === "off" + ) { + return false; + } + } + + return value; +}, z.boolean()); + +export const proxyRuntimeEnvSchema = z.object({ + LISTEN_PORT: z.coerce + .number() + .int() + .min(1) + .max(65535) + .default(DEFAULT_PROXY_LISTEN_PORT), + OPENCLAW_BASE_URL: z.string().trim().url().default(DEFAULT_OPENCLAW_BASE_URL), + REGISTRY_URL: z.string().trim().url().default(DEFAULT_REGISTRY_URL), + REGISTRY_INTERNAL_SERVICE_ID: z.string().trim().min(1).optional(), + REGISTRY_INTERNAL_SERVICE_SECRET: z.string().trim().min(1).optional(), + ENVIRONMENT: z + .enum(proxyEnvironmentValues) + .default(DEFAULT_PROXY_ENVIRONMENT), + CRL_REFRESH_INTERVAL_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_CRL_REFRESH_INTERVAL_MS), + CRL_MAX_AGE_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_CRL_MAX_AGE_MS), + CRL_STALE_BEHAVIOR: z + .enum(["fail-open", "fail-closed"]) + .default(DEFAULT_CRL_STALE_BEHAVIOR), + AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE), + AGENT_RATE_LIMIT_WINDOW_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_AGENT_RATE_LIMIT_WINDOW_MS), + INJECT_IDENTITY_INTO_MESSAGE: envBooleanSchema.default( + DEFAULT_INJECT_IDENTITY_INTO_MESSAGE, + ), + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_QUEUE_MAX_MESSAGES_PER_AGENT), + RELAY_QUEUE_TTL_SECONDS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_QUEUE_TTL_SECONDS), + RELAY_RETRY_INITIAL_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_RETRY_INITIAL_MS), + RELAY_RETRY_MAX_MS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_RETRY_MAX_MS), + RELAY_RETRY_MAX_ATTEMPTS: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_RETRY_MAX_ATTEMPTS), + RELAY_RETRY_JITTER_RATIO: z.coerce + .number() + .min(0) + .max(1) + .default(DEFAULT_RELAY_RETRY_JITTER_RATIO), + RELAY_MAX_IN_FLIGHT_DELIVERIES: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_MAX_IN_FLIGHT_DELIVERIES), + RELAY_MAX_FRAME_BYTES: z.coerce + .number() + .int() + .positive() + .default(DEFAULT_RELAY_MAX_FRAME_BYTES), +}); + +export const proxyConfigSchema = z.object({ + listenPort: z.number().int().min(1).max(65535), + openclawBaseUrl: z.string().url(), + registryUrl: z.string().url(), + registryInternalServiceId: z.string().min(1).optional(), + registryInternalServiceSecret: z.string().min(1).optional(), + environment: z.enum(proxyEnvironmentValues), + crlRefreshIntervalMs: z.number().int().positive(), + crlMaxAgeMs: z.number().int().positive(), + crlStaleBehavior: z.enum(["fail-open", "fail-closed"]), + agentRateLimitRequestsPerMinute: z.number().int().positive(), + agentRateLimitWindowMs: z.number().int().positive(), + injectIdentityIntoMessage: z.boolean(), + relayQueueMaxMessagesPerAgent: z.number().int().positive(), + relayQueueTtlSeconds: z.number().int().positive(), + relayRetryInitialMs: z.number().int().positive(), + relayRetryMaxMs: z.number().int().positive(), + relayRetryMaxAttempts: z.number().int().positive(), + relayRetryJitterRatio: z.number().min(0).max(1), + relayMaxInFlightDeliveries: z.number().int().positive(), + relayMaxFrameBytes: z.number().int().positive(), +}); + +export type ProxyConfig = z.infer; diff --git a/apps/proxy/src/config/validation.ts b/apps/proxy/src/config/validation.ts new file mode 100644 index 0000000..e6ba1ae --- /dev/null +++ b/apps/proxy/src/config/validation.ts @@ -0,0 +1,128 @@ +import type { ProxyConfigLoadOptions } from "./defaults.js"; +import { + assertNoDeprecatedAllowAllVerified, + assertRequiredProxyRuntimeKeys, + isRuntimeEnvInput, + normalizeRuntimeEnv, + type RuntimeEnvInput, + resolveDefaultEnv, +} from "./env-normalization.js"; +import { toConfigValidationError } from "./errors.js"; +import { + loadEnvWithDotEnvFallback, + loadOpenclawBaseUrlFromFallback, +} from "./files.js"; +import { + type ProxyConfig, + proxyConfigSchema, + proxyRuntimeEnvSchema, +} from "./schema.js"; + +type ParseProxyConfigOptions = { + requireRuntimeKeys?: boolean; +}; + +export function parseProxyConfig( + env: unknown, + options: ParseProxyConfigOptions = {}, +): ProxyConfig { + const inputEnv: RuntimeEnvInput = isRuntimeEnvInput(env) ? env : {}; + assertNoDeprecatedAllowAllVerified(inputEnv); + if (options.requireRuntimeKeys === true) { + assertRequiredProxyRuntimeKeys(inputEnv); + } + + const parsedRuntimeEnv = proxyRuntimeEnvSchema.safeParse( + normalizeRuntimeEnv(inputEnv), + ); + if (!parsedRuntimeEnv.success) { + throw toConfigValidationError({ + fieldErrors: parsedRuntimeEnv.error.flatten().fieldErrors, + formErrors: parsedRuntimeEnv.error.flatten().formErrors, + }); + } + + const candidateConfig: Record = { + listenPort: parsedRuntimeEnv.data.LISTEN_PORT, + openclawBaseUrl: parsedRuntimeEnv.data.OPENCLAW_BASE_URL, + registryUrl: parsedRuntimeEnv.data.REGISTRY_URL, + environment: parsedRuntimeEnv.data.ENVIRONMENT, + crlRefreshIntervalMs: parsedRuntimeEnv.data.CRL_REFRESH_INTERVAL_MS, + crlMaxAgeMs: parsedRuntimeEnv.data.CRL_MAX_AGE_MS, + crlStaleBehavior: parsedRuntimeEnv.data.CRL_STALE_BEHAVIOR, + agentRateLimitRequestsPerMinute: + parsedRuntimeEnv.data.AGENT_RATE_LIMIT_REQUESTS_PER_MINUTE, + agentRateLimitWindowMs: parsedRuntimeEnv.data.AGENT_RATE_LIMIT_WINDOW_MS, + injectIdentityIntoMessage: + parsedRuntimeEnv.data.INJECT_IDENTITY_INTO_MESSAGE, + relayQueueMaxMessagesPerAgent: + parsedRuntimeEnv.data.RELAY_QUEUE_MAX_MESSAGES_PER_AGENT, + relayQueueTtlSeconds: parsedRuntimeEnv.data.RELAY_QUEUE_TTL_SECONDS, + relayRetryInitialMs: parsedRuntimeEnv.data.RELAY_RETRY_INITIAL_MS, + relayRetryMaxMs: parsedRuntimeEnv.data.RELAY_RETRY_MAX_MS, + relayRetryMaxAttempts: parsedRuntimeEnv.data.RELAY_RETRY_MAX_ATTEMPTS, + relayRetryJitterRatio: parsedRuntimeEnv.data.RELAY_RETRY_JITTER_RATIO, + relayMaxInFlightDeliveries: + parsedRuntimeEnv.data.RELAY_MAX_IN_FLIGHT_DELIVERIES, + relayMaxFrameBytes: parsedRuntimeEnv.data.RELAY_MAX_FRAME_BYTES, + }; + if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID !== undefined) { + candidateConfig.registryInternalServiceId = + parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_ID; + } + if (parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET !== undefined) { + candidateConfig.registryInternalServiceSecret = + parsedRuntimeEnv.data.REGISTRY_INTERNAL_SERVICE_SECRET; + } + + const parsedConfig = proxyConfigSchema.safeParse(candidateConfig); + if (parsedConfig.success) { + const hasServiceId = + typeof parsedConfig.data.registryInternalServiceId === "string"; + const hasServiceSecret = + typeof parsedConfig.data.registryInternalServiceSecret === "string"; + if (hasServiceId !== hasServiceSecret) { + throw toConfigValidationError({ + fieldErrors: { + REGISTRY_INTERNAL_SERVICE_ID: [ + "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", + ], + REGISTRY_INTERNAL_SERVICE_SECRET: [ + "REGISTRY_INTERNAL_SERVICE_ID and REGISTRY_INTERNAL_SERVICE_SECRET must be set together.", + ], + }, + formErrors: [], + }); + } + if ( + parsedConfig.data.relayRetryMaxMs < parsedConfig.data.relayRetryInitialMs + ) { + throw toConfigValidationError({ + fieldErrors: { + RELAY_RETRY_MAX_MS: [ + "RELAY_RETRY_MAX_MS must be greater than or equal to RELAY_RETRY_INITIAL_MS.", + ], + RELAY_RETRY_INITIAL_MS: [ + "RELAY_RETRY_MAX_MS must be greater than or equal to RELAY_RETRY_INITIAL_MS.", + ], + }, + formErrors: [], + }); + } + return parsedConfig.data; + } + + throw toConfigValidationError({ + fieldErrors: parsedConfig.error.flatten().fieldErrors, + formErrors: parsedConfig.error.flatten().formErrors, + }); +} + +export function loadProxyConfig( + env: unknown = resolveDefaultEnv(), + options: ProxyConfigLoadOptions & ParseProxyConfigOptions = {}, +): ProxyConfig { + const mergedEnv = loadEnvWithDotEnvFallback(env, options); + loadOpenclawBaseUrlFromFallback(mergedEnv, options); + return parseProxyConfig(mergedEnv, options); +} From f28ed90bd6d1797b9845a12e2e33ee6fff1b76f5 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 22:35:36 +0530 Subject: [PATCH 148/190] refactor: split relay session, registry registration tests, and connector lifecycle --- apps/proxy/src/agent-relay-session/AGENTS.md | 8 + apps/proxy/src/agent-relay-session/core.ts | 448 ++-------- .../proxy/src/agent-relay-session/delivery.ts | 75 ++ .../src/agent-relay-session/queue-manager.ts | 270 ++++++ .../src/agent-relay-session/websocket.ts | 104 +++ apps/registry/src/server.test/AGENTS.md | 20 +- ...gent-registration-create.challenge.test.ts | 158 ++++ .../agent-registration-create.config.test.ts | 80 ++ .../agent-registration-create.success.test.ts | 227 +++++ .../agent-registration-create.test.ts | 779 ------------------ ...ent-registration-create.validation.test.ts | 184 +++++ apps/registry/src/server.test/helpers.ts | 5 + .../src/server.test/helpers/AGENTS.md | 2 +- .../server.test/helpers/agent-registration.ts | 127 +++ .../registry/src/server.test/helpers/index.ts | 7 + packages/connector/AGENTS.md | 5 + packages/connector/src/client.ts | 215 +++-- packages/connector/src/client/AGENTS.md | 8 + .../src/client/reconnect-scheduler.ts | 73 ++ .../connector/src/client/socket-events.ts | 20 + scripts/quality/check-file-size.mjs | 6 +- 21 files changed, 1558 insertions(+), 1263 deletions(-) create mode 100644 apps/proxy/src/agent-relay-session/delivery.ts create mode 100644 apps/proxy/src/agent-relay-session/queue-manager.ts create mode 100644 apps/proxy/src/agent-relay-session/websocket.ts create mode 100644 apps/registry/src/server.test/agent-registration-create.challenge.test.ts create mode 100644 apps/registry/src/server.test/agent-registration-create.config.test.ts create mode 100644 apps/registry/src/server.test/agent-registration-create.success.test.ts delete mode 100644 apps/registry/src/server.test/agent-registration-create.test.ts create mode 100644 apps/registry/src/server.test/agent-registration-create.validation.test.ts create mode 100644 apps/registry/src/server.test/helpers/agent-registration.ts create mode 100644 packages/connector/src/client/reconnect-scheduler.ts create mode 100644 packages/connector/src/client/socket-events.ts diff --git a/apps/proxy/src/agent-relay-session/AGENTS.md b/apps/proxy/src/agent-relay-session/AGENTS.md index 6f5eb0e..e549c83 100644 --- a/apps/proxy/src/agent-relay-session/AGENTS.md +++ b/apps/proxy/src/agent-relay-session/AGENTS.md @@ -5,9 +5,17 @@ ## Rules - Keep `core.ts` as orchestration only (fetch/alarm lifecycle, queue + delivery flow), not as a utility dump. +- Keep queue persistence, pruning, retry sequencing, and alarm coordination in `queue-manager.ts`. +- Keep connector frame send + in-flight ack tracking in `delivery.ts`. +- Keep websocket frame/close/error dispatch in `websocket.ts`. - Keep socket liveness/heartbeat/pending-close tracking in `socket-tracker.ts`. - Keep frame construction/parsing helpers in `frames.ts`; do not duplicate frame payload logic in `core.ts`. - Keep queue receipt normalization/pruning/upsert/delete behavior in `queue-state.ts`. - Keep retry delay math in `policy.ts` and alarm scheduling in `scheduler.ts`. - Keep request payload validation in `parsers.ts` and RPC error envelopes in `rpc.ts`. - Keep shared relay constants in `constants.ts`; avoid repeating close codes and route paths inline. + +## Refactor Guidance +- Prefer extracting concrete collaborators (queue management, connector delivery transport, and RPC wiring) so `core.ts` stays a high-level orchestrator with well-defined dependencies. +- When adding new helpers, document the exported signatures and the direction of dependencies (e.g., `core.ts` → `queue-manager` → `queue-state`, `core.ts` → `rpc-handlers` → `parsers`). +- Preserve the existing request/queue/workflow contracts; refactors should not change how RPC paths, receipt state, or delivery retries behave. diff --git a/apps/proxy/src/agent-relay-session/core.ts b/apps/proxy/src/agent-relay-session/core.ts index 9926b9c..5095554 100644 --- a/apps/proxy/src/agent-relay-session/core.ts +++ b/apps/proxy/src/agent-relay-session/core.ts @@ -1,67 +1,50 @@ -import { - DEFAULT_RELAY_DELIVER_TIMEOUT_MS, - parseFrame, - serializeFrame, -} from "@clawdentity/connector"; import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; import { nowUtcMs, toIso } from "@clawdentity/sdk"; import { parseProxyConfig } from "../config.js"; import { CONNECTOR_AGENT_DID_HEADER, RELAY_HEARTBEAT_ACK_TIMEOUT_MS, - RELAY_QUEUE_STORAGE_KEY, RELAY_RPC_DELIVER_PATH, RELAY_RPC_GET_RECEIPT_PATH, RELAY_RPC_RECORD_RECEIPT_PATH, RELAY_SOCKET_STALE_CLOSE_CODE, RELAY_SOCKET_SUPERSEDED_CLOSE_CODE, } from "./constants.js"; +import { RelayDeliveryTransport } from "./delivery.js"; import { RelayQueueFullError } from "./errors.js"; -import { - getWebSocketMessageBytes, - toDeliverFrame, - toHeartbeatAckFrame, - toRelayDeliveryResult, -} from "./frames.js"; +import { toRelayDeliveryResult } from "./frames.js"; import { parseDeliveryInput, parseReceiptLookupInput, parseReceiptRecordInput, } from "./parsers.js"; -import { rejectPendingDeliveries } from "./pending-deliveries.js"; import { computeRetryDelayMs } from "./policy.js"; -import { - deleteQueuedReceipt, - isQueuedDelivery, - normalizeReceipts, - pruneExpiredQueueState, - upsertReceipt, -} from "./queue-state.js"; +import { RelayQueueManager } from "./queue-manager.js"; +import { upsertReceipt } from "./queue-state.js"; import { toErrorResponse } from "./rpc.js"; -import { scheduleNextRelayAlarm } from "./scheduler.js"; import { RelaySocketTracker } from "./socket-tracker.js"; import type { DurableObjectStateLike, - PendingDelivery, QueuedRelayDelivery, RelayDeliveryInput, RelayDeliveryPolicy, RelayDeliveryResult, - RelayQueueState, RelayReceiptLookupInput, RelayReceiptLookupResult, RelayReceiptRecordInput, } from "./types.js"; +import { + handleRelayWebSocketClose, + handleRelayWebSocketError, + handleRelayWebSocketMessage, +} from "./websocket.js"; export class AgentRelaySession { private readonly deliveryPolicy: RelayDeliveryPolicy; - private readonly pendingDeliveries = new Map(); private readonly socketTracker: RelaySocketTracker; + private readonly deliveryTransport: RelayDeliveryTransport; + private readonly queueManager: RelayQueueManager; private readonly state: DurableObjectStateLike; - private inMemoryQueueState: RelayQueueState = { - deliveries: [], - receipts: {}, - }; constructor(state: DurableObjectStateLike, env?: unknown) { this.state = state; @@ -80,6 +63,16 @@ export class AgentRelaySession { heartbeatAckTimeoutMs: RELAY_HEARTBEAT_ACK_TIMEOUT_MS, staleCloseCode: RELAY_SOCKET_STALE_CLOSE_CODE, }); + + this.deliveryTransport = new RelayDeliveryTransport(this.deliveryPolicy); + this.queueManager = new RelayQueueManager({ + state: this.state, + deliveryPolicy: this.deliveryPolicy, + getActiveSockets: (nowMs) => this.getActiveSockets(nowMs), + getPendingDeliveriesCount: () => this.deliveryTransport.getPendingCount(), + sendDeliverFrame: (socket, input) => + this.deliveryTransport.sendDeliverFrame(socket, input), + }); } async fetch(request: Request): Promise { @@ -158,20 +151,23 @@ export class AgentRelaySession { } } - const queueState = await this.loadQueueState(nowMs); - const queueMutated = await this.processQueueDeliveries(queueState, nowMs); + const queueState = await this.queueManager.loadQueueState(nowMs); + const queueMutated = await this.queueManager.processQueueDeliveries( + queueState, + nowMs, + ); if (queueMutated) { - await this.saveQueueState(queueState); + await this.queueManager.saveQueueState(queueState); } - await this.scheduleNextAlarm(queueState, nowMs); + await this.queueManager.scheduleNextAlarm(queueState, nowMs); } async deliverToConnector( input: RelayDeliveryInput, ): Promise { const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); + const queueState = await this.queueManager.loadQueueState(nowMs); const existingReceipt = queueState.receipts[input.requestId]; if ( @@ -195,11 +191,15 @@ export class AgentRelaySession { if ( sockets.length > 0 && - this.pendingDeliveries.size < this.deliveryPolicy.maxInFlightDeliveries + this.deliveryTransport.getPendingCount() < + this.deliveryPolicy.maxInFlightDeliveries ) { priorAttempts = 1; try { - const accepted = await this.sendDeliverFrame(sockets[0], input); + const accepted = await this.deliveryTransport.sendDeliverFrame( + sockets[0], + input, + ); if (accepted) { upsertReceipt(queueState, { requestId: input.requestId, @@ -210,8 +210,8 @@ export class AgentRelaySession { recipientAgentDid: input.recipientAgentDid, statusUpdatedAt: toIso(nowMs), }); - await this.saveQueueState(queueState); - await this.scheduleNextAlarm(queueState, nowMs); + await this.queueManager.saveQueueState(queueState); + await this.queueManager.scheduleNextAlarm(queueState, nowMs); return toRelayDeliveryResult({ deliveryId, @@ -262,8 +262,8 @@ export class AgentRelaySession { statusUpdatedAt: toIso(nowMs), }); - await this.saveQueueState(queueState); - await this.scheduleNextAlarm(queueState, nowMs); + await this.queueManager.saveQueueState(queueState); + await this.queueManager.scheduleNextAlarm(queueState, nowMs); return toRelayDeliveryResult({ deliveryId, @@ -275,7 +275,7 @@ export class AgentRelaySession { async recordDeliveryReceipt(input: RelayReceiptRecordInput): Promise { const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); + const queueState = await this.queueManager.loadQueueState(nowMs); const existing = queueState.receipts[input.requestId]; if (existing === undefined) { return; @@ -292,15 +292,15 @@ export class AgentRelaySession { existing.reason = input.reason; existing.expiresAtMs = nowMs + this.deliveryPolicy.queueTtlMs; existing.statusUpdatedAt = toIso(nowMs); - await this.saveQueueState(queueState); - await this.scheduleNextAlarm(queueState, nowMs); + await this.queueManager.saveQueueState(queueState); + await this.queueManager.scheduleNextAlarm(queueState, nowMs); } async getDeliveryReceipt( input: RelayReceiptLookupInput, ): Promise { const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); + const queueState = await this.queueManager.loadQueueState(nowMs); const existing = queueState.receipts[input.requestId]; if ( existing === undefined || @@ -319,55 +319,22 @@ export class AgentRelaySession { ws: WebSocket, message: string | ArrayBuffer, ): Promise { - const frameBytes = getWebSocketMessageBytes(message); - if (frameBytes > this.deliveryPolicy.maxFrameBytes) { - this.closeSocket(ws, 1009, "frame_too_large"); - await this.scheduleFromStorage(); - return; - } - - const nowMs = nowUtcMs(); - const frameResult = (() => { - try { - return parseFrame(message); - } catch { - return null; - } - })(); - - if (frameResult === null) { - await this.scheduleFromStorage(); - return; - } - - const frame = frameResult; - - if (frame.type === "heartbeat") { - this.socketTracker.touchSocketAck(ws, nowMs); - ws.send(toHeartbeatAckFrame(frame.id)); - await this.scheduleFromStorage(); - return; - } - - if (frame.type === "deliver_ack") { - this.socketTracker.touchSocketAck(ws, nowMs); - const pending = this.pendingDeliveries.get(frame.ackId); - if (pending) { - clearTimeout(pending.timeoutHandle); - this.pendingDeliveries.delete(frame.ackId); - pending.resolve(frame.accepted); - } - await this.scheduleFromStorage(); - return; - } - - if (frame.type === "heartbeat_ack") { - this.socketTracker.handleHeartbeatAck(frame.ackId, ws, nowMs); - await this.scheduleFromStorage(); - return; - } - - await this.scheduleFromStorage(); + await handleRelayWebSocketMessage({ + ws, + message, + maxFrameBytes: this.deliveryPolicy.maxFrameBytes, + socketTracker: this.socketTracker, + closeSocket: (socket, code, reason) => { + this.closeSocket(socket, code, reason); + }, + now: nowUtcMs, + onDeliverAck: (ackId, accepted) => { + this.deliveryTransport.resolveDeliverAck(ackId, accepted); + }, + onSchedule: async () => { + await this.queueManager.scheduleFromStorage(nowUtcMs()); + }, + }); } async webSocketClose( @@ -376,23 +343,33 @@ export class AgentRelaySession { _reason?: string, wasClean?: boolean, ): Promise { - if (ws !== undefined) { - this.socketTracker.onSocketClosed(ws); - } - - const gracefulClose = code === 1000 && (wasClean ?? true); - if (!gracefulClose && this.state.getWebSockets().length === 0) { - rejectPendingDeliveries( - this.pendingDeliveries, - new Error("Connector socket closed"), - ); - } - - await this.scheduleFromStorage(); + await handleRelayWebSocketClose({ + ws, + code, + wasClean, + socketTracker: this.socketTracker, + getSocketCount: () => this.state.getWebSockets().length, + rejectPending: (error) => { + this.deliveryTransport.rejectPending(error); + }, + onSchedule: async () => { + await this.queueManager.scheduleFromStorage(nowUtcMs()); + }, + }); } async webSocketError(ws?: WebSocket): Promise { - await this.webSocketClose(ws, 1011, "connector_socket_error", false); + await handleRelayWebSocketError({ + ws, + socketTracker: this.socketTracker, + getSocketCount: () => this.state.getWebSockets().length, + rejectPending: (error) => { + this.deliveryTransport.rejectPending(error); + }, + onSchedule: async () => { + await this.queueManager.scheduleFromStorage(nowUtcMs()); + }, + }); } private async handleConnect(request: Request): Promise { @@ -423,7 +400,7 @@ export class AgentRelaySession { this.state.acceptWebSocket(server, [connectorAgentDid]); this.socketTracker.touchSocketAck(server, nowMs); - void this.drainQueueOnReconnect(); + void this.queueManager.drainQueueOnReconnect(nowMs); return new Response(null, { status: 101, @@ -431,222 +408,6 @@ export class AgentRelaySession { }); } - private async loadQueueState(nowMs: number): Promise { - const fromStorage = this.state.storage.get - ? await this.state.storage.get(RELAY_QUEUE_STORAGE_KEY) - : this.inMemoryQueueState; - const rawState = - typeof fromStorage === "object" && fromStorage !== null - ? (fromStorage as Partial) - : undefined; - - const queueState: RelayQueueState = { - deliveries: Array.isArray(rawState?.deliveries) - ? rawState.deliveries.filter((entry) => isQueuedDelivery(entry)) - : [], - receipts: normalizeReceipts(rawState?.receipts), - }; - - const pruned = pruneExpiredQueueState(queueState, nowMs); - if (pruned) { - await this.saveQueueState(queueState); - } - - return queueState; - } - - private async saveQueueState(queueState: RelayQueueState): Promise { - const serialized: RelayQueueState = { - deliveries: [...queueState.deliveries], - receipts: { ...queueState.receipts }, - }; - - if (this.state.storage.put) { - await this.state.storage.put(RELAY_QUEUE_STORAGE_KEY, serialized); - return; - } - - this.inMemoryQueueState = serialized; - } - - private async processQueueDeliveries( - queueState: RelayQueueState, - nowMs: number, - ): Promise { - if (queueState.deliveries.length === 0) { - return false; - } - - const sockets = this.getActiveSockets(nowMs); - if (sockets.length === 0) { - let mutated = false; - for (const delivery of queueState.deliveries) { - if (delivery.nextAttemptAtMs <= nowMs) { - delivery.nextAttemptAtMs = - nowMs + - computeRetryDelayMs(this.deliveryPolicy, delivery.attemptCount); - mutated = true; - } - } - - return mutated; - } - - queueState.deliveries.sort((left, right) => { - if (left.nextAttemptAtMs !== right.nextAttemptAtMs) { - return left.nextAttemptAtMs - right.nextAttemptAtMs; - } - - return left.createdAtMs - right.createdAtMs; - }); - - let mutated = false; - const socket = sockets[0]; - - for (let index = 0; index < queueState.deliveries.length; ) { - if ( - this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries - ) { - break; - } - - const delivery = queueState.deliveries[index]; - - if (delivery.expiresAtMs <= nowMs) { - queueState.deliveries.splice(index, 1); - deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - if (delivery.attemptCount >= this.deliveryPolicy.retryMaxAttempts) { - queueState.deliveries.splice(index, 1); - deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - if (delivery.nextAttemptAtMs > nowMs) { - index += 1; - continue; - } - - let accepted = false; - let deliveryError = false; - try { - accepted = await this.sendDeliverFrame(socket, { - requestId: delivery.requestId, - senderAgentDid: delivery.senderAgentDid, - recipientAgentDid: delivery.recipientAgentDid, - conversationId: delivery.conversationId, - replyTo: delivery.replyTo, - payload: delivery.payload, - }); - } catch { - deliveryError = true; - } - - if (accepted) { - queueState.deliveries.splice(index, 1); - upsertReceipt(queueState, { - requestId: delivery.requestId, - deliveryId: delivery.deliveryId, - state: "delivered", - expiresAtMs: nowMs + this.deliveryPolicy.queueTtlMs, - senderAgentDid: delivery.senderAgentDid, - recipientAgentDid: delivery.recipientAgentDid, - statusUpdatedAt: toIso(nowMs), - }); - mutated = true; - continue; - } - - const nextAttemptCount = delivery.attemptCount + 1; - if (nextAttemptCount >= this.deliveryPolicy.retryMaxAttempts) { - queueState.deliveries.splice(index, 1); - deleteQueuedReceipt( - queueState, - delivery.requestId, - delivery.deliveryId, - ); - mutated = true; - continue; - } - - delivery.attemptCount = nextAttemptCount; - delivery.nextAttemptAtMs = - nowMs + computeRetryDelayMs(this.deliveryPolicy, delivery.attemptCount); - mutated = true; - index += 1; - - if (deliveryError) { - for ( - let remaining = index; - remaining < queueState.deliveries.length; - remaining += 1 - ) { - if (queueState.deliveries[remaining].nextAttemptAtMs <= nowMs) { - queueState.deliveries[remaining].nextAttemptAtMs = - nowMs + - computeRetryDelayMs( - this.deliveryPolicy, - queueState.deliveries[remaining].attemptCount, - ); - } - } - break; - } - } - - return mutated; - } - private async sendDeliverFrame( - socket: WebSocket, - input: RelayDeliveryInput, - ): Promise { - if ( - this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries - ) { - throw new Error("Relay connector in-flight window is full"); - } - - const frame = toDeliverFrame(input); - const framePayload = serializeFrame(frame); - const frameBytes = new TextEncoder().encode(framePayload).byteLength; - if (frameBytes > this.deliveryPolicy.maxFrameBytes) { - throw new Error("Relay connector frame exceeds max allowed size"); - } - - return new Promise((resolve, reject) => { - const timeoutHandle = setTimeout(() => { - this.pendingDeliveries.delete(frame.id); - reject(new Error("Relay connector acknowledgement timed out")); - }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); - - this.pendingDeliveries.set(frame.id, { - resolve, - reject, - timeoutHandle, - }); - - try { - socket.send(framePayload); - } catch (error) { - clearTimeout(timeoutHandle); - this.pendingDeliveries.delete(frame.id); - reject(error); - } - }); - } - private getActiveSockets(nowMs: number): WebSocket[] { return this.socketTracker.getActiveSockets( this.state.getWebSockets(), @@ -661,45 +422,4 @@ export class AgentRelaySession { private closeSocket(socket: WebSocket, code: number, reason: string): void { this.socketTracker.closeSocket(socket, code, reason); } - - private async drainQueueOnReconnect(): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - let queueMutated = false; - - for (const delivery of queueState.deliveries) { - if (delivery.nextAttemptAtMs > nowMs) { - delivery.nextAttemptAtMs = nowMs; - queueMutated = true; - } - } - - if (await this.processQueueDeliveries(queueState, nowMs)) { - queueMutated = true; - } - - if (queueMutated) { - await this.saveQueueState(queueState); - } - - await this.scheduleNextAlarm(queueState, nowMs); - } - - private async scheduleFromStorage(): Promise { - const nowMs = nowUtcMs(); - const queueState = await this.loadQueueState(nowMs); - await this.scheduleNextAlarm(queueState, nowMs); - } - - private async scheduleNextAlarm( - queueState: RelayQueueState, - nowMs: number, - ): Promise { - await scheduleNextRelayAlarm({ - storage: this.state.storage, - queueState, - nowMs, - hasActiveSockets: this.getActiveSockets(nowMs).length > 0, - }); - } } diff --git a/apps/proxy/src/agent-relay-session/delivery.ts b/apps/proxy/src/agent-relay-session/delivery.ts new file mode 100644 index 0000000..505a411 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/delivery.ts @@ -0,0 +1,75 @@ +import { + DEFAULT_RELAY_DELIVER_TIMEOUT_MS, + serializeFrame, +} from "@clawdentity/connector"; +import { toDeliverFrame } from "./frames.js"; +import { rejectPendingDeliveries } from "./pending-deliveries.js"; +import type { + PendingDelivery, + RelayDeliveryInput, + RelayDeliveryPolicy, +} from "./types.js"; + +export class RelayDeliveryTransport { + private readonly pendingDeliveries = new Map(); + + constructor(private readonly deliveryPolicy: RelayDeliveryPolicy) {} + + getPendingCount(): number { + return this.pendingDeliveries.size; + } + + rejectPending(error: Error): void { + rejectPendingDeliveries(this.pendingDeliveries, error); + } + + resolveDeliverAck(ackId: string, accepted: boolean): void { + const pending = this.pendingDeliveries.get(ackId); + if (!pending) { + return; + } + + clearTimeout(pending.timeoutHandle); + this.pendingDeliveries.delete(ackId); + pending.resolve(accepted); + } + + async sendDeliverFrame( + socket: WebSocket, + input: RelayDeliveryInput, + ): Promise { + if ( + this.pendingDeliveries.size >= this.deliveryPolicy.maxInFlightDeliveries + ) { + throw new Error("Relay connector in-flight window is full"); + } + + const frame = toDeliverFrame(input); + const framePayload = serializeFrame(frame); + const frameBytes = new TextEncoder().encode(framePayload).byteLength; + if (frameBytes > this.deliveryPolicy.maxFrameBytes) { + throw new Error("Relay connector frame exceeds max allowed size"); + } + + return new Promise((resolve, reject) => { + const timeoutHandle = setTimeout(() => { + this.pendingDeliveries.delete(frame.id); + reject(new Error("Relay connector acknowledgement timed out")); + }, DEFAULT_RELAY_DELIVER_TIMEOUT_MS); + + this.pendingDeliveries.set(frame.id, { + resolve, + reject, + timeoutHandle, + }); + + try { + socket.send(framePayload); + } catch (error) { + clearTimeout(timeoutHandle); + this.pendingDeliveries.delete(frame.id); + reject(error); + } + }); + } +} diff --git a/apps/proxy/src/agent-relay-session/queue-manager.ts b/apps/proxy/src/agent-relay-session/queue-manager.ts new file mode 100644 index 0000000..bd3e58e --- /dev/null +++ b/apps/proxy/src/agent-relay-session/queue-manager.ts @@ -0,0 +1,270 @@ +import { toIso } from "@clawdentity/sdk"; +import { RELAY_QUEUE_STORAGE_KEY } from "./constants.js"; +import { computeRetryDelayMs } from "./policy.js"; +import { + deleteQueuedReceipt, + isQueuedDelivery, + normalizeReceipts, + pruneExpiredQueueState, + upsertReceipt, +} from "./queue-state.js"; +import { scheduleNextRelayAlarm } from "./scheduler.js"; +import type { + DurableObjectStateLike, + RelayDeliveryInput, + RelayDeliveryPolicy, + RelayQueueState, +} from "./types.js"; + +type RelayQueueManagerInput = { + state: DurableObjectStateLike; + deliveryPolicy: RelayDeliveryPolicy; + getActiveSockets: (nowMs: number) => WebSocket[]; + getPendingDeliveriesCount: () => number; + sendDeliverFrame: ( + socket: WebSocket, + input: RelayDeliveryInput, + ) => Promise; +}; + +export class RelayQueueManager { + private readonly state: DurableObjectStateLike; + private readonly deliveryPolicy: RelayDeliveryPolicy; + private readonly getActiveSockets: (nowMs: number) => WebSocket[]; + private readonly getPendingDeliveriesCount: () => number; + private readonly sendDeliverFrame: ( + socket: WebSocket, + input: RelayDeliveryInput, + ) => Promise; + private inMemoryQueueState: RelayQueueState = { + deliveries: [], + receipts: {}, + }; + + constructor(input: RelayQueueManagerInput) { + this.state = input.state; + this.deliveryPolicy = input.deliveryPolicy; + this.getActiveSockets = input.getActiveSockets; + this.getPendingDeliveriesCount = input.getPendingDeliveriesCount; + this.sendDeliverFrame = input.sendDeliverFrame; + } + + async loadQueueState(nowMs: number): Promise { + const fromStorage = this.state.storage.get + ? await this.state.storage.get(RELAY_QUEUE_STORAGE_KEY) + : this.inMemoryQueueState; + const rawState = + typeof fromStorage === "object" && fromStorage !== null + ? (fromStorage as Partial) + : undefined; + + const queueState: RelayQueueState = { + deliveries: Array.isArray(rawState?.deliveries) + ? rawState.deliveries.filter((entry) => isQueuedDelivery(entry)) + : [], + receipts: normalizeReceipts(rawState?.receipts), + }; + + const pruned = pruneExpiredQueueState(queueState, nowMs); + if (pruned) { + await this.saveQueueState(queueState); + } + + return queueState; + } + + async saveQueueState(queueState: RelayQueueState): Promise { + const serialized: RelayQueueState = { + deliveries: [...queueState.deliveries], + receipts: { ...queueState.receipts }, + }; + + if (this.state.storage.put) { + await this.state.storage.put(RELAY_QUEUE_STORAGE_KEY, serialized); + return; + } + + this.inMemoryQueueState = serialized; + } + + async processQueueDeliveries( + queueState: RelayQueueState, + nowMs: number, + ): Promise { + if (queueState.deliveries.length === 0) { + return false; + } + + const sockets = this.getActiveSockets(nowMs); + if (sockets.length === 0) { + let mutated = false; + for (const delivery of queueState.deliveries) { + if (delivery.nextAttemptAtMs <= nowMs) { + delivery.nextAttemptAtMs = + nowMs + + computeRetryDelayMs(this.deliveryPolicy, delivery.attemptCount); + mutated = true; + } + } + + return mutated; + } + + queueState.deliveries.sort((left, right) => { + if (left.nextAttemptAtMs !== right.nextAttemptAtMs) { + return left.nextAttemptAtMs - right.nextAttemptAtMs; + } + + return left.createdAtMs - right.createdAtMs; + }); + + let mutated = false; + const socket = sockets[0]; + + for (let index = 0; index < queueState.deliveries.length; ) { + if ( + this.getPendingDeliveriesCount() >= + this.deliveryPolicy.maxInFlightDeliveries + ) { + break; + } + + const delivery = queueState.deliveries[index]; + + if (delivery.expiresAtMs <= nowMs) { + queueState.deliveries.splice(index, 1); + deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + if (delivery.attemptCount >= this.deliveryPolicy.retryMaxAttempts) { + queueState.deliveries.splice(index, 1); + deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + if (delivery.nextAttemptAtMs > nowMs) { + index += 1; + continue; + } + + let accepted = false; + let deliveryError = false; + try { + accepted = await this.sendDeliverFrame(socket, { + requestId: delivery.requestId, + senderAgentDid: delivery.senderAgentDid, + recipientAgentDid: delivery.recipientAgentDid, + conversationId: delivery.conversationId, + replyTo: delivery.replyTo, + payload: delivery.payload, + }); + } catch { + deliveryError = true; + } + + if (accepted) { + queueState.deliveries.splice(index, 1); + upsertReceipt(queueState, { + requestId: delivery.requestId, + deliveryId: delivery.deliveryId, + state: "delivered", + expiresAtMs: nowMs + this.deliveryPolicy.queueTtlMs, + senderAgentDid: delivery.senderAgentDid, + recipientAgentDid: delivery.recipientAgentDid, + statusUpdatedAt: toIso(nowMs), + }); + mutated = true; + continue; + } + + const nextAttemptCount = delivery.attemptCount + 1; + if (nextAttemptCount >= this.deliveryPolicy.retryMaxAttempts) { + queueState.deliveries.splice(index, 1); + deleteQueuedReceipt( + queueState, + delivery.requestId, + delivery.deliveryId, + ); + mutated = true; + continue; + } + + delivery.attemptCount = nextAttemptCount; + delivery.nextAttemptAtMs = + nowMs + computeRetryDelayMs(this.deliveryPolicy, delivery.attemptCount); + mutated = true; + index += 1; + + if (deliveryError) { + for ( + let remaining = index; + remaining < queueState.deliveries.length; + remaining += 1 + ) { + const pendingDelivery = queueState.deliveries[remaining]; + if (pendingDelivery.nextAttemptAtMs <= nowMs) { + pendingDelivery.nextAttemptAtMs = + nowMs + + computeRetryDelayMs( + this.deliveryPolicy, + pendingDelivery.attemptCount, + ); + } + } + break; + } + } + + return mutated; + } + + async drainQueueOnReconnect(nowMs: number): Promise { + const queueState = await this.loadQueueState(nowMs); + let queueMutated = false; + + for (const delivery of queueState.deliveries) { + if (delivery.nextAttemptAtMs > nowMs) { + delivery.nextAttemptAtMs = nowMs; + queueMutated = true; + } + } + + if (await this.processQueueDeliveries(queueState, nowMs)) { + queueMutated = true; + } + + if (queueMutated) { + await this.saveQueueState(queueState); + } + + await this.scheduleNextAlarm(queueState, nowMs); + } + + async scheduleFromStorage(nowMs: number): Promise { + const queueState = await this.loadQueueState(nowMs); + await this.scheduleNextAlarm(queueState, nowMs); + } + + async scheduleNextAlarm( + queueState: RelayQueueState, + nowMs: number, + ): Promise { + await scheduleNextRelayAlarm({ + storage: this.state.storage, + queueState, + nowMs, + hasActiveSockets: this.getActiveSockets(nowMs).length > 0, + }); + } +} diff --git a/apps/proxy/src/agent-relay-session/websocket.ts b/apps/proxy/src/agent-relay-session/websocket.ts new file mode 100644 index 0000000..5d931c9 --- /dev/null +++ b/apps/proxy/src/agent-relay-session/websocket.ts @@ -0,0 +1,104 @@ +import { parseFrame } from "@clawdentity/connector"; +import { getWebSocketMessageBytes, toHeartbeatAckFrame } from "./frames.js"; +import type { RelaySocketTracker } from "./socket-tracker.js"; + +type RelayWebSocketMessageInput = { + ws: WebSocket; + message: string | ArrayBuffer; + maxFrameBytes: number; + socketTracker: RelaySocketTracker; + closeSocket: (socket: WebSocket, code: number, reason: string) => void; + now: () => number; + onDeliverAck: (ackId: string, accepted: boolean) => void; + onSchedule: () => Promise; +}; + +export async function handleRelayWebSocketMessage( + input: RelayWebSocketMessageInput, +): Promise { + const frameBytes = getWebSocketMessageBytes(input.message); + if (frameBytes > input.maxFrameBytes) { + input.closeSocket(input.ws, 1009, "frame_too_large"); + await input.onSchedule(); + return; + } + + const nowMs = input.now(); + const frameResult = (() => { + try { + return parseFrame(input.message); + } catch { + return null; + } + })(); + + if (frameResult === null) { + await input.onSchedule(); + return; + } + + if (frameResult.type === "heartbeat") { + input.socketTracker.touchSocketAck(input.ws, nowMs); + input.ws.send(toHeartbeatAckFrame(frameResult.id)); + await input.onSchedule(); + return; + } + + if (frameResult.type === "deliver_ack") { + input.socketTracker.touchSocketAck(input.ws, nowMs); + input.onDeliverAck(frameResult.ackId, frameResult.accepted); + await input.onSchedule(); + return; + } + + if (frameResult.type === "heartbeat_ack") { + input.socketTracker.handleHeartbeatAck(frameResult.ackId, input.ws, nowMs); + await input.onSchedule(); + return; + } + + await input.onSchedule(); +} + +type RelayWebSocketCloseInput = { + ws?: WebSocket; + code?: number; + wasClean?: boolean; + socketTracker: RelaySocketTracker; + getSocketCount: () => number; + rejectPending: (error: Error) => void; + onSchedule: () => Promise; +}; + +export async function handleRelayWebSocketClose( + input: RelayWebSocketCloseInput, +): Promise { + if (input.ws !== undefined) { + input.socketTracker.onSocketClosed(input.ws); + } + + const gracefulClose = input.code === 1000 && (input.wasClean ?? true); + if (!gracefulClose && input.getSocketCount() === 0) { + input.rejectPending(new Error("Connector socket closed")); + } + + await input.onSchedule(); +} + +export async function handleRelayWebSocketError(input: { + ws?: WebSocket; + socketTracker: RelaySocketTracker; + getSocketCount: () => number; + rejectPending: (error: Error) => void; + onSchedule: () => Promise; +}): Promise { + await handleRelayWebSocketClose({ + ws: input.ws, + code: 1011, + wasClean: false, + socketTracker: input.socketTracker, + getSocketCount: input.getSocketCount, + rejectPending: input.rejectPending, + onSchedule: input.onSchedule, + }); +} diff --git a/apps/registry/src/server.test/AGENTS.md b/apps/registry/src/server.test/AGENTS.md index 144c117..40743f5 100644 --- a/apps/registry/src/server.test/AGENTS.md +++ b/apps/registry/src/server.test/AGENTS.md @@ -8,10 +8,23 @@ - Keep each `*.test.ts` file focused on one route or tightly related route concern. - Keep each `*.test.ts` file under 800 lines. - Keep `helpers.ts` as a thin public export shim used by tests. -- Place shared helper implementation in `helpers/**` with focused modules (`claims`, `crypto`, `pat`, `db/*`); do not duplicate harness logic across test files. +- Place shared helper implementation in `helpers/**` with focused modules (`claims`, `crypto`, `pat`, `agent-registration`, `db/*`); do not duplicate harness logic across test files. - Prefer adding small helper functions in the appropriate `helpers/**` module when setup repeats 3+ times. - Keep every file under `server.test` (including `helpers/**`) below 800 lines. +## Agent registration create test split +- Split `agent-registration-create` specs into focused files: + - `agent-registration-create-validation.test.ts` for auth, general payload validation, and environment-specific error messaging. + - `agent-registration-create-challenge.test.ts` for challenge lifecycle errors (missing, invalid proof, replayed challenge) and shared challenge fixtures. + - `agent-registration-create-success.test.ts` for the happy path responses, default values, persisted records, and AIT verification. + - `agent-registration-create-config.test.ts` for configuration/500-level failures such as missing/mismatched signing keys. +- Each file should `import { createRegistryApp } from "../server.js"` and only add helpers needed for that concern. + +## Shared helpers +- Add a `helpers/agent-registration.ts` module that exposes curated builders (e.g., `makeRegistrationChallenge`, `makeValidRegistrationPayload`, `makeRegistrySigningKeys`) so new files re-use deterministic data and signing-key sets. +- Keep `helpers/pat.ts` focused on PAT fixtures, and reuse `helpers/crypto.ts` for keypair/signature helpers across all registration tests. +- Export any new helper from `helpers.ts` so new spec files can `import { makeRegistrySigningKeys } from "./helpers.js"` and stay concise. + ## Change Rules - Preserve existing assertions and response contracts when refactoring test structure. - When adding tests, keep test names explicit about endpoint, auth mode, and expected status. @@ -29,6 +42,11 @@ - agent lifecycle (delete/reissue) - registration challenge/create - agent auth refresh/validate/revoke +- Keep `POST /v1/agents` registration-create coverage split by concern: + - `agent-registration-create.validation.test.ts` + - `agent-registration-create.challenge.test.ts` + - `agent-registration-create.success.test.ts` + - `agent-registration-create.config.test.ts` ## Validation - For server test changes, run: diff --git a/apps/registry/src/server.test/agent-registration-create.challenge.test.ts b/apps/registry/src/server.test/agent-registration-create.challenge.test.ts new file mode 100644 index 0000000..b306bcb --- /dev/null +++ b/apps/registry/src/server.test/agent-registration-create.challenge.test.ts @@ -0,0 +1,158 @@ +import { encodeBase64url, generateUlid } from "@clawdentity/protocol"; +import { + encodeEd25519SignatureBase64url, + generateEd25519Keypair, +} from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { + createDefaultRegistrySigning, + createFakeDb, + createTestBindings, + makeValidPatContext, + signRegistrationChallenge, +} from "./helpers.js"; + +describe("POST /v1/agents", () => { + it("returns 400 when registration challenge is missing", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const { signingEnv } = await createDefaultRegistrySigning(); + const agentKeypair = await generateEd25519Keypair(); + const challengeSignature = encodeEd25519SignatureBase64url( + Uint8Array.from({ length: 64 }, (_, index) => index + 1), + ); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-missing-challenge", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: generateUlid(1700000000000), + challengeSignature, + }), + }, + createTestBindings(database, signingEnv), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_NOT_FOUND"); + }); + + it("returns 400 when challenge signature is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { signingEnv } = await createDefaultRegistrySigning(); + const agentKeypair = await generateEd25519Keypair(); + const challengeId = generateUlid(1700000010000); + const challengeNonce = encodeBase64url( + Uint8Array.from({ length: 24 }, (_, index) => index + 3), + ); + const { database } = createFakeDb([authRow], [], { + registrationChallengeRows: [ + { + id: challengeId, + ownerId: "human-1", + publicKey: encodeBase64url(agentKeypair.publicKey), + nonce: challengeNonce, + status: "pending", + expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), + usedAt: null, + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const invalidSignature = await signRegistrationChallenge({ + challengeId, + nonce: challengeNonce, + ownerDid: authRow.humanDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "wrong-name", + secretKey: agentKeypair.secretKey, + }); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-proof-invalid", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId, + challengeSignature: invalidSignature, + }), + }, + createTestBindings(database, signingEnv), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_PROOF_INVALID"); + }); + + it("returns 400 when challenge has already been used", async () => { + const { token, authRow } = await makeValidPatContext(); + const { signingEnv } = await createDefaultRegistrySigning(); + const agentKeypair = await generateEd25519Keypair(); + const challengeId = generateUlid(1700000011000); + const challengeNonce = encodeBase64url( + Uint8Array.from({ length: 24 }, (_, index) => index + 5), + ); + const { database } = createFakeDb([authRow], [], { + registrationChallengeRows: [ + { + id: challengeId, + ownerId: "human-1", + publicKey: encodeBase64url(agentKeypair.publicKey), + nonce: challengeNonce, + status: "used", + expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), + usedAt: new Date(Date.now() - 60 * 1000).toISOString(), + createdAt: "2026-01-01T00:00:00.000Z", + updatedAt: "2026-01-01T00:00:00.000Z", + }, + ], + }); + const signature = await signRegistrationChallenge({ + challengeId, + nonce: challengeNonce, + ownerDid: authRow.humanDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-challenge-replayed", + secretKey: agentKeypair.secretKey, + }); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-challenge-replayed", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId, + challengeSignature: signature, + }), + }, + createTestBindings(database, signingEnv), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { error: { code: string } }; + expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_REPLAYED"); + }); +}); diff --git a/apps/registry/src/server.test/agent-registration-create.config.test.ts b/apps/registry/src/server.test/agent-registration-create.config.test.ts new file mode 100644 index 0000000..4a8952b --- /dev/null +++ b/apps/registry/src/server.test/agent-registration-create.config.test.ts @@ -0,0 +1,80 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { generateEd25519Keypair } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { + createDefaultRegistrySigning, + createFakeDb, + createRegistrySigningEnv, + createTestBindings, + makeValidPatContext, + requestRegistrationChallenge, + signRegistrationChallenge, +} from "./helpers.js"; + +describe("POST /v1/agents", () => { + it("returns 500 when signer secret does not match any active published key", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const { signer } = await createDefaultRegistrySigning(); + const wrongPublishedKey = await generateEd25519Keypair(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + + const { response: challengeResponse, body: challengeBody } = + await requestRegistrationChallenge({ + app: appInstance, + token, + publicKey: encodeBase64url(agentKeypair.publicKey), + bindings: createTestBindings(database), + }); + expect(challengeResponse.status).toBe(201); + + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-signer-mismatch", + secretKey: agentKeypair.secretKey, + }); + + const mismatchedSigningEnv = createRegistrySigningEnv({ + kid: "reg-key-2", + publicKey: wrongPublishedKey.publicKey, + secretKey: signer.secretKey, + }); + + const res = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-signer-mismatch", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, + }), + }, + createTestBindings(database, mismatchedSigningEnv), + ); + + expect(res.status).toBe(500); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); + expect(body.error.message).toBe("Registry configuration is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + REGISTRY_SIGNING_KEYS: expect.any(Array), + }); + }); +}); diff --git a/apps/registry/src/server.test/agent-registration-create.success.test.ts b/apps/registry/src/server.test/agent-registration-create.success.test.ts new file mode 100644 index 0000000..0a933c6 --- /dev/null +++ b/apps/registry/src/server.test/agent-registration-create.success.test.ts @@ -0,0 +1,227 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { generateEd25519Keypair, verifyAIT } from "@clawdentity/sdk"; +import { describe, expect, it } from "vitest"; +import { + DEFAULT_AGENT_FRAMEWORK, + DEFAULT_AGENT_TTL_DAYS, +} from "../agent-registration.js"; +import { createRegistryApp } from "../server.js"; +import { + createDefaultRegistrySigning, + createFakeDb, + createTestBindings, + makeValidPatContext, + requestRegistrationChallenge, + signRegistrationChallenge, +} from "./helpers.js"; + +describe("POST /v1/agents", () => { + it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { + const { token, authRow } = await makeValidPatContext(); + const { + database, + agentInserts, + agentAuthSessionInserts, + agentAuthEventInserts, + } = createFakeDb([authRow]); + const { signingEnv } = await createDefaultRegistrySigning(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + + const { response: challengeResponse, body: challengeBody } = + await requestRegistrationChallenge({ + app: appInstance, + token, + publicKey: encodeBase64url(agentKeypair.publicKey), + bindings: createTestBindings(database, signingEnv), + }); + expect(challengeResponse.status).toBe(201); + + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-01", + secretKey: agentKeypair.secretKey, + }); + + const res = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-01", + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, + }), + }, + createTestBindings(database, signingEnv), + ); + + expect(res.status).toBe(201); + const body = (await res.json()) as { + agent: { + id: string; + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + ttlDays: number; + status: string; + expiresAt: string; + createdAt: string; + updatedAt: string; + }; + ait: string; + agentAuth: { + tokenType: string; + accessToken: string; + accessExpiresAt: string; + refreshToken: string; + refreshExpiresAt: string; + }; + }; + + expect(body.agent.name).toBe("agent-01"); + expect(body.agent.framework).toBe(DEFAULT_AGENT_FRAMEWORK); + expect(body.agent.ttlDays).toBe(DEFAULT_AGENT_TTL_DAYS); + expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(body.agent.status).toBe("active"); + expect(body.ait).toEqual(expect.any(String)); + expect(body.agentAuth.tokenType).toBe("Bearer"); + expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); + expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); + expect(Date.parse(body.agentAuth.accessExpiresAt)).toBeGreaterThan( + Date.now(), + ); + expect(Date.parse(body.agentAuth.refreshExpiresAt)).toBeGreaterThan( + Date.now(), + ); + + expect(agentInserts).toHaveLength(1); + const inserted = agentInserts[0]; + expect(inserted?.owner_id).toBe("human-1"); + expect(inserted?.name).toBe("agent-01"); + expect(inserted?.framework).toBe(DEFAULT_AGENT_FRAMEWORK); + expect(inserted?.public_key).toBe(encodeBase64url(agentKeypair.publicKey)); + expect(inserted?.current_jti).toBe(body.agent.currentJti); + expect(inserted?.expires_at).toBe(body.agent.expiresAt); + expect(agentAuthSessionInserts).toHaveLength(1); + expect(agentAuthSessionInserts[0]).toMatchObject({ + agent_id: body.agent.id, + status: "active", + }); + expect(agentAuthEventInserts).toHaveLength(1); + expect(agentAuthEventInserts[0]).toMatchObject({ + agent_id: body.agent.id, + event_type: "issued", + }); + }); + + it("returns verifiable AIT using published keyset", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const { signingEnv } = await createDefaultRegistrySigning(); + const agentKeypair = await generateEd25519Keypair(); + const appInstance = createRegistryApp(); + + const { response: challengeResponse, body: challengeBody } = + await requestRegistrationChallenge({ + app: appInstance, + token, + publicKey: encodeBase64url(agentKeypair.publicKey), + bindings: createTestBindings(database, signingEnv), + }); + expect(challengeResponse.status).toBe(201); + + const challengeSignature = await signRegistrationChallenge({ + challengeId: challengeBody.challengeId, + nonce: challengeBody.nonce, + ownerDid: challengeBody.ownerDid, + publicKey: encodeBase64url(agentKeypair.publicKey), + name: "agent-registry-verify", + framework: "openclaw", + ttlDays: 10, + secretKey: agentKeypair.secretKey, + }); + + const registerResponse = await appInstance.request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "agent-registry-verify", + framework: "openclaw", + ttlDays: 10, + publicKey: encodeBase64url(agentKeypair.publicKey), + challengeId: challengeBody.challengeId, + challengeSignature, + }), + }, + createTestBindings(database, signingEnv), + ); + + expect(registerResponse.status).toBe(201); + const registerBody = (await registerResponse.json()) as { + agent: { + did: string; + ownerDid: string; + name: string; + framework: string; + publicKey: string; + currentJti: string; + }; + ait: string; + }; + + const keysResponse = await appInstance.request( + "/.well-known/claw-keys.json", + {}, + createTestBindings(database, signingEnv), + ); + const keysBody = (await keysResponse.json()) as { + keys: Array<{ + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; + }>; + }; + + const claims = await verifyAIT({ + token: registerBody.ait, + expectedIssuer: "https://dev.registry.clawdentity.com", + registryKeys: keysBody.keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP" as const, + crv: key.crv, + x: key.x, + }, + })), + }); + + expect(claims.iss).toBe("https://dev.registry.clawdentity.com"); + expect(claims.sub).toBe(registerBody.agent.did); + expect(claims.ownerDid).toBe(registerBody.agent.ownerDid); + expect(claims.name).toBe(registerBody.agent.name); + expect(claims.framework).toBe(registerBody.agent.framework); + expect(claims.cnf.jwk.x).toBe(registerBody.agent.publicKey); + expect(claims.jti).toBe(registerBody.agent.currentJti); + }); +}); diff --git a/apps/registry/src/server.test/agent-registration-create.test.ts b/apps/registry/src/server.test/agent-registration-create.test.ts deleted file mode 100644 index 80315ce..0000000 --- a/apps/registry/src/server.test/agent-registration-create.test.ts +++ /dev/null @@ -1,779 +0,0 @@ -import { - AGENT_REGISTRATION_CHALLENGE_PATH, - encodeBase64url, - generateUlid, -} from "@clawdentity/protocol"; -import { - encodeEd25519SignatureBase64url, - generateEd25519Keypair, - verifyAIT, -} from "@clawdentity/sdk"; -import { describe, expect, it } from "vitest"; -import { - DEFAULT_AGENT_FRAMEWORK, - DEFAULT_AGENT_TTL_DAYS, -} from "../agent-registration.js"; -import { createRegistryApp } from "../server.js"; -import { - createFakeDb, - makeValidPatContext, - signRegistrationChallenge, -} from "./helpers.js"; - -describe("POST /v1/agents", () => { - it("returns 401 when PAT is missing", async () => { - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { "content-type": "application/json" }, - body: JSON.stringify({ - name: "agent-01", - publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - }), - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 400 when request payload is invalid", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "!!!", - framework: "", - publicKey: "not-base64url", - ttlDays: 0, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Agent registration payload is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - name: expect.any(Array), - framework: expect.any(Array), - publicKey: expect.any(Array), - ttlDays: expect.any(Array), - challengeId: expect.any(Array), - challengeSignature: expect.any(Array), - }); - }); - - it("returns verbose malformed-json error in test", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: '{"name":"agent-01"', - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Request body must be valid JSON"); - expect(body.error.details).toBeUndefined(); - }); - - it("returns generic malformed-json error in production", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: '{"name":"agent-01"', - }, - { - DB: database, - ENVIRONMENT: "production", - PROXY_URL: "https://proxy.clawdentity.com", - REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", - EVENT_BUS_BACKEND: "memory", - BOOTSTRAP_SECRET: "bootstrap-secret", - REGISTRY_SIGNING_KEY: "test-signing-key", - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Request could not be processed"); - expect(body.error.details).toBeUndefined(); - }); - - it("returns generic validation error details in production", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "!!!", - publicKey: "not-base64url", - }), - }, - { - DB: database, - ENVIRONMENT: "production", - PROXY_URL: "https://proxy.clawdentity.com", - REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", - EVENT_BUS_BACKEND: "memory", - BOOTSTRAP_SECRET: "bootstrap-secret", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); - expect(body.error.message).toBe("Request could not be processed"); - expect(body.error.details).toBeUndefined(); - }); - - it("returns 400 when registration challenge is missing", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const challengeSignature = encodeEd25519SignatureBase64url( - Uint8Array.from({ length: 64 }, (_, index) => index + 1), - ); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-missing-challenge", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: generateUlid(1700000000000), - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_NOT_FOUND"); - }); - - it("returns 400 when challenge signature is invalid", async () => { - const { token, authRow } = await makeValidPatContext(); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const challengeId = generateUlid(1700000010000); - const challengeNonce = encodeBase64url( - Uint8Array.from({ length: 24 }, (_, index) => index + 3), - ); - const { database } = createFakeDb([authRow], [], { - registrationChallengeRows: [ - { - id: challengeId, - ownerId: "human-1", - publicKey: encodeBase64url(agentKeypair.publicKey), - nonce: challengeNonce, - status: "pending", - expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - usedAt: null, - createdAt: "2026-01-01T00:00:00.000Z", - updatedAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - const invalidSignature = await signRegistrationChallenge({ - challengeId, - nonce: challengeNonce, - ownerDid: authRow.humanDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "wrong-name", - secretKey: agentKeypair.secretKey, - }); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-proof-invalid", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId, - challengeSignature: invalidSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_REGISTRATION_PROOF_INVALID"); - }); - - it("returns 400 when challenge has already been used", async () => { - const { token, authRow } = await makeValidPatContext(); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const challengeId = generateUlid(1700000011000); - const challengeNonce = encodeBase64url( - Uint8Array.from({ length: 24 }, (_, index) => index + 5), - ); - const { database } = createFakeDb([authRow], [], { - registrationChallengeRows: [ - { - id: challengeId, - ownerId: "human-1", - publicKey: encodeBase64url(agentKeypair.publicKey), - nonce: challengeNonce, - status: "used", - expiresAt: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - usedAt: new Date(Date.now() - 60 * 1000).toISOString(), - createdAt: "2026-01-01T00:00:00.000Z", - updatedAt: "2026-01-01T00:00:00.000Z", - }, - ], - }); - const signature = await signRegistrationChallenge({ - challengeId, - nonce: challengeNonce, - ownerDid: authRow.humanDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-challenge-replayed", - secretKey: agentKeypair.secretKey, - }); - - const res = await createRegistryApp().request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-challenge-replayed", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId, - challengeSignature: signature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(400); - const body = (await res.json()) as { error: { code: string } }; - expect(body.error.code).toBe("AGENT_REGISTRATION_CHALLENGE_REPLAYED"); - }); - - it("creates an agent, defaults framework/ttl, and persists current_jti + expires_at", async () => { - const { token, authRow } = await makeValidPatContext(); - const { - database, - agentInserts, - agentAuthSessionInserts, - agentAuthEventInserts, - } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - - const challengeResponse = await appInstance.request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - expect(challengeResponse.status).toBe(201); - const challengeBody = (await challengeResponse.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - }; - const challengeSignature = await signRegistrationChallenge({ - challengeId: challengeBody.challengeId, - nonce: challengeBody.nonce, - ownerDid: challengeBody.ownerDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-01", - secretKey: agentKeypair.secretKey, - }); - - const res = await appInstance.request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-01", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: challengeBody.challengeId, - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(201); - const body = (await res.json()) as { - agent: { - id: string; - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - ttlDays: number; - status: string; - expiresAt: string; - createdAt: string; - updatedAt: string; - }; - ait: string; - agentAuth: { - tokenType: string; - accessToken: string; - accessExpiresAt: string; - refreshToken: string; - refreshExpiresAt: string; - }; - }; - - expect(body.agent.name).toBe("agent-01"); - expect(body.agent.framework).toBe(DEFAULT_AGENT_FRAMEWORK); - expect(body.agent.ttlDays).toBe(DEFAULT_AGENT_TTL_DAYS); - expect(body.agent.publicKey).toBe(encodeBase64url(agentKeypair.publicKey)); - expect(body.agent.status).toBe("active"); - expect(body.ait).toEqual(expect.any(String)); - expect(body.agentAuth.tokenType).toBe("Bearer"); - expect(body.agentAuth.accessToken.startsWith("clw_agt_")).toBe(true); - expect(body.agentAuth.refreshToken.startsWith("clw_rft_")).toBe(true); - expect(Date.parse(body.agentAuth.accessExpiresAt)).toBeGreaterThan( - Date.now(), - ); - expect(Date.parse(body.agentAuth.refreshExpiresAt)).toBeGreaterThan( - Date.now(), - ); - - expect(agentInserts).toHaveLength(1); - const inserted = agentInserts[0]; - expect(inserted?.owner_id).toBe("human-1"); - expect(inserted?.name).toBe("agent-01"); - expect(inserted?.framework).toBe(DEFAULT_AGENT_FRAMEWORK); - expect(inserted?.public_key).toBe(encodeBase64url(agentKeypair.publicKey)); - expect(inserted?.current_jti).toBe(body.agent.currentJti); - expect(inserted?.expires_at).toBe(body.agent.expiresAt); - expect(agentAuthSessionInserts).toHaveLength(1); - expect(agentAuthSessionInserts[0]).toMatchObject({ - agent_id: body.agent.id, - status: "active", - }); - expect(agentAuthEventInserts).toHaveLength(1); - expect(agentAuthEventInserts[0]).toMatchObject({ - agent_id: body.agent.id, - event_type: "issued", - }); - }); - - it("returns verifiable AIT using published keyset", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - const signingKeyset = JSON.stringify([ - { - kid: "reg-key-1", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(signer.publicKey), - status: "active", - }, - ]); - - const challengeResponse = await appInstance.request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - expect(challengeResponse.status).toBe(201); - const challengeBody = (await challengeResponse.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - }; - const challengeSignature = await signRegistrationChallenge({ - challengeId: challengeBody.challengeId, - nonce: challengeBody.nonce, - ownerDid: challengeBody.ownerDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-registry-verify", - framework: "openclaw", - ttlDays: 10, - secretKey: agentKeypair.secretKey, - }); - - const registerResponse = await appInstance.request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-registry-verify", - framework: "openclaw", - ttlDays: 10, - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: challengeBody.challengeId, - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - - expect(registerResponse.status).toBe(201); - const registerBody = (await registerResponse.json()) as { - agent: { - did: string; - ownerDid: string; - name: string; - framework: string; - publicKey: string; - currentJti: string; - }; - ait: string; - }; - - const keysResponse = await appInstance.request( - "/.well-known/claw-keys.json", - {}, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: signingKeyset, - }, - ); - const keysBody = (await keysResponse.json()) as { - keys: Array<{ - kid: string; - alg: "EdDSA"; - crv: "Ed25519"; - x: string; - status: "active" | "revoked"; - }>; - }; - - const claims = await verifyAIT({ - token: registerBody.ait, - expectedIssuer: "https://dev.registry.clawdentity.com", - registryKeys: keysBody.keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP" as const, - crv: key.crv, - x: key.x, - }, - })), - }); - - expect(claims.iss).toBe("https://dev.registry.clawdentity.com"); - expect(claims.sub).toBe(registerBody.agent.did); - expect(claims.ownerDid).toBe(registerBody.agent.ownerDid); - expect(claims.name).toBe(registerBody.agent.name); - expect(claims.framework).toBe(registerBody.agent.framework); - expect(claims.cnf.jwk.x).toBe(registerBody.agent.publicKey); - expect(claims.jti).toBe(registerBody.agent.currentJti); - }); - - it("returns 500 when signer secret does not match any active published key", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database } = createFakeDb([authRow]); - const signer = await generateEd25519Keypair(); - const wrongPublishedKey = await generateEd25519Keypair(); - const agentKeypair = await generateEd25519Keypair(); - const appInstance = createRegistryApp(); - - const challengeResponse = await appInstance.request( - AGENT_REGISTRATION_CHALLENGE_PATH, - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - publicKey: encodeBase64url(agentKeypair.publicKey), - }), - }, - { - DB: database, - ENVIRONMENT: "test", - }, - ); - expect(challengeResponse.status).toBe(201); - const challengeBody = (await challengeResponse.json()) as { - challengeId: string; - nonce: string; - ownerDid: string; - }; - const challengeSignature = await signRegistrationChallenge({ - challengeId: challengeBody.challengeId, - nonce: challengeBody.nonce, - ownerDid: challengeBody.ownerDid, - publicKey: encodeBase64url(agentKeypair.publicKey), - name: "agent-signer-mismatch", - secretKey: agentKeypair.secretKey, - }); - - const res = await appInstance.request( - "/v1/agents", - { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "content-type": "application/json", - }, - body: JSON.stringify({ - name: "agent-signer-mismatch", - publicKey: encodeBase64url(agentKeypair.publicKey), - challengeId: challengeBody.challengeId, - challengeSignature, - }), - }, - { - DB: database, - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), - REGISTRY_SIGNING_KEYS: JSON.stringify([ - { - kid: "reg-key-2", - alg: "EdDSA", - crv: "Ed25519", - x: encodeBase64url(wrongPublishedKey.publicKey), - status: "active", - }, - ]), - }, - ); - - expect(res.status).toBe(500); - const body = (await res.json()) as { - error: { - code: string; - message: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("CONFIG_VALIDATION_FAILED"); - expect(body.error.message).toBe("Registry configuration is invalid"); - expect(body.error.details?.fieldErrors).toMatchObject({ - REGISTRY_SIGNING_KEYS: expect.any(Array), - }); - }); -}); diff --git a/apps/registry/src/server.test/agent-registration-create.validation.test.ts b/apps/registry/src/server.test/agent-registration-create.validation.test.ts new file mode 100644 index 0000000..8bc5f16 --- /dev/null +++ b/apps/registry/src/server.test/agent-registration-create.validation.test.ts @@ -0,0 +1,184 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { + createDefaultRegistrySigning, + createFakeDb, + createProductionBindings, + createTestBindings, + makeValidPatContext, +} from "./helpers.js"; + +describe("POST /v1/agents", () => { + it("returns 401 when PAT is missing", async () => { + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + name: "agent-01", + publicKey: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + }), + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 400 when request payload is invalid", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const { signingEnv } = await createDefaultRegistrySigning(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "!!!", + framework: "", + publicKey: "not-base64url", + ttlDays: 0, + }), + }, + createTestBindings(database, signingEnv), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Agent registration payload is invalid"); + expect(body.error.details?.fieldErrors).toMatchObject({ + name: expect.any(Array), + framework: expect.any(Array), + publicKey: expect.any(Array), + ttlDays: expect.any(Array), + challengeId: expect.any(Array), + challengeSignature: expect.any(Array), + }); + }); + + it("returns verbose malformed-json error in test", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: '{"name":"agent-01"', + }, + createTestBindings(database), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request body must be valid JSON"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns generic malformed-json error in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: '{"name":"agent-01"', + }, + createProductionBindings(database, { + REGISTRY_SIGNING_KEY: "test-signing-key", + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyA", + status: "active", + }, + ]), + }), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); + + it("returns generic validation error details in production", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database } = createFakeDb([authRow]); + const { signer, signingEnv } = await createDefaultRegistrySigning(); + + const res = await createRegistryApp().request( + "/v1/agents", + { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + name: "!!!", + publicKey: "not-base64url", + }), + }, + createProductionBindings(database, { + REGISTRY_SIGNING_KEY: encodeBase64url(signer.secretKey), + REGISTRY_SIGNING_KEYS: signingEnv.REGISTRY_SIGNING_KEYS, + }), + ); + + expect(res.status).toBe(400); + const body = (await res.json()) as { + error: { + code: string; + message: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REGISTRATION_INVALID"); + expect(body.error.message).toBe("Request could not be processed"); + expect(body.error.details).toBeUndefined(); + }); +}); diff --git a/apps/registry/src/server.test/helpers.ts b/apps/registry/src/server.test/helpers.ts index fe04a6b..f765ebd 100644 --- a/apps/registry/src/server.test/helpers.ts +++ b/apps/registry/src/server.test/helpers.ts @@ -1,8 +1,13 @@ export type { FakeD1Row } from "./helpers/index.js"; export { + createDefaultRegistrySigning, createFakeDb, + createProductionBindings, + createRegistrySigningEnv, createSignedAgentRefreshRequest, + createTestBindings, makeAitClaims, makeValidPatContext, + requestRegistrationChallenge, signRegistrationChallenge, } from "./helpers/index.js"; diff --git a/apps/registry/src/server.test/helpers/AGENTS.md b/apps/registry/src/server.test/helpers/AGENTS.md index 32e775b..7bb7c83 100644 --- a/apps/registry/src/server.test/helpers/AGENTS.md +++ b/apps/registry/src/server.test/helpers/AGENTS.md @@ -6,7 +6,7 @@ ## Structure Rules - Keep `../helpers.ts` as a stable export-only shim for tests. - Group helper implementations by concern: - - `claims.ts`, `crypto.ts`, `pat.ts` for top-level helper APIs. + - `claims.ts`, `crypto.ts`, `pat.ts`, `agent-registration.ts` for top-level helper APIs. - `db/types.ts`, `db/parse.ts`, `db/resolvers.ts`, `db/mock.ts`, `db/run-handlers*.ts` for fake D1 behavior. - Keep each helper file under 800 lines; split further when a file approaches the limit. diff --git a/apps/registry/src/server.test/helpers/agent-registration.ts b/apps/registry/src/server.test/helpers/agent-registration.ts new file mode 100644 index 0000000..2f7dcc5 --- /dev/null +++ b/apps/registry/src/server.test/helpers/agent-registration.ts @@ -0,0 +1,127 @@ +import { + AGENT_REGISTRATION_CHALLENGE_PATH, + encodeBase64url, +} from "@clawdentity/protocol"; +import { generateEd25519Keypair } from "@clawdentity/sdk"; + +export type RegistrationChallengeBody = { + challengeId: string; + nonce: string; + ownerDid: string; +}; + +export type RegistrySigningEnv = { + REGISTRY_SIGNING_KEY: string; + REGISTRY_SIGNING_KEYS: string; +}; + +export type Ed25519Keypair = Awaited>; + +export function createRegistrySigningEnv(input: { + publicKey: Uint8Array; + secretKey: Uint8Array; + kid?: string; +}): RegistrySigningEnv { + return { + REGISTRY_SIGNING_KEY: encodeBase64url(input.secretKey), + REGISTRY_SIGNING_KEYS: JSON.stringify([ + { + kid: input.kid ?? "reg-key-1", + alg: "EdDSA", + crv: "Ed25519", + x: encodeBase64url(input.publicKey), + status: "active", + }, + ]), + }; +} + +export async function createDefaultRegistrySigning(input?: { + kid?: string; +}): Promise<{ + signer: Ed25519Keypair; + signingEnv: RegistrySigningEnv; +}> { + const signer = await generateEd25519Keypair(); + return { + signer, + signingEnv: createRegistrySigningEnv({ + publicKey: signer.publicKey, + secretKey: signer.secretKey, + kid: input?.kid, + }), + }; +} + +export function createTestBindings( + database: D1Database, + extra: Record = {}, +): { DB: D1Database; ENVIRONMENT: "test" } & Record { + return { + DB: database, + ENVIRONMENT: "test", + ...extra, + }; +} + +export function createProductionBindings( + database: D1Database, + extra: Record = {}, +): { + DB: D1Database; + ENVIRONMENT: "production"; + PROXY_URL: string; + REGISTRY_ISSUER_URL: string; + EVENT_BUS_BACKEND: "memory"; + BOOTSTRAP_SECRET: string; +} & Record { + return { + DB: database, + ENVIRONMENT: "production", + PROXY_URL: "https://proxy.clawdentity.com", + REGISTRY_ISSUER_URL: "https://registry.clawdentity.com", + EVENT_BUS_BACKEND: "memory", + BOOTSTRAP_SECRET: "bootstrap-secret", + ...extra, + }; +} + +export async function requestRegistrationChallenge(input: { + app: unknown; + token: string; + publicKey: string; + bindings: unknown; +}): Promise<{ + response: Response; + body: RegistrationChallengeBody; +}> { + const request = ( + input.app as { + request: ( + path: URL | RequestInfo, + requestInit?: RequestInit, + bindings?: unknown, + ) => Response | Promise; + } + ).request; + + const response = await Promise.resolve( + request( + AGENT_REGISTRATION_CHALLENGE_PATH, + { + method: "POST", + headers: { + Authorization: `Bearer ${input.token}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + publicKey: input.publicKey, + }), + }, + input.bindings, + ), + ); + + const body = (await response.json()) as RegistrationChallengeBody; + return { response, body }; +} diff --git a/apps/registry/src/server.test/helpers/index.ts b/apps/registry/src/server.test/helpers/index.ts index 3fb0ec9..5f3c10e 100644 --- a/apps/registry/src/server.test/helpers/index.ts +++ b/apps/registry/src/server.test/helpers/index.ts @@ -1,3 +1,10 @@ +export { + createDefaultRegistrySigning, + createProductionBindings, + createRegistrySigningEnv, + createTestBindings, + requestRegistrationChallenge, +} from "./agent-registration.js"; export { makeAitClaims } from "./claims.js"; export { createSignedAgentRefreshRequest, diff --git a/packages/connector/AGENTS.md b/packages/connector/AGENTS.md index d650daa..764d0ed 100644 --- a/packages/connector/AGENTS.md +++ b/packages/connector/AGENTS.md @@ -28,3 +28,8 @@ - Client tests must mock WebSocket/fetch and verify heartbeat ack, delivery forwarding, reconnect, and outbound queue flush behavior. - Inbox tests must cover persistence, dedupe by request id, cap enforcement, and replay state transitions (`markReplayFailure`/`markDelivered`). - Keep tests fully offline and deterministic (fake timers where timing matters). + +## Modularization Notes +- Treat `ConnectorClient` as the orchestration entry point; extract lifecycle, socket event handling, and reconnect scheduling into explicit helper modules so the public API stays stable while internals become easier to unit test. +- New helper modules should expose narrow interfaces (start/stop, attach/detach, schedule/clear) and accept injected dependencies like `Logger`, heartbeat/metric helpers, and hooks so they are replaceable during testing. +- Document any new helper modules in the respective `client/AGENTS.md` so future contributors can quickly see the division of responsibilities. diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index c96fbdd..4f58469 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -20,7 +20,8 @@ import { handleIncomingConnectorMessage } from "./client/inbound.js"; import { handleInboundDeliverFrame } from "./client/inbound-delivery.js"; import { ConnectorClientMetricsTracker } from "./client/metrics.js"; import { ConnectorOutboundQueueManager } from "./client/queue.js"; -import { computeJitteredBackoffDelayMs } from "./client/retry.js"; +import { ConnectorReconnectScheduler } from "./client/reconnect-scheduler.js"; +import { attachConnectorSocketEventListeners } from "./client/socket-events.js"; import type { ConnectorClientHooks, ConnectorClientMetricsSnapshot, @@ -97,11 +98,10 @@ export class ConnectorClient { private readonly outboundQueue: ConnectorOutboundQueueManager; private readonly localOpenclawDelivery: LocalOpenclawDeliveryClient; private readonly metricsTracker: ConnectorClientMetricsTracker; + private readonly reconnectScheduler: ConnectorReconnectScheduler; private socket: ConnectorWebSocket | undefined; - private reconnectTimeout: ReturnType | undefined; private connectTimeout: ReturnType | undefined; - private reconnectAttempt = 0; private authUpgradeImmediateRetryUsed = false; private started = false; @@ -196,6 +196,19 @@ export class ConnectorClient { logger: this.logger, }); this.metricsTracker = new ConnectorClientMetricsTracker(this.now); + this.reconnectScheduler = new ConnectorReconnectScheduler({ + minDelayMs: this.reconnectMinDelayMs, + maxDelayMs: this.reconnectMaxDelayMs, + backoffFactor: this.reconnectBackoffFactor, + jitterRatio: this.reconnectJitterRatio, + random: this.random, + onSchedule: () => { + this.metricsTracker.onReconnectScheduled(); + }, + onReconnect: () => { + void this.connectSocket(); + }, + }); const openclawHookUrl = toOpenclawHookUrl( options.openclawBaseUrl, @@ -231,7 +244,7 @@ export class ConnectorClient { disconnect(): void { this.started = false; - this.clearReconnectTimeout(); + this.reconnectScheduler.clear(); this.clearSocketState(); if (this.socket !== undefined) { @@ -275,7 +288,7 @@ export class ConnectorClient { } private async connectSocket(): Promise { - this.clearReconnectTimeout(); + this.reconnectScheduler.clear(); this.metricsTracker.onConnectAttempt(); if (this.outboundQueuePersistence !== undefined) { @@ -314,97 +327,95 @@ export class ConnectorClient { const socket = this.socket; this.startConnectTimeout(socket); - socket.addEventListener("open", () => { - if (this.socket !== socket) { - return; - } - - this.clearConnectTimeout(); - this.reconnectAttempt = 0; - this.authUpgradeImmediateRetryUsed = false; - this.metricsTracker.onSocketConnected(this.makeTimestamp()); - this.logger.info("connector.websocket.connected", { - url: this.connectorUrl, - }); - this.startHeartbeatInterval(); - this.flushOutboundQueue(); - this.hooks.onConnected?.(); - }); - - socket.addEventListener("message", (event) => { - if (this.socket !== socket) { - return; - } - - void this.handleIncomingMessage(readMessageEventData(event)); - }); - - socket.addEventListener("close", (event) => { - if (!this.detachSocket(socket)) { - return; - } - - const closeEvent = readCloseEvent(event); + attachConnectorSocketEventListeners(socket, { + onOpen: () => { + if (this.socket !== socket) { + return; + } + + this.clearConnectTimeout(); + this.reconnectScheduler.resetAttempts(); + this.authUpgradeImmediateRetryUsed = false; + this.metricsTracker.onSocketConnected(this.makeTimestamp()); + this.logger.info("connector.websocket.connected", { + url: this.connectorUrl, + }); + this.startHeartbeatInterval(); + this.flushOutboundQueue(); + this.hooks.onConnected?.(); + }, + onMessage: (event) => { + if (this.socket !== socket) { + return; + } - this.logger.warn("connector.websocket.closed", { - closeCode: closeEvent.code, - reason: closeEvent.reason, - wasClean: closeEvent.wasClean, - }); + void this.handleIncomingMessage(readMessageEventData(event)); + }, + onClose: (event) => { + if (!this.detachSocket(socket)) { + return; + } - this.hooks.onDisconnected?.({ - code: closeEvent.code, - reason: closeEvent.reason, - wasClean: closeEvent.wasClean, - }); + const closeEvent = readCloseEvent(event); - if (this.started) { - this.scheduleReconnect(); - } - }); + this.logger.warn("connector.websocket.closed", { + closeCode: closeEvent.code, + reason: closeEvent.reason, + wasClean: closeEvent.wasClean, + }); - socket.addEventListener("error", (event) => { - if (this.socket !== socket) { - return; - } + this.hooks.onDisconnected?.({ + code: closeEvent.code, + reason: closeEvent.reason, + wasClean: closeEvent.wasClean, + }); - const readyState = socket.readyState; - const shouldForceReconnect = - readyState !== WS_READY_STATE_OPEN && - readyState !== WS_READY_STATE_CONNECTING; - if (!shouldForceReconnect) { + if (this.started) { + this.scheduleReconnect(); + } + }, + onError: (event) => { + if (this.socket !== socket) { + return; + } + + const readyState = socket.readyState; + const shouldForceReconnect = + readyState !== WS_READY_STATE_OPEN && + readyState !== WS_READY_STATE_CONNECTING; + if (!shouldForceReconnect) { + this.logger.warn("connector.websocket.error", { + url: this.connectorUrl, + reason: readErrorEventReason(event), + readyState, + }); + return; + } + + if (!this.detachSocket(socket)) { + return; + } + + const reason = readErrorEventReason(event); this.logger.warn("connector.websocket.error", { url: this.connectorUrl, - reason: readErrorEventReason(event), - readyState, + reason, }); - return; - } - - if (!this.detachSocket(socket)) { - return; - } - - const reason = readErrorEventReason(event); - this.logger.warn("connector.websocket.error", { - url: this.connectorUrl, - reason, - }); - this.closeSocketQuietly(socket, 1011, "websocket error"); - - this.hooks.onDisconnected?.({ - code: 1006, - reason, - wasClean: false, - }); + this.closeSocketQuietly(socket, 1011, "websocket error"); - if (this.started) { - this.scheduleReconnect(); - } - }); + this.hooks.onDisconnected?.({ + code: 1006, + reason, + wasClean: false, + }); - socket.addEventListener("unexpected-response", (event) => { - void this.handleUnexpectedResponse(socket, event); + if (this.started) { + this.scheduleReconnect(); + } + }, + onUnexpectedResponse: (event) => { + void this.handleUnexpectedResponse(socket, event); + }, }); } @@ -416,37 +427,7 @@ export class ConnectorClient { return; } - this.clearReconnectTimeout(); - - let delayMs: number; - if (options?.delayMs !== undefined) { - delayMs = Math.max(0, Math.floor(options.delayMs)); - } else { - delayMs = computeJitteredBackoffDelayMs({ - minDelayMs: this.reconnectMinDelayMs, - maxDelayMs: this.reconnectMaxDelayMs, - backoffFactor: this.reconnectBackoffFactor, - attempt: this.reconnectAttempt, - jitterRatio: this.reconnectJitterRatio, - random: this.random, - }); - } - - if (options?.incrementAttempt ?? true) { - this.reconnectAttempt += 1; - } - this.metricsTracker.onReconnectScheduled(); - - this.reconnectTimeout = setTimeout(() => { - void this.connectSocket(); - }, delayMs); - } - - private clearReconnectTimeout(): void { - if (this.reconnectTimeout !== undefined) { - clearTimeout(this.reconnectTimeout); - this.reconnectTimeout = undefined; - } + this.reconnectScheduler.schedule(options); } private startConnectTimeout(socket: ConnectorWebSocket): void { diff --git a/packages/connector/src/client/AGENTS.md b/packages/connector/src/client/AGENTS.md index bb76c98..7f7942d 100644 --- a/packages/connector/src/client/AGENTS.md +++ b/packages/connector/src/client/AGENTS.md @@ -4,6 +4,9 @@ - Keep `ConnectorClient` internals modular, testable, and deterministic. ## Rules +- Keep `client.ts` as orchestration for public API methods (`connect`, `disconnect`, `enqueueOutbound`) and high-level flow only. +- Keep reconnect timer/attempt scheduling logic in `reconnect-scheduler.ts`. +- Keep websocket listener registration wiring in `socket-events.ts`. - Keep frame/event parsing and sanitization in `helpers.ts` as pure functions. - Keep inbound frame parsing + frame-type dispatch in `inbound.ts` so `client.ts` only wires handlers. - Keep connector transport/inbound delivery metrics state in `metrics.ts` to avoid duplicating counters in `client.ts`. @@ -11,3 +14,8 @@ - Keep heartbeat tracking and metrics centralized in `heartbeat.ts`. - Keep outbound queue persistence and load/flush semantics centralized in `queue.ts`. - Keep local OpenClaw delivery/retry behavior in `delivery.ts` and inbound ack orchestration in `inbound-delivery.ts`. +- Design additional helper modules with narrow interfaces: + - `lifecycle.ts` should orchestrate `connect`/`disconnect`, queue hydration, heartbeat lifecycle, and hook invocation while exposing start/stop/attached-state APIs invoked by `ConnectorClient`. + - `socket-events.ts` should register WebSocket listeners (`open`, `message`, `close`, `error`, `unexpected-response`) via dependency-injected callbacks (logger, hooks, heartbeat manager, reconnect scheduler) so event handling remains testable. + - `reconnect.ts` should own reconnection timers/backoff (`schedule`, `clear`) using injected timing/random utilities plus a pluggable callback instead of inline timeout tracking inside `client.ts`. + - Each helper module must accept only the dependencies it truly needs (e.g., logger, metrics tracker, heartbeat/reconnect interfaces, hooks) so wiring in `ConnectorClient` stays declarative and easy to mock. diff --git a/packages/connector/src/client/reconnect-scheduler.ts b/packages/connector/src/client/reconnect-scheduler.ts new file mode 100644 index 0000000..3448991 --- /dev/null +++ b/packages/connector/src/client/reconnect-scheduler.ts @@ -0,0 +1,73 @@ +import { computeJitteredBackoffDelayMs } from "./retry.js"; + +type ConnectorReconnectSchedulerOptions = { + minDelayMs: number; + maxDelayMs: number; + backoffFactor: number; + jitterRatio: number; + random: () => number; + onSchedule: () => void; + onReconnect: () => void; +}; + +export class ConnectorReconnectScheduler { + private readonly minDelayMs: number; + private readonly maxDelayMs: number; + private readonly backoffFactor: number; + private readonly jitterRatio: number; + private readonly random: () => number; + private readonly onSchedule: () => void; + private readonly onReconnect: () => void; + + private attempt = 0; + private timeout: ReturnType | undefined; + + constructor(options: ConnectorReconnectSchedulerOptions) { + this.minDelayMs = options.minDelayMs; + this.maxDelayMs = options.maxDelayMs; + this.backoffFactor = options.backoffFactor; + this.jitterRatio = options.jitterRatio; + this.random = options.random; + this.onSchedule = options.onSchedule; + this.onReconnect = options.onReconnect; + } + + schedule(options?: { delayMs?: number; incrementAttempt?: boolean }): void { + this.clear(); + + let delayMs: number; + if (options?.delayMs !== undefined) { + delayMs = Math.max(0, Math.floor(options.delayMs)); + } else { + delayMs = computeJitteredBackoffDelayMs({ + minDelayMs: this.minDelayMs, + maxDelayMs: this.maxDelayMs, + backoffFactor: this.backoffFactor, + attempt: this.attempt, + jitterRatio: this.jitterRatio, + random: this.random, + }); + } + + if (options?.incrementAttempt ?? true) { + this.attempt += 1; + } + + this.onSchedule(); + this.timeout = setTimeout(() => { + this.timeout = undefined; + this.onReconnect(); + }, delayMs); + } + + clear(): void { + if (this.timeout !== undefined) { + clearTimeout(this.timeout); + this.timeout = undefined; + } + } + + resetAttempts(): void { + this.attempt = 0; + } +} diff --git a/packages/connector/src/client/socket-events.ts b/packages/connector/src/client/socket-events.ts new file mode 100644 index 0000000..cbbf35d --- /dev/null +++ b/packages/connector/src/client/socket-events.ts @@ -0,0 +1,20 @@ +import type { ConnectorWebSocket } from "./types.js"; + +type ConnectorSocketEventHandlers = { + onOpen: () => void; + onMessage: (event: unknown) => void; + onClose: (event: unknown) => void; + onError: (event: unknown) => void; + onUnexpectedResponse: (event: unknown) => void; +}; + +export function attachConnectorSocketEventListeners( + socket: ConnectorWebSocket, + handlers: ConnectorSocketEventHandlers, +): void { + socket.addEventListener("open", handlers.onOpen); + socket.addEventListener("message", handlers.onMessage); + socket.addEventListener("close", handlers.onClose); + socket.addEventListener("error", handlers.onError); + socket.addEventListener("unexpected-response", handlers.onUnexpectedResponse); +} diff --git a/scripts/quality/check-file-size.mjs b/scripts/quality/check-file-size.mjs index f3bc413..28698d0 100644 --- a/scripts/quality/check-file-size.mjs +++ b/scripts/quality/check-file-size.mjs @@ -1,7 +1,7 @@ #!/usr/bin/env node import { execFileSync } from "node:child_process"; -import { readFileSync } from "node:fs"; +import { existsSync, readFileSync } from "node:fs"; const MAX_LINES = 800; const EXCLUDED_DIR_SEGMENTS = new Set([ @@ -85,6 +85,10 @@ function countLines(filePath) { } const sourceFiles = trackedFiles.filter((filePath) => { + if (!existsSync(filePath)) { + return false; + } + if (isExcluded(filePath)) { return false; } From 5cd102b199a8897c5a397bac1b02c128b04c533e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Fri, 20 Feb 2026 23:06:19 +0530 Subject: [PATCH 149/190] refactor: split auth, connector, and test helpers by responsibility --- .../src/agent-relay-session.connect.test.ts | 248 +++++++ .../src/agent-relay-session.delivery.test.ts | 283 ++++++++ .../proxy/src/agent-relay-session.rpc.test.ts | 111 +++ .../src/agent-relay-session.test-helpers.ts | 70 ++ apps/proxy/src/agent-relay-session.test.ts | 679 ------------------ apps/proxy/src/auth-middleware.ts | 674 +---------------- apps/proxy/src/auth-middleware/AGENTS.md | 18 + apps/proxy/src/auth-middleware/errors.ts | 32 + apps/proxy/src/auth-middleware/middleware.ts | 425 +++++++++++ .../src/auth-middleware/registry-keys.ts | 57 ++ .../proxy/src/auth-middleware/request-auth.ts | 86 +++ apps/proxy/src/auth-middleware/types.ts | 57 ++ apps/proxy/src/auth-middleware/url.ts | 58 ++ .../src/server.test/agents-delete.test.ts | 215 ++++++ ...reissue.test.ts => agents-reissue.test.ts} | 211 ------ .../src/server.test/helpers/db/AGENTS.md | 12 + .../src/server.test/helpers/db/resolvers.ts | 669 +---------------- .../helpers/db/resolvers/AGENTS.md | 11 + .../db/resolvers/agent-auth-sessions.ts | 112 +++ .../agent-registration-challenges.ts | 82 +++ .../helpers/db/resolvers/agents.ts | 161 +++++ .../helpers/db/resolvers/api-keys.ts | 110 +++ .../server.test/helpers/db/resolvers/crl.ts | 65 ++ .../helpers/db/resolvers/humans.ts | 74 ++ .../server.test/helpers/db/resolvers/index.ts | 23 + .../helpers/db/resolvers/invites.ts | 83 +++ packages/connector/src/client.ts | 266 +++---- packages/connector/src/client/AGENTS.md | 12 + .../connector/src/client/inbound-router.ts | 67 ++ .../connector/src/client/outbound-flush.ts | 52 ++ .../connector/src/client/socket-session.ts | 173 +++++ 31 files changed, 2802 insertions(+), 2394 deletions(-) create mode 100644 apps/proxy/src/agent-relay-session.connect.test.ts create mode 100644 apps/proxy/src/agent-relay-session.delivery.test.ts create mode 100644 apps/proxy/src/agent-relay-session.rpc.test.ts create mode 100644 apps/proxy/src/agent-relay-session.test-helpers.ts delete mode 100644 apps/proxy/src/agent-relay-session.test.ts create mode 100644 apps/proxy/src/auth-middleware/AGENTS.md create mode 100644 apps/proxy/src/auth-middleware/errors.ts create mode 100644 apps/proxy/src/auth-middleware/middleware.ts create mode 100644 apps/proxy/src/auth-middleware/registry-keys.ts create mode 100644 apps/proxy/src/auth-middleware/request-auth.ts create mode 100644 apps/proxy/src/auth-middleware/types.ts create mode 100644 apps/proxy/src/auth-middleware/url.ts create mode 100644 apps/registry/src/server.test/agents-delete.test.ts rename apps/registry/src/server.test/{agents-delete-reissue.test.ts => agents-reissue.test.ts} (69%) create mode 100644 apps/registry/src/server.test/helpers/db/AGENTS.md create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/AGENTS.md create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/agent-auth-sessions.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/agent-registration-challenges.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/agents.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/api-keys.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/crl.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/humans.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/index.ts create mode 100644 apps/registry/src/server.test/helpers/db/resolvers/invites.ts create mode 100644 packages/connector/src/client/inbound-router.ts create mode 100644 packages/connector/src/client/outbound-flush.ts create mode 100644 packages/connector/src/client/socket-session.ts diff --git a/apps/proxy/src/agent-relay-session.connect.test.ts b/apps/proxy/src/agent-relay-session.connect.test.ts new file mode 100644 index 0000000..9503d6b --- /dev/null +++ b/apps/proxy/src/agent-relay-session.connect.test.ts @@ -0,0 +1,248 @@ +import { parseFrame } from "@clawdentity/connector"; +import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { AgentRelaySession } from "./agent-relay-session.js"; +import { + createMockSocket, + createStateHarness, + RECIPIENT_AGENT_DID, + RELAY_QUEUE_STORAGE_KEY, + SENDER_AGENT_DID, + withMockWebSocketPair, +} from "./agent-relay-session.test-helpers.js"; + +describe("AgentRelaySession connect", () => { + it("accepts websocket connects with hibernation state and schedules heartbeat alarm", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + await withMockWebSocketPair(pairClient, pairServer, async () => { + const request = new Request( + `https://relay.example.test${RELAY_CONNECT_PATH}`, + { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }, + ); + + let connectResponse: Response | undefined; + let connectError: unknown; + try { + connectResponse = await relaySession.fetch(request); + } catch (error) { + connectError = error; + } + + expect(harness.state.acceptWebSocket).toHaveBeenCalledTimes(1); + expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ + "did:claw:agent:connector", + ]); + expect(harness.storage.setAlarm.mock.calls.length).toBeGreaterThanOrEqual( + 1, + ); + + // Node's WHATWG Response may reject status 101 in tests; Workers runtime accepts it. + if (connectResponse !== undefined) { + expect(connectResponse.status).toBe(101); + } else { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + }); + + it("returns 426 for non-websocket connect requests", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + + const response = await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + }), + ); + + expect(response.status).toBe(426); + expect(harness.state.acceptWebSocket).not.toHaveBeenCalled(); + }); + + it("returns websocket upgrade quickly while reconnect drain runs in background", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + RELAY_RETRY_INITIAL_MS: "1", + }); + + await relaySession.deliverToConnector({ + requestId: "req-upgrade-fast", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + + const connectState = await withMockWebSocketPair( + pairClient, + pairServer, + async () => { + const connectAttempt = relaySession + .fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ) + .then( + () => "settled" as const, + () => "settled" as const, + ); + + return Promise.race([ + connectAttempt, + new Promise<"pending">((resolve) => { + setTimeout(() => resolve("pending"), 50); + }), + ]); + }, + ); + + expect(connectState).toBe("settled"); + }); + + it("supersedes an existing socket when a new connector session connects", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state); + const oldSocket = createMockSocket(); + const oldWs = oldSocket as unknown as WebSocket; + oldSocket.close.mockImplementation(() => { + harness.connectedSockets.splice( + harness.connectedSockets.indexOf(oldWs), + 1, + ); + }); + harness.connectedSockets.push(oldWs); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + + await withMockWebSocketPair(pairClient, pairServer, async () => { + let connectError: unknown; + try { + await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ); + } catch (error) { + connectError = error; + } + + if (connectError !== undefined) { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + + expect(oldSocket.close).toHaveBeenCalledWith( + 1000, + "superseded_by_new_connection", + ); + expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ + "did:claw:agent:connector", + ]); + expect(oldSocket.close.mock.invocationCallOrder[0]).toBeLessThan( + harness.state.acceptWebSocket.mock.invocationCallOrder[0], + ); + }); + + it("drains queued messages immediately after connector reconnects", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + RELAY_RETRY_INITIAL_MS: "1", + }); + + await relaySession.deliverToConnector({ + requestId: "req-3", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + const ws = pairServer as unknown as WebSocket; + + pairServer.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 2), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + await withMockWebSocketPair(pairClient, pairServer, async () => { + let connectError: unknown; + try { + await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ); + } catch (error) { + connectError = error; + } + + if (connectError !== undefined) { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + + await new Promise((resolve) => setTimeout(resolve, 10)); + + const sendFrames = pairServer.send.mock.calls + .map((call) => parseFrame(call[0])) + .filter((frame) => frame.type === "deliver"); + expect(sendFrames).toHaveLength(1); + + const dedupedResult = await relaySession.deliverToConnector({ + requestId: "req-3", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + expect(dedupedResult.state).toBe("delivered"); + expect(dedupedResult.queueDepth).toBe(0); + + const persisted = harness.storageMap.get(RELAY_QUEUE_STORAGE_KEY) as + | { deliveries: Array<{ requestId: string }> } + | undefined; + expect(persisted?.deliveries ?? []).toHaveLength(0); + }); +}); diff --git a/apps/proxy/src/agent-relay-session.delivery.test.ts b/apps/proxy/src/agent-relay-session.delivery.test.ts new file mode 100644 index 0000000..f2fbd01 --- /dev/null +++ b/apps/proxy/src/agent-relay-session.delivery.test.ts @@ -0,0 +1,283 @@ +import { parseFrame } from "@clawdentity/connector"; +import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { describe, expect, it, vi } from "vitest"; +import { AgentRelaySession } from "./agent-relay-session.js"; +import { + createMockSocket, + createStateHarness, + RECIPIENT_AGENT_DID, + SENDER_AGENT_DID, + withMockWebSocketPair, +} from "./agent-relay-session.test-helpers.js"; + +describe("AgentRelaySession delivery", () => { + it("delivers relay frames to active websocket connectors", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + connectorSocket.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 1), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + const result = await relaySession.deliverToConnector({ + requestId: "req-1", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + expect(result.delivered).toBe(true); + expect(result.queued).toBe(false); + expect(result.state).toBe("delivered"); + expect(result.queueDepth).toBe(0); + expect(result.connectedSockets).toBe(1); + expect(result.deliveryId).toBeTruthy(); + + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + const relayPayload = parseFrame(connectorSocket.send.mock.calls[0]?.[0]); + expect(relayPayload.type).toBe("deliver"); + if (relayPayload.type === "deliver") { + expect(relayPayload.fromAgentDid).toBe(SENDER_AGENT_DID); + expect(relayPayload.toAgentDid).toBe(RECIPIENT_AGENT_DID); + } + }); + + it("queues relay frames when no connector socket is active", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + + const result = await relaySession.deliverToConnector({ + requestId: "req-2", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + + expect(result.delivered).toBe(false); + expect(result.queued).toBe(true); + expect(result.state).toBe("queued"); + expect(result.queueDepth).toBe(1); + expect(result.connectedSockets).toBe(0); + }); + + it("evicts stale sockets during alarm heartbeat sweep", async () => { + vi.useFakeTimers(); + const nowMs = Date.now(); + vi.setSystemTime(nowMs); + + try { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const staleSocket = createMockSocket(); + const ws = staleSocket as unknown as WebSocket; + staleSocket.close.mockImplementation(() => { + harness.connectedSockets.splice( + harness.connectedSockets.indexOf(ws), + 1, + ); + }); + harness.connectedSockets.push(ws); + + await relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "heartbeat_ack", + id: generateUlid(nowMs + 1), + ts: new Date(nowMs + 1).toISOString(), + ackId: generateUlid(nowMs + 2), + }), + ); + + vi.advanceTimersByTime(60_001); + await relaySession.alarm(); + + expect(staleSocket.close).toHaveBeenCalledWith( + 1011, + "heartbeat_ack_timeout", + ); + const outboundHeartbeats = staleSocket.send.mock.calls + .map((call) => parseFrame(call[0])) + .filter((frame) => frame.type === "heartbeat"); + expect(outboundHeartbeats).toHaveLength(0); + } finally { + vi.useRealTimers(); + } + }); + + it("keeps superseded sockets inactive even when late frames arrive", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const oldSocket = createMockSocket(); + const oldWs = oldSocket as unknown as WebSocket; + harness.connectedSockets.push(oldWs); + + const pairClient = createMockSocket(); + const pairServer = createMockSocket(); + const newWs = pairServer as unknown as WebSocket; + pairServer.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + newWs, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 3), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + await withMockWebSocketPair(pairClient, pairServer, async () => { + let connectError: unknown; + try { + await relaySession.fetch( + new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { + method: "GET", + headers: { + upgrade: "websocket", + "x-claw-connector-agent-did": "did:claw:agent:connector", + }, + }), + ); + } catch (error) { + connectError = error; + } + + if (connectError !== undefined) { + expect(connectError).toBeInstanceOf(RangeError); + } + }); + + await relaySession.webSocketMessage( + oldWs, + JSON.stringify({ + v: 1, + type: "heartbeat_ack", + id: generateUlid(Date.now() + 4), + ts: new Date().toISOString(), + ackId: generateUlid(Date.now() + 5), + }), + ); + + const deliveryState = await Promise.race([ + relaySession + .deliverToConnector({ + requestId: "req-superseded-socket", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }) + .then((result) => result.state), + new Promise<"pending">((resolve) => { + setTimeout(() => resolve("pending"), 50); + }), + ]); + + expect(deliveryState).toBe("delivered"); + expect(oldSocket.send).not.toHaveBeenCalled(); + expect(pairServer.send).toHaveBeenCalled(); + }); + + it("does not reject pending deliveries on clean close code 1000", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + const pendingDelivery = relaySession.deliverToConnector({ + requestId: "req-clean-close", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + await vi.waitFor(() => { + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + }); + + harness.connectedSockets.splice(harness.connectedSockets.indexOf(ws), 1); + await relaySession.webSocketClose(ws, 1000, "normal", true); + + const settleState = await Promise.race([ + pendingDelivery.then( + () => "settled", + () => "settled", + ), + new Promise<"pending">((resolve) => { + setTimeout(() => resolve("pending"), 5); + }), + ]); + expect(settleState).toBe("pending"); + + await relaySession.webSocketClose(ws, 1011, "unclean", false); + const queuedAfterUnclean = await pendingDelivery; + expect(queuedAfterUnclean.state).toBe("queued"); + expect(queuedAfterUnclean.queued).toBe(true); + }); + + it("rejects pending deliveries on unclean close when no sockets remain", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + const pendingDelivery = relaySession.deliverToConnector({ + requestId: "req-unclean-close", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }); + await vi.waitFor(() => { + expect(connectorSocket.send).toHaveBeenCalledTimes(1); + }); + + harness.connectedSockets.splice(harness.connectedSockets.indexOf(ws), 1); + await relaySession.webSocketClose(ws, 1011, "socket_error", false); + + const settleState = await Promise.race([ + pendingDelivery.then((result) => result.state), + new Promise<"timeout">((resolve) => { + setTimeout(() => resolve("timeout"), 20); + }), + ]); + expect(settleState).toBe("queued"); + }); +}); diff --git a/apps/proxy/src/agent-relay-session.rpc.test.ts b/apps/proxy/src/agent-relay-session.rpc.test.ts new file mode 100644 index 0000000..ae49d3c --- /dev/null +++ b/apps/proxy/src/agent-relay-session.rpc.test.ts @@ -0,0 +1,111 @@ +import { parseFrame } from "@clawdentity/connector"; +import { generateUlid } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { AgentRelaySession } from "./agent-relay-session.js"; +import { + createMockSocket, + createStateHarness, + RECIPIENT_AGENT_DID, + SENDER_AGENT_DID, +} from "./agent-relay-session.test-helpers.js"; + +describe("AgentRelaySession RPC", () => { + it("supports fetch RPC delivery endpoint for compatibility", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_RETRY_JITTER_RATIO: "0", + }); + const connectorSocket = createMockSocket(); + const ws = connectorSocket as unknown as WebSocket; + harness.connectedSockets.push(ws); + + connectorSocket.send.mockImplementation((payload: unknown) => { + const frame = parseFrame(payload); + if (frame.type !== "deliver") { + return; + } + + void relaySession.webSocketMessage( + ws, + JSON.stringify({ + v: 1, + type: "deliver_ack", + id: generateUlid(Date.now() + 3), + ts: new Date().toISOString(), + ackId: frame.id, + accepted: true, + }), + ); + }); + + const response = await relaySession.fetch( + new Request("https://relay.example.test/rpc/deliver-to-connector", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + requestId: "req-4", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }), + }), + ); + + expect(response.status).toBe(202); + const body = (await response.json()) as { + deliveryId: string; + state: string; + delivered: boolean; + }; + expect(body.deliveryId).toBeTruthy(); + expect(body.state).toBe("delivered"); + expect(body.delivered).toBe(true); + }); + + it("returns queue-full error from RPC when buffer is full", async () => { + const harness = createStateHarness(); + const relaySession = new AgentRelaySession(harness.state, { + RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: "1", + RELAY_RETRY_JITTER_RATIO: "0", + }); + + const firstResponse = await relaySession.fetch( + new Request("https://relay.example.test/rpc/deliver-to-connector", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + requestId: "req-5", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }), + }), + ); + expect(firstResponse.status).toBe(202); + + const secondResponse = await relaySession.fetch( + new Request("https://relay.example.test/rpc/deliver-to-connector", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ + requestId: "req-6", + senderAgentDid: SENDER_AGENT_DID, + recipientAgentDid: RECIPIENT_AGENT_DID, + payload: { event: "agent.started" }, + }), + }), + ); + + expect(secondResponse.status).toBe(507); + const body = (await secondResponse.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("PROXY_RELAY_QUEUE_FULL"); + }); +}); diff --git a/apps/proxy/src/agent-relay-session.test-helpers.ts b/apps/proxy/src/agent-relay-session.test-helpers.ts new file mode 100644 index 0000000..4fac2a0 --- /dev/null +++ b/apps/proxy/src/agent-relay-session.test-helpers.ts @@ -0,0 +1,70 @@ +import { vi } from "vitest"; + +export type MockWebSocket = { + send: ReturnType; + close: ReturnType; +}; + +export const SENDER_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7"; +export const RECIPIENT_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8"; +export const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; + +export function createMockSocket(): MockWebSocket { + return { + send: vi.fn(), + close: vi.fn(), + }; +} + +export async function withMockWebSocketPair( + pairClient: MockWebSocket, + pairServer: MockWebSocket, + callback: () => Promise, +): Promise { + const originalWebSocketPair = (globalThis as { WebSocketPair?: unknown }) + .WebSocketPair; + + (globalThis as unknown as { WebSocketPair: unknown }).WebSocketPair = class { + 0 = pairClient as unknown as WebSocket; + 1 = pairServer as unknown as WebSocket; + }; + + try { + return await callback(); + } finally { + if (originalWebSocketPair === undefined) { + delete (globalThis as { WebSocketPair?: unknown }).WebSocketPair; + } else { + (globalThis as { WebSocketPair?: unknown }).WebSocketPair = + originalWebSocketPair; + } + } +} + +export function createStateHarness() { + const connectedSockets: WebSocket[] = []; + const storageMap = new Map(); + const storage = { + get: vi.fn(async (key: string) => storageMap.get(key) as T | undefined), + put: vi.fn(async (key: string, value: T) => { + storageMap.set(key, value); + }), + setAlarm: vi.fn(async (_scheduled: number | Date) => {}), + deleteAlarm: vi.fn(async () => {}), + }; + + const state = { + acceptWebSocket: vi.fn((socket: WebSocket) => { + connectedSockets.push(socket); + }), + getWebSockets: vi.fn(() => connectedSockets), + storage, + }; + + return { + state, + storage, + storageMap, + connectedSockets, + }; +} diff --git a/apps/proxy/src/agent-relay-session.test.ts b/apps/proxy/src/agent-relay-session.test.ts deleted file mode 100644 index de0ff88..0000000 --- a/apps/proxy/src/agent-relay-session.test.ts +++ /dev/null @@ -1,679 +0,0 @@ -import { parseFrame } from "@clawdentity/connector"; -import { generateUlid, RELAY_CONNECT_PATH } from "@clawdentity/protocol"; -import { describe, expect, it, vi } from "vitest"; -import { AgentRelaySession } from "./agent-relay-session.js"; - -type MockWebSocket = { - send: ReturnType; - close: ReturnType; -}; - -const SENDER_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB7"; -const RECIPIENT_AGENT_DID = "did:claw:agent:01HF7YAT31JZHSMW1CG6Q6MHB8"; -const RELAY_QUEUE_STORAGE_KEY = "relay:delivery-queue"; - -function createMockSocket(): MockWebSocket { - return { - send: vi.fn(), - close: vi.fn(), - }; -} - -async function withMockWebSocketPair( - pairClient: MockWebSocket, - pairServer: MockWebSocket, - callback: () => Promise, -): Promise { - const originalWebSocketPair = (globalThis as { WebSocketPair?: unknown }) - .WebSocketPair; - - (globalThis as unknown as { WebSocketPair: unknown }).WebSocketPair = class { - 0 = pairClient as unknown as WebSocket; - 1 = pairServer as unknown as WebSocket; - }; - - try { - return await callback(); - } finally { - if (originalWebSocketPair === undefined) { - delete (globalThis as { WebSocketPair?: unknown }).WebSocketPair; - } else { - (globalThis as { WebSocketPair?: unknown }).WebSocketPair = - originalWebSocketPair; - } - } -} - -function createStateHarness() { - const connectedSockets: WebSocket[] = []; - const storageMap = new Map(); - const storage = { - get: vi.fn(async (key: string) => storageMap.get(key) as T | undefined), - put: vi.fn(async (key: string, value: T) => { - storageMap.set(key, value); - }), - setAlarm: vi.fn(async (_scheduled: number | Date) => {}), - deleteAlarm: vi.fn(async () => {}), - }; - - const state = { - acceptWebSocket: vi.fn((socket: WebSocket) => { - connectedSockets.push(socket); - }), - getWebSockets: vi.fn(() => connectedSockets), - storage, - }; - - return { - state, - storage, - storageMap, - connectedSockets, - }; -} - -describe("AgentRelaySession", () => { - it("accepts websocket connects with hibernation state and schedules heartbeat alarm", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); - - const pairClient = createMockSocket(); - const pairServer = createMockSocket(); - await withMockWebSocketPair(pairClient, pairServer, async () => { - const request = new Request( - `https://relay.example.test${RELAY_CONNECT_PATH}`, - { - method: "GET", - headers: { - upgrade: "websocket", - "x-claw-connector-agent-did": "did:claw:agent:connector", - }, - }, - ); - - let connectResponse: Response | undefined; - let connectError: unknown; - try { - connectResponse = await relaySession.fetch(request); - } catch (error) { - connectError = error; - } - - expect(harness.state.acceptWebSocket).toHaveBeenCalledTimes(1); - expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ - "did:claw:agent:connector", - ]); - expect(harness.storage.setAlarm.mock.calls.length).toBeGreaterThanOrEqual( - 1, - ); - - // Node's WHATWG Response may reject status 101 in tests; Workers runtime accepts it. - if (connectResponse !== undefined) { - expect(connectResponse.status).toBe(101); - } else { - expect(connectError).toBeInstanceOf(RangeError); - } - }); - }); - - it("returns 426 for non-websocket connect requests", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); - - const response = await relaySession.fetch( - new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { - method: "GET", - }), - ); - - expect(response.status).toBe(426); - expect(harness.state.acceptWebSocket).not.toHaveBeenCalled(); - }); - - it("delivers relay frames to active websocket connectors", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - const connectorSocket = createMockSocket(); - const ws = connectorSocket as unknown as WebSocket; - harness.connectedSockets.push(ws); - - connectorSocket.send.mockImplementation((payload: unknown) => { - const frame = parseFrame(payload); - if (frame.type !== "deliver") { - return; - } - - void relaySession.webSocketMessage( - ws, - JSON.stringify({ - v: 1, - type: "deliver_ack", - id: generateUlid(Date.now() + 1), - ts: new Date().toISOString(), - ackId: frame.id, - accepted: true, - }), - ); - }); - - const result = await relaySession.deliverToConnector({ - requestId: "req-1", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - - expect(result.delivered).toBe(true); - expect(result.queued).toBe(false); - expect(result.state).toBe("delivered"); - expect(result.queueDepth).toBe(0); - expect(result.connectedSockets).toBe(1); - expect(result.deliveryId).toBeTruthy(); - - expect(connectorSocket.send).toHaveBeenCalledTimes(1); - const relayPayload = parseFrame(connectorSocket.send.mock.calls[0]?.[0]); - expect(relayPayload.type).toBe("deliver"); - if (relayPayload.type === "deliver") { - expect(relayPayload.fromAgentDid).toBe(SENDER_AGENT_DID); - expect(relayPayload.toAgentDid).toBe(RECIPIENT_AGENT_DID); - } - }); - - it("queues relay frames when no connector socket is active", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - - const result = await relaySession.deliverToConnector({ - requestId: "req-2", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - - expect(result.delivered).toBe(false); - expect(result.queued).toBe(true); - expect(result.state).toBe("queued"); - expect(result.queueDepth).toBe(1); - expect(result.connectedSockets).toBe(0); - - const persisted = harness.storageMap.get(RELAY_QUEUE_STORAGE_KEY) as { - deliveries: Array<{ requestId: string }>; - }; - expect(persisted.deliveries).toHaveLength(1); - expect(persisted.deliveries[0]?.requestId).toBe("req-2"); - }); - - it("drains queued messages immediately after connector reconnects", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - RELAY_RETRY_INITIAL_MS: "1", - }); - - await relaySession.deliverToConnector({ - requestId: "req-3", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - - const pairClient = createMockSocket(); - const pairServer = createMockSocket(); - const ws = pairServer as unknown as WebSocket; - - pairServer.send.mockImplementation((payload: unknown) => { - const frame = parseFrame(payload); - if (frame.type !== "deliver") { - return; - } - - void relaySession.webSocketMessage( - ws, - JSON.stringify({ - v: 1, - type: "deliver_ack", - id: generateUlid(Date.now() + 2), - ts: new Date().toISOString(), - ackId: frame.id, - accepted: true, - }), - ); - }); - - await withMockWebSocketPair(pairClient, pairServer, async () => { - let connectError: unknown; - try { - await relaySession.fetch( - new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { - method: "GET", - headers: { - upgrade: "websocket", - "x-claw-connector-agent-did": "did:claw:agent:connector", - }, - }), - ); - } catch (error) { - connectError = error; - } - - if (connectError !== undefined) { - expect(connectError).toBeInstanceOf(RangeError); - } - }); - - await new Promise((resolve) => setTimeout(resolve, 10)); - - const sendFrames = pairServer.send.mock.calls - .map((call) => parseFrame(call[0])) - .filter((frame) => frame.type === "deliver"); - expect(sendFrames).toHaveLength(1); - - const dedupedResult = await relaySession.deliverToConnector({ - requestId: "req-3", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - expect(dedupedResult.state).toBe("delivered"); - expect(dedupedResult.queueDepth).toBe(0); - }); - - it("returns websocket upgrade quickly while reconnect drain runs in background", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - RELAY_RETRY_INITIAL_MS: "1", - }); - - await relaySession.deliverToConnector({ - requestId: "req-upgrade-fast", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - - const pairClient = createMockSocket(); - const pairServer = createMockSocket(); - - const connectState = await withMockWebSocketPair( - pairClient, - pairServer, - async () => { - const connectAttempt = relaySession - .fetch( - new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { - method: "GET", - headers: { - upgrade: "websocket", - "x-claw-connector-agent-did": "did:claw:agent:connector", - }, - }), - ) - .then( - () => "settled" as const, - () => "settled" as const, - ); - - return Promise.race([ - connectAttempt, - new Promise<"pending">((resolve) => { - setTimeout(() => resolve("pending"), 50); - }), - ]); - }, - ); - - expect(connectState).toBe("settled"); - }); - - it("evicts stale sockets during alarm heartbeat sweep", async () => { - vi.useFakeTimers(); - const nowMs = Date.now(); - vi.setSystemTime(nowMs); - - try { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - const staleSocket = createMockSocket(); - const ws = staleSocket as unknown as WebSocket; - staleSocket.close.mockImplementation(() => { - harness.connectedSockets.splice( - harness.connectedSockets.indexOf(ws), - 1, - ); - }); - harness.connectedSockets.push(ws); - - await relaySession.webSocketMessage( - ws, - JSON.stringify({ - v: 1, - type: "heartbeat_ack", - id: generateUlid(nowMs + 1), - ts: new Date(nowMs + 1).toISOString(), - ackId: generateUlid(nowMs + 2), - }), - ); - - vi.advanceTimersByTime(60_001); - await relaySession.alarm(); - - expect(staleSocket.close).toHaveBeenCalledWith( - 1011, - "heartbeat_ack_timeout", - ); - const outboundHeartbeats = staleSocket.send.mock.calls - .map((call) => parseFrame(call[0])) - .filter((frame) => frame.type === "heartbeat"); - expect(outboundHeartbeats).toHaveLength(0); - } finally { - vi.useRealTimers(); - } - }); - - it("supersedes an existing socket when a new connector session connects", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state); - const oldSocket = createMockSocket(); - const oldWs = oldSocket as unknown as WebSocket; - oldSocket.close.mockImplementation(() => { - harness.connectedSockets.splice( - harness.connectedSockets.indexOf(oldWs), - 1, - ); - }); - harness.connectedSockets.push(oldWs); - - const pairClient = createMockSocket(); - const pairServer = createMockSocket(); - - await withMockWebSocketPair(pairClient, pairServer, async () => { - let connectError: unknown; - try { - await relaySession.fetch( - new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { - method: "GET", - headers: { - upgrade: "websocket", - "x-claw-connector-agent-did": "did:claw:agent:connector", - }, - }), - ); - } catch (error) { - connectError = error; - } - - if (connectError !== undefined) { - expect(connectError).toBeInstanceOf(RangeError); - } - }); - - expect(oldSocket.close).toHaveBeenCalledWith( - 1000, - "superseded_by_new_connection", - ); - expect(harness.state.acceptWebSocket).toHaveBeenCalledWith(pairServer, [ - "did:claw:agent:connector", - ]); - expect(oldSocket.close.mock.invocationCallOrder[0]).toBeLessThan( - harness.state.acceptWebSocket.mock.invocationCallOrder[0], - ); - }); - - it("keeps superseded sockets inactive even when late frames arrive", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - const oldSocket = createMockSocket(); - const oldWs = oldSocket as unknown as WebSocket; - harness.connectedSockets.push(oldWs); - - const pairClient = createMockSocket(); - const pairServer = createMockSocket(); - const newWs = pairServer as unknown as WebSocket; - pairServer.send.mockImplementation((payload: unknown) => { - const frame = parseFrame(payload); - if (frame.type !== "deliver") { - return; - } - - void relaySession.webSocketMessage( - newWs, - JSON.stringify({ - v: 1, - type: "deliver_ack", - id: generateUlid(Date.now() + 3), - ts: new Date().toISOString(), - ackId: frame.id, - accepted: true, - }), - ); - }); - - await withMockWebSocketPair(pairClient, pairServer, async () => { - let connectError: unknown; - try { - await relaySession.fetch( - new Request(`https://relay.example.test${RELAY_CONNECT_PATH}`, { - method: "GET", - headers: { - upgrade: "websocket", - "x-claw-connector-agent-did": "did:claw:agent:connector", - }, - }), - ); - } catch (error) { - connectError = error; - } - - if (connectError !== undefined) { - expect(connectError).toBeInstanceOf(RangeError); - } - }); - - await relaySession.webSocketMessage( - oldWs, - JSON.stringify({ - v: 1, - type: "heartbeat_ack", - id: generateUlid(Date.now() + 4), - ts: new Date().toISOString(), - ackId: generateUlid(Date.now() + 5), - }), - ); - - const deliveryState = await Promise.race([ - relaySession - .deliverToConnector({ - requestId: "req-superseded-socket", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }) - .then((result) => result.state), - new Promise<"pending">((resolve) => { - setTimeout(() => resolve("pending"), 50); - }), - ]); - - expect(deliveryState).toBe("delivered"); - expect(oldSocket.send).not.toHaveBeenCalled(); - expect(pairServer.send).toHaveBeenCalled(); - }); - - it("does not reject pending deliveries on clean close code 1000", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - const connectorSocket = createMockSocket(); - const ws = connectorSocket as unknown as WebSocket; - harness.connectedSockets.push(ws); - - const pendingDelivery = relaySession.deliverToConnector({ - requestId: "req-clean-close", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - await vi.waitFor(() => { - expect(connectorSocket.send).toHaveBeenCalledTimes(1); - }); - - harness.connectedSockets.splice(harness.connectedSockets.indexOf(ws), 1); - await relaySession.webSocketClose(ws, 1000, "normal", true); - - const settleState = await Promise.race([ - pendingDelivery.then( - () => "settled", - () => "settled", - ), - new Promise<"pending">((resolve) => { - setTimeout(() => resolve("pending"), 5); - }), - ]); - expect(settleState).toBe("pending"); - - await relaySession.webSocketClose(ws, 1011, "unclean", false); - const queuedAfterUnclean = await pendingDelivery; - expect(queuedAfterUnclean.state).toBe("queued"); - expect(queuedAfterUnclean.queued).toBe(true); - }); - - it("rejects pending deliveries on unclean close when no sockets remain", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - const connectorSocket = createMockSocket(); - const ws = connectorSocket as unknown as WebSocket; - harness.connectedSockets.push(ws); - - const pendingDelivery = relaySession.deliverToConnector({ - requestId: "req-unclean-close", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }); - await vi.waitFor(() => { - expect(connectorSocket.send).toHaveBeenCalledTimes(1); - }); - - harness.connectedSockets.splice(harness.connectedSockets.indexOf(ws), 1); - await relaySession.webSocketClose(ws, 1011, "socket_error", false); - - const settleState = await Promise.race([ - pendingDelivery.then((result) => result.state), - new Promise<"timeout">((resolve) => { - setTimeout(() => resolve("timeout"), 20); - }), - ]); - expect(settleState).toBe("queued"); - }); - - it("supports fetch RPC delivery endpoint for compatibility", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_RETRY_JITTER_RATIO: "0", - }); - const connectorSocket = createMockSocket(); - const ws = connectorSocket as unknown as WebSocket; - harness.connectedSockets.push(ws); - - connectorSocket.send.mockImplementation((payload: unknown) => { - const frame = parseFrame(payload); - if (frame.type !== "deliver") { - return; - } - - void relaySession.webSocketMessage( - ws, - JSON.stringify({ - v: 1, - type: "deliver_ack", - id: generateUlid(Date.now() + 3), - ts: new Date().toISOString(), - ackId: frame.id, - accepted: true, - }), - ); - }); - - const response = await relaySession.fetch( - new Request("https://relay.example.test/rpc/deliver-to-connector", { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - requestId: "req-4", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }), - }), - ); - - expect(response.status).toBe(202); - const body = (await response.json()) as { - deliveryId: string; - state: string; - delivered: boolean; - }; - expect(body.deliveryId).toBeTruthy(); - expect(body.state).toBe("delivered"); - expect(body.delivered).toBe(true); - }); - - it("returns queue-full error from RPC when buffer is full", async () => { - const harness = createStateHarness(); - const relaySession = new AgentRelaySession(harness.state, { - RELAY_QUEUE_MAX_MESSAGES_PER_AGENT: "1", - RELAY_RETRY_JITTER_RATIO: "0", - }); - - const firstResponse = await relaySession.fetch( - new Request("https://relay.example.test/rpc/deliver-to-connector", { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - requestId: "req-5", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }), - }), - ); - expect(firstResponse.status).toBe(202); - - const secondResponse = await relaySession.fetch( - new Request("https://relay.example.test/rpc/deliver-to-connector", { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - requestId: "req-6", - senderAgentDid: SENDER_AGENT_DID, - recipientAgentDid: RECIPIENT_AGENT_DID, - payload: { event: "agent.started" }, - }), - }), - ); - - expect(secondResponse.status).toBe(507); - const body = (await secondResponse.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("PROXY_RELAY_QUEUE_FULL"); - }); -}); diff --git a/apps/proxy/src/auth-middleware.ts b/apps/proxy/src/auth-middleware.ts index 0b6586b..292113d 100644 --- a/apps/proxy/src/auth-middleware.ts +++ b/apps/proxy/src/auth-middleware.ts @@ -1,664 +1,10 @@ -import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; -import { - AGENT_AUTH_VALIDATE_PATH, - decodeBase64url, - RELAY_CONNECT_PATH, - RELAY_DELIVERY_RECEIPTS_PATH, -} from "@clawdentity/protocol"; -import { - AitJwtError, - AppError, - type CrlCache, - CrlJwtError, - createCrlCache, - createNonceCache, - type Logger, - type NonceCache, - parseRegistryConfig, - type RequestContextVariables, - type VerifyHttpRequestInput, - verifyAIT, - verifyCRL, - verifyHttpRequest, -} from "@clawdentity/sdk"; -import { createMiddleware } from "hono/factory"; -import type { ProxyConfig } from "./config.js"; -import { - PAIR_CONFIRM_PATH, - PAIR_START_PATH, - PAIR_STATUS_PATH, -} from "./pairing-constants.js"; -import type { ProxyTrustStore } from "./proxy-trust-store.js"; -import { assertKnownTrustedAgent } from "./trust-policy.js"; - -export const DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS = 60 * 60 * 1000; -export const DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS = 300; - -type RegistrySigningKey = NonNullable< - ReturnType["REGISTRY_SIGNING_KEYS"] ->[number]; - -type VerificationKey = { - kid: string; - jwk: { - kty: "OKP"; - crv: "Ed25519"; - x: string; - }; -}; - -export type ProxyAuthContext = { - agentDid: string; - ownerDid: string; - aitJti: string; - issuer: string; - cnfPublicKey: string; -}; - -export type ProxyRequestVariables = RequestContextVariables & { - auth?: ProxyAuthContext; -}; - -export type ProxyAuthMiddlewareOptions = { - config: ProxyConfig; - logger: Logger; - trustStore: ProxyTrustStore; - fetchImpl?: typeof fetch; - clock?: () => number; - nonceCache?: NonceCache; - crlCache?: CrlCache; - maxTimestampSkewSeconds?: number; - registryKeysCacheTtlMs?: number; -}; - -type RegistryKeysCache = { - fetchedAtMs: number; - keys: VerificationKey[]; -}; - -function isRecord(value: unknown): value is Record { - return typeof value === "object" && value !== null; -} - -function toErrorMessage(error: unknown): string { - return error instanceof Error ? error.message : "unknown"; -} - -function toPathWithQuery(url: string): string { - const parsed = new URL(url, "http://localhost"); - return `${parsed.pathname}${parsed.search}`; -} - -function normalizeRegistryUrl(registryUrl: string): string { - try { - return new URL(registryUrl).toString(); - } catch { - throw new AppError({ - code: "PROXY_AUTH_INVALID_REGISTRY_URL", - message: "Proxy registry URL is invalid", - status: 500, - expose: true, - }); - } -} - -function toRegistryUrl(registryUrl: string, path: string): string { - const normalizedBaseUrl = registryUrl.endsWith("/") - ? registryUrl - : `${registryUrl}/`; - return new URL(path, normalizedBaseUrl).toString(); -} - -function parseAgentAccessHeader(value: string | undefined): string { - if (typeof value !== "string" || value.trim().length === 0) { - throw unauthorizedError({ - code: "PROXY_AGENT_ACCESS_REQUIRED", - message: "X-Claw-Agent-Access header is required", - }); - } - - return value.trim(); -} - -function unauthorizedError(options: { - code: string; - message: string; - details?: Record; -}): AppError { - return new AppError({ - code: options.code, - message: options.message, - status: 401, - details: options.details, - expose: true, - }); -} - -function dependencyUnavailableError(options: { - message: string; - details?: Record; -}): AppError { - return new AppError({ - code: "PROXY_AUTH_DEPENDENCY_UNAVAILABLE", - message: options.message, - status: 503, - details: options.details, - expose: true, - }); -} - -function shouldSkipKnownAgentCheck(path: string): boolean { - return ( - path === PAIR_START_PATH || - path === PAIR_CONFIRM_PATH || - path === PAIR_STATUS_PATH || - path === RELAY_CONNECT_PATH - ); -} - -export function parseClawAuthorizationHeader(authorization?: string): string { - if (typeof authorization !== "string" || authorization.trim().length === 0) { - throw unauthorizedError({ - code: "PROXY_AUTH_MISSING_TOKEN", - message: "Authorization header is required", - }); - } - - const parsed = authorization.trim().match(/^Claw\s+(\S+)$/); - if (!parsed || parsed[1].trim().length === 0) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_SCHEME", - message: "Authorization must be in the format 'Claw '", - }); - } - - return parsed[1].trim(); -} - -export function resolveExpectedIssuer(registryUrl: string): string | undefined { - try { - const hostname = new URL(registryUrl).hostname; - if (hostname === "registry.clawdentity.com") { - return "https://registry.clawdentity.com"; - } - - if (hostname === "dev.registry.clawdentity.com") { - return "https://dev.registry.clawdentity.com"; - } - - return undefined; - } catch { - return undefined; - } -} - -function parseRegistrySigningKeys(payload: unknown): RegistrySigningKey[] { - if (!isRecord(payload) || !Array.isArray(payload.keys)) { - throw dependencyUnavailableError({ - message: "Registry signing keys payload is invalid", - }); - } - - const parsed = (() => { - try { - return parseRegistryConfig({ - ENVIRONMENT: "test", - REGISTRY_SIGNING_KEYS: JSON.stringify(payload.keys), - }); - } catch (error) { - throw dependencyUnavailableError({ - message: "Registry signing keys are invalid", - details: { - reason: toErrorMessage(error), - }, - }); - } - })(); - - const keys = parsed.REGISTRY_SIGNING_KEYS ?? []; - if (keys.length === 0) { - throw dependencyUnavailableError({ - message: "Registry signing keys are unavailable", - }); - } - - return keys; -} - -function toVerificationKeys(keys: RegistrySigningKey[]): VerificationKey[] { - return keys - .filter((key) => key.status === "active") - .map((key) => ({ - kid: key.kid, - jwk: { - kty: "OKP", - crv: "Ed25519", - x: key.x, - }, - })); -} - -function parseUnixTimestamp(headerValue: string): number { - if (!/^\d+$/.test(headerValue)) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_TIMESTAMP", - message: "X-Claw-Timestamp must be a unix seconds integer", - }); - } - - const timestamp = Number.parseInt(headerValue, 10); - if (!Number.isInteger(timestamp) || timestamp < 0) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_TIMESTAMP", - message: "X-Claw-Timestamp must be a unix seconds integer", - }); - } - - return timestamp; -} - -function assertTimestampWithinSkew(options: { - clock: () => number; - maxSkewSeconds: number; - timestampSeconds: number; -}): void { - const nowSeconds = Math.floor(options.clock() / 1000); - const skew = Math.abs(nowSeconds - options.timestampSeconds); - if (skew > options.maxSkewSeconds) { - throw unauthorizedError({ - code: "PROXY_AUTH_TIMESTAMP_SKEW", - message: "X-Claw-Timestamp is outside the allowed skew window", - details: { - maxSkewSeconds: options.maxSkewSeconds, - }, - }); - } -} - -function toProofVerificationInput(input: { - method: string; - pathWithQuery: string; - headers: Headers; - body: Uint8Array; - publicKey: Uint8Array; -}): VerifyHttpRequestInput { - const headers = Object.fromEntries(input.headers.entries()); - return { - method: input.method, - pathWithQuery: input.pathWithQuery, - headers, - body: input.body, - publicKey: input.publicKey, - }; -} - -export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { - const fetchImpl = options.fetchImpl ?? fetch; - const clock = options.clock ?? Date.now; - const nonceCache = options.nonceCache ?? createNonceCache(); - const maxTimestampSkewSeconds = - options.maxTimestampSkewSeconds ?? DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS; - const registryKeysCacheTtlMs = - options.registryKeysCacheTtlMs ?? DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS; - const registryUrl = normalizeRegistryUrl(options.config.registryUrl); - const expectedIssuer = resolveExpectedIssuer(registryUrl); - const agentAuthValidateUrl = toRegistryUrl( - registryUrl, - AGENT_AUTH_VALIDATE_PATH, - ); - - let registryKeysCache: RegistryKeysCache | undefined; - - async function getActiveRegistryKeys(input?: { - forceRefresh?: boolean; - }): Promise { - const forceRefresh = input?.forceRefresh === true; - if ( - !forceRefresh && - registryKeysCache && - clock() - registryKeysCache.fetchedAtMs <= registryKeysCacheTtlMs - ) { - return registryKeysCache.keys; - } - - let response: Response; - try { - response = await fetchImpl( - toRegistryUrl(registryUrl, "/.well-known/claw-keys.json"), - ); - } catch (error) { - throw dependencyUnavailableError({ - message: "Registry signing keys are unavailable", - details: { - reason: toErrorMessage(error), - }, - }); - } - - if (!response.ok) { - throw dependencyUnavailableError({ - message: "Registry signing keys are unavailable", - details: { - status: response.status, - }, - }); - } - - const parsedKeys = parseRegistrySigningKeys( - await parseJsonResponse(response), - ); - const verificationKeys = toVerificationKeys(parsedKeys); - if (verificationKeys.length === 0) { - throw dependencyUnavailableError({ - message: "Registry signing keys are unavailable", - }); - } - - registryKeysCache = { - fetchedAtMs: clock(), - keys: verificationKeys, - }; - - return verificationKeys; - } - - async function fetchLatestCrlClaims(): Promise { - let response: Response; - try { - response = await fetchImpl(toRegistryUrl(registryUrl, "/v1/crl")); - } catch (error) { - throw dependencyUnavailableError({ - message: "Registry CRL is unavailable", - details: { - reason: toErrorMessage(error), - }, - }); - } - - if (!response.ok) { - throw dependencyUnavailableError({ - message: "Registry CRL is unavailable", - details: { - status: response.status, - }, - }); - } - - const payload = await parseJsonResponse(response); - if (!isRecord(payload) || typeof payload.crl !== "string") { - throw dependencyUnavailableError({ - message: "Registry CRL payload is invalid", - }); - } - const crlToken = payload.crl; - - const verifyWithKeys = async (registryKeys: VerificationKey[]) => - verifyCRL({ - token: crlToken, - registryKeys, - expectedIssuer, - }); - - try { - const verificationKeys = await getActiveRegistryKeys(); - return await verifyWithKeys(verificationKeys); - } catch (error) { - if (error instanceof CrlJwtError && error.code === "UNKNOWN_CRL_KID") { - try { - const refreshedKeys = await getActiveRegistryKeys({ - forceRefresh: true, - }); - return await verifyWithKeys(refreshedKeys); - } catch (refreshedError) { - throw dependencyUnavailableError({ - message: "Registry CRL is invalid", - details: { - reason: toErrorMessage(refreshedError), - }, - }); - } - } - - throw dependencyUnavailableError({ - message: "Registry CRL is invalid", - details: { - reason: toErrorMessage(error), - }, - }); - } - } - - const crlCache = - options.crlCache ?? - createCrlCache({ - fetchLatest: fetchLatestCrlClaims, - refreshIntervalMs: options.config.crlRefreshIntervalMs, - maxAgeMs: options.config.crlMaxAgeMs, - staleBehavior: options.config.crlStaleBehavior, - clock, - }); - - async function verifyAitClaims(token: string) { - const verifyWithKeys = async (registryKeys: VerificationKey[]) => - verifyAIT({ - token, - registryKeys, - expectedIssuer, - }); - - const verificationKeys = await getActiveRegistryKeys(); - try { - return await verifyWithKeys(verificationKeys); - } catch (error) { - if (error instanceof AitJwtError && error.code === "UNKNOWN_AIT_KID") { - const refreshedKeys = await getActiveRegistryKeys({ - forceRefresh: true, - }); - try { - return await verifyWithKeys(refreshedKeys); - } catch (refreshedError) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_AIT", - message: "AIT verification failed", - details: { - reason: toErrorMessage(refreshedError), - }, - }); - } - } - - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_AIT", - message: "AIT verification failed", - details: { - reason: toErrorMessage(error), - }, - }); - } - } - - return createMiddleware<{ Variables: ProxyRequestVariables }>( - async (c, next) => { - if (c.req.path === "/health") { - await next(); - return; - } - const authorizationHeader = c.req.header("authorization"); - const token = parseClawAuthorizationHeader(authorizationHeader); - const claims = await verifyAitClaims(token); - - const timestampHeader = c.req.header("x-claw-timestamp"); - if (typeof timestampHeader !== "string") { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_TIMESTAMP", - message: "X-Claw-Timestamp header is required", - }); - } - - assertTimestampWithinSkew({ - clock, - maxSkewSeconds: maxTimestampSkewSeconds, - timestampSeconds: parseUnixTimestamp(timestampHeader), - }); - - const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); - const pathWithQuery = toPathWithQuery(c.req.url); - - let cnfPublicKey: Uint8Array; - try { - cnfPublicKey = decodeBase64url(claims.cnf.jwk.x); - } catch (error) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_AIT", - message: "AIT public key is invalid", - details: { - reason: toErrorMessage(error), - }, - }); - } - - try { - await verifyHttpRequest( - toProofVerificationInput({ - method: c.req.method, - pathWithQuery, - headers: c.req.raw.headers, - body: bodyBytes, - publicKey: cnfPublicKey, - }), - ); - } catch (error) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_PROOF", - message: "PoP verification failed", - details: { - reason: toErrorMessage(error), - }, - }); - } - - const nonceHeader = c.req.header("x-claw-nonce"); - const nonce = typeof nonceHeader === "string" ? nonceHeader : ""; - const nonceResult = (() => { - try { - return nonceCache.tryAcceptNonce({ - agentDid: claims.sub, - nonce, - }); - } catch (error) { - throw unauthorizedError({ - code: "PROXY_AUTH_INVALID_NONCE", - message: "Nonce validation failed", - details: { - reason: toErrorMessage(error), - }, - }); - } - })(); - - if (!nonceResult.accepted) { - throw unauthorizedError({ - code: "PROXY_AUTH_REPLAY", - message: "Replay detected", - }); - } - - let isRevoked: boolean; - try { - isRevoked = await crlCache.isRevoked(claims.jti); - } catch (error) { - if ( - error instanceof AppError && - error.code === "PROXY_AUTH_DEPENDENCY_UNAVAILABLE" - ) { - throw error; - } - - throw dependencyUnavailableError({ - message: "Registry CRL is unavailable", - details: { - reason: toErrorMessage(error), - }, - }); - } - - if (isRevoked) { - throw unauthorizedError({ - code: "PROXY_AUTH_REVOKED", - message: "AIT has been revoked", - }); - } - - if (!shouldSkipKnownAgentCheck(c.req.path)) { - await assertKnownTrustedAgent({ - trustStore: options.trustStore, - agentDid: claims.sub, - }); - } - - if ( - c.req.path === "/hooks/agent" || - c.req.path === RELAY_CONNECT_PATH || - c.req.path === RELAY_DELIVERY_RECEIPTS_PATH - ) { - const accessToken = parseAgentAccessHeader( - c.req.header("x-claw-agent-access"), - ); - - let validateResponse: Response; - try { - validateResponse = await fetchImpl(agentAuthValidateUrl, { - method: "POST", - headers: { - "content-type": "application/json", - "x-claw-agent-access": accessToken, - }, - body: JSON.stringify({ - agentDid: claims.sub, - aitJti: claims.jti, - }), - }); - } catch (error) { - throw dependencyUnavailableError({ - message: "Registry agent auth validation is unavailable", - details: { - reason: toErrorMessage(error), - }, - }); - } - - if (validateResponse.status === 401) { - throw unauthorizedError({ - code: "PROXY_AGENT_ACCESS_INVALID", - message: "Agent access token is invalid or expired", - }); - } - - if (validateResponse.status !== 204) { - throw dependencyUnavailableError({ - message: "Registry agent auth validation is unavailable", - details: { - status: validateResponse.status, - }, - }); - } - } - - c.set("auth", { - agentDid: claims.sub, - ownerDid: claims.ownerDid, - aitJti: claims.jti, - issuer: claims.iss, - cnfPublicKey: claims.cnf.jwk.x, - }); - - options.logger.info("proxy.auth.verified", { - agentDid: claims.sub, - ownerDid: claims.ownerDid, - jti: claims.jti, - }); - - await next(); - }, - ); -} +export { createProxyAuthMiddleware } from "./auth-middleware/middleware.js"; +export { parseClawAuthorizationHeader } from "./auth-middleware/request-auth.js"; +export { + DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS, + DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS, + type ProxyAuthContext, + type ProxyAuthMiddlewareOptions, + type ProxyRequestVariables, +} from "./auth-middleware/types.js"; +export { resolveExpectedIssuer } from "./auth-middleware/url.js"; diff --git a/apps/proxy/src/auth-middleware/AGENTS.md b/apps/proxy/src/auth-middleware/AGENTS.md new file mode 100644 index 0000000..50368da --- /dev/null +++ b/apps/proxy/src/auth-middleware/AGENTS.md @@ -0,0 +1,18 @@ +# AGENTS.md (apps/proxy/src/auth-middleware) + +## Purpose +- Keep proxy auth verification modular, testable, and failure-mode explicit. + +## Module ownership +- `middleware.ts`: orchestration only (verification flow and context wiring). +- `request-auth.ts`: header parsing, timestamp skew validation, proof input shaping. +- `registry-keys.ts`: registry key payload parsing + verification-key projection. +- `url.ts`: registry URL normalization, issuer resolution, path helpers. +- `errors.ts`: standardized auth/dependency error constructors. +- `types.ts`: shared types and defaults. + +## Rules +- Keep error codes/messages stable; route tests depend on them. +- Prefer pure helpers in leaf modules and inject side-effectful dependencies (`fetch`, caches, clock) from `middleware.ts`. +- Do not mix registry fetch/parsing logic into request-header helpers. +- Keep replay/nonce and CRL decisions fail-safe and explicit. diff --git a/apps/proxy/src/auth-middleware/errors.ts b/apps/proxy/src/auth-middleware/errors.ts new file mode 100644 index 0000000..085bfb7 --- /dev/null +++ b/apps/proxy/src/auth-middleware/errors.ts @@ -0,0 +1,32 @@ +import { AppError } from "@clawdentity/sdk"; + +export function toErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : "unknown"; +} + +export function unauthorizedError(options: { + code: string; + message: string; + details?: Record; +}): AppError { + return new AppError({ + code: options.code, + message: options.message, + status: 401, + details: options.details, + expose: true, + }); +} + +export function dependencyUnavailableError(options: { + message: string; + details?: Record; +}): AppError { + return new AppError({ + code: "PROXY_AUTH_DEPENDENCY_UNAVAILABLE", + message: options.message, + status: 503, + details: options.details, + expose: true, + }); +} diff --git a/apps/proxy/src/auth-middleware/middleware.ts b/apps/proxy/src/auth-middleware/middleware.ts new file mode 100644 index 0000000..88ab703 --- /dev/null +++ b/apps/proxy/src/auth-middleware/middleware.ts @@ -0,0 +1,425 @@ +import { parseJsonResponseSafe as parseJsonResponse } from "@clawdentity/common"; +import { + AGENT_AUTH_VALIDATE_PATH, + decodeBase64url, + RELAY_CONNECT_PATH, + RELAY_DELIVERY_RECEIPTS_PATH, +} from "@clawdentity/protocol"; +import { + AitJwtError, + AppError, + CrlJwtError, + createCrlCache, + createNonceCache, + verifyAIT, + verifyCRL, + verifyHttpRequest, +} from "@clawdentity/sdk"; +import { createMiddleware } from "hono/factory"; +import { assertKnownTrustedAgent } from "../trust-policy.js"; +import { + dependencyUnavailableError, + toErrorMessage, + unauthorizedError, +} from "./errors.js"; +import { + parseRegistrySigningKeys, + toVerificationKeys, +} from "./registry-keys.js"; +import { + assertTimestampWithinSkew, + parseAgentAccessHeader, + parseClawAuthorizationHeader, + parseUnixTimestamp, + toProofVerificationInput, +} from "./request-auth.js"; +import { + DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS, + DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS, + type ProxyAuthMiddlewareOptions, + type ProxyRequestVariables, + type RegistryKeysCache, + type VerificationKey, +} from "./types.js"; +import { + normalizeRegistryUrl, + resolveExpectedIssuer, + shouldSkipKnownAgentCheck, + toPathWithQuery, + toRegistryUrl, +} from "./url.js"; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function createProxyAuthMiddleware(options: ProxyAuthMiddlewareOptions) { + const fetchImpl = options.fetchImpl ?? fetch; + const clock = options.clock ?? Date.now; + const nonceCache = options.nonceCache ?? createNonceCache(); + const maxTimestampSkewSeconds = + options.maxTimestampSkewSeconds ?? DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS; + const registryKeysCacheTtlMs = + options.registryKeysCacheTtlMs ?? DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS; + const registryUrl = normalizeRegistryUrl(options.config.registryUrl); + const expectedIssuer = resolveExpectedIssuer(registryUrl); + const agentAuthValidateUrl = toRegistryUrl( + registryUrl, + AGENT_AUTH_VALIDATE_PATH, + ); + + let registryKeysCache: RegistryKeysCache | undefined; + + async function getActiveRegistryKeys(input?: { + forceRefresh?: boolean; + }): Promise { + const forceRefresh = input?.forceRefresh === true; + if ( + !forceRefresh && + registryKeysCache && + clock() - registryKeysCache.fetchedAtMs <= registryKeysCacheTtlMs + ) { + return registryKeysCache.keys; + } + + let response: Response; + try { + response = await fetchImpl( + toRegistryUrl(registryUrl, "/.well-known/claw-keys.json"), + ); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (!response.ok) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + details: { + status: response.status, + }, + }); + } + + const parsedKeys = parseRegistrySigningKeys( + await parseJsonResponse(response), + ); + const verificationKeys = toVerificationKeys(parsedKeys); + if (verificationKeys.length === 0) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + }); + } + + registryKeysCache = { + fetchedAtMs: clock(), + keys: verificationKeys, + }; + + return verificationKeys; + } + + async function fetchLatestCrlClaims(): Promise { + let response: Response; + try { + response = await fetchImpl(toRegistryUrl(registryUrl, "/v1/crl")); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry CRL is unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (!response.ok) { + throw dependencyUnavailableError({ + message: "Registry CRL is unavailable", + details: { + status: response.status, + }, + }); + } + + const payload = await parseJsonResponse(response); + if (!isRecord(payload) || typeof payload.crl !== "string") { + throw dependencyUnavailableError({ + message: "Registry CRL payload is invalid", + }); + } + const crlToken = payload.crl; + + const verifyWithKeys = async (registryKeys: VerificationKey[]) => + verifyCRL({ + token: crlToken, + registryKeys, + expectedIssuer, + }); + + try { + const verificationKeys = await getActiveRegistryKeys(); + return await verifyWithKeys(verificationKeys); + } catch (error) { + if (error instanceof CrlJwtError && error.code === "UNKNOWN_CRL_KID") { + try { + const refreshedKeys = await getActiveRegistryKeys({ + forceRefresh: true, + }); + return await verifyWithKeys(refreshedKeys); + } catch (refreshedError) { + throw dependencyUnavailableError({ + message: "Registry CRL is invalid", + details: { + reason: toErrorMessage(refreshedError), + }, + }); + } + } + + throw dependencyUnavailableError({ + message: "Registry CRL is invalid", + details: { + reason: toErrorMessage(error), + }, + }); + } + } + + const crlCache = + options.crlCache ?? + createCrlCache({ + fetchLatest: fetchLatestCrlClaims, + refreshIntervalMs: options.config.crlRefreshIntervalMs, + maxAgeMs: options.config.crlMaxAgeMs, + staleBehavior: options.config.crlStaleBehavior, + clock, + }); + + async function verifyAitClaims(token: string) { + const verifyWithKeys = async (registryKeys: VerificationKey[]) => + verifyAIT({ + token, + registryKeys, + expectedIssuer, + }); + + const verificationKeys = await getActiveRegistryKeys(); + try { + return await verifyWithKeys(verificationKeys); + } catch (error) { + if (error instanceof AitJwtError && error.code === "UNKNOWN_AIT_KID") { + const refreshedKeys = await getActiveRegistryKeys({ + forceRefresh: true, + }); + try { + return await verifyWithKeys(refreshedKeys); + } catch (refreshedError) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_AIT", + message: "AIT verification failed", + details: { + reason: toErrorMessage(refreshedError), + }, + }); + } + } + + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_AIT", + message: "AIT verification failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + } + + return createMiddleware<{ Variables: ProxyRequestVariables }>( + async (c, next) => { + if (c.req.path === "/health") { + await next(); + return; + } + const authorizationHeader = c.req.header("authorization"); + const token = parseClawAuthorizationHeader(authorizationHeader); + const claims = await verifyAitClaims(token); + + const timestampHeader = c.req.header("x-claw-timestamp"); + if (typeof timestampHeader !== "string") { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_TIMESTAMP", + message: "X-Claw-Timestamp header is required", + }); + } + + assertTimestampWithinSkew({ + clock, + maxSkewSeconds: maxTimestampSkewSeconds, + timestampSeconds: parseUnixTimestamp(timestampHeader), + }); + + const bodyBytes = new Uint8Array(await c.req.raw.clone().arrayBuffer()); + const pathWithQuery = toPathWithQuery(c.req.url); + + let cnfPublicKey: Uint8Array; + try { + cnfPublicKey = decodeBase64url(claims.cnf.jwk.x); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_AIT", + message: "AIT public key is invalid", + details: { + reason: toErrorMessage(error), + }, + }); + } + + try { + await verifyHttpRequest( + toProofVerificationInput({ + method: c.req.method, + pathWithQuery, + headers: c.req.raw.headers, + body: bodyBytes, + publicKey: cnfPublicKey, + }), + ); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_PROOF", + message: "PoP verification failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + + const nonceHeader = c.req.header("x-claw-nonce"); + const nonce = typeof nonceHeader === "string" ? nonceHeader : ""; + const nonceResult = (() => { + try { + return nonceCache.tryAcceptNonce({ + agentDid: claims.sub, + nonce, + }); + } catch (error) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_NONCE", + message: "Nonce validation failed", + details: { + reason: toErrorMessage(error), + }, + }); + } + })(); + + if (!nonceResult.accepted) { + throw unauthorizedError({ + code: "PROXY_AUTH_REPLAY", + message: "Replay detected", + }); + } + + let isRevoked: boolean; + try { + isRevoked = await crlCache.isRevoked(claims.jti); + } catch (error) { + if ( + error instanceof AppError && + error.code === "PROXY_AUTH_DEPENDENCY_UNAVAILABLE" + ) { + throw error; + } + + throw dependencyUnavailableError({ + message: "Registry CRL is unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (isRevoked) { + throw unauthorizedError({ + code: "PROXY_AUTH_REVOKED", + message: "AIT has been revoked", + }); + } + + if (!shouldSkipKnownAgentCheck(c.req.path)) { + await assertKnownTrustedAgent({ + trustStore: options.trustStore, + agentDid: claims.sub, + }); + } + + if ( + c.req.path === "/hooks/agent" || + c.req.path === RELAY_CONNECT_PATH || + c.req.path === RELAY_DELIVERY_RECEIPTS_PATH + ) { + const accessToken = parseAgentAccessHeader( + c.req.header("x-claw-agent-access"), + ); + + let validateResponse: Response; + try { + validateResponse = await fetchImpl(agentAuthValidateUrl, { + method: "POST", + headers: { + "content-type": "application/json", + "x-claw-agent-access": accessToken, + }, + body: JSON.stringify({ + agentDid: claims.sub, + aitJti: claims.jti, + }), + }); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry agent auth validation is unavailable", + details: { + reason: toErrorMessage(error), + }, + }); + } + + if (validateResponse.status === 401) { + throw unauthorizedError({ + code: "PROXY_AGENT_ACCESS_INVALID", + message: "Agent access token is invalid or expired", + }); + } + + if (validateResponse.status !== 204) { + throw dependencyUnavailableError({ + message: "Registry agent auth validation is unavailable", + details: { + status: validateResponse.status, + }, + }); + } + } + + c.set("auth", { + agentDid: claims.sub, + ownerDid: claims.ownerDid, + aitJti: claims.jti, + issuer: claims.iss, + cnfPublicKey: claims.cnf.jwk.x, + }); + + options.logger.info("proxy.auth.verified", { + agentDid: claims.sub, + ownerDid: claims.ownerDid, + jti: claims.jti, + }); + + await next(); + }, + ); +} diff --git a/apps/proxy/src/auth-middleware/registry-keys.ts b/apps/proxy/src/auth-middleware/registry-keys.ts new file mode 100644 index 0000000..baddbef --- /dev/null +++ b/apps/proxy/src/auth-middleware/registry-keys.ts @@ -0,0 +1,57 @@ +import { parseRegistryConfig } from "@clawdentity/sdk"; +import { dependencyUnavailableError, toErrorMessage } from "./errors.js"; +import type { RegistrySigningKey, VerificationKey } from "./types.js"; + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +export function parseRegistrySigningKeys( + payload: unknown, +): RegistrySigningKey[] { + if (!isRecord(payload) || !Array.isArray(payload.keys)) { + throw dependencyUnavailableError({ + message: "Registry signing keys payload is invalid", + }); + } + + const parsed = (() => { + try { + return parseRegistryConfig({ + ENVIRONMENT: "test", + REGISTRY_SIGNING_KEYS: JSON.stringify(payload.keys), + }); + } catch (error) { + throw dependencyUnavailableError({ + message: "Registry signing keys are invalid", + details: { + reason: toErrorMessage(error), + }, + }); + } + })(); + + const keys = parsed.REGISTRY_SIGNING_KEYS ?? []; + if (keys.length === 0) { + throw dependencyUnavailableError({ + message: "Registry signing keys are unavailable", + }); + } + + return keys; +} + +export function toVerificationKeys( + keys: RegistrySigningKey[], +): VerificationKey[] { + return keys + .filter((key) => key.status === "active") + .map((key) => ({ + kid: key.kid, + jwk: { + kty: "OKP", + crv: "Ed25519", + x: key.x, + }, + })); +} diff --git a/apps/proxy/src/auth-middleware/request-auth.ts b/apps/proxy/src/auth-middleware/request-auth.ts new file mode 100644 index 0000000..3213525 --- /dev/null +++ b/apps/proxy/src/auth-middleware/request-auth.ts @@ -0,0 +1,86 @@ +import type { VerifyHttpRequestInput } from "@clawdentity/sdk"; +import { unauthorizedError } from "./errors.js"; + +export function parseClawAuthorizationHeader(authorization?: string): string { + if (typeof authorization !== "string" || authorization.trim().length === 0) { + throw unauthorizedError({ + code: "PROXY_AUTH_MISSING_TOKEN", + message: "Authorization header is required", + }); + } + + const parsed = authorization.trim().match(/^Claw\s+(\S+)$/); + if (!parsed || parsed[1].trim().length === 0) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_SCHEME", + message: "Authorization must be in the format 'Claw '", + }); + } + + return parsed[1].trim(); +} + +export function parseAgentAccessHeader(value: string | undefined): string { + if (typeof value !== "string" || value.trim().length === 0) { + throw unauthorizedError({ + code: "PROXY_AGENT_ACCESS_REQUIRED", + message: "X-Claw-Agent-Access header is required", + }); + } + + return value.trim(); +} + +export function parseUnixTimestamp(headerValue: string): number { + if (!/^\d+$/.test(headerValue)) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_TIMESTAMP", + message: "X-Claw-Timestamp must be a unix seconds integer", + }); + } + + const timestamp = Number.parseInt(headerValue, 10); + if (!Number.isInteger(timestamp) || timestamp < 0) { + throw unauthorizedError({ + code: "PROXY_AUTH_INVALID_TIMESTAMP", + message: "X-Claw-Timestamp must be a unix seconds integer", + }); + } + + return timestamp; +} + +export function assertTimestampWithinSkew(options: { + clock: () => number; + maxSkewSeconds: number; + timestampSeconds: number; +}): void { + const nowSeconds = Math.floor(options.clock() / 1000); + const skew = Math.abs(nowSeconds - options.timestampSeconds); + if (skew > options.maxSkewSeconds) { + throw unauthorizedError({ + code: "PROXY_AUTH_TIMESTAMP_SKEW", + message: "X-Claw-Timestamp is outside the allowed skew window", + details: { + maxSkewSeconds: options.maxSkewSeconds, + }, + }); + } +} + +export function toProofVerificationInput(input: { + method: string; + pathWithQuery: string; + headers: Headers; + body: Uint8Array; + publicKey: Uint8Array; +}): VerifyHttpRequestInput { + const headers = Object.fromEntries(input.headers.entries()); + return { + method: input.method, + pathWithQuery: input.pathWithQuery, + headers, + body: input.body, + publicKey: input.publicKey, + }; +} diff --git a/apps/proxy/src/auth-middleware/types.ts b/apps/proxy/src/auth-middleware/types.ts new file mode 100644 index 0000000..36b28ce --- /dev/null +++ b/apps/proxy/src/auth-middleware/types.ts @@ -0,0 +1,57 @@ +import type { + CrlCache, + Logger, + NonceCache, + RequestContextVariables, +} from "@clawdentity/sdk"; +import type { ProxyConfig } from "../config.js"; +import type { ProxyTrustStore } from "../proxy-trust-store.js"; + +export const DEFAULT_REGISTRY_KEYS_CACHE_TTL_MS = 60 * 60 * 1000; +export const DEFAULT_MAX_TIMESTAMP_SKEW_SECONDS = 300; + +export type RegistrySigningKey = { + kid: string; + alg: "EdDSA"; + crv: "Ed25519"; + x: string; + status: "active" | "revoked"; +}; + +export type VerificationKey = { + kid: string; + jwk: { + kty: "OKP"; + crv: "Ed25519"; + x: string; + }; +}; + +export type ProxyAuthContext = { + agentDid: string; + ownerDid: string; + aitJti: string; + issuer: string; + cnfPublicKey: string; +}; + +export type ProxyRequestVariables = RequestContextVariables & { + auth?: ProxyAuthContext; +}; + +export type ProxyAuthMiddlewareOptions = { + config: ProxyConfig; + logger: Logger; + trustStore: ProxyTrustStore; + fetchImpl?: typeof fetch; + clock?: () => number; + nonceCache?: NonceCache; + crlCache?: CrlCache; + maxTimestampSkewSeconds?: number; + registryKeysCacheTtlMs?: number; +}; + +export type RegistryKeysCache = { + fetchedAtMs: number; + keys: VerificationKey[]; +}; diff --git a/apps/proxy/src/auth-middleware/url.ts b/apps/proxy/src/auth-middleware/url.ts new file mode 100644 index 0000000..af9e0ed --- /dev/null +++ b/apps/proxy/src/auth-middleware/url.ts @@ -0,0 +1,58 @@ +import { RELAY_CONNECT_PATH } from "@clawdentity/protocol"; +import { AppError } from "@clawdentity/sdk"; +import { + PAIR_CONFIRM_PATH, + PAIR_START_PATH, + PAIR_STATUS_PATH, +} from "../pairing-constants.js"; + +export function toPathWithQuery(url: string): string { + const parsed = new URL(url, "http://localhost"); + return `${parsed.pathname}${parsed.search}`; +} + +export function normalizeRegistryUrl(registryUrl: string): string { + try { + return new URL(registryUrl).toString(); + } catch { + throw new AppError({ + code: "PROXY_AUTH_INVALID_REGISTRY_URL", + message: "Proxy registry URL is invalid", + status: 500, + expose: true, + }); + } +} + +export function toRegistryUrl(registryUrl: string, path: string): string { + const normalizedBaseUrl = registryUrl.endsWith("/") + ? registryUrl + : `${registryUrl}/`; + return new URL(path, normalizedBaseUrl).toString(); +} + +export function resolveExpectedIssuer(registryUrl: string): string | undefined { + try { + const hostname = new URL(registryUrl).hostname; + if (hostname === "registry.clawdentity.com") { + return "https://registry.clawdentity.com"; + } + + if (hostname === "dev.registry.clawdentity.com") { + return "https://dev.registry.clawdentity.com"; + } + + return undefined; + } catch { + return undefined; + } +} + +export function shouldSkipKnownAgentCheck(path: string): boolean { + return ( + path === PAIR_START_PATH || + path === PAIR_CONFIRM_PATH || + path === PAIR_STATUS_PATH || + path === RELAY_CONNECT_PATH + ); +} diff --git a/apps/registry/src/server.test/agents-delete.test.ts b/apps/registry/src/server.test/agents-delete.test.ts new file mode 100644 index 0000000..9806a09 --- /dev/null +++ b/apps/registry/src/server.test/agents-delete.test.ts @@ -0,0 +1,215 @@ +import { generateUlid, makeAgentDid } from "@clawdentity/protocol"; +import { describe, expect, it } from "vitest"; +import { createRegistryApp } from "../server.js"; +import { createFakeDb, makeValidPatContext } from "./helpers.js"; + +describe("DELETE /v1/agents/:id", () => { + it("returns 401 when PAT is missing", async () => { + const agentId = generateUlid(1700200000000); + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + }, + { DB: {} as D1Database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(401); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("API_KEY_MISSING"); + }); + + it("returns 404 when agent does not exist", async () => { + const { token, authRow } = await makeValidPatContext(); + const { database, agentUpdates, revocationInserts } = createFakeDb([ + authRow, + ]); + const agentId = generateUlid(1700200000100); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string; message: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("returns 404 when agent is owned by another human", async () => { + const { token, authRow } = await makeValidPatContext(); + const foreignAgentId = generateUlid(1700200000200); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: foreignAgentId, + did: makeAgentDid(foreignAgentId), + ownerId: "human-2", + name: "foreign-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: generateUlid(1700200000201), + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${foreignAgentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(404); + const body = (await res.json()) as { + error: { code: string }; + }; + expect(body.error.code).toBe("AGENT_NOT_FOUND"); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); + + it("revokes owned agent and inserts revocation record", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000300); + const agentJti = generateUlid(1700200000301); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: agentJti, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(204); + expect(agentUpdates).toHaveLength(1); + expect(agentUpdates[0]).toMatchObject({ + id: agentId, + status: "revoked", + updated_at: expect.any(String), + }); + expect(revocationInserts).toHaveLength(1); + expect(revocationInserts[0]).toMatchObject({ + agent_id: agentId, + jti: agentJti, + reason: null, + revoked_at: expect.any(String), + }); + }); + + it("is idempotent for repeat revoke requests", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000400); + const agentJti = generateUlid(1700200000401); + const { database, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: agentJti, + }, + ], + ); + + const first = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + const second = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(first.status).toBe(204); + expect(second.status).toBe(204); + expect(revocationInserts).toHaveLength(1); + }); + + it("returns 409 when owned agent has missing current_jti", async () => { + const { token, authRow } = await makeValidPatContext(); + const agentId = generateUlid(1700200000500); + const { database, agentUpdates, revocationInserts } = createFakeDb( + [authRow], + [ + { + id: agentId, + did: makeAgentDid(agentId), + ownerId: "human-1", + name: "owned-agent", + framework: "openclaw", + status: "active", + expiresAt: "2026-04-01T00:00:00.000Z", + currentJti: null, + }, + ], + ); + + const res = await createRegistryApp().request( + `/v1/agents/${agentId}`, + { + method: "DELETE", + headers: { Authorization: `Bearer ${token}` }, + }, + { DB: database, ENVIRONMENT: "test" }, + ); + + expect(res.status).toBe(409); + const body = (await res.json()) as { + error: { + code: string; + details?: { fieldErrors?: Record }; + }; + }; + expect(body.error.code).toBe("AGENT_REVOKE_INVALID_STATE"); + expect(body.error.details?.fieldErrors).toMatchObject({ + currentJti: expect.any(Array), + }); + expect(agentUpdates).toHaveLength(0); + expect(revocationInserts).toHaveLength(0); + }); +}); diff --git a/apps/registry/src/server.test/agents-delete-reissue.test.ts b/apps/registry/src/server.test/agents-reissue.test.ts similarity index 69% rename from apps/registry/src/server.test/agents-delete-reissue.test.ts rename to apps/registry/src/server.test/agents-reissue.test.ts index ac6d18b..0de7d08 100644 --- a/apps/registry/src/server.test/agents-delete-reissue.test.ts +++ b/apps/registry/src/server.test/agents-reissue.test.ts @@ -8,217 +8,6 @@ import { describe, expect, it } from "vitest"; import { createRegistryApp } from "../server.js"; import { createFakeDb, makeValidPatContext } from "./helpers.js"; -describe("DELETE /v1/agents/:id", () => { - it("returns 401 when PAT is missing", async () => { - const agentId = generateUlid(1700200000000); - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - }, - { DB: {} as D1Database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(401); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("API_KEY_MISSING"); - }); - - it("returns 404 when agent does not exist", async () => { - const { token, authRow } = await makeValidPatContext(); - const { database, agentUpdates, revocationInserts } = createFakeDb([ - authRow, - ]); - const agentId = generateUlid(1700200000100); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { - error: { code: string; message: string }; - }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("returns 404 when agent is owned by another human", async () => { - const { token, authRow } = await makeValidPatContext(); - const foreignAgentId = generateUlid(1700200000200); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: foreignAgentId, - did: makeAgentDid(foreignAgentId), - ownerId: "human-2", - name: "foreign-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: generateUlid(1700200000201), - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${foreignAgentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(404); - const body = (await res.json()) as { - error: { code: string }; - }; - expect(body.error.code).toBe("AGENT_NOT_FOUND"); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); - - it("revokes owned agent and inserts revocation record", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700200000300); - const agentJti = generateUlid(1700200000301); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: agentJti, - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(204); - expect(agentUpdates).toHaveLength(1); - expect(agentUpdates[0]).toMatchObject({ - id: agentId, - status: "revoked", - updated_at: expect.any(String), - }); - expect(revocationInserts).toHaveLength(1); - expect(revocationInserts[0]).toMatchObject({ - agent_id: agentId, - jti: agentJti, - reason: null, - revoked_at: expect.any(String), - }); - }); - - it("is idempotent for repeat revoke requests", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700200000400); - const agentJti = generateUlid(1700200000401); - const { database, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: agentJti, - }, - ], - ); - - const first = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - const second = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(first.status).toBe(204); - expect(second.status).toBe(204); - expect(revocationInserts).toHaveLength(1); - }); - - it("returns 409 when owned agent has missing current_jti", async () => { - const { token, authRow } = await makeValidPatContext(); - const agentId = generateUlid(1700200000500); - const { database, agentUpdates, revocationInserts } = createFakeDb( - [authRow], - [ - { - id: agentId, - did: makeAgentDid(agentId), - ownerId: "human-1", - name: "owned-agent", - framework: "openclaw", - status: "active", - expiresAt: "2026-04-01T00:00:00.000Z", - currentJti: null, - }, - ], - ); - - const res = await createRegistryApp().request( - `/v1/agents/${agentId}`, - { - method: "DELETE", - headers: { Authorization: `Bearer ${token}` }, - }, - { DB: database, ENVIRONMENT: "test" }, - ); - - expect(res.status).toBe(409); - const body = (await res.json()) as { - error: { - code: string; - details?: { fieldErrors?: Record }; - }; - }; - expect(body.error.code).toBe("AGENT_REVOKE_INVALID_STATE"); - expect(body.error.details?.fieldErrors).toMatchObject({ - currentJti: expect.any(Array), - }); - expect(agentUpdates).toHaveLength(0); - expect(revocationInserts).toHaveLength(0); - }); -}); - describe("POST /v1/agents/:id/reissue", () => { it("returns 401 when PAT is missing", async () => { const agentId = generateUlid(1700300000000); diff --git a/apps/registry/src/server.test/helpers/db/AGENTS.md b/apps/registry/src/server.test/helpers/db/AGENTS.md new file mode 100644 index 0000000..a6dc07d --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/AGENTS.md @@ -0,0 +1,12 @@ +# AGENTS.md (apps/registry/src/server.test/helpers/db) + +## Purpose +- Keep fake D1 helpers deterministic and easy to extend for server tests. + +## Rules +- Keep SQL-shape parsing (`parse.ts`) pure and reusable. +- Keep query branch orchestration in `mock.ts`; move entity-specific row projection/filter logic into resolver modules. +- Keep each resolver file scoped to one table/domain (`agents`, `api-keys`, `humans`, `invites`, `crl`, auth sessions, registration challenges). +- Preserve backward-compatible exports from `resolvers.ts` as the public resolver entrypoint for test harness imports. +- When adding a new fake table, add a dedicated resolver module and re-export it from `resolvers/index.ts`. +- Avoid embedding clock/random side effects in resolver functions. diff --git a/apps/registry/src/server.test/helpers/db/resolvers.ts b/apps/registry/src/server.test/helpers/db/resolvers.ts index 7eb7f9a..5b41f09 100644 --- a/apps/registry/src/server.test/helpers/db/resolvers.ts +++ b/apps/registry/src/server.test/helpers/db/resolvers.ts @@ -1,668 +1 @@ -import { encodeBase64url } from "@clawdentity/protocol"; -import { - extractWhereClause, - hasFilter, - parseWhereEqualityParams, -} from "./parse.js"; -import type { - FakeAgentAuthSessionRow, - FakeAgentRegistrationChallengeRow, - FakeAgentRow, - FakeAgentSelectRow, - FakeApiKeyRow, - FakeApiKeySelectRow, - FakeCrlSelectRow, - FakeD1Row, - FakeHumanRow, - FakeInviteRow, - FakeRevocationRow, -} from "./types.js"; - -export function createFakePublicKey(agentId: string): string { - const seed = agentId.length > 0 ? agentId : "agent"; - const bytes = new Uint8Array(32); - - for (let index = 0; index < bytes.length; index += 1) { - bytes[index] = seed.charCodeAt(index % seed.length) & 0xff; - } - - return encodeBase64url(bytes); -} - -export function getAgentSelectColumnValue( - row: FakeAgentSelectRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "did") { - return row.did; - } - if (column === "owner_id") { - return row.owner_id; - } - if (column === "owner_did") { - return row.owner_did; - } - if (column === "name") { - return row.name; - } - if (column === "framework") { - return row.framework; - } - if (column === "public_key") { - return row.public_key; - } - if (column === "status") { - return row.status; - } - if (column === "expires_at") { - return row.expires_at; - } - if (column === "current_jti") { - return row.current_jti; - } - if (column === "created_at") { - return row.created_at; - } - if (column === "updated_at") { - return row.updated_at; - } - return undefined; -} - -export function getAgentRegistrationChallengeSelectColumnValue( - row: FakeAgentRegistrationChallengeRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "owner_id") { - return row.ownerId; - } - if (column === "public_key") { - return row.publicKey; - } - if (column === "nonce") { - return row.nonce; - } - if (column === "status") { - return row.status; - } - if (column === "expires_at") { - return row.expiresAt; - } - if (column === "used_at") { - return row.usedAt; - } - if (column === "created_at") { - return row.createdAt; - } - if (column === "updated_at") { - return row.updatedAt; - } - return undefined; -} - -export function getHumanSelectColumnValue( - row: FakeHumanRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "did") { - return row.did; - } - if (column === "display_name") { - return row.displayName; - } - if (column === "role") { - return row.role; - } - if (column === "status") { - return row.status; - } - if (column === "created_at") { - return row.createdAt; - } - if (column === "updated_at") { - return row.updatedAt; - } - return undefined; -} - -export function resolveHumanSelectRows(options: { - query: string; - params: unknown[]; - humanRows: FakeHumanRow[]; -}): FakeHumanRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - - const roleFilter = - typeof equalityParams.values.role?.[0] === "string" - ? String(equalityParams.values.role[0]) - : undefined; - const statusFilter = - typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const idFilter = - typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const didFilter = - typeof equalityParams.values.did?.[0] === "string" - ? String(equalityParams.values.did[0]) - : undefined; - - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.humanRows.length; - - return options.humanRows - .filter((row) => (roleFilter ? row.role === roleFilter : true)) - .filter((row) => (statusFilter ? row.status === statusFilter : true)) - .filter((row) => (idFilter ? row.id === idFilter : true)) - .filter((row) => (didFilter ? row.did === didFilter : true)) - .slice(0, limit); -} - -export function getApiKeySelectColumnValue( - row: FakeApiKeySelectRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "human_id") { - return row.human_id; - } - if (column === "key_hash") { - return row.key_hash; - } - if (column === "key_prefix") { - return row.key_prefix; - } - if (column === "name") { - return row.name; - } - if (column === "status") { - return row.status; - } - if (column === "created_at") { - return row.created_at; - } - if (column === "last_used_at") { - return row.last_used_at; - } - return undefined; -} - -export function resolveApiKeySelectRows(options: { - query: string; - params: unknown[]; - apiKeyRows: FakeApiKeyRow[]; -}): FakeApiKeySelectRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasHumanIdFilter = hasFilter(whereClause, "human_id"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasPrefixFilter = hasFilter(whereClause, "key_prefix"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - const orderByCreatedAtDesc = - options.query.toLowerCase().includes("order by") && - options.query.toLowerCase().includes("created_at") && - options.query.toLowerCase().includes("desc"); - - const humanId = - hasHumanIdFilter && typeof equalityParams.values.human_id?.[0] === "string" - ? String(equalityParams.values.human_id[0]) - : undefined; - const id = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const status = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const keyPrefix = - hasPrefixFilter && typeof equalityParams.values.key_prefix?.[0] === "string" - ? String(equalityParams.values.key_prefix[0]) - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.apiKeyRows.length; - - const rows = options.apiKeyRows - .filter((row) => (humanId ? row.humanId === humanId : true)) - .filter((row) => (id ? row.id === id : true)) - .filter((row) => (status ? row.status === status : true)) - .filter((row) => (keyPrefix ? row.keyPrefix === keyPrefix : true)) - .map((row) => ({ - id: row.id, - human_id: row.humanId, - key_hash: row.keyHash, - key_prefix: row.keyPrefix, - name: row.name, - status: row.status, - created_at: row.createdAt, - last_used_at: row.lastUsedAt, - })); - - if (orderByCreatedAtDesc) { - rows.sort((left, right) => { - const createdAtCompare = right.created_at.localeCompare(left.created_at); - if (createdAtCompare !== 0) { - return createdAtCompare; - } - return right.id.localeCompare(left.id); - }); - } - - return rows.slice(0, limit); -} - -export function getAgentAuthSessionSelectColumnValue( - row: FakeAgentAuthSessionRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "agent_id") { - return row.agentId; - } - if (column === "refresh_key_hash") { - return row.refreshKeyHash; - } - if (column === "refresh_key_prefix") { - return row.refreshKeyPrefix; - } - if (column === "refresh_issued_at") { - return row.refreshIssuedAt; - } - if (column === "refresh_expires_at") { - return row.refreshExpiresAt; - } - if (column === "refresh_last_used_at") { - return row.refreshLastUsedAt; - } - if (column === "access_key_hash") { - return row.accessKeyHash; - } - if (column === "access_key_prefix") { - return row.accessKeyPrefix; - } - if (column === "access_issued_at") { - return row.accessIssuedAt; - } - if (column === "access_expires_at") { - return row.accessExpiresAt; - } - if (column === "access_last_used_at") { - return row.accessLastUsedAt; - } - if (column === "status") { - return row.status; - } - if (column === "revoked_at") { - return row.revokedAt; - } - if (column === "created_at") { - return row.createdAt; - } - if (column === "updated_at") { - return row.updatedAt; - } - return undefined; -} - -export function resolveAgentAuthSessionSelectRows(options: { - query: string; - params: unknown[]; - sessionRows: FakeAgentAuthSessionRow[]; -}): FakeAgentAuthSessionRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasAgentIdFilter = hasFilter(whereClause, "agent_id"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasRefreshPrefixFilter = hasFilter(whereClause, "refresh_key_prefix"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - - const agentId = - hasAgentIdFilter && typeof equalityParams.values.agent_id?.[0] === "string" - ? String(equalityParams.values.agent_id[0]) - : undefined; - const id = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const status = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - const refreshPrefix = - hasRefreshPrefixFilter && - typeof equalityParams.values.refresh_key_prefix?.[0] === "string" - ? String(equalityParams.values.refresh_key_prefix[0]) - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.sessionRows.length; - - return options.sessionRows - .filter((row) => (agentId ? row.agentId === agentId : true)) - .filter((row) => (id ? row.id === id : true)) - .filter((row) => (status ? row.status === status : true)) - .filter((row) => - refreshPrefix ? row.refreshKeyPrefix === refreshPrefix : true, - ) - .slice(0, limit); -} - -export function resolveAgentSelectRows(options: { - query: string; - params: unknown[]; - authRows: FakeD1Row[]; - agentRows: FakeAgentRow[]; -}): FakeAgentSelectRow[] { - const normalizedQuery = options.query.toLowerCase(); - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasOwnerFilter = hasFilter(whereClause, "owner_id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasFrameworkFilter = hasFilter(whereClause, "framework"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasDidFilter = hasFilter(whereClause, "did"); - const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); - const hasCursorFilter = hasFilter(whereClause, "id", "<"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - const requiresHumanJoin = - normalizedQuery.includes('join "humans"') || - normalizedQuery.includes("join humans"); - - const ownerId = - hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" - ? String(equalityParams.values.owner_id?.[0]) - : undefined; - const statusFilter = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status?.[0]) - : undefined; - const frameworkFilter = - hasFrameworkFilter && - typeof equalityParams.values.framework?.[0] === "string" - ? String(equalityParams.values.framework?.[0]) - : undefined; - const idFilter = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id?.[0]) - : undefined; - const didFilter = - hasDidFilter && typeof equalityParams.values.did?.[0] === "string" - ? String(equalityParams.values.did?.[0]) - : undefined; - const currentJtiFilter = hasCurrentJtiFilter - ? (equalityParams.values.current_jti?.[0] as string | null | undefined) - : undefined; - const cursorFilter = hasCursorFilter - ? String(options.params[equalityParams.consumedParams] ?? "") - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.agentRows.length; - - const filteredRows = options.agentRows - .filter((row) => (ownerId ? row.ownerId === ownerId : true)) - .filter((row) => (statusFilter ? row.status === statusFilter : true)) - .filter((row) => - frameworkFilter ? row.framework === frameworkFilter : true, - ) - .filter((row) => (idFilter ? row.id === idFilter : true)) - .filter((row) => (didFilter ? row.did === didFilter : true)) - .filter((row) => - currentJtiFilter !== undefined - ? (row.currentJti ?? null) === currentJtiFilter - : true, - ) - .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) - .sort((left, right) => right.id.localeCompare(left.id)) - .map((row) => { - const ownerDid = options.authRows.find( - (authRow) => authRow.humanId === row.ownerId, - )?.humanDid; - - return { - id: row.id, - did: row.did, - owner_id: row.ownerId, - owner_did: ownerDid ?? "", - name: row.name, - framework: row.framework, - public_key: row.publicKey ?? createFakePublicKey(row.id), - status: row.status, - expires_at: row.expiresAt, - current_jti: row.currentJti ?? null, - created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", - updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", - }; - }) - .filter((row) => (requiresHumanJoin ? row.owner_did.length > 0 : true)) - .slice(0, limit); - - return filteredRows; -} - -export function resolveAgentRegistrationChallengeSelectRows(options: { - query: string; - params: unknown[]; - challengeRows: FakeAgentRegistrationChallengeRow[]; -}): FakeAgentRegistrationChallengeRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasOwnerFilter = hasFilter(whereClause, "owner_id"); - const hasChallengeIdFilter = hasFilter(whereClause, "id"); - const hasStatusFilter = hasFilter(whereClause, "status"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - - const ownerId = - hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" - ? String(equalityParams.values.owner_id[0]) - : undefined; - const challengeId = - hasChallengeIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const status = - hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" - ? String(equalityParams.values.status[0]) - : undefined; - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.challengeRows.length; - - return options.challengeRows - .filter((row) => (ownerId ? row.ownerId === ownerId : true)) - .filter((row) => (challengeId ? row.id === challengeId : true)) - .filter((row) => (status ? row.status === status : true)) - .slice(0, limit); -} - -export function getInviteSelectColumnValue( - row: FakeInviteRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "code") { - return row.code; - } - if (column === "created_by") { - return row.createdBy; - } - if (column === "redeemed_by") { - return row.redeemedBy; - } - if (column === "agent_id") { - return row.agentId; - } - if (column === "expires_at") { - return row.expiresAt; - } - if (column === "created_at") { - return row.createdAt; - } - return undefined; -} - -export function resolveInviteSelectRows(options: { - query: string; - params: unknown[]; - inviteRows: FakeInviteRow[]; -}): FakeInviteRow[] { - const whereClause = extractWhereClause(options.query); - const equalityParams = parseWhereEqualityParams({ - whereClause, - params: options.params, - }); - const hasCodeFilter = hasFilter(whereClause, "code"); - const hasIdFilter = hasFilter(whereClause, "id"); - const hasRedeemedByFilter = hasFilter(whereClause, "redeemed_by"); - const hasLimitClause = options.query.toLowerCase().includes(" limit "); - - const codeFilter = - hasCodeFilter && typeof equalityParams.values.code?.[0] === "string" - ? String(equalityParams.values.code[0]) - : undefined; - const idFilter = - hasIdFilter && typeof equalityParams.values.id?.[0] === "string" - ? String(equalityParams.values.id[0]) - : undefined; - const redeemedByFilter = hasRedeemedByFilter - ? (equalityParams.values.redeemed_by?.[0] as string | null | undefined) - : undefined; - - const requiresRedeemedByNull = - whereClause.includes("redeemed_by") && whereClause.includes("is null"); - - const maybeLimit = hasLimitClause - ? Number(options.params[options.params.length - 1]) - : Number.NaN; - const limit = Number.isFinite(maybeLimit) - ? maybeLimit - : options.inviteRows.length; - - return options.inviteRows - .filter((row) => (codeFilter ? row.code === codeFilter : true)) - .filter((row) => (idFilter ? row.id === idFilter : true)) - .filter((row) => - redeemedByFilter !== undefined - ? row.redeemedBy === redeemedByFilter - : true, - ) - .filter((row) => (requiresRedeemedByNull ? row.redeemedBy === null : true)) - .slice(0, limit); -} - -export function getCrlSelectColumnValue( - row: FakeCrlSelectRow, - column: string, -): unknown { - if (column === "id") { - return row.id; - } - if (column === "jti") { - return row.jti; - } - if (column === "reason") { - return row.reason; - } - if (column === "revoked_at") { - return row.revoked_at; - } - if (column === "revokedat") { - return row.revoked_at; - } - if (column === "agent_did") { - return row.agent_did; - } - if (column === "agentdid" || column === "did") { - return row.did; - } - return undefined; -} - -export function resolveCrlSelectRows(options: { - agentRows: FakeAgentRow[]; - revocationRows: FakeRevocationRow[]; -}): FakeCrlSelectRow[] { - return options.revocationRows - .map((row) => { - const agent = options.agentRows.find( - (agentRow) => agentRow.id === row.agentId, - ); - if (!agent) { - return null; - } - - return { - id: row.id, - jti: row.jti, - reason: row.reason, - revoked_at: row.revokedAt, - agent_did: agent.did, - did: agent.did, - }; - }) - .filter((row): row is FakeCrlSelectRow => row !== null) - .sort((left, right) => { - const timestampCompare = right.revoked_at.localeCompare(left.revoked_at); - if (timestampCompare !== 0) { - return timestampCompare; - } - return right.id.localeCompare(left.id); - }); -} +export * from "./resolvers/index.js"; diff --git a/apps/registry/src/server.test/helpers/db/resolvers/AGENTS.md b/apps/registry/src/server.test/helpers/db/resolvers/AGENTS.md new file mode 100644 index 0000000..1d2f83d --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/AGENTS.md @@ -0,0 +1,11 @@ +# AGENTS.md (apps/registry/src/server.test/helpers/db/resolvers) + +## Purpose +- Provide single-responsibility resolver modules for fake DB select behavior. + +## Rules +- One file per entity/resolver concern. +- Keep column mapping helpers (`get*SelectColumnValue`) and row resolvers (`resolve*SelectRows`) together per entity. +- Keep functions data-in/data-out only; no external state. +- Re-export all resolver APIs from `index.ts` and keep naming consistent for discoverability. +- If SQL filter parsing needs new behavior, extend shared parser helpers instead of duplicating condition parsing in multiple modules. diff --git a/apps/registry/src/server.test/helpers/db/resolvers/agent-auth-sessions.ts b/apps/registry/src/server.test/helpers/db/resolvers/agent-auth-sessions.ts new file mode 100644 index 0000000..e18cc43 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/agent-auth-sessions.ts @@ -0,0 +1,112 @@ +import { + extractWhereClause, + hasFilter, + parseWhereEqualityParams, +} from "../parse.js"; +import type { FakeAgentAuthSessionRow } from "../types.js"; + +export function getAgentAuthSessionSelectColumnValue( + row: FakeAgentAuthSessionRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "agent_id") { + return row.agentId; + } + if (column === "refresh_key_hash") { + return row.refreshKeyHash; + } + if (column === "refresh_key_prefix") { + return row.refreshKeyPrefix; + } + if (column === "refresh_issued_at") { + return row.refreshIssuedAt; + } + if (column === "refresh_expires_at") { + return row.refreshExpiresAt; + } + if (column === "refresh_last_used_at") { + return row.refreshLastUsedAt; + } + if (column === "access_key_hash") { + return row.accessKeyHash; + } + if (column === "access_key_prefix") { + return row.accessKeyPrefix; + } + if (column === "access_issued_at") { + return row.accessIssuedAt; + } + if (column === "access_expires_at") { + return row.accessExpiresAt; + } + if (column === "access_last_used_at") { + return row.accessLastUsedAt; + } + if (column === "status") { + return row.status; + } + if (column === "revoked_at") { + return row.revokedAt; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +export function resolveAgentAuthSessionSelectRows(options: { + query: string; + params: unknown[]; + sessionRows: FakeAgentAuthSessionRow[]; +}): FakeAgentAuthSessionRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasAgentIdFilter = hasFilter(whereClause, "agent_id"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasRefreshPrefixFilter = hasFilter(whereClause, "refresh_key_prefix"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const agentId = + hasAgentIdFilter && typeof equalityParams.values.agent_id?.[0] === "string" + ? String(equalityParams.values.agent_id[0]) + : undefined; + const id = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const refreshPrefix = + hasRefreshPrefixFilter && + typeof equalityParams.values.refresh_key_prefix?.[0] === "string" + ? String(equalityParams.values.refresh_key_prefix[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.sessionRows.length; + + return options.sessionRows + .filter((row) => (agentId ? row.agentId === agentId : true)) + .filter((row) => (id ? row.id === id : true)) + .filter((row) => (status ? row.status === status : true)) + .filter((row) => + refreshPrefix ? row.refreshKeyPrefix === refreshPrefix : true, + ) + .slice(0, limit); +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers/agent-registration-challenges.ts b/apps/registry/src/server.test/helpers/db/resolvers/agent-registration-challenges.ts new file mode 100644 index 0000000..0ae572c --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/agent-registration-challenges.ts @@ -0,0 +1,82 @@ +import { + extractWhereClause, + hasFilter, + parseWhereEqualityParams, +} from "../parse.js"; +import type { FakeAgentRegistrationChallengeRow } from "../types.js"; + +export function getAgentRegistrationChallengeSelectColumnValue( + row: FakeAgentRegistrationChallengeRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "owner_id") { + return row.ownerId; + } + if (column === "public_key") { + return row.publicKey; + } + if (column === "nonce") { + return row.nonce; + } + if (column === "status") { + return row.status; + } + if (column === "expires_at") { + return row.expiresAt; + } + if (column === "used_at") { + return row.usedAt; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +export function resolveAgentRegistrationChallengeSelectRows(options: { + query: string; + params: unknown[]; + challengeRows: FakeAgentRegistrationChallengeRow[]; +}): FakeAgentRegistrationChallengeRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasOwnerFilter = hasFilter(whereClause, "owner_id"); + const hasChallengeIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const ownerId = + hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id[0]) + : undefined; + const challengeId = + hasChallengeIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.challengeRows.length; + + return options.challengeRows + .filter((row) => (ownerId ? row.ownerId === ownerId : true)) + .filter((row) => (challengeId ? row.id === challengeId : true)) + .filter((row) => (status ? row.status === status : true)) + .slice(0, limit); +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers/agents.ts b/apps/registry/src/server.test/helpers/db/resolvers/agents.ts new file mode 100644 index 0000000..5b19cbd --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/agents.ts @@ -0,0 +1,161 @@ +import { encodeBase64url } from "@clawdentity/protocol"; +import { + extractWhereClause, + hasFilter, + parseWhereEqualityParams, +} from "../parse.js"; +import type { FakeAgentRow, FakeAgentSelectRow, FakeD1Row } from "../types.js"; + +export function createFakePublicKey(agentId: string): string { + const seed = agentId.length > 0 ? agentId : "agent"; + const bytes = new Uint8Array(32); + + for (let index = 0; index < bytes.length; index += 1) { + bytes[index] = seed.charCodeAt(index % seed.length) & 0xff; + } + + return encodeBase64url(bytes); +} + +export function getAgentSelectColumnValue( + row: FakeAgentSelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "did") { + return row.did; + } + if (column === "owner_id") { + return row.owner_id; + } + if (column === "owner_did") { + return row.owner_did; + } + if (column === "name") { + return row.name; + } + if (column === "framework") { + return row.framework; + } + if (column === "public_key") { + return row.public_key; + } + if (column === "status") { + return row.status; + } + if (column === "expires_at") { + return row.expires_at; + } + if (column === "current_jti") { + return row.current_jti; + } + if (column === "created_at") { + return row.created_at; + } + if (column === "updated_at") { + return row.updated_at; + } + return undefined; +} + +export function resolveAgentSelectRows(options: { + query: string; + params: unknown[]; + authRows: FakeD1Row[]; + agentRows: FakeAgentRow[]; +}): FakeAgentSelectRow[] { + const normalizedQuery = options.query.toLowerCase(); + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasOwnerFilter = hasFilter(whereClause, "owner_id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasFrameworkFilter = hasFilter(whereClause, "framework"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasDidFilter = hasFilter(whereClause, "did"); + const hasCurrentJtiFilter = hasFilter(whereClause, "current_jti"); + const hasCursorFilter = hasFilter(whereClause, "id", "<"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const requiresHumanJoin = + normalizedQuery.includes('join "humans"') || + normalizedQuery.includes("join humans"); + + const ownerId = + hasOwnerFilter && typeof equalityParams.values.owner_id?.[0] === "string" + ? String(equalityParams.values.owner_id?.[0]) + : undefined; + const statusFilter = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status?.[0]) + : undefined; + const frameworkFilter = + hasFrameworkFilter && + typeof equalityParams.values.framework?.[0] === "string" + ? String(equalityParams.values.framework?.[0]) + : undefined; + const idFilter = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id?.[0]) + : undefined; + const didFilter = + hasDidFilter && typeof equalityParams.values.did?.[0] === "string" + ? String(equalityParams.values.did?.[0]) + : undefined; + const currentJtiFilter = hasCurrentJtiFilter + ? (equalityParams.values.current_jti?.[0] as string | null | undefined) + : undefined; + const cursorFilter = hasCursorFilter + ? String(options.params[equalityParams.consumedParams] ?? "") + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.agentRows.length; + + const filteredRows = options.agentRows + .filter((row) => (ownerId ? row.ownerId === ownerId : true)) + .filter((row) => (statusFilter ? row.status === statusFilter : true)) + .filter((row) => + frameworkFilter ? row.framework === frameworkFilter : true, + ) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => (didFilter ? row.did === didFilter : true)) + .filter((row) => + currentJtiFilter !== undefined + ? (row.currentJti ?? null) === currentJtiFilter + : true, + ) + .filter((row) => (cursorFilter ? row.id < cursorFilter : true)) + .sort((left, right) => right.id.localeCompare(left.id)) + .map((row) => { + const ownerDid = options.authRows.find( + (authRow) => authRow.humanId === row.ownerId, + )?.humanDid; + + return { + id: row.id, + did: row.did, + owner_id: row.ownerId, + owner_did: ownerDid ?? "", + name: row.name, + framework: row.framework, + public_key: row.publicKey ?? createFakePublicKey(row.id), + status: row.status, + expires_at: row.expiresAt, + current_jti: row.currentJti ?? null, + created_at: row.createdAt ?? "2026-01-01T00:00:00.000Z", + updated_at: row.updatedAt ?? "2026-01-01T00:00:00.000Z", + }; + }) + .filter((row) => (requiresHumanJoin ? row.owner_did.length > 0 : true)) + .slice(0, limit); + + return filteredRows; +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers/api-keys.ts b/apps/registry/src/server.test/helpers/db/resolvers/api-keys.ts new file mode 100644 index 0000000..59a56d6 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/api-keys.ts @@ -0,0 +1,110 @@ +import { + extractWhereClause, + hasFilter, + parseWhereEqualityParams, +} from "../parse.js"; +import type { FakeApiKeyRow, FakeApiKeySelectRow } from "../types.js"; + +export function getApiKeySelectColumnValue( + row: FakeApiKeySelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "human_id") { + return row.human_id; + } + if (column === "key_hash") { + return row.key_hash; + } + if (column === "key_prefix") { + return row.key_prefix; + } + if (column === "name") { + return row.name; + } + if (column === "status") { + return row.status; + } + if (column === "created_at") { + return row.created_at; + } + if (column === "last_used_at") { + return row.last_used_at; + } + return undefined; +} + +export function resolveApiKeySelectRows(options: { + query: string; + params: unknown[]; + apiKeyRows: FakeApiKeyRow[]; +}): FakeApiKeySelectRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasHumanIdFilter = hasFilter(whereClause, "human_id"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasStatusFilter = hasFilter(whereClause, "status"); + const hasPrefixFilter = hasFilter(whereClause, "key_prefix"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const orderByCreatedAtDesc = + options.query.toLowerCase().includes("order by") && + options.query.toLowerCase().includes("created_at") && + options.query.toLowerCase().includes("desc"); + + const humanId = + hasHumanIdFilter && typeof equalityParams.values.human_id?.[0] === "string" + ? String(equalityParams.values.human_id[0]) + : undefined; + const id = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const status = + hasStatusFilter && typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const keyPrefix = + hasPrefixFilter && typeof equalityParams.values.key_prefix?.[0] === "string" + ? String(equalityParams.values.key_prefix[0]) + : undefined; + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.apiKeyRows.length; + + const rows = options.apiKeyRows + .filter((row) => (humanId ? row.humanId === humanId : true)) + .filter((row) => (id ? row.id === id : true)) + .filter((row) => (status ? row.status === status : true)) + .filter((row) => (keyPrefix ? row.keyPrefix === keyPrefix : true)) + .map((row) => ({ + id: row.id, + human_id: row.humanId, + key_hash: row.keyHash, + key_prefix: row.keyPrefix, + name: row.name, + status: row.status, + created_at: row.createdAt, + last_used_at: row.lastUsedAt, + })); + + if (orderByCreatedAtDesc) { + rows.sort((left, right) => { + const createdAtCompare = right.created_at.localeCompare(left.created_at); + if (createdAtCompare !== 0) { + return createdAtCompare; + } + return right.id.localeCompare(left.id); + }); + } + + return rows.slice(0, limit); +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers/crl.ts b/apps/registry/src/server.test/helpers/db/resolvers/crl.ts new file mode 100644 index 0000000..c7b3e24 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/crl.ts @@ -0,0 +1,65 @@ +import type { + FakeAgentRow, + FakeCrlSelectRow, + FakeRevocationRow, +} from "../types.js"; + +export function getCrlSelectColumnValue( + row: FakeCrlSelectRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "jti") { + return row.jti; + } + if (column === "reason") { + return row.reason; + } + if (column === "revoked_at") { + return row.revoked_at; + } + if (column === "revokedat") { + return row.revoked_at; + } + if (column === "agent_did") { + return row.agent_did; + } + if (column === "agentdid" || column === "did") { + return row.did; + } + return undefined; +} + +export function resolveCrlSelectRows(options: { + agentRows: FakeAgentRow[]; + revocationRows: FakeRevocationRow[]; +}): FakeCrlSelectRow[] { + return options.revocationRows + .map((row) => { + const agent = options.agentRows.find( + (agentRow) => agentRow.id === row.agentId, + ); + if (!agent) { + return null; + } + + return { + id: row.id, + jti: row.jti, + reason: row.reason, + revoked_at: row.revokedAt, + agent_did: agent.did, + did: agent.did, + }; + }) + .filter((row): row is FakeCrlSelectRow => row !== null) + .sort((left, right) => { + const timestampCompare = right.revoked_at.localeCompare(left.revoked_at); + if (timestampCompare !== 0) { + return timestampCompare; + } + return right.id.localeCompare(left.id); + }); +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers/humans.ts b/apps/registry/src/server.test/helpers/db/resolvers/humans.ts new file mode 100644 index 0000000..9da8786 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/humans.ts @@ -0,0 +1,74 @@ +import { extractWhereClause, parseWhereEqualityParams } from "../parse.js"; +import type { FakeHumanRow } from "../types.js"; + +export function getHumanSelectColumnValue( + row: FakeHumanRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "did") { + return row.did; + } + if (column === "display_name") { + return row.displayName; + } + if (column === "role") { + return row.role; + } + if (column === "status") { + return row.status; + } + if (column === "created_at") { + return row.createdAt; + } + if (column === "updated_at") { + return row.updatedAt; + } + return undefined; +} + +export function resolveHumanSelectRows(options: { + query: string; + params: unknown[]; + humanRows: FakeHumanRow[]; +}): FakeHumanRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + + const roleFilter = + typeof equalityParams.values.role?.[0] === "string" + ? String(equalityParams.values.role[0]) + : undefined; + const statusFilter = + typeof equalityParams.values.status?.[0] === "string" + ? String(equalityParams.values.status[0]) + : undefined; + const idFilter = + typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const didFilter = + typeof equalityParams.values.did?.[0] === "string" + ? String(equalityParams.values.did[0]) + : undefined; + + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.humanRows.length; + + return options.humanRows + .filter((row) => (roleFilter ? row.role === roleFilter : true)) + .filter((row) => (statusFilter ? row.status === statusFilter : true)) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => (didFilter ? row.did === didFilter : true)) + .slice(0, limit); +} diff --git a/apps/registry/src/server.test/helpers/db/resolvers/index.ts b/apps/registry/src/server.test/helpers/db/resolvers/index.ts new file mode 100644 index 0000000..e61917d --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/index.ts @@ -0,0 +1,23 @@ +export { + getAgentAuthSessionSelectColumnValue, + resolveAgentAuthSessionSelectRows, +} from "./agent-auth-sessions.js"; +export { + getAgentRegistrationChallengeSelectColumnValue, + resolveAgentRegistrationChallengeSelectRows, +} from "./agent-registration-challenges.js"; +export { + createFakePublicKey, + getAgentSelectColumnValue, + resolveAgentSelectRows, +} from "./agents.js"; +export { + getApiKeySelectColumnValue, + resolveApiKeySelectRows, +} from "./api-keys.js"; +export { getCrlSelectColumnValue, resolveCrlSelectRows } from "./crl.js"; +export { getHumanSelectColumnValue, resolveHumanSelectRows } from "./humans.js"; +export { + getInviteSelectColumnValue, + resolveInviteSelectRows, +} from "./invites.js"; diff --git a/apps/registry/src/server.test/helpers/db/resolvers/invites.ts b/apps/registry/src/server.test/helpers/db/resolvers/invites.ts new file mode 100644 index 0000000..a2c7385 --- /dev/null +++ b/apps/registry/src/server.test/helpers/db/resolvers/invites.ts @@ -0,0 +1,83 @@ +import { + extractWhereClause, + hasFilter, + parseWhereEqualityParams, +} from "../parse.js"; +import type { FakeInviteRow } from "../types.js"; + +export function getInviteSelectColumnValue( + row: FakeInviteRow, + column: string, +): unknown { + if (column === "id") { + return row.id; + } + if (column === "code") { + return row.code; + } + if (column === "created_by") { + return row.createdBy; + } + if (column === "redeemed_by") { + return row.redeemedBy; + } + if (column === "agent_id") { + return row.agentId; + } + if (column === "expires_at") { + return row.expiresAt; + } + if (column === "created_at") { + return row.createdAt; + } + return undefined; +} + +export function resolveInviteSelectRows(options: { + query: string; + params: unknown[]; + inviteRows: FakeInviteRow[]; +}): FakeInviteRow[] { + const whereClause = extractWhereClause(options.query); + const equalityParams = parseWhereEqualityParams({ + whereClause, + params: options.params, + }); + const hasCodeFilter = hasFilter(whereClause, "code"); + const hasIdFilter = hasFilter(whereClause, "id"); + const hasRedeemedByFilter = hasFilter(whereClause, "redeemed_by"); + const hasLimitClause = options.query.toLowerCase().includes(" limit "); + + const codeFilter = + hasCodeFilter && typeof equalityParams.values.code?.[0] === "string" + ? String(equalityParams.values.code[0]) + : undefined; + const idFilter = + hasIdFilter && typeof equalityParams.values.id?.[0] === "string" + ? String(equalityParams.values.id[0]) + : undefined; + const redeemedByFilter = hasRedeemedByFilter + ? (equalityParams.values.redeemed_by?.[0] as string | null | undefined) + : undefined; + + const requiresRedeemedByNull = + whereClause.includes("redeemed_by") && whereClause.includes("is null"); + + const maybeLimit = hasLimitClause + ? Number(options.params[options.params.length - 1]) + : Number.NaN; + const limit = Number.isFinite(maybeLimit) + ? maybeLimit + : options.inviteRows.length; + + return options.inviteRows + .filter((row) => (codeFilter ? row.code === codeFilter : true)) + .filter((row) => (idFilter ? row.id === idFilter : true)) + .filter((row) => + redeemedByFilter !== undefined + ? row.redeemedBy === redeemedByFilter + : true, + ) + .filter((row) => (requiresRedeemedByNull ? row.redeemedBy === null : true)) + .slice(0, limit); +} diff --git a/packages/connector/src/client.ts b/packages/connector/src/client.ts index 4f58469..fb59bbd 100644 --- a/packages/connector/src/client.ts +++ b/packages/connector/src/client.ts @@ -7,21 +7,26 @@ import { } from "./client/heartbeat.js"; import { normalizeConnectionHeaders, - readCloseEvent, - readErrorEventReason, - readMessageEventData, readUnexpectedResponseStatus, resolveWebSocketFactory, sanitizeErrorReason, toOpenclawHookUrl, - WS_READY_STATE_CONNECTING, } from "./client/helpers.js"; -import { handleIncomingConnectorMessage } from "./client/inbound.js"; -import { handleInboundDeliverFrame } from "./client/inbound-delivery.js"; +import { routeConnectorInboundMessage } from "./client/inbound-router.js"; import { ConnectorClientMetricsTracker } from "./client/metrics.js"; +import { + ensureConnectorOutboundQueueLoaded, + flushConnectorOutboundQueue, + sendConnectorFrame, +} from "./client/outbound-flush.js"; import { ConnectorOutboundQueueManager } from "./client/queue.js"; import { ConnectorReconnectScheduler } from "./client/reconnect-scheduler.js"; import { attachConnectorSocketEventListeners } from "./client/socket-events.js"; +import { + closeConnectorSocketQuietly, + createConnectorSocketEventHandlers, + resolveConnectorConnectionHeaders, +} from "./client/socket-session.js"; import type { ConnectorClientHooks, ConnectorClientMetricsSnapshot, @@ -53,9 +58,7 @@ import { type DeliverFrame, type EnqueueFrame, enqueueFrameSchema, - type HeartbeatAckFrame, type HeartbeatFrame, - serializeFrame, } from "./frames.js"; export type { @@ -250,7 +253,12 @@ export class ConnectorClient { if (this.socket !== undefined) { const socket = this.socket; this.socket = undefined; - this.closeSocketQuietly(socket, 1000, "client disconnect"); + closeConnectorSocketQuietly({ + socket, + code: 1000, + reason: "client disconnect", + logger: this.logger, + }); } } @@ -296,18 +304,18 @@ export class ConnectorClient { } let connectionHeaders = this.connectionHeaders; - if (this.connectionHeadersProvider) { - try { - connectionHeaders = normalizeConnectionHeaders( - await this.connectionHeadersProvider(), - ); - } catch (error) { - this.logger.warn("connector.websocket.create_failed", { - reason: sanitizeErrorReason(error), - }); + if (this.connectionHeadersProvider !== undefined) { + const resolvedHeaders = await resolveConnectorConnectionHeaders({ + baseHeaders: this.connectionHeaders, + provider: this.connectionHeadersProvider, + logger: this.logger, + }); + if (resolvedHeaders === undefined) { this.scheduleReconnect(); return; } + + connectionHeaders = resolvedHeaders; } if (!this.started) { @@ -327,96 +335,49 @@ export class ConnectorClient { const socket = this.socket; this.startConnectTimeout(socket); - attachConnectorSocketEventListeners(socket, { - onOpen: () => { - if (this.socket !== socket) { - return; - } - + const socketHandlers = createConnectorSocketEventHandlers({ + socket, + connectorUrl: this.connectorUrl, + hooks: this.hooks, + logger: this.logger, + metricsTracker: this.metricsTracker, + reconnectScheduler: this.reconnectScheduler, + clearConnectTimeout: () => { this.clearConnectTimeout(); - this.reconnectScheduler.resetAttempts(); - this.authUpgradeImmediateRetryUsed = false; - this.metricsTracker.onSocketConnected(this.makeTimestamp()); - this.logger.info("connector.websocket.connected", { - url: this.connectorUrl, - }); - this.startHeartbeatInterval(); - this.flushOutboundQueue(); - this.hooks.onConnected?.(); }, - onMessage: (event) => { - if (this.socket !== socket) { - return; - } - - void this.handleIncomingMessage(readMessageEventData(event)); + startHeartbeatInterval: () => { + this.startHeartbeatInterval(); }, - onClose: (event) => { - if (!this.detachSocket(socket)) { - return; - } - - const closeEvent = readCloseEvent(event); - - this.logger.warn("connector.websocket.closed", { - closeCode: closeEvent.code, - reason: closeEvent.reason, - wasClean: closeEvent.wasClean, - }); - - this.hooks.onDisconnected?.({ - code: closeEvent.code, - reason: closeEvent.reason, - wasClean: closeEvent.wasClean, - }); - - if (this.started) { - this.scheduleReconnect(); - } + flushOutboundQueue: () => { + this.flushOutboundQueue(); }, - onError: (event) => { - if (this.socket !== socket) { - return; - } - - const readyState = socket.readyState; - const shouldForceReconnect = - readyState !== WS_READY_STATE_OPEN && - readyState !== WS_READY_STATE_CONNECTING; - if (!shouldForceReconnect) { - this.logger.warn("connector.websocket.error", { - url: this.connectorUrl, - reason: readErrorEventReason(event), - readyState, - }); - return; - } - - if (!this.detachSocket(socket)) { - return; - } - - const reason = readErrorEventReason(event); - this.logger.warn("connector.websocket.error", { - url: this.connectorUrl, - reason, - }); - this.closeSocketQuietly(socket, 1011, "websocket error"); - - this.hooks.onDisconnected?.({ - code: 1006, + isCurrentSocket: (candidate) => this.socket === candidate, + detachSocket: (candidate) => this.detachSocket(candidate), + closeSocketQuietly: (candidate, code, reason) => { + closeConnectorSocketQuietly({ + socket: candidate, + code, reason, - wasClean: false, + logger: this.logger, }); - - if (this.started) { - this.scheduleReconnect(); - } }, - onUnexpectedResponse: (event) => { - void this.handleUnexpectedResponse(socket, event); + onIncomingMessage: async (rawFrame) => { + await this.handleIncomingMessage(rawFrame); + }, + onUnexpectedResponse: async (candidate, event) => { + await this.handleUnexpectedResponse(candidate, event); + }, + isStarted: () => this.started, + scheduleReconnect: (options) => { + this.scheduleReconnect(options); + }, + makeTimestamp: () => this.makeTimestamp(), + onConnected: () => { + this.authUpgradeImmediateRetryUsed = false; }, }); + + attachConnectorSocketEventListeners(socket, socketHandlers); } private scheduleReconnect(options?: { @@ -446,7 +407,12 @@ export class ConnectorClient { timeoutMs: this.connectTimeoutMs, url: this.connectorUrl, }); - this.closeSocketQuietly(socket, 1000, "connect timeout"); + closeConnectorSocketQuietly({ + socket, + code: 1000, + reason: "connect timeout", + logger: this.logger, + }); this.hooks.onDisconnected?.({ code: 1006, reason: "WebSocket connect timed out", @@ -482,20 +448,6 @@ export class ConnectorClient { return true; } - private closeSocketQuietly( - socket: ConnectorWebSocket, - code?: number, - reason?: string, - ): void { - try { - socket.close(code, reason); - } catch (error) { - this.logger.warn("connector.websocket.close_failed", { - reason: sanitizeErrorReason(error), - }); - } - } - private async handleUnexpectedResponse( socket: ConnectorWebSocket, event: unknown, @@ -526,7 +478,12 @@ export class ConnectorClient { immediateRetry, url: this.connectorUrl, }); - this.closeSocketQuietly(socket, 1000, reason); + closeConnectorSocketQuietly({ + socket, + code: 1000, + reason, + logger: this.logger, + }); this.hooks.onDisconnected?.({ code: 1006, reason, @@ -584,7 +541,12 @@ export class ConnectorClient { oldestPendingAgeMs: event.oldestPendingAgeMs, timeoutMs: event.timeoutMs, }); - this.closeSocketQuietly(socket, 1000, "heartbeat ack timeout"); + closeConnectorSocketQuietly({ + socket, + code: 1000, + reason: "heartbeat ack timeout", + logger: this.logger, + }); this.hooks.onDisconnected?.({ code: 1006, reason: "Heartbeat acknowledgement timed out", @@ -597,81 +559,41 @@ export class ConnectorClient { } private flushOutboundQueue(): void { - this.outboundQueue.flush({ + flushConnectorOutboundQueue({ + queue: this.outboundQueue, isConnected: () => this.isConnected(), sendFrame: (frame) => this.sendFrame(frame), }); } private async ensureOutboundQueueLoaded(): Promise { - await this.outboundQueue.ensureLoaded(); - this.flushOutboundQueue(); + await ensureConnectorOutboundQueueLoaded({ + queue: this.outboundQueue, + flush: () => this.flushOutboundQueue(), + }); } private sendFrame(frame: ConnectorFrame): boolean { - const socket = this.socket; - if (socket === undefined || socket.readyState !== WS_READY_STATE_OPEN) { - return false; - } - - const payload = serializeFrame(frame); - - try { - socket.send(payload); - return true; - } catch (error) { - this.logger.warn("connector.websocket.send_failed", { - frameType: frame.type, - reason: sanitizeErrorReason(error), - }); - return false; - } + return sendConnectorFrame({ + socket: this.socket, + frame, + logger: this.logger, + }); } private async handleIncomingMessage(rawFrame: unknown): Promise { - await handleIncomingConnectorMessage({ + await routeConnectorInboundMessage({ rawFrame, logger: this.logger, - handlers: { - onFrame: this.hooks.onFrame, - onHeartbeatFrame: (frame) => { - this.handleHeartbeatFrame(frame); - }, - onHeartbeatAckFrame: (frame) => { - this.heartbeatManager.handleHeartbeatAck(frame); - }, - onDeliverFrame: async (frame) => { - await this.handleDeliverFrame(frame); - }, - }, - }); - } - - private handleHeartbeatFrame(frame: HeartbeatFrame): void { - const ackFrame: HeartbeatAckFrame = { - v: CONNECTOR_FRAME_VERSION, - type: "heartbeat_ack", - id: this.makeFrameId(), - ts: this.makeTimestamp(), - ackId: frame.id, - }; - - this.sendFrame(ackFrame); - } - - private async handleDeliverFrame(frame: DeliverFrame): Promise { - await handleInboundDeliverFrame({ - frame, + hooks: this.hooks, + heartbeatManager: this.heartbeatManager, inboundDeliverHandler: this.inboundDeliverHandler, localOpenclawDelivery: this.localOpenclawDelivery, isStarted: () => this.started, - hooks: this.hooks, - now: this.now, makeFrameId: () => this.makeFrameId(), makeTimestamp: () => this.makeTimestamp(), - sendDeliverAckFrame: (ackFrame) => { - this.sendFrame(ackFrame); - }, + now: this.now, + sendFrame: (frame) => this.sendFrame(frame), recordAckLatency: (durationMs) => { this.metricsTracker.recordInboundDeliveryAckLatency(durationMs); }, diff --git a/packages/connector/src/client/AGENTS.md b/packages/connector/src/client/AGENTS.md index 7f7942d..36fa671 100644 --- a/packages/connector/src/client/AGENTS.md +++ b/packages/connector/src/client/AGENTS.md @@ -7,15 +7,27 @@ - Keep `client.ts` as orchestration for public API methods (`connect`, `disconnect`, `enqueueOutbound`) and high-level flow only. - Keep reconnect timer/attempt scheduling logic in `reconnect-scheduler.ts`. - Keep websocket listener registration wiring in `socket-events.ts`. +- Keep socket lifecycle event-callback composition in `socket-session.ts`. - Keep frame/event parsing and sanitization in `helpers.ts` as pure functions. - Keep inbound frame parsing + frame-type dispatch in `inbound.ts` so `client.ts` only wires handlers. +- Keep inbound dispatch wiring in `inbound-router.ts` so heartbeat ack + deliver routing stay out of `client.ts`. - Keep connector transport/inbound delivery metrics state in `metrics.ts` to avoid duplicating counters in `client.ts`. - Keep reconnect delay math in `retry.ts` and avoid inline backoff duplication. - Keep heartbeat tracking and metrics centralized in `heartbeat.ts`. - Keep outbound queue persistence and load/flush semantics centralized in `queue.ts`. +- Keep outbound send/flush orchestration helpers in `outbound-flush.ts`. - Keep local OpenClaw delivery/retry behavior in `delivery.ts` and inbound ack orchestration in `inbound-delivery.ts`. - Design additional helper modules with narrow interfaces: - `lifecycle.ts` should orchestrate `connect`/`disconnect`, queue hydration, heartbeat lifecycle, and hook invocation while exposing start/stop/attached-state APIs invoked by `ConnectorClient`. - `socket-events.ts` should register WebSocket listeners (`open`, `message`, `close`, `error`, `unexpected-response`) via dependency-injected callbacks (logger, hooks, heartbeat manager, reconnect scheduler) so event handling remains testable. - `reconnect.ts` should own reconnection timers/backoff (`schedule`, `clear`) using injected timing/random utilities plus a pluggable callback instead of inline timeout tracking inside `client.ts`. - Each helper module must accept only the dependencies it truly needs (e.g., logger, metrics tracker, heartbeat/reconnect interfaces, hooks) so wiring in `ConnectorClient` stays declarative and easy to mock. + +## SRP guidance +- When refactoring `client.ts`, keep `ConnectorClient` as the stable public surface while slicing out targeted helpers that do one thing well (lifecycle, socket session, delivery, routing, metrics). Document the new helper in this AGENTS.md so others know what each file owns. +- Potential helper candidates to extract along this path: + - `lifecycle.ts` (start/stop state, queue hydration, heartbeat lifecycle, reconnect scheduling + hook invocation). + - `socket-session.ts` (WebSocket dial/close/send, connect timeout, attach/detach guard, injected event callbacks for open/message/close/error/unexpected-response, metrics hooks). + - `outbound-flush.ts` (queue flush orchestration and serialization assistance so `ConnectorClient` no longer reaches directly into `queue.ts`). + - `inbound-router.ts` (handles raw message parsing, routes heartbeat/deliver frames to heartbeat manager/handlers, and records metrics before handing off to `handleInboundDeliverFrame`). +- `delivery.ts` and `inbound-delivery.ts` stay responsible for OpenClaw delivery + ack orchestration and should expose injectable hooks for testing retries/timeout logic. diff --git a/packages/connector/src/client/inbound-router.ts b/packages/connector/src/client/inbound-router.ts new file mode 100644 index 0000000..e8d7666 --- /dev/null +++ b/packages/connector/src/client/inbound-router.ts @@ -0,0 +1,67 @@ +import type { Logger } from "@clawdentity/sdk"; +import { CONNECTOR_FRAME_VERSION } from "../constants.js"; +import type { + ConnectorFrame, + DeliverFrame, + HeartbeatAckFrame, +} from "../frames.js"; +import type { LocalOpenclawDeliveryClient } from "./delivery.js"; +import type { ConnectorHeartbeatManager } from "./heartbeat.js"; +import { handleIncomingConnectorMessage } from "./inbound.js"; +import { handleInboundDeliverFrame } from "./inbound-delivery.js"; +import type { ConnectorClientHooks } from "./types.js"; + +export async function routeConnectorInboundMessage(options: { + rawFrame: unknown; + logger: Logger; + hooks: ConnectorClientHooks; + heartbeatManager: ConnectorHeartbeatManager; + inboundDeliverHandler: + | ((frame: DeliverFrame) => Promise<{ accepted: boolean; reason?: string }>) + | undefined; + localOpenclawDelivery: LocalOpenclawDeliveryClient; + isStarted: () => boolean; + makeFrameId: () => string; + makeTimestamp: () => string; + now: () => number; + sendFrame: (frame: ConnectorFrame) => boolean; + recordAckLatency: (durationMs: number) => void; +}): Promise { + await handleIncomingConnectorMessage({ + rawFrame: options.rawFrame, + logger: options.logger, + handlers: { + onFrame: options.hooks.onFrame, + onHeartbeatFrame: (frame) => { + const ackFrame: HeartbeatAckFrame = { + v: CONNECTOR_FRAME_VERSION, + type: "heartbeat_ack", + id: options.makeFrameId(), + ts: options.makeTimestamp(), + ackId: frame.id, + }; + + options.sendFrame(ackFrame); + }, + onHeartbeatAckFrame: (frame) => { + options.heartbeatManager.handleHeartbeatAck(frame); + }, + onDeliverFrame: async (frame: DeliverFrame) => { + await handleInboundDeliverFrame({ + frame, + inboundDeliverHandler: options.inboundDeliverHandler, + localOpenclawDelivery: options.localOpenclawDelivery, + isStarted: options.isStarted, + hooks: options.hooks, + now: options.now, + makeFrameId: options.makeFrameId, + makeTimestamp: options.makeTimestamp, + sendDeliverAckFrame: (ackFrame) => { + options.sendFrame(ackFrame); + }, + recordAckLatency: options.recordAckLatency, + }); + }, + }, + }); +} diff --git a/packages/connector/src/client/outbound-flush.ts b/packages/connector/src/client/outbound-flush.ts new file mode 100644 index 0000000..ab38ba8 --- /dev/null +++ b/packages/connector/src/client/outbound-flush.ts @@ -0,0 +1,52 @@ +import type { Logger } from "@clawdentity/sdk"; +import { WS_READY_STATE_OPEN } from "../constants.js"; +import type { ConnectorFrame, EnqueueFrame } from "../frames.js"; +import { serializeFrame } from "../frames.js"; +import { sanitizeErrorReason } from "./helpers.js"; +import type { ConnectorOutboundQueueManager } from "./queue.js"; +import type { ConnectorWebSocket } from "./types.js"; + +export function sendConnectorFrame(input: { + socket: ConnectorWebSocket | undefined; + frame: ConnectorFrame; + logger: Logger; +}): boolean { + if ( + input.socket === undefined || + input.socket.readyState !== WS_READY_STATE_OPEN + ) { + return false; + } + + const payload = serializeFrame(input.frame); + + try { + input.socket.send(payload); + return true; + } catch (error) { + input.logger.warn("connector.websocket.send_failed", { + frameType: input.frame.type, + reason: sanitizeErrorReason(error), + }); + return false; + } +} + +export function flushConnectorOutboundQueue(input: { + queue: ConnectorOutboundQueueManager; + isConnected: () => boolean; + sendFrame: (frame: EnqueueFrame) => boolean; +}): void { + input.queue.flush({ + isConnected: input.isConnected, + sendFrame: input.sendFrame, + }); +} + +export async function ensureConnectorOutboundQueueLoaded(input: { + queue: ConnectorOutboundQueueManager; + flush: () => void; +}): Promise { + await input.queue.ensureLoaded(); + input.flush(); +} diff --git a/packages/connector/src/client/socket-session.ts b/packages/connector/src/client/socket-session.ts new file mode 100644 index 0000000..101b787 --- /dev/null +++ b/packages/connector/src/client/socket-session.ts @@ -0,0 +1,173 @@ +import type { Logger } from "@clawdentity/sdk"; +import { WS_READY_STATE_OPEN } from "../constants.js"; +import { + normalizeConnectionHeaders, + readCloseEvent, + readErrorEventReason, + readMessageEventData, + sanitizeErrorReason, + WS_READY_STATE_CONNECTING, +} from "./helpers.js"; +import type { ConnectorClientMetricsTracker } from "./metrics.js"; +import type { ConnectorReconnectScheduler } from "./reconnect-scheduler.js"; +import type { ConnectorClientHooks, ConnectorWebSocket } from "./types.js"; + +export async function resolveConnectorConnectionHeaders(input: { + baseHeaders: Record; + provider: + | (() => Record | Promise>) + | undefined; + logger: Logger; +}): Promise | undefined> { + if (input.provider === undefined) { + return input.baseHeaders; + } + + try { + return normalizeConnectionHeaders(await input.provider()); + } catch (error) { + input.logger.warn("connector.websocket.create_failed", { + reason: sanitizeErrorReason(error), + }); + return undefined; + } +} + +export function closeConnectorSocketQuietly(input: { + socket: ConnectorWebSocket; + logger: Logger; + code?: number; + reason?: string; +}): void { + try { + input.socket.close(input.code, input.reason); + } catch (error) { + input.logger.warn("connector.websocket.close_failed", { + reason: sanitizeErrorReason(error), + }); + } +} + +export function createConnectorSocketEventHandlers(input: { + socket: ConnectorWebSocket; + connectorUrl: string; + hooks: ConnectorClientHooks; + logger: Logger; + metricsTracker: ConnectorClientMetricsTracker; + reconnectScheduler: ConnectorReconnectScheduler; + clearConnectTimeout: () => void; + startHeartbeatInterval: () => void; + flushOutboundQueue: () => void; + isCurrentSocket: (socket: ConnectorWebSocket) => boolean; + detachSocket: (socket: ConnectorWebSocket) => boolean; + closeSocketQuietly: ( + socket: ConnectorWebSocket, + code?: number, + reason?: string, + ) => void; + onIncomingMessage: (rawFrame: unknown) => Promise; + onUnexpectedResponse: ( + socket: ConnectorWebSocket, + event: unknown, + ) => Promise; + isStarted: () => boolean; + scheduleReconnect: (options?: { + delayMs?: number; + incrementAttempt?: boolean; + }) => void; + makeTimestamp: () => string; + onConnected: () => void; +}) { + const socket = input.socket; + + return { + onOpen: () => { + if (!input.isCurrentSocket(socket)) { + return; + } + + input.clearConnectTimeout(); + input.reconnectScheduler.resetAttempts(); + input.metricsTracker.onSocketConnected(input.makeTimestamp()); + input.logger.info("connector.websocket.connected", { + url: input.connectorUrl, + }); + input.startHeartbeatInterval(); + input.flushOutboundQueue(); + input.hooks.onConnected?.(); + input.onConnected(); + }, + onMessage: (event: unknown) => { + if (!input.isCurrentSocket(socket)) { + return; + } + + void input.onIncomingMessage(readMessageEventData(event)); + }, + onClose: (event: unknown) => { + if (!input.detachSocket(socket)) { + return; + } + + const closeEvent = readCloseEvent(event); + + input.logger.warn("connector.websocket.closed", { + closeCode: closeEvent.code, + reason: closeEvent.reason, + wasClean: closeEvent.wasClean, + }); + + input.hooks.onDisconnected?.({ + code: closeEvent.code, + reason: closeEvent.reason, + wasClean: closeEvent.wasClean, + }); + + if (input.isStarted()) { + input.scheduleReconnect(); + } + }, + onError: (event: unknown) => { + if (!input.isCurrentSocket(socket)) { + return; + } + + const readyState = socket.readyState; + const shouldForceReconnect = + readyState !== WS_READY_STATE_OPEN && + readyState !== WS_READY_STATE_CONNECTING; + if (!shouldForceReconnect) { + input.logger.warn("connector.websocket.error", { + url: input.connectorUrl, + reason: readErrorEventReason(event), + readyState, + }); + return; + } + + if (!input.detachSocket(socket)) { + return; + } + + const reason = readErrorEventReason(event); + input.logger.warn("connector.websocket.error", { + url: input.connectorUrl, + reason, + }); + input.closeSocketQuietly(socket, 1011, "websocket error"); + + input.hooks.onDisconnected?.({ + code: 1006, + reason, + wasClean: false, + }); + + if (input.isStarted()) { + input.scheduleReconnect(); + } + }, + onUnexpectedResponse: (event: unknown) => { + void input.onUnexpectedResponse(socket, event); + }, + }; +} From 08b638f4b9b1ff4f32ff3417e7206bf502cccc43 Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sat, 21 Feb 2026 19:15:04 +0530 Subject: [PATCH 150/190] Add PROTOCOL.md and Internet-Draft (draft-vemula-clawdentity-protocol-00) - PROTOCOL.md: Human-readable protocol specification - XML source (xml2rfc v3): draft-vemula-clawdentity-protocol-00.xml - Generated text output: draft-vemula-clawdentity-protocol-00.txt - Generated HTML output: draft-vemula-clawdentity-protocol-00.html Covers: DID format, AIT (JWT), Ed25519 PoP signing, pairing ceremony, WebSocket relay frames, CRL revocation, trust store, error codes, IANA considerations. References: RFC 2119, RFC 4648, RFC 6234, RFC 6455, RFC 7515, RFC 7519, RFC 7800, RFC 8032, RFC 8037, RFC 8174, RFC 8446, RFC 9110, RFC 9449, W3C DID Core, ULID spec. --- PROTOCOL.md | 970 ++++++ draft-vemula-clawdentity-protocol-00.html | 3707 +++++++++++++++++++++ draft-vemula-clawdentity-protocol-00.txt | 1736 ++++++++++ draft-vemula-clawdentity-protocol-00.xml | 1494 +++++++++ 4 files changed, 7907 insertions(+) create mode 100644 PROTOCOL.md create mode 100644 draft-vemula-clawdentity-protocol-00.html create mode 100644 draft-vemula-clawdentity-protocol-00.txt create mode 100644 draft-vemula-clawdentity-protocol-00.xml diff --git a/PROTOCOL.md b/PROTOCOL.md new file mode 100644 index 0000000..e7be299 --- /dev/null +++ b/PROTOCOL.md @@ -0,0 +1,970 @@ +# Clawdentity Protocol Specification + +**Version:** 0.1.0-draft +**Status:** Draft +**Authors:** Ravi Kiran Vemula +**Date:** 2026-02-21 +**License:** MIT + +--- + +## Abstract + +Clawdentity defines a cryptographic identity and trust protocol for AI agent-to-agent communication. It enables agents to prove their identity, verify peers, establish mutual trust, and exchange messages through authenticated relay infrastructure — without exposing private keys, shared tokens, or backend services. + +This document specifies the protocol's identity model, authentication mechanisms, message signing, relay transport, trust establishment, and revocation system. + +--- + +## Table of Contents + +1. [Introduction](#1-introduction) +2. [Terminology](#2-terminology) +3. [Identity Model](#3-identity-model) +4. [Agent Identity Token (AIT)](#4-agent-identity-token-ait) +5. [HTTP Request Signing](#5-http-request-signing) +6. [Authentication Flow](#6-authentication-flow) +7. [Trust Establishment (Pairing)](#7-trust-establishment-pairing) +8. [Relay Transport](#8-relay-transport) +9. [Certificate Revocation](#9-certificate-revocation) +10. [Security Considerations](#10-security-considerations) +11. [Wire Formats](#11-wire-formats) +12. [Endpoints](#12-endpoints) +13. [Error Codes](#13-error-codes) +14. [IANA Considerations](#14-iana-considerations) +15. [References](#15-references) + +--- + +## 1. Introduction + +### 1.1 Problem Statement + +Current AI agent frameworks rely on shared bearer tokens for inter-agent communication. This creates several problems: + +- A single token leak compromises all agents +- No way to distinguish which agent sent a request +- Revoking one agent requires rotating the token for all +- No per-agent access control or rate limiting +- Backend services must be publicly exposed + +### 1.2 Design Goals + +Clawdentity addresses these problems with the following design goals: + +1. **Individual identity** — Each agent has a unique cryptographic identity +2. **Proof of possession** — Every request proves the sender holds the private key +3. **Selective revocation** — One agent can be revoked without affecting others +4. **Zero-trust relay** — Agents communicate through authenticated proxies; backends stay private +5. **Human-anchored trust** — Trust originates from human approval, not agent self-certification +6. **Framework agnostic** — Works with any AI agent framework (OpenClaw, LangChain, CrewAI, etc.) + +### 1.3 Architecture Overview + +``` +┌─────────────┐ ┌─────────────┐ ┌──────────────┐ +│ Agent A │ │ Registry │ │ Agent B │ +│ (private) │ │ (central) │ │ (private) │ +└──────┬───────┘ └──────┬──────┘ └──────┬───────┘ + │ │ │ +┌──────┴───────┐ │ ┌───────┴──────┐ +│ Connector A │ │ │ Connector B │ +│ (local) │ │ │ (local) │ +└──────┬───────┘ │ └───────┬──────┘ + │ WebSocket │ │ WebSocket +┌──────┴───────┐ │ ┌───────┴──────┐ +│ Proxy A │◄───────────────┤────────────────│ Proxy B │ +│ (edge) │ │ │ (edge) │ +└──────────────┘ │ └──────────────┘ + │ + ┌───────────┴───────────┐ + │ .well-known/keys │ + │ /v1/crl │ + │ /v1/agents │ + └───────────────────────┘ +``` + +**Components:** + +- **Registry** — Central identity authority. Issues AITs, manages keys, publishes CRL. +- **Proxy** — Per-owner edge service. Verifies identity, enforces trust policy, relays messages. +- **Connector** — Local bridge between proxy and agent framework. Never exposed publicly. +- **Agent** — The AI agent itself. Has no knowledge of the protocol; the connector handles everything. + +--- + +## 2. Terminology + +| Term | Definition | +|------|-----------| +| **AIT** | Agent Identity Token. A JWT credential binding an agent DID to a public key. | +| **CRL** | Certificate Revocation List. A signed list of revoked AITs. | +| **DID** | Decentralized Identifier. A URI identifying a human or agent. | +| **Connector** | Local process that bridges the proxy relay to the agent framework. | +| **Proxy** | Edge service that authenticates requests and relays messages. | +| **Registry** | Central authority that issues identities and publishes signing keys. | +| **PoP** | Proof of Possession. A signature proving the sender holds the private key. | +| **Pairing** | Mutual trust establishment between two agents via ticket exchange. | +| **Trust Store** | Per-proxy database of known agents and approved pairs. | +| **ULID** | Universally Unique Lexicographically Sortable Identifier. | + +**Key words:** "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHOULD", "MAY" are used as defined in [RFC 2119]. + +--- + +## 3. Identity Model + +### 3.1 DID Format + +Clawdentity uses a custom DID method: `did:claw`. + +``` +did:claw:: +``` + +**Kinds:** + +| Kind | Description | Example | +|------|-------------|---------| +| `human` | A human owner/operator | `did:claw:human:01HF7YAT00W6W7CM7N3W5FDXT4` | +| `agent` | An AI agent | `did:claw:agent:01HG8ZBU11X7X8DN8O4X6GEYU5` | + +The `` component MUST be a valid ULID as defined in the [ULID specification](https://github.com/ulid/spec). + +### 3.2 Cryptographic Primitives + +| Primitive | Algorithm | Key Size | Usage | +|-----------|-----------|----------|-------| +| Signing keypair | Ed25519 | 32 bytes (public), 64 bytes (secret) | Agent identity, request signing | +| Body hashing | SHA-256 | 256 bits | Request body integrity | +| Token format | JWT (JWS Compact) | Variable | AIT and CRL tokens | +| Encoding | Base64url (no padding) | Variable | Keys, signatures, hashes | + +Ed25519 (RFC 8032) is the REQUIRED signing algorithm. Implementations MUST NOT support other curves. + +### 3.3 Key Generation + +Each agent generates a local Ed25519 keypair: + +``` +secretKey: 64 bytes (Ed25519 secret key) +publicKey: 32 bytes (Ed25519 public key) +``` + +The secret key MUST be stored locally and MUST NOT be transmitted. Only the public key is registered with the registry. + +### 3.4 Ownership Model + +Every agent DID is bound to exactly one human DID (the `ownerDid`). This binding is recorded in the AIT and enforced by the registry. + +``` +Human (did:claw:human:...) + └── Agent A (did:claw:agent:...) + └── Agent B (did:claw:agent:...) + └── Agent C (did:claw:agent:...) +``` + +A human MAY own multiple agents. An agent MUST have exactly one owner. + +--- + +## 4. Agent Identity Token (AIT) + +### 4.1 Overview + +The AIT is a JWT that serves as an agent's passport. It is issued by the registry and binds the agent's DID to its public key via a confirmation claim (`cnf`). + +### 4.2 Token Format + +**JOSE Header:** + +```json +{ + "alg": "EdDSA", + "typ": "AIT", + "kid": "" +} +``` + +| Field | Required | Description | +|-------|----------|-------------| +| `alg` | REQUIRED | MUST be `"EdDSA"` | +| `typ` | REQUIRED | MUST be `"AIT"` | +| `kid` | REQUIRED | Key ID of the registry signing key used | + +**Claims:** + +```json +{ + "iss": "https://registry.clawdentity.com", + "sub": "did:claw:agent:01HG8ZBU11X7X8DN8O4X6GEYU5", + "ownerDid": "did:claw:human:01HF7YAT00W6W7CM7N3W5FDXT4", + "name": "kai", + "framework": "openclaw", + "description": "Ravi's personal AI assistant", + "cnf": { + "jwk": { + "kty": "OKP", + "crv": "Ed25519", + "x": "" + } + }, + "iat": 1708531200, + "nbf": 1708531200, + "exp": 1711209600, + "jti": "01HG8ZBU11X7X8DN8O4X6GEYU5" +} +``` + +| Claim | Type | Required | Description | +|-------|------|----------|-------------| +| `iss` | string | REQUIRED | Registry issuer URL | +| `sub` | string | REQUIRED | Agent DID (`did:claw:agent:`) | +| `ownerDid` | string | REQUIRED | Owner human DID (`did:claw:human:`) | +| `name` | string | REQUIRED | Agent name. 1-64 chars, `[A-Za-z0-9._ -]` | +| `framework` | string | REQUIRED | Agent framework identifier, 1-32 chars | +| `description` | string | OPTIONAL | Human-readable description, max 280 chars | +| `cnf` | object | REQUIRED | Confirmation claim containing the agent's public key | +| `cnf.jwk.kty` | string | REQUIRED | MUST be `"OKP"` | +| `cnf.jwk.crv` | string | REQUIRED | MUST be `"Ed25519"` | +| `cnf.jwk.x` | string | REQUIRED | Base64url-encoded 32-byte Ed25519 public key | +| `iat` | number | REQUIRED | Issued-at timestamp (Unix seconds) | +| `nbf` | number | REQUIRED | Not-before timestamp (Unix seconds) | +| `exp` | number | REQUIRED | Expiration timestamp (Unix seconds). MUST be > `nbf` and > `iat` | +| `jti` | string | REQUIRED | Unique token ID (ULID) | + +### 4.3 Validation Rules + +An AIT MUST be rejected if: + +1. `alg` is not `EdDSA` +2. `typ` is not `AIT` +3. `kid` does not match any active registry signing key +4. JWT signature verification fails +5. `sub` is not a valid agent DID +6. `ownerDid` is not a valid human DID +7. `cnf.jwk.x` does not decode to exactly 32 bytes +8. `exp <= nbf` or `exp <= iat` +9. `jti` is not a valid ULID +10. Current time is outside `[nbf, exp]` window +11. `jti` appears in the current CRL + +--- + +## 5. HTTP Request Signing + +### 5.1 Purpose + +Every authenticated request includes a Proof of Possession (PoP) signature that proves the sender holds the private key corresponding to the public key in their AIT's `cnf` claim. + +### 5.2 Canonical Request Format + +The canonical request string is constructed by joining the following fields with newline (`\n`) separators: + +``` +CLAW-PROOF-V1 + + + + + +``` + +| Field | Description | +|-------|-------------| +| Version | Literal string `CLAW-PROOF-V1` | +| Method | HTTP method, uppercased (e.g., `POST`) | +| Path with query | Request path including query string (e.g., `/hooks/agent?foo=bar`) | +| Timestamp | Unix epoch seconds as a string | +| Nonce | Unique per-request value (ULID recommended) | +| Body hash | SHA-256 hash of the request body, base64url-encoded | + +### 5.3 Signature Computation + +``` +canonical_string = canonicalize(method, path, timestamp, nonce, body_hash) +signature = Ed25519.sign(UTF8(canonical_string), secret_key) +proof = base64url(signature) +``` + +### 5.4 Request Headers + +| Header | Required | Description | +|--------|----------|-------------| +| `Authorization` | REQUIRED | `Claw ` | +| `X-Claw-Timestamp` | REQUIRED | Unix epoch seconds | +| `X-Claw-Nonce` | REQUIRED | Unique request nonce | +| `X-Claw-Body-SHA256` | REQUIRED | SHA-256 hash of body (base64url) | +| `X-Claw-Proof` | REQUIRED | Ed25519 signature of canonical request (base64url) | +| `X-Claw-Agent-Access` | CONDITIONAL | Session access token (required for relay/hook routes) | + +### 5.5 Verification + +The verifier MUST: + +1. Extract the AIT from the `Authorization: Claw ` header +2. Verify the AIT signature against the registry's signing keys (Section 4.3) +3. Extract the public key from `cnf.jwk.x` +4. Reconstruct the canonical request from the received headers and body +5. Recompute the body hash and compare with `X-Claw-Body-SHA256` +6. Verify the `X-Claw-Proof` signature using the agent's public key +7. Check `X-Claw-Timestamp` is within the allowed skew window (default: 300 seconds) +8. Check `X-Claw-Nonce` has not been seen before (per agent, within the timestamp window) +9. Check the AIT's `jti` is not on the CRL + +--- + +## 6. Authentication Flow + +### 6.1 Agent Registration + +``` +Agent Registry + │ │ + │ 1. POST /v1/agents/challenge + │ ────────────────────────────► + │ │ + │ 2. { challengeId, nonce } │ + │ ◄──────────────────────────── + │ │ + │ 3. Sign registration proof │ + │ (see 6.2) │ + │ │ + │ 4. POST /v1/agents │ + │ { proof, publicKey, name } │ + │ ────────────────────────────► + │ │ + │ 5. { agentDid, ait } │ + │ ◄──────────────────────────── +``` + +### 6.2 Registration Proof + +The registration proof is a signed message demonstrating key ownership during registration: + +``` +clawdentity.register.v1 +challengeId: +nonce: +ownerDid: +publicKey: +name: +framework: +ttlDays: +``` + +The agent signs this canonical message with its Ed25519 private key and submits the signature with the registration request. + +### 6.3 AIT Refresh + +AITs have a bounded lifetime (`exp`). Before expiration, the agent MUST request a fresh AIT: + +``` +POST /v1/agents/auth/refresh +Authorization: Claw +X-Claw-Agent-Access: +``` + +The registry validates the current AIT and access token, then issues a new AIT with an updated `exp`. + +### 6.4 Access Token Validation + +For sensitive routes (relay, hooks), the proxy validates the agent's session access token with the registry: + +``` +POST /v1/agents/auth/validate +{ + "agentDid": "did:claw:agent:...", + "aitJti": "" +} +``` + +Returns `204 No Content` if valid, `401 Unauthorized` if not. + +--- + +## 7. Trust Establishment (Pairing) + +### 7.1 Overview + +Before two agents can exchange messages, they MUST establish mutual trust through a pairing ceremony. Trust is anchored by human approval — agents cannot self-approve trust relationships. + +### 7.2 Pairing Flow + +``` +Agent A (Initiator) Proxy Agent B (Responder) + │ │ │ + │ 1. POST /pair/start │ │ + │ { initiatorProfile } │ │ + │ ─────────────────────────► │ + │ │ │ + │ 2. { ticket } │ │ + │ ◄───────────────────────── │ + │ │ │ + │ (out-of-band ticket exchange) │ + │ (QR code, message, etc.) │ + │ ────────────────────────────────────────────────────► + │ │ │ + │ │ 3. POST /pair/confirm │ + │ │ { ticket, │ + │ │ responderProfile } │ + │ │ ◄──────────────────────── + │ │ │ + │ │ 4. { paired: true } │ + │ │ ─────────────────────────► + │ │ │ + │ 5. Callback (optional) │ │ + │ ◄───────────────────────── │ +``` + +### 7.3 Pairing Ticket + +The pairing ticket is a signed JWT containing: + +- Issuer proxy URL +- Expiration timestamp +- Ticket signing key ID (`pkid`) + +Tickets have a configurable TTL (default: 300 seconds, maximum: 900 seconds). + +### 7.4 Peer Profile + +Each side of the pair provides a profile: + +```json +{ + "agentName": "kai", + "humanName": "Ravi", + "proxyOrigin": "https://proxy.example.com" +} +``` + +| Field | Required | Description | +|-------|----------|-------------| +| `agentName` | REQUIRED | Agent display name (max 64 chars) | +| `humanName` | REQUIRED | Owner display name (max 64 chars) | +| `proxyOrigin` | OPTIONAL | Proxy URL for cross-proxy routing | + +### 7.5 Ownership Verification + +When an agent initiates pairing, the proxy MUST verify that the authenticated caller (identified by `ownerDid` in the AIT) actually owns the claimed `initiatorAgentDid`. This is done by querying the registry's internal ownership endpoint. + +### 7.6 Trust Store + +Each proxy maintains a Trust Store recording: + +- **Known agents** — Agents that have been seen and accepted +- **Approved pairs** — Bidirectional trust relationships between agents +- **Pairing tickets** — Pending and completed pairing ceremonies + +A message from Agent A to Agent B is allowed only if the pair `(A, B)` exists in the trust store. + +--- + +## 8. Relay Transport + +### 8.1 Overview + +Agents communicate through a relay system. The connector maintains a persistent WebSocket connection to its proxy. Messages are relayed between proxies and delivered to connectors, which forward them to the local agent framework. + +### 8.2 Connector-Proxy WebSocket + +The connector connects to the proxy at: + +``` +GET /v1/relay/connect +Authorization: Claw +X-Claw-Agent-Access: ++ PoP headers +``` + +On successful authentication, the connection is upgraded to WebSocket. + +### 8.3 Frame Protocol + +All WebSocket messages use JSON frames with the following base structure: + +```json +{ + "v": 1, + "type": "", + "id": "", + "ts": "" +} +``` + +| Field | Type | Required | Description | +|-------|------|----------|-------------| +| `v` | integer | REQUIRED | Frame protocol version. Currently `1`. | +| `type` | string | REQUIRED | Frame type identifier | +| `id` | string | REQUIRED | Unique frame ID (ULID) | +| `ts` | string | REQUIRED | ISO 8601 timestamp with timezone | + +### 8.4 Frame Types + +#### 8.4.1 Heartbeat + +Sent by either side to check liveness. + +```json +{ + "v": 1, + "type": "heartbeat", + "id": "01HG8...", + "ts": "2026-02-21T12:00:00.000Z" +} +``` + +Default interval: 30 seconds. Ack timeout: 60 seconds. + +#### 8.4.2 Heartbeat Acknowledgement + +```json +{ + "v": 1, + "type": "heartbeat_ack", + "id": "01HG9...", + "ts": "2026-02-21T12:00:00.100Z", + "ackId": "01HG8..." +} +``` + +#### 8.4.3 Deliver (Proxy → Connector) + +Inbound message delivery to the local agent. + +```json +{ + "v": 1, + "type": "deliver", + "id": "01HGA...", + "ts": "2026-02-21T12:00:01.000Z", + "fromAgentDid": "did:claw:agent:...", + "toAgentDid": "did:claw:agent:...", + "payload": { ... }, + "contentType": "application/json", + "conversationId": "conv-123", + "replyTo": "https://proxy-a.example.com/v1/relay/delivery-receipts" +} +``` + +| Field | Required | Description | +|-------|----------|-------------| +| `fromAgentDid` | REQUIRED | Sender agent DID | +| `toAgentDid` | REQUIRED | Recipient agent DID | +| `payload` | REQUIRED | Message payload (any JSON value) | +| `contentType` | OPTIONAL | MIME type of the payload | +| `conversationId` | OPTIONAL | Conversation thread identifier | +| `replyTo` | OPTIONAL | URL for delivery receipts | + +#### 8.4.4 Deliver Acknowledgement (Connector → Proxy) + +```json +{ + "v": 1, + "type": "deliver_ack", + "id": "01HGB...", + "ts": "2026-02-21T12:00:01.200Z", + "ackId": "01HGA...", + "accepted": true, + "reason": null +} +``` + +| Field | Required | Description | +|-------|----------|-------------| +| `ackId` | REQUIRED | ID of the deliver frame being acknowledged | +| `accepted` | REQUIRED | Whether the local agent accepted the message | +| `reason` | OPTIONAL | Rejection reason (if `accepted` is false) | + +#### 8.4.5 Enqueue (Connector → Proxy) + +Outbound message from the local agent to a remote agent. + +```json +{ + "v": 1, + "type": "enqueue", + "id": "01HGC...", + "ts": "2026-02-21T12:00:02.000Z", + "toAgentDid": "did:claw:agent:...", + "payload": { ... }, + "conversationId": "conv-123", + "replyTo": "https://proxy-a.example.com/v1/relay/delivery-receipts" +} +``` + +#### 8.4.6 Enqueue Acknowledgement (Proxy → Connector) + +```json +{ + "v": 1, + "type": "enqueue_ack", + "id": "01HGD...", + "ts": "2026-02-21T12:00:02.100Z", + "ackId": "01HGC...", + "accepted": true +} +``` + +### 8.5 Local Delivery + +When the connector receives a `deliver` frame, it forwards the payload to the local agent framework via HTTP: + +``` +POST /hooks/agent +Content-Type: application/json +x-clawdentity-agent-did: +x-clawdentity-to-agent-did: +x-clawdentity-verified: true +x-openclaw-token: +x-request-id: + + +``` + +The connector handles retry with exponential backoff (default: 4 attempts, 300ms initial delay, 2x factor, 14s budget). + +### 8.6 Reconnection + +On WebSocket disconnection, the connector MUST attempt to reconnect using exponential backoff with jitter: + +| Parameter | Default | +|-----------|---------| +| Min delay | 1,000 ms | +| Max delay | 30,000 ms | +| Backoff factor | 2 | +| Jitter ratio | 0.2 | + +### 8.7 Outbound Queue + +When the WebSocket is disconnected, the connector MUST queue outbound `enqueue` frames locally. Queued frames are flushed in order upon reconnection. + +The queue supports optional persistence (e.g., to disk or SQLite) so that messages survive connector restarts. + +### 8.8 Delivery Receipts + +The proxy exposes a delivery receipt endpoint: + +``` +POST /v1/relay/delivery-receipts +``` + +Delivery receipts confirm that a message was delivered to the recipient's connector. Headers used: + +| Header | Description | +|--------|-------------| +| `X-Claw-Conversation-Id` | Conversation thread identifier | +| `X-Claw-Delivery-Receipt-Url` | Callback URL for receipts | +| `X-Claw-Recipient-Agent-Did` | DID of the recipient agent | + +--- + +## 9. Certificate Revocation + +### 9.1 CRL Format + +The Certificate Revocation List is a signed JWT containing a list of revoked AITs. + +**JOSE Header:** + +```json +{ + "alg": "EdDSA", + "typ": "CRL", + "kid": "" +} +``` + +**Claims:** + +```json +{ + "iss": "https://registry.clawdentity.com", + "jti": "01HGE...", + "iat": 1708531200, + "exp": 1708534800, + "revocations": [ + { + "jti": "01HGF...", + "agentDid": "did:claw:agent:...", + "reason": "compromised", + "revokedAt": 1708532000 + } + ] +} +``` + +| Claim | Type | Required | Description | +|-------|------|----------|-------------| +| `iss` | string | REQUIRED | Registry issuer URL | +| `jti` | string | REQUIRED | CRL identifier (ULID) | +| `iat` | number | REQUIRED | Issued-at timestamp | +| `exp` | number | REQUIRED | Expiration. MUST be > `iat` | +| `revocations` | array | REQUIRED | At least one revocation entry | +| `revocations[].jti` | string | REQUIRED | Revoked AIT's `jti` (ULID) | +| `revocations[].agentDid` | string | REQUIRED | Revoked agent's DID | +| `revocations[].reason` | string | OPTIONAL | Human-readable reason (max 280 chars) | +| `revocations[].revokedAt` | number | REQUIRED | Revocation timestamp | + +### 9.2 CRL Distribution + +The registry publishes the current CRL at: + +``` +GET /v1/crl +``` + +Response: + +```json +{ + "crl": "" +} +``` + +### 9.3 CRL Caching + +Proxies MUST cache the CRL and refresh it periodically: + +| Parameter | Default | +|-----------|---------| +| Refresh interval | 5 minutes | +| Max age | 15 minutes | +| Stale behavior | `fail-open` or `fail-closed` (configurable) | + +When `fail-open`: if the CRL cannot be refreshed, the stale CRL is used. +When `fail-closed`: if the CRL is stale and cannot be refreshed, all requests are rejected. + +### 9.4 Revocation Scope + +| Scope | What Happens | Who Can Do It | +|-------|-------------|---------------| +| **Revoke agent** | AIT is added to CRL. Agent can no longer authenticate anywhere. | Agent owner | +| **Remove pair** | Trust relationship is deleted from the proxy trust store. Agent still exists but can no longer communicate with the removed peer. | Either side of the pair | + +--- + +## 10. Security Considerations + +### 10.1 Private Key Protection + +Agent private keys MUST be stored locally and MUST NOT be transmitted over the network. The protocol is designed so that only the public key leaves the agent's machine — embedded in the AIT's `cnf` claim. + +### 10.2 Replay Protection + +Replay attacks are mitigated by three mechanisms: + +1. **Timestamp skew check** — Requests with `X-Claw-Timestamp` outside a 300-second window are rejected +2. **Nonce uniqueness** — Each `(agentDid, nonce)` pair is tracked; duplicates are rejected +3. **AIT expiration** — AITs have bounded lifetimes + +### 10.3 Man-in-the-Middle + +TLS is REQUIRED for all proxy-to-proxy and proxy-to-registry communication. The PoP signature provides an additional layer: even if TLS were compromised, a replayed AIT cannot produce valid signatures for new requests without the private key. + +### 10.4 Connector Isolation + +The connector MUST only communicate with its own proxy. It MUST NOT directly access: + +- The registry +- Other proxies +- Cloud infrastructure services (queues, object storage, etc.) + +This ensures the connector remains a simple, auditable bridge with minimal attack surface. + +### 10.5 Trust Store Integrity + +The trust store is the source of truth for authorization. Implementations SHOULD use a durable, transactional storage backend (e.g., SQLite in a Durable Object) to prevent corruption. + +### 10.6 CRL Freshness + +There is an inherent window between AIT revocation and CRL propagation. With default settings, this window is up to 5 minutes. Implementations requiring tighter revocation windows SHOULD: + +- Reduce the CRL refresh interval +- Use push-based CRL invalidation (e.g., via message queues) +- Combine CRL with real-time agent-auth validation for sensitive operations + +--- + +## 11. Wire Formats + +### 11.1 Registry Signing Keys + +Published at `/.well-known/claw-keys.json`: + +```json +{ + "keys": [ + { + "kid": "reg-key-01", + "x": "", + "status": "active", + "createdAt": "2026-01-01T00:00:00Z" + } + ] +} +``` + +Key rotation: the registry MAY have multiple active keys. The AIT/CRL `kid` header identifies which key signed the token. + +### 11.2 Authorization Header + +``` +Authorization: Claw +``` + +The scheme `Claw` is case-sensitive. The AIT MUST be a valid JWS Compact Serialization. + +--- + +## 12. Endpoints + +### 12.1 Registry Endpoints + +| Method | Path | Description | +|--------|------|-------------| +| GET | `/.well-known/claw-keys.json` | Registry signing keys | +| GET | `/v1/metadata` | Registry metadata | +| GET | `/v1/crl` | Current CRL | +| POST | `/v1/agents/challenge` | Request registration challenge | +| POST | `/v1/agents/auth/refresh` | Refresh AIT | +| POST | `/v1/agents/auth/validate` | Validate agent access token | +| POST | `/v1/invites` | Create invite code | +| POST | `/v1/invites/redeem` | Redeem invite code | +| POST | `/v1/me/api-keys` | Manage API keys | +| POST | `/internal/v1/identity/agent-ownership` | Verify agent ownership (internal) | + +### 12.2 Proxy Endpoints + +| Method | Path | Description | +|--------|------|-------------| +| GET | `/health` | Health check (unauthenticated) | +| POST | `/hooks/agent` | Inbound message hook | +| GET | `/v1/relay/connect` | WebSocket relay connection | +| POST | `/v1/relay/delivery-receipts` | Delivery receipt callback | +| POST | `/pair/start` | Initiate pairing | +| POST | `/pair/confirm` | Confirm pairing | +| POST | `/pair/status` | Check pairing status | + +--- + +## 13. Error Codes + +### 13.1 Authentication Errors (401) + +| Code | Description | +|------|-------------| +| `PROXY_AUTH_MISSING_TOKEN` | No Authorization header | +| `PROXY_AUTH_INVALID_SCHEME` | Not `Claw ` format | +| `PROXY_AUTH_INVALID_AIT` | AIT verification failed | +| `PROXY_AUTH_INVALID_PROOF` | PoP signature mismatch | +| `PROXY_AUTH_INVALID_TIMESTAMP` | Missing or invalid timestamp | +| `PROXY_AUTH_TIMESTAMP_SKEW` | Timestamp outside allowed window | +| `PROXY_AUTH_REPLAY` | Nonce reuse detected | +| `PROXY_AUTH_REVOKED` | AIT has been revoked | +| `PROXY_AGENT_ACCESS_REQUIRED` | Missing X-Claw-Agent-Access | +| `PROXY_AGENT_ACCESS_INVALID` | Invalid or expired access token | + +### 13.2 Authorization Errors (403) + +| Code | Description | +|------|-------------| +| `PROXY_AUTH_FORBIDDEN` | Agent not in trust store or pair not approved | +| `PROXY_PAIR_OWNERSHIP_FORBIDDEN` | Caller doesn't own the agent DID | + +### 13.3 Service Errors (503) + +| Code | Description | +|------|-------------| +| `PROXY_AUTH_DEPENDENCY_UNAVAILABLE` | Registry/CRL/trust store unreachable | +| `PROXY_PAIR_STATE_UNAVAILABLE` | Trust store unreachable | + +--- + +## 14. IANA Considerations + +### 14.1 DID Method Registration + +This specification introduces the `did:claw` method. If submitted to the W3C DID Method Registry, it would be registered as: + +- **Method name:** `claw` +- **Method specific identifier:** `:` where kind ∈ {`human`, `agent`} +- **DID document:** Not applicable (identity resolved via registry API) + +### 14.2 HTTP Authentication Scheme + +This specification introduces the `Claw` HTTP authentication scheme for the `Authorization` header. + +### 14.3 JWT Type Values + +| `typ` Value | Description | +|-------------|-------------| +| `AIT` | Agent Identity Token | +| `CRL` | Certificate Revocation List | + +--- + +## 15. References + +### 15.1 Normative References + +- [RFC 2119] Bradner, S., "Key words for use in RFCs", BCP 14, RFC 2119 +- [RFC 7515] Jones, M., "JSON Web Signature (JWS)", RFC 7515 +- [RFC 7519] Jones, M., "JSON Web Token (JWT)", RFC 7519 +- [RFC 8032] Josefsson, S., "Edwards-Curve Digital Signature Algorithm (EdDSA)", RFC 8032 +- [RFC 8037] Liusvaara, I., "CFRG Elliptic Curve Diffie-Hellman (ECDH) and Signatures in JOSE", RFC 8037 +- [ULID] Crockford, A., "Universally Unique Lexicographically Sortable Identifier" + +### 15.2 Informative References + +- [DID Core] W3C, "Decentralized Identifiers (DIDs) v1.0" +- [DPoP] Fett, D., "OAuth 2.0 Demonstrating Proof of Possession" (RFC 9449) +- [WebSocket] Fette, I., "The WebSocket Protocol" (RFC 6455) + +--- + +## Appendix A: Example Message Flow + +A complete message from Agent A to Agent B: + +``` +1. Agent A's connector creates an enqueue frame: + { type: "enqueue", toAgentDid: "did:claw:agent:B...", payload: {...} } + +2. Connector sends frame over WebSocket to Proxy A + +3. Proxy A: + a. Looks up Agent B's proxy URL from trust store + b. Signs an HTTP request with Agent A's credentials + c. POST to Proxy B's /hooks/agent endpoint + +4. Proxy B: + a. Verifies Authorization (AIT + PoP) + b. Checks CRL (not revoked) + c. Checks trust store (A→B pair exists) + d. Creates a deliver frame + e. Sends frame over WebSocket to Connector B + +5. Connector B: + a. Receives deliver frame + b. POST to local agent framework (localhost) + c. Sends deliver_ack back to Proxy B + +6. Agent B processes the message +``` + +## Appendix B: Differences from Existing Standards + +| Feature | OAuth 2.0 / DPoP | Clawdentity | +|---------|------------------|-------------| +| Identity model | Client credentials | Per-agent DID + Ed25519 keypair | +| Token issuer | Authorization server | Registry (centralized trust anchor) | +| PoP mechanism | DPoP (RFC 9449) | Custom canonical request signing | +| Trust model | Scope-based | Explicit bilateral pairing | +| Revocation | Token introspection | Signed CRL (JWT) with local caching | +| Transport | Direct HTTP | WebSocket relay with store-and-forward | + +--- + +*This is a living document. Submit issues and proposals at [github.com/vrknetha/clawdentity](https://github.com/vrknetha/clawdentity).* diff --git a/draft-vemula-clawdentity-protocol-00.html b/draft-vemula-clawdentity-protocol-00.html new file mode 100644 index 0000000..1a1dbf1 --- /dev/null +++ b/draft-vemula-clawdentity-protocol-00.html @@ -0,0 +1,3707 @@ + + + + + + +Clawdentity: Cryptographic Identity and Trust Protocol for AI Agent Communication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Internet-DraftClawdentityFebruary 2026
VemulaExpires 25 August 2026[Page]
+

+
+
+
Workgroup:
+
Independent Submission
+
Internet-Draft:
+
draft-vemula-clawdentity-protocol-00
+
Published:
+
+ +
+
Intended Status:
+
Informational
+
Expires:
+
+
Author:
+
+
+
R.K. Vemula
+
KnackLabs
+
+
+
+
+

Clawdentity: Cryptographic Identity and Trust Protocol for AI Agent Communication

+
+

Abstract

+

+ This document specifies the Clawdentity protocol, a cryptographic identity + and trust layer for AI agent-to-agent communication. Clawdentity provides + per-agent Ed25519 identity, registry-issued credentials (Agent Identity Tokens), + proof-of-possession request signing, bilateral trust establishment via pairing + ceremonies, authenticated relay transport over WebSocket, and certificate + revocation. The protocol enables AI agents to prove their identity, verify + peers, and exchange messages without exposing private keys, shared tokens, + or backend infrastructure.

+
+
+
+

+Status of This Memo +

+

+ This Internet-Draft is submitted in full conformance with the + provisions of BCP 78 and BCP 79.

+

+ Internet-Drafts are working documents of the Internet Engineering Task + Force (IETF). Note that other groups may also distribute working + documents as Internet-Drafts. The list of current Internet-Drafts is + at https://datatracker.ietf.org/drafts/current/.

+

+ Internet-Drafts are draft documents valid for a maximum of six months + and may be updated, replaced, or obsoleted by other documents at any + time. It is inappropriate to use Internet-Drafts as reference + material or to cite them other than as "work in progress."

+

+ This Internet-Draft will expire on 25 August 2026.

+
+
+ +
+
+

+Table of Contents +

+ +
+
+
+
+

+1. Introduction +

+
+
+

+1.1. Problem Statement +

+

+ Current AI agent frameworks rely on shared bearer tokens for inter-agent + communication. A single token leak compromises all agents in the system. + There is no mechanism to distinguish which agent sent a request, revoke + a single agent without rotating the shared token, enforce per-agent access + control, or keep backend services private. These limitations become critical + as multi-agent systems scale.

+
+
+
+
+

+1.2. Design Goals +

+

Clawdentity addresses these problems with six design goals:

+
    +
  1. + Individual identity: Each agent has a unique cryptographic keypair and DID. +
  2. +
  3. + Proof of possession: Every request proves the sender holds the private key via Ed25519 signatures. +
  4. +
  5. + Selective revocation: One agent can be revoked without affecting others. +
  6. +
  7. + Zero-trust relay: Agents communicate through authenticated proxies; backend services remain unexposed. +
  8. +
  9. + Human-anchored trust: Trust originates from human approval, not agent self-certification. +
  10. +
  11. + Framework agnostic: Works with any AI agent framework. +
  12. +
+
+
+
+
+

+1.3. Architecture Overview +

+

+ The protocol defines four component roles:

+
+
Registry
+
Central identity authority. Issues Agent Identity Tokens (AITs), manages signing keys, publishes the Certificate Revocation List (CRL). +
+
+
Proxy
+
Per-owner edge service. Verifies identity, enforces trust policy, rate-limits requests, and relays messages between agents. +
+
+
Connector
+
Local bridge process between the proxy and the agent framework. Maintains a persistent WebSocket connection to the proxy. Never exposed publicly. +
+
+
Agent
+
The AI agent itself. Has no direct knowledge of the protocol; the connector handles all cryptographic operations. +
+
+
+
+
+
+
+  +-------------+         +-------------+         +--------------+
+  |   Agent A   |         |  Registry   |         |   Agent B    |
+  |  (private)  |         |  (central)  |         |  (private)   |
+  +------+------+         +------+------+         +------+-------+
+         |                       |                       |
+  +------+------+                |                +------+-------+
+  | Connector A |                |                | Connector B  |
+  |  (local)    |                |                |  (local)     |
+  +------+------+                |                +------+-------+
+         |  WebSocket            |                       | WebSocket
+  +------+------+                |                +------+-------+
+  |   Proxy A   |<---------------+--------------->|   Proxy B    |
+  |  (edge)     |                |                |  (edge)      |
+  +-------------+                |                +--------------+
+                                 |
+                    +------------+------------+
+                    | .well-known/claw-keys   |
+                    | /v1/crl                 |
+                    | /v1/agents              |
+                    +-------------------------+
+
+
+
Figure 1: +Component Architecture +
+
+
+
+
+
+
+
+

+2. Conventions and Terminology +

+

+ The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", + "SHOULD", "SHOULD NOT", "RECOMMENDED", "NOT RECOMMENDED", "MAY", and + "OPTIONAL" in this document are to be interpreted as described in + BCP 14 [RFC2119] [RFC8174] when, + and only when, they appear in all capitals, as shown here.

+

This document uses the following terms:

+
+
AIT
+
Agent Identity Token. A signed JWT credential (Section 4) binding an agent DID to a public key. +
+
+
CRL
+
Certificate Revocation List. A signed JWT (Section 10) containing a list of revoked AITs. +
+
+
DID
+
Decentralized Identifier as defined in [W3C.DID], using the "claw" method. +
+
+
PoP
+
Proof of Possession. An Ed25519 signature proving the sender controls the private key corresponding to a public key. +
+
+
Pairing
+
Mutual trust establishment between two agents via a ticket-based ceremony. +
+
+
Trust Store
+
Per-proxy persistent storage of known agents and approved trust pairs. +
+
+
ULID
+
Universally Unique Lexicographically Sortable Identifier [ULID]. +
+
+
+
+
+
+
+

+3. Identity Model +

+
+
+

+3.1. DID Format +

+

+ Clawdentity uses a custom DID method with the scheme "did:claw". + The method-specific identifier consists of a kind and a ULID, + separated by a colon:

+
+
+claw-did    = "did:claw:" kind ":" ulid
+kind        = "human" / "agent"
+ulid        = 26ALPHA  ; Crockford Base32, see [ULID]
+
+
+

Two kinds are defined:

+ + + + + + + + + + + + + + + + + + + + + +
Table 1
KindDescriptionExample
humanA human owner/operatordid:claw:human:01HF7YAT00W6W7CM7N3W5FDXT4
agentAn AI agentdid:claw:agent:01HG8ZBU11X7X8DN8O4X6GEYU5
+

+ Implementations MUST reject DIDs where the kind is not "human" or "agent", + or where the ULID component does not conform to the ULID specification [ULID].

+
+
+
+
+

+3.2. Cryptographic Primitives +

+

+ The protocol uses Ed25519 [RFC8032] as the sole signing algorithm. + Implementations MUST NOT support other signature algorithms.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2
PrimitiveAlgorithmReferenceUsage
SigningEd25519 + [RFC8032] +Identity, request signing
Body hashSHA-256 + [RFC6234] +Request body integrity
Token formatJWS Compact + [RFC7515] +AIT and CRL tokens
Key encodingBase64url (no pad) + [RFC4648] Section 5Keys, signatures, hashes
Key representationJWK (OKP/Ed25519) + [RFC8037] +Public keys in AITs
+
+
+
+
+

+3.3. Key Generation +

+

+ Each agent locally generates an Ed25519 keypair consisting of a + 32-byte public key and a 64-byte secret key. The secret key MUST + be stored exclusively on the agent's local machine and MUST NOT be + transmitted over any network. Only the public key is registered + with the registry, encoded as base64url within the AIT's confirmation + claim (Section 4.4).

+
+
+
+
+

+3.4. Ownership Model +

+

+ Every agent DID is bound to exactly one human DID (the "ownerDid"). + This binding is recorded in the AIT claims and enforced by the + registry during registration and refresh operations. A human MAY + own multiple agents. An agent MUST have exactly one owner.

+
+
+  Human (did:claw:human:...)
+    +-- Agent A (did:claw:agent:...)
+    +-- Agent B (did:claw:agent:...)
+    +-- Agent C (did:claw:agent:...)
+
+
+
+
+
+
+
+
+

+4. Agent Identity Token (AIT) +

+
+
+

+4.1. Overview +

+

+ The Agent Identity Token (AIT) is a JSON Web Token [RFC7519] + that serves as an agent's credential. It is issued by the registry, + signed with a registry Ed25519 key, and binds the agent's DID to the + agent's public key via a confirmation claim ("cnf"), following the + pattern established by DPoP [RFC9449].

+
+
+
+
+

+4.2. JOSE Header +

+

The AIT's JOSE protected header MUST contain:

+
+
alg
+
REQUIRED. MUST be "EdDSA" per [RFC8037]. +
+
+
typ
+
REQUIRED. MUST be "AIT". +
+
+
kid
+
REQUIRED. The key identifier of the registry signing key used to sign this AIT. This allows the verifier to locate the correct registry public key. +
+
+
+
+
+
+{
+  "alg": "EdDSA",
+  "typ": "AIT",
+  "kid": "reg-key-2026-01"
+}
+
+
+
Figure 2: +AIT JOSE Header Example +
+
+
+
+
+

+4.3. Claims +

+

+ The AIT payload MUST contain the following claims. No additional + claims are permitted (strict validation).

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3
ClaimTypeRequiredDescription
issstringREQUIREDRegistry issuer URL (e.g., "https://registry.clawdentity.com")
substringREQUIREDAgent DID. MUST be "did:claw:agent:<ulid>"
ownerDidstringREQUIREDOwner human DID. MUST be "did:claw:human:<ulid>"
namestringREQUIREDAgent name. 1-64 characters matching [A-Za-z0-9._ -]
frameworkstringREQUIREDAgent framework identifier. 1-32 characters, no control characters.
descriptionstringOPTIONALHuman-readable description. Maximum 280 characters.
cnfobjectREQUIREDConfirmation claim. See Section 4.4.
iatnumberREQUIREDIssued-at time (NumericDate per [RFC7519]).
nbfnumberREQUIREDNot-before time (NumericDate).
expnumberREQUIREDExpiration time (NumericDate). MUST be greater than both nbf and iat.
jtistringREQUIREDUnique token identifier. MUST be a valid ULID.
+
+
+
+
+

+4.4. Confirmation Claim +

+

+ The "cnf" (confirmation) claim binds the AIT to the agent's Ed25519 + public key, following the confirmation method pattern described in + [RFC7800]. It contains a single "jwk" member:

+
+
+"cnf": {
+  "jwk": {
+    "kty": "OKP",
+    "crv": "Ed25519",
+    "x": "<base64url-encoded-32-byte-public-key>"
+  }
+}
+
+
+

+ The "kty" MUST be "OKP". The "crv" MUST be "Ed25519". + The "x" parameter MUST decode (base64url) to exactly 32 bytes. + The JWK MUST NOT contain a "d" (private key) parameter.

+
+
+
+
+

+4.5. Validation Rules +

+

An AIT MUST be rejected if any of the following conditions are true:

+
    +
  1. "alg" is not "EdDSA". +
  2. +
  3. "typ" is not "AIT". +
  4. +
  5. "kid" does not match any active registry signing key. +
  6. +
  7. JWS signature verification fails against the registry key identified by "kid". +
  8. +
  9. "sub" is not a valid DID with kind "agent". +
  10. +
  11. "ownerDid" is not a valid DID with kind "human". +
  12. +
  13. "cnf.jwk.x" does not decode to exactly 32 bytes. +
  14. +
  15. "exp" is less than or equal to "nbf" or "iat". +
  16. +
  17. "jti" is not a valid ULID. +
  18. +
  19. Current time is before "nbf" or after "exp" (accounting for clock skew). +
  20. +
  21. "jti" appears in the current CRL (Section 10). +
  22. +
+
+
+
+
+
+
+

+5. HTTP Request Signing +

+
+
+

+5.1. Purpose +

+

+ Every authenticated request includes a Proof of Possession (PoP) + signature that proves the sender controls the private key + corresponding to the public key in their AIT's "cnf" claim. + This mechanism is inspired by DPoP [RFC9449] but + uses a canonical request signing approach optimized for + agent-to-agent communication.

+
+
+
+
+

+5.2. Canonical Request Format +

+

+ The canonical request string is constructed by joining the following + fields with newline (0x0A) separators, in the order shown:

+
+
+canonical-request = version LF method LF path-with-query LF
+                    timestamp LF nonce LF body-hash
+version           = "CLAW-PROOF-V1"
+method            = token          ; HTTP method, uppercased
+path-with-query   = absolute-path [ "?" query ]
+timestamp         = 1*DIGIT        ; Unix epoch seconds
+nonce             = 1*unreserved   ; unique per-request value
+body-hash         = base64url      ; SHA-256 of request body
+LF                = %x0A
+
+
+
+
+
+CLAW-PROOF-V1
+POST
+/hooks/agent
+1708531200
+01HG8ZBU11X7X8DN8O4X6GEYU5
+47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
+
+
+
Figure 3: +Canonical Request Example +
+
+
+
+
+

+5.3. Signature Computation +

+

+ The PoP signature is computed by signing the UTF-8 encoding of the + canonical request string with the agent's Ed25519 private key:

+
+
+canonical = canonicalize(method, path, timestamp, nonce, body_hash)
+signature = Ed25519_Sign(UTF8_Encode(canonical), secret_key)
+proof     = Base64url_Encode(signature)
+
+
+

+ The resulting "proof" is a base64url-encoded 64-byte Ed25519 signature.

+
+
+
+
+

+5.4. Request Headers +

+

+ An authenticated request MUST include the following headers:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4
HeaderStatusDescription
AuthorizationREQUIRED"Claw" SP <AIT-JWT>. See Section 6.
X-Claw-TimestampREQUIREDUnix epoch seconds (integer string).
X-Claw-NonceREQUIREDUnique per-request value. ULID RECOMMENDED.
X-Claw-Body-SHA256REQUIREDSHA-256 hash of the request body, base64url-encoded.
X-Claw-ProofREQUIREDEd25519 PoP signature (base64url, 64 bytes).
X-Claw-Agent-AccessCONDITIONALSession access token. Required for relay and hook routes.
+
+
+
+
+

+5.5. Verification Procedure +

+

The verifier (proxy) MUST perform the following steps in order:

+
    +
  1. Extract the AIT from the "Authorization: Claw <token>" header. +
  2. +
  3. Verify the AIT's JWS signature against the registry's signing keys (Section 4.5). +
  4. +
  5. Check that the AIT's "jti" is not on the CRL (Section 10.3). +
  6. +
  7. Extract the agent's public key from the AIT's "cnf.jwk.x" claim. +
  8. +
  9. Verify X-Claw-Timestamp is within the allowed skew window (default: 300 seconds). +
  10. +
  11. Recompute SHA-256 of the request body; compare with X-Claw-Body-SHA256. +
  12. +
  13. Reconstruct the canonical request (Section 5.2). +
  14. +
  15. Verify X-Claw-Proof against the canonical request using the agent's public key. +
  16. +
  17. Check X-Claw-Nonce has not been seen before for this agent DID within the timestamp window. +
  18. +
+

If any step fails, the request MUST be rejected with HTTP 401.

+
+
+
+
+
+
+

+6. The "Claw" Authentication Scheme +

+

+ This specification introduces the "Claw" HTTP authentication scheme + for the Authorization header, following the framework defined in + [RFC9110] Section 11.

+
+
+credentials = "Claw" SP ait-token
+ait-token   = 1*base64url-char "." 1*base64url-char "." 1*base64url-char
+
+
+

+ The scheme name "Claw" is case-sensitive. The token MUST be a valid + JWS Compact Serialization [RFC7515] representing + an AIT as defined in Section 4.

+
+
+
+
+

+7. Agent Registration +

+
+
+

+7.1. Registration Flow +

+

+ Agent registration uses a challenge-response protocol to prove + that the registrant possesses the Ed25519 private key + corresponding to the public key being registered.

+
+
+
+
+  Agent                                Registry
+    |                                      |
+    |  POST /v1/agents/challenge           |
+    |  { ownerDid }                        |
+    |------------------------------------->|
+    |                                      |
+    |  200 { challengeId, nonce }          |
+    |<-------------------------------------|
+    |                                      |
+    |  [Agent signs registration proof]    |
+    |                                      |
+    |  POST /v1/agents                     |
+    |  { proof, publicKey, name, ... }     |
+    |------------------------------------->|
+    |                                      |
+    |  201 { agentDid, ait }               |
+    |<-------------------------------------|
+
+
+
Figure 4: +Agent Registration Sequence +
+
+
+
+
+
+

+7.2. Registration Proof +

+

+ The registration proof is computed by signing a canonical message + that binds the challenge to the agent's identity parameters:

+
+
+clawdentity.register.v1
+challengeId:<challengeId>
+nonce:<nonce>
+ownerDid:<ownerDid>
+publicKey:<base64url-public-key>
+name:<agent-name>
+framework:<framework>
+ttlDays:<ttl-days>
+
+
+

+ Optional fields (framework, ttlDays) use empty strings when absent. + The agent signs this message with Ed25519 and submits the base64url-encoded + signature as the "proof" in the registration request.

+
+
+
+
+

+7.3. AIT Refresh +

+

+ AITs have bounded lifetimes. Before expiration, the connector + MUST request a fresh AIT:

+
+
+POST /v1/agents/auth/refresh HTTP/1.1
+Authorization: Claw <current-AIT>
+X-Claw-Agent-Access: <access-token>
+
+
+

+ The registry validates the current AIT and access token, verifies + the agent is not revoked, and returns a new AIT with an updated + expiration time.

+
+
+
+
+
+
+

+8. Trust Establishment (Pairing) +

+
+
+

+8.1. Overview +

+

+ Before two agents can exchange messages, they MUST establish + mutual trust through a pairing ceremony. Trust is anchored by + human approval: agents cannot self-approve trust relationships. + The pairing process uses short-lived tickets exchanged + out-of-band (e.g., via QR code or messaging).

+
+
+
+
+

+8.2. Pairing Flow +

+
+
+
+
+  Agent A (Initiator)        Proxy         Agent B (Responder)
+         |                     |                    |
+         | POST /pair/start    |                    |
+         | { initiatorProfile, |                    |
+         |   ttlSeconds }      |                    |
+         |-------------------->|                    |
+         |                     |                    |
+         | { ticket,           |                    |
+         |   expiresAt }       |                    |
+         |<--------------------|                    |
+         |                     |                    |
+         |   (out-of-band ticket exchange)          |
+         |   (QR code, message, copy-paste)         |
+         |----------------------------------------->|
+         |                     |                    |
+         |                     | POST /pair/confirm |
+         |                     | { ticket,          |
+         |                     |   responderProfile}|
+         |                     |<-------------------|
+         |                     |                    |
+         |                     | 201 { paired:true }|
+         |                     |------------------->|
+         |                     |                    |
+         | callback (optional) |                    |
+         |<--------------------|                    |
+
+
+
Figure 5: +Trust Establishment Sequence +
+
+
+
+
+
+

+8.3. Pairing Ticket +

+

+ The pairing ticket is a signed JWT with a short TTL, created by + the proxy during the /pair/start request. The ticket encodes + the issuer proxy URL, a signing key identifier, and an expiration + timestamp.

+

Ticket parameters:

+ + + + + + + + + + + + + + + + +
Table 5
ParameterDefaultMaximum
TTL (ttlSeconds)300 seconds900 seconds
+
+
+
+
+

+8.4. Peer Profile +

+

+ Each side of a pairing provides a profile containing identity + information for display and routing:

+
+
+{
+  "agentName": "kai",
+  "humanName": "Ravi",
+  "proxyOrigin": "https://proxy.example.com"
+}
+
+
+
+
agentName
+
REQUIRED. Agent display name. Maximum 64 characters. No control characters. +
+
+
humanName
+
REQUIRED. Owner display name. Maximum 64 characters. No control characters. +
+
+
proxyOrigin
+
OPTIONAL. The proxy's URL origin, used for cross-proxy message routing. +
+
+
+
+
+
+
+

+8.5. Ownership Verification +

+

+ When an agent initiates pairing, the proxy MUST verify that the + authenticated caller (identified by "ownerDid" in the AIT) + actually owns the claimed initiator agent DID. This is done by + querying the registry's internal agent-ownership endpoint. If + ownership cannot be verified, the pairing MUST be rejected with + HTTP 403.

+
+
+
+
+

+8.6. Trust Store +

+

+ Each proxy maintains a Trust Store that records:

+
    +
  • + Known agents: Agents that have been authenticated and accepted. +
  • +
  • + Approved pairs: Bidirectional trust relationships between agents. +
  • +
  • + Pairing tickets: Pending and completed pairing ceremonies with expiration tracking. +
  • +
+

+ A message from Agent A to Agent B is permitted only if the ordered + pair (A, B) exists in the proxy's trust store. The trust store + SHOULD use a durable, transactional storage backend.

+
+
+
+
+
+
+

+9. Relay Transport +

+
+
+

+9.1. Overview +

+

+ Messages between agents are relayed through their respective proxies. + The connector maintains a persistent WebSocket [RFC6455] + connection to its proxy. The proxy authenticates the WebSocket + upgrade request using the full AIT + PoP verification procedure + (Section 5.5).

+
+
+
+
+

+9.2. WebSocket Connection +

+

+ The connector initiates a WebSocket connection to:

+
+
+GET /v1/relay/connect HTTP/1.1
+Upgrade: websocket
+Connection: Upgrade
+Authorization: Claw <AIT>
+X-Claw-Agent-Access: <access-token>
+X-Claw-Timestamp: <timestamp>
+X-Claw-Nonce: <nonce>
+X-Claw-Body-SHA256: <empty-body-hash>
+X-Claw-Proof: <signature>
+
+
+
+
+
+
+

+9.3. Frame Protocol +

+

+ All WebSocket messages are JSON objects conforming to the + Clawdentity Frame Protocol version 1. Every frame contains + a common base structure:

+
+
+{
+  "v": 1,
+  "type": "<frame-type>",
+  "id": "<ULID>",
+  "ts": "<ISO-8601-timestamp>"
+}
+
+
+
+
v
+
REQUIRED. Integer. Frame protocol version. MUST be 1. +
+
+
type
+
REQUIRED. String. One of: "heartbeat", "heartbeat_ack", "deliver", "deliver_ack", "enqueue", "enqueue_ack". +
+
+
id
+
REQUIRED. String. Unique frame identifier (ULID). +
+
+
ts
+
REQUIRED. String. ISO 8601 timestamp with timezone. +
+
+
+
+
+
+
+

+9.4. Heartbeat Frames +

+

+ Either side MAY send heartbeat frames to verify liveness. + The default heartbeat interval is 30 seconds. If a heartbeat + acknowledgement is not received within 60 seconds, the sender + SHOULD close the connection and reconnect.

+

Heartbeat:

+
+
+{ "v": 1, "type": "heartbeat", "id": "<ULID>", "ts": "<ISO>" }
+
+
+

Heartbeat acknowledgement:

+
+
+{ "v": 1, "type": "heartbeat_ack", "id": "<ULID>", "ts": "<ISO>",
+  "ackId": "<heartbeat-frame-id>" }
+
+
+
+
+
+
+

+9.5. Deliver Frames +

+

+ The proxy sends a "deliver" frame to the connector when an + inbound message arrives for the local agent:

+
+
+{
+  "v": 1,
+  "type": "deliver",
+  "id": "<ULID>",
+  "ts": "<ISO>",
+  "fromAgentDid": "did:claw:agent:...",
+  "toAgentDid": "did:claw:agent:...",
+  "payload": { ... },
+  "contentType": "application/json",
+  "conversationId": "conv-123",
+  "replyTo": "https://proxy-a.example.com/v1/relay/delivery-receipts"
+}
+
+
+

+ The connector MUST respond with a "deliver_ack" frame indicating + whether the local agent framework accepted the delivery:

+
+
+{
+  "v": 1,
+  "type": "deliver_ack",
+  "id": "<ULID>",
+  "ts": "<ISO>",
+  "ackId": "<deliver-frame-id>",
+  "accepted": true
+}
+
+
+

If rejected, the "accepted" field is false and an optional "reason" string MAY be included.

+
+
+
+
+

+9.6. Enqueue Frames +

+

+ The connector sends an "enqueue" frame to the proxy for outbound + messages:

+
+
+{
+  "v": 1,
+  "type": "enqueue",
+  "id": "<ULID>",
+  "ts": "<ISO>",
+  "toAgentDid": "did:claw:agent:...",
+  "payload": { ... },
+  "conversationId": "conv-123"
+}
+
+
+

+ The proxy responds with "enqueue_ack" after accepting or rejecting + the message for relay.

+
+
+
+
+

+9.7. Local Agent Delivery +

+

+ Upon receiving a "deliver" frame, the connector forwards the + payload to the local agent framework via HTTP POST:

+
+
+POST /hooks/agent HTTP/1.1
+Host: 127.0.0.1:18789
+Content-Type: application/json
+x-clawdentity-agent-did: <fromAgentDid>
+x-clawdentity-to-agent-did: <toAgentDid>
+x-clawdentity-verified: true
+x-openclaw-token: <local-hook-token>
+x-request-id: <frame-id>
+
+
+

+ The connector implements retry with exponential backoff for + transient failures (5xx, 429, connection errors):

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6
ParameterDefault Value
Max attempts4
Initial delay300 ms
Max delay2,000 ms
Backoff factor2
Total budget14,000 ms
+
+
+
+
+

+9.8. Reconnection +

+

+ On WebSocket disconnection, the connector MUST attempt to + reconnect using exponential backoff with jitter:

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7
ParameterDefault Value
Minimum delay1,000 ms
Maximum delay30,000 ms
Backoff factor2
Jitter ratio0.2 (±20%)
+

+ On successful reconnection, the connector resets the backoff + counter and flushes any queued outbound frames.

+
+
+
+
+

+9.9. Outbound Queue Persistence +

+

+ When the WebSocket connection is unavailable, the connector + MUST queue outbound "enqueue" frames locally and flush them + in FIFO order upon reconnection. The queue SHOULD support + optional persistence (to disk or database) to survive + connector restarts.

+
+
+
+
+
+
+

+10. Certificate Revocation +

+
+
+

+10.1. CRL Format +

+

+ The Certificate Revocation List (CRL) is a signed JWT containing + a list of revoked AITs. Its JOSE header uses:

+
+
alg
+
MUST be "EdDSA". +
+
+
typ
+
MUST be "CRL". +
+
+
kid
+
Registry signing key identifier. +
+
+
+

CRL claims:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 8
ClaimTypeRequiredDescription
issstringREQUIREDRegistry issuer URL.
jtistringREQUIREDCRL identifier (ULID).
iatnumberREQUIREDIssued-at timestamp.
expnumberREQUIREDExpiration. MUST be greater than iat.
revocationsarrayREQUIREDAt least one revocation entry.
+

Each revocation entry contains:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 9
FieldTypeRequiredDescription
jtistringREQUIREDRevoked AIT's jti (ULID).
agentDidstringREQUIREDRevoked agent's DID.
reasonstringOPTIONALHuman-readable reason. Max 280 chars.
revokedAtnumberREQUIREDRevocation timestamp (Unix seconds).
+
+
+
+
+

+10.2. CRL Distribution +

+

+ The registry publishes the current CRL at the well-known endpoint:

+
+
+GET /v1/crl HTTP/1.1
+Host: registry.clawdentity.com
+
+
+

Response:

+
+
+{ "crl": "<signed-CRL-JWT>" }
+
+
+
+
+
+
+

+10.3. CRL Caching +

+

+ Proxies MUST cache the CRL locally and refresh it periodically. + The following parameters control caching behavior:

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 10
ParameterDefaultDescription
Refresh interval5 minutesHow often to fetch a fresh CRL.
Max age15 minutesMaximum staleness before the cache is considered expired.
Stale behaviorfail-open"fail-open" allows stale CRL use; "fail-closed" rejects all requests.
+

+ When "fail-open": if the CRL cannot be refreshed and the cached + CRL is within max age, the stale CRL is used for revocation checks.

+

+ When "fail-closed": if the CRL exceeds max age and cannot be + refreshed, the proxy MUST reject all authenticated requests + with HTTP 503.

+
+
+
+
+

+10.4. Revocation Scope +

+

Two levels of revocation are defined:

+
+
Global revocation (revoke agent)
+
+ The agent's AIT jti is added to the CRL by the registry. The + agent can no longer authenticate at any proxy. Only the agent's + owner can initiate global revocation. +
+
+
Local revocation (remove pair)
+
+ A trust relationship is removed from a proxy's trust store. + The agent still exists and can communicate with other paired + agents, but can no longer reach the unpaired peer via that proxy. + Either side of the pair can initiate local revocation. +
+
+
+
+
+
+
+
+
+

+11. Registry Key Discovery +

+

+ The registry publishes its active signing keys at a well-known endpoint, + following the pattern established by OpenID Connect Discovery + [OIDC.Discovery]:

+
+
+GET /.well-known/claw-keys.json HTTP/1.1
+Host: registry.clawdentity.com
+
+
+

Response:

+
+
+{
+  "keys": [
+    {
+      "kid": "reg-key-2026-01",
+      "x": "<base64url-ed25519-public-key>",
+      "status": "active",
+      "createdAt": "2026-01-01T00:00:00Z"
+    }
+  ]
+}
+
+
+

+ The registry MAY have multiple active signing keys to support key + rotation. The AIT and CRL "kid" headers identify which key was + used to sign a given token. Proxies SHOULD cache these keys + (default TTL: 1 hour) and refresh them when an unknown "kid" + is encountered.

+
+
+
+
+

+12. Endpoint Reference +

+
+
+

+12.1. Registry Endpoints +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 11
MethodPathAuthDescription
GET/.well-known/claw-keys.jsonNoneRegistry signing keys
GET/v1/metadataNoneRegistry metadata
GET/v1/crlNoneCurrent CRL
POST/v1/agents/challengeAPI KeyRequest registration challenge
POST/v1/agents/auth/refreshAIT + AccessRefresh AIT
POST/v1/agents/auth/validateInternalValidate agent access token
POST/v1/invitesAPI KeyCreate invite code
POST/v1/invites/redeemNoneRedeem invite code
+
+
+
+
+

+12.2. Proxy Endpoints +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 12
MethodPathAuthDescription
GET/healthNoneHealth check
POST/hooks/agentAIT + PoP + AccessInbound message delivery
GET/v1/relay/connectAIT + PoP + AccessWebSocket relay
POST/v1/relay/delivery-receiptsAIT + PoP + AccessDelivery receipt callback
POST/pair/startAIT + PoPInitiate pairing
POST/pair/confirmAIT + PoPConfirm pairing
POST/pair/statusAIT + PoPCheck pairing status
+
+
+
+
+
+
+

+13. Security Considerations +

+
+
+

+13.1. Private Key Protection +

+

+ Agent Ed25519 private keys MUST be stored exclusively on the + agent's local machine. The protocol is designed so that only + the public key leaves the agent — embedded in the AIT's "cnf" + claim and registered with the registry. Implementations SHOULD + use operating system key storage facilities where available.

+
+
+
+
+

+13.2. Replay Protection +

+

Three mechanisms provide replay protection:

+
    +
  1. + Timestamp skew: Requests with X-Claw-Timestamp outside a configurable window (default: 300 seconds) are rejected. +
  2. +
  3. + Nonce uniqueness: Each (agentDid, nonce) pair is tracked per proxy. Duplicate nonces within the timestamp window are rejected. +
  4. +
  5. + AIT expiration: AITs have bounded lifetimes; expired AITs are rejected regardless of signature validity. +
  6. +
+
+
+
+
+

+13.3. Transport Security +

+

+ TLS 1.2 or later ([RFC8446] for TLS 1.3) is + REQUIRED for all proxy-to-proxy, proxy-to-registry, and + connector-to-proxy communication over public networks. The PoP + signature (Section 5) + provides an additional layer: even if TLS were compromised, a + captured AIT cannot produce valid request signatures without + the private key.

+
+
+
+
+

+13.4. Connector Isolation +

+

+ The connector MUST only communicate with its own proxy (via + WebSocket) and the local agent framework (via localhost HTTP). + It MUST NOT directly access the registry, other proxies, or + any cloud infrastructure services (message queues, object + storage, databases). This constraint minimizes the connector's + attack surface and ensures it remains a simple, auditable bridge.

+
+
+
+
+

+13.5. Trust Store Integrity +

+

+ The trust store is the sole authorization source for message + relay. Implementations SHOULD use a transactional storage + backend (e.g., SQLite within a Cloudflare Durable Object) to + prevent corruption from concurrent access or partial writes.

+
+
+
+
+

+13.6. CRL Freshness Window +

+

+ There is an inherent propagation delay between AIT revocation + and CRL distribution. With default settings, this window is up + to 5 minutes. Deployments requiring tighter revocation windows + SHOULD:

+
    +
  • Reduce the CRL refresh interval. +
  • +
  • Use push-based CRL invalidation (e.g., message queues). +
  • +
  • Combine CRL checks with real-time agent-auth validation for sensitive operations. +
  • +
+
+
+
+
+

+13.7. Human-Anchored Trust +

+

+ The protocol explicitly prevents agent self-certification. + An agent cannot approve its own work or establish trust + without human involvement. The pairing ceremony requires + out-of-band ticket exchange, and global revocation requires + the agent owner's credentials. This design prevents + autonomous trust escalation.

+
+
+
+
+
+
+

+14. Error Codes +

+

+ The following error codes are returned in JSON error responses + with the corresponding HTTP status codes:

+
+
+

+14.1. Authentication Errors (401) +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 13
CodeDescription
PROXY_AUTH_MISSING_TOKENNo Authorization header provided.
PROXY_AUTH_INVALID_SCHEMEAuthorization header is not "Claw <token>" format.
PROXY_AUTH_INVALID_AITAIT JWT verification failed.
PROXY_AUTH_INVALID_PROOFPoP signature does not match.
PROXY_AUTH_INVALID_TIMESTAMPX-Claw-Timestamp missing or not a valid integer.
PROXY_AUTH_TIMESTAMP_SKEWTimestamp outside the allowed skew window.
PROXY_AUTH_REPLAYNonce has been seen before (replay detected).
PROXY_AUTH_REVOKEDAIT jti is on the CRL.
PROXY_AGENT_ACCESS_REQUIREDX-Claw-Agent-Access header missing.
PROXY_AGENT_ACCESS_INVALIDAgent access token is invalid or expired.
+
+
+
+
+

+14.2. Authorization Errors (403) +

+ + + + + + + + + + + + + + + + + + +
Table 14
CodeDescription
PROXY_AUTH_FORBIDDENAgent not in trust store or pair not approved.
PROXY_PAIR_OWNERSHIP_FORBIDDENCaller does not own the initiator agent DID.
+
+
+
+
+

+14.3. Service Unavailable Errors (503) +

+ + + + + + + + + + + + + + + + + + + + + + +
Table 15
CodeDescription
PROXY_AUTH_DEPENDENCY_UNAVAILABLERegistry, CRL, or trust store is unreachable.
PROXY_PAIR_STATE_UNAVAILABLETrust store is unreachable for pairing operations.
CRL_CACHE_STALECRL exceeds max age and fail-closed is configured.
+
+
+
+
+
+
+

+15. IANA Considerations +

+
+
+

+15.1. DID Method Registration +

+

+ This specification introduces the "claw" DID method. A registration + request for the W3C DID Method Registry would include:

+
+
Method Name
+
claw +
+
+
Method Specific Identifier
+
<kind>:<ulid> where kind is "human" or "agent" +
+
+
DID Document
+
Not applicable; identity is resolved via registry API. +
+
+
+
+
+
+
+

+15.2. HTTP Authentication Scheme Registration +

+

+ This specification registers the "Claw" authentication scheme + in the "Hypertext Transfer Protocol (HTTP) Authentication Scheme + Registry" defined in [RFC9110] Section 16.3:

+
+
Authentication Scheme Name
+
Claw +
+
+
Reference
+
Section 6 of this document +
+
+
+
+
+
+
+

+15.3. JWT "typ" Header Parameter Values +

+

+ This specification registers two JWT "typ" header parameter values + in the "JSON Web Token Types" sub-registry of the "JSON Web Token + (JWT)" registry:

+ + + + + + + + + + + + + + + + + + + + + +
Table 16
"typ" ValueDescriptionReference
AITAgent Identity TokenSection 4 +
CRLCertificate Revocation ListSection 10 +
+
+
+
+
+
+

+16. References +

+
+

+16.1. Normative References +

+
+
[RFC2119]
+
+Bradner, S., "Key words for use in RFCs to Indicate Requirement Levels", BCP 14, RFC 2119, DOI 10.17487/RFC2119, , <https://www.rfc-editor.org/info/rfc2119>.
+
+
[RFC8174]
+
+Leiba, B., "Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words", BCP 14, RFC 8174, DOI 10.17487/RFC8174, , <https://www.rfc-editor.org/info/rfc8174>.
+
+
[RFC4648]
+
+Josefsson, S., "The Base16, Base32, and Base64 Data Encodings", RFC 4648, DOI 10.17487/RFC4648, , <https://www.rfc-editor.org/info/rfc4648>.
+
+
[RFC6234]
+
+3rd, D. E. and T. Hansen, "US Secure Hash Algorithms (SHA and SHA-based HMAC and HKDF)", RFC 6234, DOI 10.17487/RFC6234, , <https://www.rfc-editor.org/info/rfc6234>.
+
+
[RFC6455]
+
+Fette, I. and A. Melnikov, "The WebSocket Protocol", RFC 6455, DOI 10.17487/RFC6455, , <https://www.rfc-editor.org/info/rfc6455>.
+
+
[RFC7515]
+
+Jones, M., Bradley, J., and N. Sakimura, "JSON Web Signature (JWS)", RFC 7515, DOI 10.17487/RFC7515, , <https://www.rfc-editor.org/info/rfc7515>.
+
+
[RFC7519]
+
+Jones, M., Bradley, J., and N. Sakimura, "JSON Web Token (JWT)", RFC 7519, DOI 10.17487/RFC7519, , <https://www.rfc-editor.org/info/rfc7519>.
+
+
[RFC7800]
+
+Jones, M., Bradley, J., and H. Tschofenig, "Proof-of-Possession Key Semantics for JSON Web Tokens (JWTs)", RFC 7800, DOI 10.17487/RFC7800, , <https://www.rfc-editor.org/info/rfc7800>.
+
+
[RFC8032]
+
+Josefsson, S. and I. Liusvaara, "Edwards-Curve Digital Signature Algorithm (EdDSA)", RFC 8032, DOI 10.17487/RFC8032, , <https://www.rfc-editor.org/info/rfc8032>.
+
+
[RFC8037]
+
+Liusvaara, I., "CFRG Elliptic Curve Diffie-Hellman (ECDH) and Signatures in JSON Object Signing and Encryption (JOSE)", RFC 8037, DOI 10.17487/RFC8037, , <https://www.rfc-editor.org/info/rfc8037>.
+
+
[RFC8446]
+
+Rescorla, E., "The Transport Layer Security (TLS) Protocol Version 1.3", RFC 8446, DOI 10.17487/RFC8446, , <https://www.rfc-editor.org/info/rfc8446>.
+
+
[RFC9110]
+
+Fielding, R., Nottingham, M., and J. Reschke, "HTTP Semantics", RFC 9110, DOI 10.17487/RFC9110, , <https://www.rfc-editor.org/info/rfc9110>.
+
+
+
+
+

+16.2. Informative References +

+
+
[RFC9449]
+
+Fett, D., Campbell, B., Bradley, J., Lodderstedt, T., Jones, M., and D. Waite, "OAuth 2.0 Demonstrating Proof of Possession (DPoP)", RFC 9449, DOI 10.17487/RFC9449, , <https://www.rfc-editor.org/info/rfc9449>.
+
+
[W3C.DID]
+
+Sporny, M., Longley, D., Sabadello, M., Reed, D., Steele, O., and C. Allen, "Decentralized Identifiers (DIDs) v1.0", W3C Recommendation, , <https://www.w3.org/TR/did-core/>.
+
+
[ULID]
+
+Feerasta, A., "Universally Unique Lexicographically Sortable Identifier", , <https://github.com/ulid/spec>.
+
+
[OIDC.Discovery]
+
+Sakimura, N., Bradley, J., Jones, M., and E. Jay, "OpenID Connect Discovery 1.0", , <https://openid.net/specs/openid-connect-discovery-1_0.html>.
+
+
+
+
+
+
+

+Appendix A. Example: Complete Message Flow +

+

+ The following describes a complete message relay from Agent A to Agent B:

+
    +
  1. Agent A's connector creates an "enqueue" frame targeting Agent B's DID. +
  2. +
  3. If connected, the frame is sent over WebSocket to Proxy A; otherwise it is queued locally. +
  4. +
  5. Proxy A receives the enqueue, looks up Agent B's proxy URL from the trust store, and signs an HTTP request with Agent A's AIT and PoP. +
  6. +
  7. Proxy A sends POST /hooks/agent to Proxy B with the signed request. +
  8. +
  9. Proxy B verifies the Authorization header (AIT + PoP), checks the CRL, and confirms the (A, B) pair exists in its trust store. +
  10. +
  11. Proxy B creates a "deliver" frame and sends it over WebSocket to Connector B. +
  12. +
  13. Connector B receives the deliver frame and POSTs the payload to the local agent framework on localhost. +
  14. +
  15. Connector B sends a "deliver_ack" (accepted: true) back to Proxy B. +
  16. +
  17. Agent B processes the message. +
  18. +
+
+
+
+
+

+Appendix B. Comparison with Existing Standards +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 17
FeatureOAuth 2.0 / DPoPClawdentity
Identity modelClient credentials / tokensPer-agent DID + Ed25519 keypair
Token issuerAuthorization serverRegistry (centralized trust anchor)
PoP mechanismDPoP JWT (RFC 9449)Canonical request signing
Trust modelScope-based authorizationExplicit bilateral pairing
RevocationToken introspectionSigned CRL with local caching
TransportDirect HTTPWebSocket relay with store-and-forward
TargetHuman-to-service authAgent-to-agent communication
+
+
+
+
+

+Acknowledgements +

+

+ The Clawdentity protocol was designed as part of the OpenClaw + ecosystem. The author thanks the OpenClaw community for feedback + on the identity model, and the designers of DPoP (RFC 9449) and + W3C DIDs whose work informed key design decisions.

+
+
+
+
+

+Author's Address +

+
+
Ravi Kiran Vemula
+
KnackLabs
+
+Hyderabad
+
India
+ +
+URI: +https://ravi.sh +
+
+
+
+ + + diff --git a/draft-vemula-clawdentity-protocol-00.txt b/draft-vemula-clawdentity-protocol-00.txt new file mode 100644 index 0000000..cd213c0 --- /dev/null +++ b/draft-vemula-clawdentity-protocol-00.txt @@ -0,0 +1,1736 @@ + + + + +Independent Submission R.K. Vemula +Internet-Draft KnackLabs +Intended status: Informational 21 February 2026 +Expires: 25 August 2026 + + + Clawdentity: Cryptographic Identity and Trust Protocol for AI Agent + Communication + draft-vemula-clawdentity-protocol-00 + +Abstract + + This document specifies the Clawdentity protocol, a cryptographic + identity and trust layer for AI agent-to-agent communication. + Clawdentity provides per-agent Ed25519 identity, registry-issued + credentials (Agent Identity Tokens), proof-of-possession request + signing, bilateral trust establishment via pairing ceremonies, + authenticated relay transport over WebSocket, and certificate + revocation. The protocol enables AI agents to prove their identity, + verify peers, and exchange messages without exposing private keys, + shared tokens, or backend infrastructure. + +Status of This Memo + + This Internet-Draft is submitted in full conformance with the + provisions of BCP 78 and BCP 79. + + Internet-Drafts are working documents of the Internet Engineering + Task Force (IETF). Note that other groups may also distribute + working documents as Internet-Drafts. The list of current Internet- + Drafts is at https://datatracker.ietf.org/drafts/current/. + + Internet-Drafts are draft documents valid for a maximum of six months + and may be updated, replaced, or obsoleted by other documents at any + time. It is inappropriate to use Internet-Drafts as reference + material or to cite them other than as "work in progress." + + This Internet-Draft will expire on 25 August 2026. + +Copyright Notice + + Copyright (c) 2026 IETF Trust and the persons identified as the + document authors. All rights reserved. + + + + + + + + +Vemula Expires 25 August 2026 [Page 1] + +Internet-Draft Clawdentity February 2026 + + + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents (https://trustee.ietf.org/ + license-info) in effect on the date of publication of this document. + Please review these documents carefully, as they describe your rights + and restrictions with respect to this document. + +Table of Contents + + 1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . 3 + 1.1. Problem Statement . . . . . . . . . . . . . . . . . . . . 3 + 1.2. Design Goals . . . . . . . . . . . . . . . . . . . . . . 4 + 1.3. Architecture Overview . . . . . . . . . . . . . . . . . . 4 + 2. Conventions and Terminology . . . . . . . . . . . . . . . . . 5 + 3. Identity Model . . . . . . . . . . . . . . . . . . . . . . . 6 + 3.1. DID Format . . . . . . . . . . . . . . . . . . . . . . . 6 + 3.2. Cryptographic Primitives . . . . . . . . . . . . . . . . 6 + 3.3. Key Generation . . . . . . . . . . . . . . . . . . . . . 7 + 3.4. Ownership Model . . . . . . . . . . . . . . . . . . . . . 7 + 4. Agent Identity Token (AIT) . . . . . . . . . . . . . . . . . 7 + 4.1. Overview . . . . . . . . . . . . . . . . . . . . . . . . 8 + 4.2. JOSE Header . . . . . . . . . . . . . . . . . . . . . . . 8 + 4.3. Claims . . . . . . . . . . . . . . . . . . . . . . . . . 8 + 4.4. Confirmation Claim . . . . . . . . . . . . . . . . . . . 9 + 4.5. Validation Rules . . . . . . . . . . . . . . . . . . . . 10 + 5. HTTP Request Signing . . . . . . . . . . . . . . . . . . . . 10 + 5.1. Purpose . . . . . . . . . . . . . . . . . . . . . . . . . 11 + 5.2. Canonical Request Format . . . . . . . . . . . . . . . . 11 + 5.3. Signature Computation . . . . . . . . . . . . . . . . . . 11 + 5.4. Request Headers . . . . . . . . . . . . . . . . . . . . . 11 + 5.5. Verification Procedure . . . . . . . . . . . . . . . . . 12 + 6. The "Claw" Authentication Scheme . . . . . . . . . . . . . . 13 + 7. Agent Registration . . . . . . . . . . . . . . . . . . . . . 13 + 7.1. Registration Flow . . . . . . . . . . . . . . . . . . . . 13 + 7.2. Registration Proof . . . . . . . . . . . . . . . . . . . 14 + 7.3. AIT Refresh . . . . . . . . . . . . . . . . . . . . . . . 14 + 8. Trust Establishment (Pairing) . . . . . . . . . . . . . . . . 14 + 8.1. Overview . . . . . . . . . . . . . . . . . . . . . . . . 14 + 8.2. Pairing Flow . . . . . . . . . . . . . . . . . . . . . . 14 + 8.3. Pairing Ticket . . . . . . . . . . . . . . . . . . . . . 15 + 8.4. Peer Profile . . . . . . . . . . . . . . . . . . . . . . 15 + 8.5. Ownership Verification . . . . . . . . . . . . . . . . . 16 + 8.6. Trust Store . . . . . . . . . . . . . . . . . . . . . . . 16 + 9. Relay Transport . . . . . . . . . . . . . . . . . . . . . . . 16 + 9.1. Overview . . . . . . . . . . . . . . . . . . . . . . . . 16 + 9.2. WebSocket Connection . . . . . . . . . . . . . . . . . . 17 + 9.3. Frame Protocol . . . . . . . . . . . . . . . . . . . . . 17 + 9.4. Heartbeat Frames . . . . . . . . . . . . . . . . . . . . 17 + 9.5. Deliver Frames . . . . . . . . . . . . . . . . . . . . . 18 + + + +Vemula Expires 25 August 2026 [Page 2] + +Internet-Draft Clawdentity February 2026 + + + 9.6. Enqueue Frames . . . . . . . . . . . . . . . . . . . . . 18 + 9.7. Local Agent Delivery . . . . . . . . . . . . . . . . . . 19 + 9.8. Reconnection . . . . . . . . . . . . . . . . . . . . . . 20 + 9.9. Outbound Queue Persistence . . . . . . . . . . . . . . . 20 + 10. Certificate Revocation . . . . . . . . . . . . . . . . . . . 20 + 10.1. CRL Format . . . . . . . . . . . . . . . . . . . . . . . 20 + 10.2. CRL Distribution . . . . . . . . . . . . . . . . . . . . 21 + 10.3. CRL Caching . . . . . . . . . . . . . . . . . . . . . . 22 + 10.4. Revocation Scope . . . . . . . . . . . . . . . . . . . . 22 + 11. Registry Key Discovery . . . . . . . . . . . . . . . . . . . 22 + 12. Endpoint Reference . . . . . . . . . . . . . . . . . . . . . 23 + 12.1. Registry Endpoints . . . . . . . . . . . . . . . . . . . 23 + 12.2. Proxy Endpoints . . . . . . . . . . . . . . . . . . . . 24 + 13. Security Considerations . . . . . . . . . . . . . . . . . . . 24 + 13.1. Private Key Protection . . . . . . . . . . . . . . . . . 24 + 13.2. Replay Protection . . . . . . . . . . . . . . . . . . . 24 + 13.3. Transport Security . . . . . . . . . . . . . . . . . . . 25 + 13.4. Connector Isolation . . . . . . . . . . . . . . . . . . 25 + 13.5. Trust Store Integrity . . . . . . . . . . . . . . . . . 25 + 13.6. CRL Freshness Window . . . . . . . . . . . . . . . . . . 25 + 13.7. Human-Anchored Trust . . . . . . . . . . . . . . . . . . 26 + 14. Error Codes . . . . . . . . . . . . . . . . . . . . . . . . . 26 + 14.1. Authentication Errors (401) . . . . . . . . . . . . . . 26 + 14.2. Authorization Errors (403) . . . . . . . . . . . . . . . 27 + 14.3. Service Unavailable Errors (503) . . . . . . . . . . . . 27 + 15. IANA Considerations . . . . . . . . . . . . . . . . . . . . . 27 + 15.1. DID Method Registration . . . . . . . . . . . . . . . . 27 + 15.2. HTTP Authentication Scheme Registration . . . . . . . . 28 + 15.3. JWT "typ" Header Parameter Values . . . . . . . . . . . 28 + 16. References . . . . . . . . . . . . . . . . . . . . . . . . . 28 + 16.1. Normative References . . . . . . . . . . . . . . . . . . 28 + 16.2. Informative References . . . . . . . . . . . . . . . . . 29 + Appendix A. Example: Complete Message Flow . . . . . . . . . . . 30 + Appendix B. Comparison with Existing Standards . . . . . . . . . 31 + Acknowledgements . . . . . . . . . . . . . . . . . . . . . . . . 31 + Author's Address . . . . . . . . . . . . . . . . . . . . . . . . 31 + +1. Introduction + +1.1. Problem Statement + + Current AI agent frameworks rely on shared bearer tokens for inter- + agent communication. A single token leak compromises all agents in + the system. There is no mechanism to distinguish which agent sent a + request, revoke a single agent without rotating the shared token, + enforce per-agent access control, or keep backend services private. + These limitations become critical as multi-agent systems scale. + + + + +Vemula Expires 25 August 2026 [Page 3] + +Internet-Draft Clawdentity February 2026 + + +1.2. Design Goals + + Clawdentity addresses these problems with six design goals: + + 1. *Individual identity:* Each agent has a unique cryptographic + keypair and DID. + + 2. *Proof of possession:* Every request proves the sender holds the + private key via Ed25519 signatures. + + 3. *Selective revocation:* One agent can be revoked without + affecting others. + + 4. *Zero-trust relay:* Agents communicate through authenticated + proxies; backend services remain unexposed. + + 5. *Human-anchored trust:* Trust originates from human approval, not + agent self-certification. + + 6. *Framework agnostic:* Works with any AI agent framework. + +1.3. Architecture Overview + + The protocol defines four component roles: + + Registry Central identity authority. Issues Agent Identity Tokens + (AITs), manages signing keys, publishes the Certificate Revocation + List (CRL). + + Proxy Per-owner edge service. Verifies identity, enforces trust + policy, rate-limits requests, and relays messages between agents. + + Connector Local bridge process between the proxy and the agent + framework. Maintains a persistent WebSocket connection to the + proxy. Never exposed publicly. + + Agent The AI agent itself. Has no direct knowledge of the protocol; + the connector handles all cryptographic operations. + + + + + + + + + + + + + +Vemula Expires 25 August 2026 [Page 4] + +Internet-Draft Clawdentity February 2026 + + + +-------------+ +-------------+ +--------------+ + | Agent A | | Registry | | Agent B | + | (private) | | (central) | | (private) | + +------+------+ +------+------+ +------+-------+ + | | | + +------+------+ | +------+-------+ + | Connector A | | | Connector B | + | (local) | | | (local) | + +------+------+ | +------+-------+ + | WebSocket | | WebSocket + +------+------+ | +------+-------+ + | Proxy A |<---------------+--------------->| Proxy B | + | (edge) | | | (edge) | + +-------------+ | +--------------+ + | + +------------+------------+ + | .well-known/claw-keys | + | /v1/crl | + | /v1/agents | + +-------------------------+ + + Figure 1: Component Architecture + +2. Conventions and Terminology + + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", + "SHOULD", "SHOULD NOT", "RECOMMENDED", "NOT RECOMMENDED", "MAY", and + "OPTIONAL" in this document are to be interpreted as described in + BCP 14 [RFC2119] [RFC8174] when, and only when, they appear in all + capitals, as shown here. + + This document uses the following terms: + + AIT Agent Identity Token. A signed JWT credential (Section 4) + binding an agent DID to a public key. + + CRL Certificate Revocation List. A signed JWT (Section 10) + containing a list of revoked AITs. + + DID Decentralized Identifier as defined in [W3C.DID], using the + "claw" method. + + PoP Proof of Possession. An Ed25519 signature proving the sender + controls the private key corresponding to a public key. + + Pairing Mutual trust establishment between two agents via a ticket- + based ceremony. + + + + +Vemula Expires 25 August 2026 [Page 5] + +Internet-Draft Clawdentity February 2026 + + + Trust Store Per-proxy persistent storage of known agents and + approved trust pairs. + + ULID Universally Unique Lexicographically Sortable Identifier + [ULID]. + +3. Identity Model + +3.1. DID Format + + Clawdentity uses a custom DID method with the scheme "did:claw". The + method-specific identifier consists of a kind and a ULID, separated + by a colon: + + claw-did = "did:claw:" kind ":" ulid + kind = "human" / "agent" + ulid = 26ALPHA ; Crockford Base32, see [ULID] + + Two kinds are defined: + + +=======+=============+===========================================+ + | Kind | Description | Example | + +=======+=============+===========================================+ + | human | A human | did:claw:human:01HF7YAT00W6W7CM7N3W5FDXT4 | + | | owner/ | | + | | operator | | + +-------+-------------+-------------------------------------------+ + | agent | An AI agent | did:claw:agent:01HG8ZBU11X7X8DN8O4X6GEYU5 | + +-------+-------------+-------------------------------------------+ + + Table 1 + + Implementations MUST reject DIDs where the kind is not "human" or + "agent", or where the ULID component does not conform to the ULID + specification [ULID]. + +3.2. Cryptographic Primitives + + The protocol uses Ed25519 [RFC8032] as the sole signing algorithm. + Implementations MUST NOT support other signature algorithms. + + + + + + + + + + + +Vemula Expires 25 August 2026 [Page 6] + +Internet-Draft Clawdentity February 2026 + + + +================+===========+===========+===================+ + | Primitive | Algorithm | Reference | Usage | + +================+===========+===========+===================+ + | Signing | Ed25519 | [RFC8032] | Identity, request | + | | | | signing | + +----------------+-----------+-----------+-------------------+ + | Body hash | SHA-256 | [RFC6234] | Request body | + | | | | integrity | + +----------------+-----------+-----------+-------------------+ + | Token format | JWS | [RFC7515] | AIT and CRL | + | | Compact | | tokens | + +----------------+-----------+-----------+-------------------+ + | Key encoding | Base64url | [RFC4648] | Keys, signatures, | + | | (no pad) | Section 5 | hashes | + +----------------+-----------+-----------+-------------------+ + | Key | JWK (OKP/ | [RFC8037] | Public keys in | + | representation | Ed25519) | | AITs | + +----------------+-----------+-----------+-------------------+ + + Table 2 + +3.3. Key Generation + + Each agent locally generates an Ed25519 keypair consisting of a + 32-byte public key and a 64-byte secret key. The secret key MUST be + stored exclusively on the agent's local machine and MUST NOT be + transmitted over any network. Only the public key is registered with + the registry, encoded as base64url within the AIT's confirmation + claim (Section 4.4). + +3.4. Ownership Model + + Every agent DID is bound to exactly one human DID (the "ownerDid"). + This binding is recorded in the AIT claims and enforced by the + registry during registration and refresh operations. A human MAY own + multiple agents. An agent MUST have exactly one owner. + + Human (did:claw:human:...) + +-- Agent A (did:claw:agent:...) + +-- Agent B (did:claw:agent:...) + +-- Agent C (did:claw:agent:...) + +4. Agent Identity Token (AIT) + + + + + + + + +Vemula Expires 25 August 2026 [Page 7] + +Internet-Draft Clawdentity February 2026 + + +4.1. Overview + + The Agent Identity Token (AIT) is a JSON Web Token [RFC7519] that + serves as an agent's credential. It is issued by the registry, + signed with a registry Ed25519 key, and binds the agent's DID to the + agent's public key via a confirmation claim ("cnf"), following the + pattern established by DPoP [RFC9449]. + +4.2. JOSE Header + + The AIT's JOSE protected header MUST contain: + + alg REQUIRED. MUST be "EdDSA" per [RFC8037]. + + typ REQUIRED. MUST be "AIT". + + kid REQUIRED. The key identifier of the registry signing key used + to sign this AIT. This allows the verifier to locate the correct + registry public key. + + { + "alg": "EdDSA", + "typ": "AIT", + "kid": "reg-key-2026-01" + } + + Figure 2: AIT JOSE Header Example + +4.3. Claims + + The AIT payload MUST contain the following claims. No additional + claims are permitted (strict validation). + + + + + + + + + + + + + + + + + + + +Vemula Expires 25 August 2026 [Page 8] + +Internet-Draft Clawdentity February 2026 + + + +===========+======+========+======================================+ + |Claim |Type |Required| Description | + +===========+======+========+======================================+ + |iss |string|REQUIRED| Registry issuer URL (e.g., | + | | | | "https://registry.clawdentity.com") | + +-----------+------+--------+--------------------------------------+ + |sub |string|REQUIRED| Agent DID. MUST be | + | | | | "did:claw:agent:" | + +-----------+------+--------+--------------------------------------+ + |ownerDid |string|REQUIRED| Owner human DID. MUST be | + | | | | "did:claw:human:" | + +-----------+------+--------+--------------------------------------+ + |name |string|REQUIRED| Agent name. 1-64 characters matching | + | | | | [A-Za-z0-9._ -] | + +-----------+------+--------+--------------------------------------+ + |framework |string|REQUIRED| Agent framework identifier. 1-32 | + | | | | characters, no control characters. | + +-----------+------+--------+--------------------------------------+ + |description|string|OPTIONAL| Human-readable description. Maximum | + | | | | 280 characters. | + +-----------+------+--------+--------------------------------------+ + |cnf |object|REQUIRED| Confirmation claim. See | + | | | | Section 4.4. | + +-----------+------+--------+--------------------------------------+ + |iat |number|REQUIRED| Issued-at time (NumericDate per | + | | | | [RFC7519]). | + +-----------+------+--------+--------------------------------------+ + |nbf |number|REQUIRED| Not-before time (NumericDate). | + +-----------+------+--------+--------------------------------------+ + |exp |number|REQUIRED| Expiration time (NumericDate). MUST | + | | | | be greater than both nbf and iat. | + +-----------+------+--------+--------------------------------------+ + |jti |string|REQUIRED| Unique token identifier. MUST be a | + | | | | valid ULID. | + +-----------+------+--------+--------------------------------------+ + + Table 3 + +4.4. Confirmation Claim + + The "cnf" (confirmation) claim binds the AIT to the agent's Ed25519 + public key, following the confirmation method pattern described in + [RFC7800]. It contains a single "jwk" member: + + + + + + + + +Vemula Expires 25 August 2026 [Page 9] + +Internet-Draft Clawdentity February 2026 + + + "cnf": { + "jwk": { + "kty": "OKP", + "crv": "Ed25519", + "x": "" + } + } + + The "kty" MUST be "OKP". The "crv" MUST be "Ed25519". The "x" + parameter MUST decode (base64url) to exactly 32 bytes. The JWK MUST + NOT contain a "d" (private key) parameter. + +4.5. Validation Rules + + An AIT MUST be rejected if any of the following conditions are true: + + 1. "alg" is not "EdDSA". + + 2. "typ" is not "AIT". + + 3. "kid" does not match any active registry signing key. + + 4. JWS signature verification fails against the registry key + identified by "kid". + + 5. "sub" is not a valid DID with kind "agent". + + 6. "ownerDid" is not a valid DID with kind "human". + + 7. "cnf.jwk.x" does not decode to exactly 32 bytes. + + 8. "exp" is less than or equal to "nbf" or "iat". + + 9. "jti" is not a valid ULID. + + 10. Current time is before "nbf" or after "exp" (accounting for + clock skew). + + 11. "jti" appears in the current CRL (Section 10). + +5. HTTP Request Signing + + + + + + + + + + +Vemula Expires 25 August 2026 [Page 10] + +Internet-Draft Clawdentity February 2026 + + +5.1. Purpose + + Every authenticated request includes a Proof of Possession (PoP) + signature that proves the sender controls the private key + corresponding to the public key in their AIT's "cnf" claim. This + mechanism is inspired by DPoP [RFC9449] but uses a canonical request + signing approach optimized for agent-to-agent communication. + +5.2. Canonical Request Format + + The canonical request string is constructed by joining the following + fields with newline (0x0A) separators, in the order shown: + + canonical-request = version LF method LF path-with-query LF + timestamp LF nonce LF body-hash + version = "CLAW-PROOF-V1" + method = token ; HTTP method, uppercased + path-with-query = absolute-path [ "?" query ] + timestamp = 1*DIGIT ; Unix epoch seconds + nonce = 1*unreserved ; unique per-request value + body-hash = base64url ; SHA-256 of request body + LF = %x0A + + CLAW-PROOF-V1 + POST + /hooks/agent + 1708531200 + 01HG8ZBU11X7X8DN8O4X6GEYU5 + 47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU + + Figure 3: Canonical Request Example + +5.3. Signature Computation + + The PoP signature is computed by signing the UTF-8 encoding of the + canonical request string with the agent's Ed25519 private key: + + canonical = canonicalize(method, path, timestamp, nonce, body_hash) + signature = Ed25519_Sign(UTF8_Encode(canonical), secret_key) + proof = Base64url_Encode(signature) + + The resulting "proof" is a base64url-encoded 64-byte Ed25519 + signature. + +5.4. Request Headers + + An authenticated request MUST include the following headers: + + + + +Vemula Expires 25 August 2026 [Page 11] + +Internet-Draft Clawdentity February 2026 + + + +=====================+=============+=============================+ + | Header | Status | Description | + +=====================+=============+=============================+ + | Authorization | REQUIRED | "Claw" SP . See | + | | | Section 6. | + +---------------------+-------------+-----------------------------+ + | X-Claw-Timestamp | REQUIRED | Unix epoch seconds (integer | + | | | string). | + +---------------------+-------------+-----------------------------+ + | X-Claw-Nonce | REQUIRED | Unique per-request value. | + | | | ULID RECOMMENDED. | + +---------------------+-------------+-----------------------------+ + | X-Claw-Body-SHA256 | REQUIRED | SHA-256 hash of the request | + | | | body, base64url-encoded. | + +---------------------+-------------+-----------------------------+ + | X-Claw-Proof | REQUIRED | Ed25519 PoP signature | + | | | (base64url, 64 bytes). | + +---------------------+-------------+-----------------------------+ + | X-Claw-Agent-Access | CONDITIONAL | Session access token. | + | | | Required for relay and hook | + | | | routes. | + +---------------------+-------------+-----------------------------+ + + Table 4 + +5.5. Verification Procedure + + The verifier (proxy) MUST perform the following steps in order: + + 1. Extract the AIT from the "Authorization: Claw " header. + + 2. Verify the AIT's JWS signature against the registry's signing + keys (Section 4.5). + + 3. Check that the AIT's "jti" is not on the CRL (Section 10.3). + + 4. Extract the agent's public key from the AIT's "cnf.jwk.x" claim. + + 5. Verify X-Claw-Timestamp is within the allowed skew window + (default: 300 seconds). + + 6. Recompute SHA-256 of the request body; compare with X-Claw-Body- + SHA256. + + 7. Reconstruct the canonical request (Section 5.2). + + 8. Verify X-Claw-Proof against the canonical request using the + agent's public key. + + + +Vemula Expires 25 August 2026 [Page 12] + +Internet-Draft Clawdentity February 2026 + + + 9. Check X-Claw-Nonce has not been seen before for this agent DID + within the timestamp window. + + If any step fails, the request MUST be rejected with HTTP 401. + +6. The "Claw" Authentication Scheme + + This specification introduces the "Claw" HTTP authentication scheme + for the Authorization header, following the framework defined in + [RFC9110] Section 11. + +credentials = "Claw" SP ait-token +ait-token = 1*base64url-char "." 1*base64url-char "." 1*base64url-char + + The scheme name "Claw" is case-sensitive. The token MUST be a valid + JWS Compact Serialization [RFC7515] representing an AIT as defined in + Section 4. + +7. Agent Registration + +7.1. Registration Flow + + Agent registration uses a challenge-response protocol to prove that + the registrant possesses the Ed25519 private key corresponding to the + public key being registered. + + Agent Registry + | | + | POST /v1/agents/challenge | + | { ownerDid } | + |------------------------------------->| + | | + | 200 { challengeId, nonce } | + |<-------------------------------------| + | | + | [Agent signs registration proof] | + | | + | POST /v1/agents | + | { proof, publicKey, name, ... } | + |------------------------------------->| + | | + | 201 { agentDid, ait } | + |<-------------------------------------| + + Figure 4: Agent Registration Sequence + + + + + + +Vemula Expires 25 August 2026 [Page 13] + +Internet-Draft Clawdentity February 2026 + + +7.2. Registration Proof + + The registration proof is computed by signing a canonical message + that binds the challenge to the agent's identity parameters: + + clawdentity.register.v1 + challengeId: + nonce: + ownerDid: + publicKey: + name: + framework: + ttlDays: + + Optional fields (framework, ttlDays) use empty strings when absent. + The agent signs this message with Ed25519 and submits the base64url- + encoded signature as the "proof" in the registration request. + +7.3. AIT Refresh + + AITs have bounded lifetimes. Before expiration, the connector MUST + request a fresh AIT: + + POST /v1/agents/auth/refresh HTTP/1.1 + Authorization: Claw + X-Claw-Agent-Access: + + The registry validates the current AIT and access token, verifies the + agent is not revoked, and returns a new AIT with an updated + expiration time. + +8. Trust Establishment (Pairing) + +8.1. Overview + + Before two agents can exchange messages, they MUST establish mutual + trust through a pairing ceremony. Trust is anchored by human + approval: agents cannot self-approve trust relationships. The + pairing process uses short-lived tickets exchanged out-of-band (e.g., + via QR code or messaging). + +8.2. Pairing Flow + + + + + + + + + +Vemula Expires 25 August 2026 [Page 14] + +Internet-Draft Clawdentity February 2026 + + + Agent A (Initiator) Proxy Agent B (Responder) + | | | + | POST /pair/start | | + | { initiatorProfile, | | + | ttlSeconds } | | + |-------------------->| | + | | | + | { ticket, | | + | expiresAt } | | + |<--------------------| | + | | | + | (out-of-band ticket exchange) | + | (QR code, message, copy-paste) | + |----------------------------------------->| + | | | + | | POST /pair/confirm | + | | { ticket, | + | | responderProfile}| + | |<-------------------| + | | | + | | 201 { paired:true }| + | |------------------->| + | | | + | callback (optional) | | + |<--------------------| | + + Figure 5: Trust Establishment Sequence + +8.3. Pairing Ticket + + The pairing ticket is a signed JWT with a short TTL, created by the + proxy during the /pair/start request. The ticket encodes the issuer + proxy URL, a signing key identifier, and an expiration timestamp. + + Ticket parameters: + + +==================+=============+=============+ + | Parameter | Default | Maximum | + +==================+=============+=============+ + | TTL (ttlSeconds) | 300 seconds | 900 seconds | + +------------------+-------------+-------------+ + + Table 5 + +8.4. Peer Profile + + Each side of a pairing provides a profile containing identity + information for display and routing: + + + +Vemula Expires 25 August 2026 [Page 15] + +Internet-Draft Clawdentity February 2026 + + + { + "agentName": "kai", + "humanName": "Ravi", + "proxyOrigin": "https://proxy.example.com" + } + + agentName REQUIRED. Agent display name. Maximum 64 characters. No + control characters. + + humanName REQUIRED. Owner display name. Maximum 64 characters. No + control characters. + + proxyOrigin OPTIONAL. The proxy's URL origin, used for cross-proxy + message routing. + +8.5. Ownership Verification + + When an agent initiates pairing, the proxy MUST verify that the + authenticated caller (identified by "ownerDid" in the AIT) actually + owns the claimed initiator agent DID. This is done by querying the + registry's internal agent-ownership endpoint. If ownership cannot be + verified, the pairing MUST be rejected with HTTP 403. + +8.6. Trust Store + + Each proxy maintains a Trust Store that records: + + * *Known agents:* Agents that have been authenticated and accepted. + + * *Approved pairs:* Bidirectional trust relationships between + agents. + + * *Pairing tickets:* Pending and completed pairing ceremonies with + expiration tracking. + + A message from Agent A to Agent B is permitted only if the ordered + pair (A, B) exists in the proxy's trust store. The trust store + SHOULD use a durable, transactional storage backend. + +9. Relay Transport + +9.1. Overview + + Messages between agents are relayed through their respective proxies. + The connector maintains a persistent WebSocket [RFC6455] connection + to its proxy. The proxy authenticates the WebSocket upgrade request + using the full AIT + PoP verification procedure (Section 5.5). + + + + +Vemula Expires 25 August 2026 [Page 16] + +Internet-Draft Clawdentity February 2026 + + +9.2. WebSocket Connection + + The connector initiates a WebSocket connection to: + + GET /v1/relay/connect HTTP/1.1 + Upgrade: websocket + Connection: Upgrade + Authorization: Claw + X-Claw-Agent-Access: + X-Claw-Timestamp: + X-Claw-Nonce: + X-Claw-Body-SHA256: + X-Claw-Proof: + +9.3. Frame Protocol + + All WebSocket messages are JSON objects conforming to the Clawdentity + Frame Protocol version 1. Every frame contains a common base + structure: + + { + "v": 1, + "type": "", + "id": "", + "ts": "" + } + + v REQUIRED. Integer. Frame protocol version. MUST be 1. + + type REQUIRED. String. One of: "heartbeat", "heartbeat_ack", + "deliver", "deliver_ack", "enqueue", "enqueue_ack". + + id REQUIRED. String. Unique frame identifier (ULID). + + ts REQUIRED. String. ISO 8601 timestamp with timezone. + +9.4. Heartbeat Frames + + Either side MAY send heartbeat frames to verify liveness. The + default heartbeat interval is 30 seconds. If a heartbeat + acknowledgement is not received within 60 seconds, the sender SHOULD + close the connection and reconnect. + + Heartbeat: + + { "v": 1, "type": "heartbeat", "id": "", "ts": "" } + + Heartbeat acknowledgement: + + + +Vemula Expires 25 August 2026 [Page 17] + +Internet-Draft Clawdentity February 2026 + + + { "v": 1, "type": "heartbeat_ack", "id": "", "ts": "", + "ackId": "" } + +9.5. Deliver Frames + + The proxy sends a "deliver" frame to the connector when an inbound + message arrives for the local agent: + + { + "v": 1, + "type": "deliver", + "id": "", + "ts": "", + "fromAgentDid": "did:claw:agent:...", + "toAgentDid": "did:claw:agent:...", + "payload": { ... }, + "contentType": "application/json", + "conversationId": "conv-123", + "replyTo": "https://proxy-a.example.com/v1/relay/delivery-receipts" + } + + The connector MUST respond with a "deliver_ack" frame indicating + whether the local agent framework accepted the delivery: + + { + "v": 1, + "type": "deliver_ack", + "id": "", + "ts": "", + "ackId": "", + "accepted": true + } + + If rejected, the "accepted" field is false and an optional "reason" + string MAY be included. + +9.6. Enqueue Frames + + The connector sends an "enqueue" frame to the proxy for outbound + messages: + + + + + + + + + + + +Vemula Expires 25 August 2026 [Page 18] + +Internet-Draft Clawdentity February 2026 + + + { + "v": 1, + "type": "enqueue", + "id": "", + "ts": "", + "toAgentDid": "did:claw:agent:...", + "payload": { ... }, + "conversationId": "conv-123" + } + + The proxy responds with "enqueue_ack" after accepting or rejecting + the message for relay. + +9.7. Local Agent Delivery + + Upon receiving a "deliver" frame, the connector forwards the payload + to the local agent framework via HTTP POST: + + POST /hooks/agent HTTP/1.1 + Host: 127.0.0.1:18789 + Content-Type: application/json + x-clawdentity-agent-did: + x-clawdentity-to-agent-did: + x-clawdentity-verified: true + x-openclaw-token: + x-request-id: + + The connector implements retry with exponential backoff for transient + failures (5xx, 429, connection errors): + + +================+===============+ + | Parameter | Default Value | + +================+===============+ + | Max attempts | 4 | + +----------------+---------------+ + | Initial delay | 300 ms | + +----------------+---------------+ + | Max delay | 2,000 ms | + +----------------+---------------+ + | Backoff factor | 2 | + +----------------+---------------+ + | Total budget | 14,000 ms | + +----------------+---------------+ + + Table 6 + + + + + + +Vemula Expires 25 August 2026 [Page 19] + +Internet-Draft Clawdentity February 2026 + + +9.8. Reconnection + + On WebSocket disconnection, the connector MUST attempt to reconnect + using exponential backoff with jitter: + + +================+===============+ + | Parameter | Default Value | + +================+===============+ + | Minimum delay | 1,000 ms | + +----------------+---------------+ + | Maximum delay | 30,000 ms | + +----------------+---------------+ + | Backoff factor | 2 | + +----------------+---------------+ + | Jitter ratio | 0.2 (±20%) | + +----------------+---------------+ + + Table 7 + + On successful reconnection, the connector resets the backoff counter + and flushes any queued outbound frames. + +9.9. Outbound Queue Persistence + + When the WebSocket connection is unavailable, the connector MUST + queue outbound "enqueue" frames locally and flush them in FIFO order + upon reconnection. The queue SHOULD support optional persistence (to + disk or database) to survive connector restarts. + +10. Certificate Revocation + +10.1. CRL Format + + The Certificate Revocation List (CRL) is a signed JWT containing a + list of revoked AITs. Its JOSE header uses: + + alg MUST be "EdDSA". + + typ MUST be "CRL". + + kid Registry signing key identifier. + + CRL claims: + + + + + + + + +Vemula Expires 25 August 2026 [Page 20] + +Internet-Draft Clawdentity February 2026 + + + +=============+========+==========+========================+ + | Claim | Type | Required | Description | + +=============+========+==========+========================+ + | iss | string | REQUIRED | Registry issuer URL. | + +-------------+--------+----------+------------------------+ + | jti | string | REQUIRED | CRL identifier (ULID). | + +-------------+--------+----------+------------------------+ + | iat | number | REQUIRED | Issued-at timestamp. | + +-------------+--------+----------+------------------------+ + | exp | number | REQUIRED | Expiration. MUST be | + | | | | greater than iat. | + +-------------+--------+----------+------------------------+ + | revocations | array | REQUIRED | At least one | + | | | | revocation entry. | + +-------------+--------+----------+------------------------+ + + Table 8 + + Each revocation entry contains: + + +===========+========+==========+===========================+ + | Field | Type | Required | Description | + +===========+========+==========+===========================+ + | jti | string | REQUIRED | Revoked AIT's jti (ULID). | + +-----------+--------+----------+---------------------------+ + | agentDid | string | REQUIRED | Revoked agent's DID. | + +-----------+--------+----------+---------------------------+ + | reason | string | OPTIONAL | Human-readable reason. | + | | | | Max 280 chars. | + +-----------+--------+----------+---------------------------+ + | revokedAt | number | REQUIRED | Revocation timestamp | + | | | | (Unix seconds). | + +-----------+--------+----------+---------------------------+ + + Table 9 + +10.2. CRL Distribution + + The registry publishes the current CRL at the well-known endpoint: + + GET /v1/crl HTTP/1.1 + Host: registry.clawdentity.com + + Response: + + { "crl": "" } + + + + + +Vemula Expires 25 August 2026 [Page 21] + +Internet-Draft Clawdentity February 2026 + + +10.3. CRL Caching + + Proxies MUST cache the CRL locally and refresh it periodically. The + following parameters control caching behavior: + + +===========+===========+=====================================+ + | Parameter | Default | Description | + +===========+===========+=====================================+ + | Refresh | 5 minutes | How often to fetch a fresh CRL. | + | interval | | | + +-----------+-----------+-------------------------------------+ + | Max age | 15 | Maximum staleness before the cache | + | | minutes | is considered expired. | + +-----------+-----------+-------------------------------------+ + | Stale | fail-open | "fail-open" allows stale CRL use; | + | behavior | | "fail-closed" rejects all requests. | + +-----------+-----------+-------------------------------------+ + + Table 10 + + When "fail-open": if the CRL cannot be refreshed and the cached CRL + is within max age, the stale CRL is used for revocation checks. + + When "fail-closed": if the CRL exceeds max age and cannot be + refreshed, the proxy MUST reject all authenticated requests with HTTP + 503. + +10.4. Revocation Scope + + Two levels of revocation are defined: + + Global revocation (revoke agent) The agent's AIT jti is added to the + CRL by the registry. The agent can no longer authenticate at any + proxy. Only the agent's owner can initiate global revocation. + + Local revocation (remove pair) A trust relationship is removed from + a proxy's trust store. The agent still exists and can communicate + with other paired agents, but can no longer reach the unpaired + peer via that proxy. Either side of the pair can initiate local + revocation. + +11. Registry Key Discovery + + The registry publishes its active signing keys at a well-known + endpoint, following the pattern established by OpenID Connect + Discovery [OIDC.Discovery]: + + + + + +Vemula Expires 25 August 2026 [Page 22] + +Internet-Draft Clawdentity February 2026 + + + GET /.well-known/claw-keys.json HTTP/1.1 + Host: registry.clawdentity.com + + Response: + + { + "keys": [ + { + "kid": "reg-key-2026-01", + "x": "", + "status": "active", + "createdAt": "2026-01-01T00:00:00Z" + } + ] + } + + The registry MAY have multiple active signing keys to support key + rotation. The AIT and CRL "kid" headers identify which key was used + to sign a given token. Proxies SHOULD cache these keys (default TTL: + 1 hour) and refresh them when an unknown "kid" is encountered. + +12. Endpoint Reference + +12.1. Registry Endpoints + + +========+==================+==========+======================+ + | Method | Path | Auth | Description | + +========+==================+==========+======================+ + | GET | /.well-known/ | None | Registry signing | + | | claw-keys.json | | keys | + +--------+------------------+----------+----------------------+ + | GET | /v1/metadata | None | Registry metadata | + +--------+------------------+----------+----------------------+ + | GET | /v1/crl | None | Current CRL | + +--------+------------------+----------+----------------------+ + | POST | /v1/agents/ | API Key | Request registration | + | | challenge | | challenge | + +--------+------------------+----------+----------------------+ + | POST | /v1/agents/auth/ | AIT + | Refresh AIT | + | | refresh | Access | | + +--------+------------------+----------+----------------------+ + | POST | /v1/agents/auth/ | Internal | Validate agent | + | | validate | | access token | + +--------+------------------+----------+----------------------+ + | POST | /v1/invites | API Key | Create invite code | + +--------+------------------+----------+----------------------+ + | POST | /v1/invites/ | None | Redeem invite code | + | | redeem | | | + + + +Vemula Expires 25 August 2026 [Page 23] + +Internet-Draft Clawdentity February 2026 + + + +--------+------------------+----------+----------------------+ + + Table 11 + +12.2. Proxy Endpoints + + +========+===================+===========+==================+ + | Method | Path | Auth | Description | + +========+===================+===========+==================+ + | GET | /health | None | Health check | + +--------+-------------------+-----------+------------------+ + | POST | /hooks/agent | AIT + PoP | Inbound message | + | | | + Access | delivery | + +--------+-------------------+-----------+------------------+ + | GET | /v1/relay/connect | AIT + PoP | WebSocket relay | + | | | + Access | | + +--------+-------------------+-----------+------------------+ + | POST | /v1/relay/ | AIT + PoP | Delivery receipt | + | | delivery-receipts | + Access | callback | + +--------+-------------------+-----------+------------------+ + | POST | /pair/start | AIT + PoP | Initiate pairing | + +--------+-------------------+-----------+------------------+ + | POST | /pair/confirm | AIT + PoP | Confirm pairing | + +--------+-------------------+-----------+------------------+ + | POST | /pair/status | AIT + PoP | Check pairing | + | | | | status | + +--------+-------------------+-----------+------------------+ + + Table 12 + +13. Security Considerations + +13.1. Private Key Protection + + Agent Ed25519 private keys MUST be stored exclusively on the agent's + local machine. The protocol is designed so that only the public key + leaves the agent — embedded in the AIT's "cnf" claim and registered + with the registry. Implementations SHOULD use operating system key + storage facilities where available. + +13.2. Replay Protection + + Three mechanisms provide replay protection: + + 1. *Timestamp skew:* Requests with X-Claw-Timestamp outside a + configurable window (default: 300 seconds) are rejected. + + + + + +Vemula Expires 25 August 2026 [Page 24] + +Internet-Draft Clawdentity February 2026 + + + 2. *Nonce uniqueness:* Each (agentDid, nonce) pair is tracked per + proxy. Duplicate nonces within the timestamp window are + rejected. + + 3. *AIT expiration:* AITs have bounded lifetimes; expired AITs are + rejected regardless of signature validity. + +13.3. Transport Security + + TLS 1.2 or later ([RFC8446] for TLS 1.3) is REQUIRED for all proxy- + to-proxy, proxy-to-registry, and connector-to-proxy communication + over public networks. The PoP signature (Section 5) provides an + additional layer: even if TLS were compromised, a captured AIT cannot + produce valid request signatures without the private key. + +13.4. Connector Isolation + + The connector MUST only communicate with its own proxy (via + WebSocket) and the local agent framework (via localhost HTTP). It + MUST NOT directly access the registry, other proxies, or any cloud + infrastructure services (message queues, object storage, databases). + This constraint minimizes the connector's attack surface and ensures + it remains a simple, auditable bridge. + +13.5. Trust Store Integrity + + The trust store is the sole authorization source for message relay. + Implementations SHOULD use a transactional storage backend (e.g., + SQLite within a Cloudflare Durable Object) to prevent corruption from + concurrent access or partial writes. + +13.6. CRL Freshness Window + + There is an inherent propagation delay between AIT revocation and CRL + distribution. With default settings, this window is up to 5 minutes. + Deployments requiring tighter revocation windows SHOULD: + + * Reduce the CRL refresh interval. + + * Use push-based CRL invalidation (e.g., message queues). + + * Combine CRL checks with real-time agent-auth validation for + sensitive operations. + + + + + + + + +Vemula Expires 25 August 2026 [Page 25] + +Internet-Draft Clawdentity February 2026 + + +13.7. Human-Anchored Trust + + The protocol explicitly prevents agent self-certification. An agent + cannot approve its own work or establish trust without human + involvement. The pairing ceremony requires out-of-band ticket + exchange, and global revocation requires the agent owner's + credentials. This design prevents autonomous trust escalation. + +14. Error Codes + + The following error codes are returned in JSON error responses with + the corresponding HTTP status codes: + +14.1. Authentication Errors (401) + + +==============================+===================================+ + | Code | Description | + +==============================+===================================+ + | PROXY_AUTH_MISSING_TOKEN | No Authorization header provided. | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_INVALID_SCHEME | Authorization header is not "Claw | + | | " format. | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_INVALID_AIT | AIT JWT verification failed. | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_INVALID_PROOF | PoP signature does not match. | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_INVALID_TIMESTAMP | X-Claw-Timestamp missing or not a | + | | valid integer. | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_TIMESTAMP_SKEW | Timestamp outside the allowed | + | | skew window. | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_REPLAY | Nonce has been seen before | + | | (replay detected). | + +------------------------------+-----------------------------------+ + | PROXY_AUTH_REVOKED | AIT jti is on the CRL. | + +------------------------------+-----------------------------------+ + | PROXY_AGENT_ACCESS_REQUIRED | X-Claw-Agent-Access header | + | | missing. | + +------------------------------+-----------------------------------+ + | PROXY_AGENT_ACCESS_INVALID | Agent access token is invalid or | + | | expired. | + +------------------------------+-----------------------------------+ + + Table 13 + + + + + +Vemula Expires 25 August 2026 [Page 26] + +Internet-Draft Clawdentity February 2026 + + +14.2. Authorization Errors (403) + + +================================+==========================+ + | Code | Description | + +================================+==========================+ + | PROXY_AUTH_FORBIDDEN | Agent not in trust store | + | | or pair not approved. | + +--------------------------------+--------------------------+ + | PROXY_PAIR_OWNERSHIP_FORBIDDEN | Caller does not own the | + | | initiator agent DID. | + +--------------------------------+--------------------------+ + + Table 14 + +14.3. Service Unavailable Errors (503) + + +===================================+============================+ + | Code | Description | + +===================================+============================+ + | PROXY_AUTH_DEPENDENCY_UNAVAILABLE | Registry, CRL, or trust | + | | store is unreachable. | + +-----------------------------------+----------------------------+ + | PROXY_PAIR_STATE_UNAVAILABLE | Trust store is unreachable | + | | for pairing operations. | + +-----------------------------------+----------------------------+ + | CRL_CACHE_STALE | CRL exceeds max age and | + | | fail-closed is configured. | + +-----------------------------------+----------------------------+ + + Table 15 + +15. IANA Considerations + +15.1. DID Method Registration + + This specification introduces the "claw" DID method. A registration + request for the W3C DID Method Registry would include: + + Method Name claw + + Method Specific Identifier : where kind is "human" or + "agent" + + DID Document Not applicable; identity is resolved via registry API. + + + + + + + +Vemula Expires 25 August 2026 [Page 27] + +Internet-Draft Clawdentity February 2026 + + +15.2. HTTP Authentication Scheme Registration + + This specification registers the "Claw" authentication scheme in the + "Hypertext Transfer Protocol (HTTP) Authentication Scheme Registry" + defined in [RFC9110] Section 16.3: + + Authentication Scheme Name Claw + + Reference Section 6 of this document + +15.3. JWT "typ" Header Parameter Values + + This specification registers two JWT "typ" header parameter values in + the "JSON Web Token Types" sub-registry of the "JSON Web Token (JWT)" + registry: + + +=============+=============================+============+ + | "typ" Value | Description | Reference | + +=============+=============================+============+ + | AIT | Agent Identity Token | Section 4 | + +-------------+-----------------------------+------------+ + | CRL | Certificate Revocation List | Section 10 | + +-------------+-----------------------------+------------+ + + Table 16 + +16. References + +16.1. Normative References + + [RFC2119] Bradner, S., "Key words for use in RFCs to Indicate + Requirement Levels", BCP 14, RFC 2119, + DOI 10.17487/RFC2119, March 1997, + . + + [RFC8174] Leiba, B., "Ambiguity of Uppercase vs Lowercase in RFC + 2119 Key Words", BCP 14, RFC 8174, DOI 10.17487/RFC8174, + May 2017, . + + [RFC4648] Josefsson, S., "The Base16, Base32, and Base64 Data + Encodings", RFC 4648, DOI 10.17487/RFC4648, October 2006, + . + + [RFC6234] 3rd, D. E. and T. Hansen, "US Secure Hash Algorithms (SHA + and SHA-based HMAC and HKDF)", RFC 6234, + DOI 10.17487/RFC6234, May 2011, + . + + + + +Vemula Expires 25 August 2026 [Page 28] + +Internet-Draft Clawdentity February 2026 + + + [RFC6455] Fette, I. and A. Melnikov, "The WebSocket Protocol", + RFC 6455, DOI 10.17487/RFC6455, December 2011, + . + + [RFC7515] Jones, M., Bradley, J., and N. Sakimura, "JSON Web + Signature (JWS)", RFC 7515, DOI 10.17487/RFC7515, May + 2015, . + + [RFC7519] Jones, M., Bradley, J., and N. Sakimura, "JSON Web Token + (JWT)", RFC 7519, DOI 10.17487/RFC7519, May 2015, + . + + [RFC7800] Jones, M., Bradley, J., and H. Tschofenig, "Proof-of- + Possession Key Semantics for JSON Web Tokens (JWTs)", + RFC 7800, DOI 10.17487/RFC7800, April 2016, + . + + [RFC8032] Josefsson, S. and I. Liusvaara, "Edwards-Curve Digital + Signature Algorithm (EdDSA)", RFC 8032, + DOI 10.17487/RFC8032, January 2017, + . + + [RFC8037] Liusvaara, I., "CFRG Elliptic Curve Diffie-Hellman (ECDH) + and Signatures in JSON Object Signing and Encryption + (JOSE)", RFC 8037, DOI 10.17487/RFC8037, January 2017, + . + + [RFC8446] Rescorla, E., "The Transport Layer Security (TLS) Protocol + Version 1.3", RFC 8446, DOI 10.17487/RFC8446, August 2018, + . + + [RFC9110] Fielding, R., Nottingham, M., and J. Reschke, "HTTP + Semantics", RFC 9110, DOI 10.17487/RFC9110, June 2022, + . + +16.2. Informative References + + [RFC9449] Fett, D., Campbell, B., Bradley, J., Lodderstedt, T., + Jones, M., and D. Waite, "OAuth 2.0 Demonstrating Proof of + Possession (DPoP)", RFC 9449, DOI 10.17487/RFC9449, + September 2023, . + + [W3C.DID] Sporny, M., Longley, D., Sabadello, M., Reed, D., Steele, + O., and C. Allen, "Decentralized Identifiers (DIDs) v1.0", + W3C Recommendation, July 2022, + . + + + + + +Vemula Expires 25 August 2026 [Page 29] + +Internet-Draft Clawdentity February 2026 + + + [ULID] Feerasta, A., "Universally Unique Lexicographically + Sortable Identifier", 2016, + . + + [OIDC.Discovery] + Sakimura, N., Bradley, J., Jones, M., and E. Jay, "OpenID + Connect Discovery 1.0", November 2014, + . + +Appendix A. Example: Complete Message Flow + + The following describes a complete message relay from Agent A to + Agent B: + + 1. Agent A's connector creates an "enqueue" frame targeting Agent + B's DID. + + 2. If connected, the frame is sent over WebSocket to Proxy A; + otherwise it is queued locally. + + 3. Proxy A receives the enqueue, looks up Agent B's proxy URL from + the trust store, and signs an HTTP request with Agent A's AIT and + PoP. + + 4. Proxy A sends POST /hooks/agent to Proxy B with the signed + request. + + 5. Proxy B verifies the Authorization header (AIT + PoP), checks the + CRL, and confirms the (A, B) pair exists in its trust store. + + 6. Proxy B creates a "deliver" frame and sends it over WebSocket to + Connector B. + + 7. Connector B receives the deliver frame and POSTs the payload to + the local agent framework on localhost. + + 8. Connector B sends a "deliver_ack" (accepted: true) back to Proxy + B. + + 9. Agent B processes the message. + + + + + + + + + + +Vemula Expires 25 August 2026 [Page 30] + +Internet-Draft Clawdentity February 2026 + + +Appendix B. Comparison with Existing Standards + + +================+====================+=======================+ + | Feature | OAuth 2.0 / DPoP | Clawdentity | + +================+====================+=======================+ + | Identity model | Client credentials | Per-agent DID + | + | | / tokens | Ed25519 keypair | + +----------------+--------------------+-----------------------+ + | Token issuer | Authorization | Registry (centralized | + | | server | trust anchor) | + +----------------+--------------------+-----------------------+ + | PoP mechanism | DPoP JWT (RFC | Canonical request | + | | 9449) | signing | + +----------------+--------------------+-----------------------+ + | Trust model | Scope-based | Explicit bilateral | + | | authorization | pairing | + +----------------+--------------------+-----------------------+ + | Revocation | Token | Signed CRL with local | + | | introspection | caching | + +----------------+--------------------+-----------------------+ + | Transport | Direct HTTP | WebSocket relay with | + | | | store-and-forward | + +----------------+--------------------+-----------------------+ + | Target | Human-to-service | Agent-to-agent | + | | auth | communication | + +----------------+--------------------+-----------------------+ + + Table 17 + +Acknowledgements + + The Clawdentity protocol was designed as part of the OpenClaw + ecosystem. The author thanks the OpenClaw community for feedback on + the identity model, and the designers of DPoP (RFC 9449) and W3C DIDs + whose work informed key design decisions. + +Author's Address + + Ravi Kiran Vemula + KnackLabs + Hyderabad + India + Email: ravi@knacklabs.ai + URI: https://ravi.sh + + + + + + + +Vemula Expires 25 August 2026 [Page 31] diff --git a/draft-vemula-clawdentity-protocol-00.xml b/draft-vemula-clawdentity-protocol-00.xml new file mode 100644 index 0000000..fb58bbd --- /dev/null +++ b/draft-vemula-clawdentity-protocol-00.xml @@ -0,0 +1,1494 @@ + + + + + + +]> + + + + Clawdentity: Cryptographic Identity and Trust Protocol for AI Agent Communication + + + + + KnackLabs +
+ + Hyderabad + India + + ravi@knacklabs.ai + https://ravi.sh +
+
+ + + + Security + Independent Submission + + AI agents + identity + cryptographic trust + Ed25519 + proof of possession + agent-to-agent + + + + This document specifies the Clawdentity protocol, a cryptographic identity + and trust layer for AI agent-to-agent communication. Clawdentity provides + per-agent Ed25519 identity, registry-issued credentials (Agent Identity Tokens), + proof-of-possession request signing, bilateral trust establishment via pairing + ceremonies, authenticated relay transport over WebSocket, and certificate + revocation. The protocol enables AI agents to prove their identity, verify + peers, and exchange messages without exposing private keys, shared tokens, + or backend infrastructure. + + +
+ + + + +
+ Introduction + +
+ Problem Statement + + Current AI agent frameworks rely on shared bearer tokens for inter-agent + communication. A single token leak compromises all agents in the system. + There is no mechanism to distinguish which agent sent a request, revoke + a single agent without rotating the shared token, enforce per-agent access + control, or keep backend services private. These limitations become critical + as multi-agent systems scale. + +
+ +
+ Design Goals + Clawdentity addresses these problems with six design goals: +
    +
  1. Individual identity: Each agent has a unique cryptographic keypair and DID.
  2. +
  3. Proof of possession: Every request proves the sender holds the private key via Ed25519 signatures.
  4. +
  5. Selective revocation: One agent can be revoked without affecting others.
  6. +
  7. Zero-trust relay: Agents communicate through authenticated proxies; backend services remain unexposed.
  8. +
  9. Human-anchored trust: Trust originates from human approval, not agent self-certification.
  10. +
  11. Framework agnostic: Works with any AI agent framework.
  12. +
+
+ +
+ Architecture Overview + + The protocol defines four component roles: + +
+
Registry
+
Central identity authority. Issues Agent Identity Tokens (AITs), manages signing keys, publishes the Certificate Revocation List (CRL).
+
Proxy
+
Per-owner edge service. Verifies identity, enforces trust policy, rate-limits requests, and relays messages between agents.
+
Connector
+
Local bridge process between the proxy and the agent framework. Maintains a persistent WebSocket connection to the proxy. Never exposed publicly.
+
Agent
+
The AI agent itself. Has no direct knowledge of the protocol; the connector handles all cryptographic operations.
+
+
+ Component Architecture + | Proxy B | + | (edge) | | | (edge) | + +-------------+ | +--------------+ + | + +------------+------------+ + | .well-known/claw-keys | + | /v1/crl | + | /v1/agents | + +-------------------------+ +]]> +
+
+
+ + +
+ Conventions and Terminology + + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", + "SHOULD", "SHOULD NOT", "RECOMMENDED", "NOT RECOMMENDED", "MAY", and + "OPTIONAL" in this document are to be interpreted as described in + BCP 14 when, + and only when, they appear in all capitals, as shown here. + + This document uses the following terms: +
+
AIT
+
Agent Identity Token. A signed JWT credential (Section ) binding an agent DID to a public key.
+
CRL
+
Certificate Revocation List. A signed JWT (Section ) containing a list of revoked AITs.
+
DID
+
Decentralized Identifier as defined in , using the "claw" method.
+
PoP
+
Proof of Possession. An Ed25519 signature proving the sender controls the private key corresponding to a public key.
+
Pairing
+
Mutual trust establishment between two agents via a ticket-based ceremony.
+
Trust Store
+
Per-proxy persistent storage of known agents and approved trust pairs.
+
ULID
+
Universally Unique Lexicographically Sortable Identifier .
+
+
+ + +
+ Identity Model + +
+ DID Format + + Clawdentity uses a custom DID method with the scheme "did:claw". + The method-specific identifier consists of a kind and a ULID, + separated by a colon: + + + Two kinds are defined: + + + + + + + + +
KindDescriptionExample
humanA human owner/operatordid:claw:human:01HF7YAT00W6W7CM7N3W5FDXT4
agentAn AI agentdid:claw:agent:01HG8ZBU11X7X8DN8O4X6GEYU5
+ + Implementations MUST reject DIDs where the kind is not "human" or "agent", + or where the ULID component does not conform to the ULID specification . + +
+ +
+ Cryptographic Primitives + + The protocol uses Ed25519 as the sole signing algorithm. + Implementations MUST NOT support other signature algorithms. + + + + + + + + + + + + +
PrimitiveAlgorithmReferenceUsage
SigningEd25519Identity, request signing
Body hashSHA-256Request body integrity
Token formatJWS CompactAIT and CRL tokens
Key encodingBase64url (no pad) Section 5Keys, signatures, hashes
Key representationJWK (OKP/Ed25519)Public keys in AITs
+
+ +
+ Key Generation + + Each agent locally generates an Ed25519 keypair consisting of a + 32-byte public key and a 64-byte secret key. The secret key MUST + be stored exclusively on the agent's local machine and MUST NOT be + transmitted over any network. Only the public key is registered + with the registry, encoded as base64url within the AIT's confirmation + claim (Section ). + +
+ +
+ Ownership Model + + Every agent DID is bound to exactly one human DID (the "ownerDid"). + This binding is recorded in the AIT claims and enforced by the + registry during registration and refresh operations. A human MAY + own multiple agents. An agent MUST have exactly one owner. + + +
+
+ + +
+ Agent Identity Token (AIT) + +
+ Overview + + The Agent Identity Token (AIT) is a JSON Web Token + that serves as an agent's credential. It is issued by the registry, + signed with a registry Ed25519 key, and binds the agent's DID to the + agent's public key via a confirmation claim ("cnf"), following the + pattern established by DPoP . + +
+ +
+ JOSE Header + The AIT's JOSE protected header MUST contain: +
+
alg
+
REQUIRED. MUST be "EdDSA" per .
+
typ
+
REQUIRED. MUST be "AIT".
+
kid
+
REQUIRED. The key identifier of the registry signing key used to sign this AIT. This allows the verifier to locate the correct registry public key.
+
+
+ AIT JOSE Header Example + +
+
+ +
+ Claims + + The AIT payload MUST contain the following claims. No additional + claims are permitted (strict validation). + + + + + + + + + + + + + + + + + + +
ClaimTypeRequiredDescription
issstringREQUIREDRegistry issuer URL (e.g., "https://registry.clawdentity.com")
substringREQUIREDAgent DID. MUST be "did:claw:agent:<ulid>"
ownerDidstringREQUIREDOwner human DID. MUST be "did:claw:human:<ulid>"
namestringREQUIREDAgent name. 1-64 characters matching [A-Za-z0-9._ -]
frameworkstringREQUIREDAgent framework identifier. 1-32 characters, no control characters.
descriptionstringOPTIONALHuman-readable description. Maximum 280 characters.
cnfobjectREQUIREDConfirmation claim. See Section .
iatnumberREQUIREDIssued-at time (NumericDate per ).
nbfnumberREQUIREDNot-before time (NumericDate).
expnumberREQUIREDExpiration time (NumericDate). MUST be greater than both nbf and iat.
jtistringREQUIREDUnique token identifier. MUST be a valid ULID.
+
+ +
+ Confirmation Claim + + The "cnf" (confirmation) claim binds the AIT to the agent's Ed25519 + public key, following the confirmation method pattern described in + . It contains a single "jwk" member: + + " + } +} +]]> + + The "kty" MUST be "OKP". The "crv" MUST be "Ed25519". + The "x" parameter MUST decode (base64url) to exactly 32 bytes. + The JWK MUST NOT contain a "d" (private key) parameter. + +
+ +
+ Validation Rules + An AIT MUST be rejected if any of the following conditions are true: +
    +
  1. "alg" is not "EdDSA".
  2. +
  3. "typ" is not "AIT".
  4. +
  5. "kid" does not match any active registry signing key.
  6. +
  7. JWS signature verification fails against the registry key identified by "kid".
  8. +
  9. "sub" is not a valid DID with kind "agent".
  10. +
  11. "ownerDid" is not a valid DID with kind "human".
  12. +
  13. "cnf.jwk.x" does not decode to exactly 32 bytes.
  14. +
  15. "exp" is less than or equal to "nbf" or "iat".
  16. +
  17. "jti" is not a valid ULID.
  18. +
  19. Current time is before "nbf" or after "exp" (accounting for clock skew).
  20. +
  21. "jti" appears in the current CRL (Section ).
  22. +
+
+
+ + +
+ HTTP Request Signing + +
+ Purpose + + Every authenticated request includes a Proof of Possession (PoP) + signature that proves the sender controls the private key + corresponding to the public key in their AIT's "cnf" claim. + This mechanism is inspired by DPoP but + uses a canonical request signing approach optimized for + agent-to-agent communication. + +
+ +
+ Canonical Request Format + + The canonical request string is constructed by joining the following + fields with newline (0x0A) separators, in the order shown: + + +
+ Canonical Request Example + +
+
+ +
+ Signature Computation + + The PoP signature is computed by signing the UTF-8 encoding of the + canonical request string with the agent's Ed25519 private key: + + + + The resulting "proof" is a base64url-encoded 64-byte Ed25519 signature. + +
+ +
+ Request Headers + + An authenticated request MUST include the following headers: + + + + + + + + + + + + + +
HeaderStatusDescription
AuthorizationREQUIRED"Claw" SP <AIT-JWT>. See Section .
X-Claw-TimestampREQUIREDUnix epoch seconds (integer string).
X-Claw-NonceREQUIREDUnique per-request value. ULID RECOMMENDED.
X-Claw-Body-SHA256REQUIREDSHA-256 hash of the request body, base64url-encoded.
X-Claw-ProofREQUIREDEd25519 PoP signature (base64url, 64 bytes).
X-Claw-Agent-AccessCONDITIONALSession access token. Required for relay and hook routes.
+
+ +
+ Verification Procedure + The verifier (proxy) MUST perform the following steps in order: +
    +
  1. Extract the AIT from the "Authorization: Claw <token>" header.
  2. +
  3. Verify the AIT's JWS signature against the registry's signing keys (Section ).
  4. +
  5. Check that the AIT's "jti" is not on the CRL (Section ).
  6. +
  7. Extract the agent's public key from the AIT's "cnf.jwk.x" claim.
  8. +
  9. Verify X-Claw-Timestamp is within the allowed skew window (default: 300 seconds).
  10. +
  11. Recompute SHA-256 of the request body; compare with X-Claw-Body-SHA256.
  12. +
  13. Reconstruct the canonical request (Section ).
  14. +
  15. Verify X-Claw-Proof against the canonical request using the agent's public key.
  16. +
  17. Check X-Claw-Nonce has not been seen before for this agent DID within the timestamp window.
  18. +
+ If any step fails, the request MUST be rejected with HTTP 401. +
+
+ + +
+ The "Claw" Authentication Scheme + + This specification introduces the "Claw" HTTP authentication scheme + for the Authorization header, following the framework defined in + Section 11. + + + + The scheme name "Claw" is case-sensitive. The token MUST be a valid + JWS Compact Serialization representing + an AIT as defined in Section . + +
+ + +
+ Agent Registration + +
+ Registration Flow + + Agent registration uses a challenge-response protocol to prove + that the registrant possesses the Ed25519 private key + corresponding to the public key being registered. + +
+ Agent Registration Sequence + | + | | + | 200 { challengeId, nonce } | + |<-------------------------------------| + | | + | [Agent signs registration proof] | + | | + | POST /v1/agents | + | { proof, publicKey, name, ... } | + |------------------------------------->| + | | + | 201 { agentDid, ait } | + |<-------------------------------------| +]]> +
+
+ +
+ Registration Proof + + The registration proof is computed by signing a canonical message + that binds the challenge to the agent's identity parameters: + + +nonce: +ownerDid: +publicKey: +name: +framework: +ttlDays: +]]> + + Optional fields (framework, ttlDays) use empty strings when absent. + The agent signs this message with Ed25519 and submits the base64url-encoded + signature as the "proof" in the registration request. + +
+ +
+ AIT Refresh + + AITs have bounded lifetimes. Before expiration, the connector + MUST request a fresh AIT: + + +X-Claw-Agent-Access: +]]> + + The registry validates the current AIT and access token, verifies + the agent is not revoked, and returns a new AIT with an updated + expiration time. + +
+
+ + +
+ Trust Establishment (Pairing) + +
+ Overview + + Before two agents can exchange messages, they MUST establish + mutual trust through a pairing ceremony. Trust is anchored by + human approval: agents cannot self-approve trust relationships. + The pairing process uses short-lived tickets exchanged + out-of-band (e.g., via QR code or messaging). + +
+ +
+ Pairing Flow +
+ Trust Establishment Sequence + | | + | | | + | { ticket, | | + | expiresAt } | | + |<--------------------| | + | | | + | (out-of-band ticket exchange) | + | (QR code, message, copy-paste) | + |----------------------------------------->| + | | | + | | POST /pair/confirm | + | | { ticket, | + | | responderProfile}| + | |<-------------------| + | | | + | | 201 { paired:true }| + | |------------------->| + | | | + | callback (optional) | | + |<--------------------| | +]]> +
+
+ +
+ Pairing Ticket + + The pairing ticket is a signed JWT with a short TTL, created by + the proxy during the /pair/start request. The ticket encodes + the issuer proxy URL, a signing key identifier, and an expiration + timestamp. + + Ticket parameters: + + + + + + + +
ParameterDefaultMaximum
TTL (ttlSeconds)300 seconds900 seconds
+
+ +
+ Peer Profile + + Each side of a pairing provides a profile containing identity + information for display and routing: + + +
+
agentName
+
REQUIRED. Agent display name. Maximum 64 characters. No control characters.
+
humanName
+
REQUIRED. Owner display name. Maximum 64 characters. No control characters.
+
proxyOrigin
+
OPTIONAL. The proxy's URL origin, used for cross-proxy message routing.
+
+
+ +
+ Ownership Verification + + When an agent initiates pairing, the proxy MUST verify that the + authenticated caller (identified by "ownerDid" in the AIT) + actually owns the claimed initiator agent DID. This is done by + querying the registry's internal agent-ownership endpoint. If + ownership cannot be verified, the pairing MUST be rejected with + HTTP 403. + +
+ +
+ Trust Store + + Each proxy maintains a Trust Store that records: + +
    +
  • Known agents: Agents that have been authenticated and accepted.
  • +
  • Approved pairs: Bidirectional trust relationships between agents.
  • +
  • Pairing tickets: Pending and completed pairing ceremonies with expiration tracking.
  • +
+ + A message from Agent A to Agent B is permitted only if the ordered + pair (A, B) exists in the proxy's trust store. The trust store + SHOULD use a durable, transactional storage backend. + +
+
+ + +
+ Relay Transport + +
+ Overview + + Messages between agents are relayed through their respective proxies. + The connector maintains a persistent WebSocket + connection to its proxy. The proxy authenticates the WebSocket + upgrade request using the full AIT + PoP verification procedure + (Section ). + +
+ +
+ WebSocket Connection + + The connector initiates a WebSocket connection to: + + +X-Claw-Agent-Access: +X-Claw-Timestamp: +X-Claw-Nonce: +X-Claw-Body-SHA256: +X-Claw-Proof: +]]> +
+ +
+ Frame Protocol + + All WebSocket messages are JSON objects conforming to the + Clawdentity Frame Protocol version 1. Every frame contains + a common base structure: + + ", + "id": "", + "ts": "" +} +]]> +
+
v
+
REQUIRED. Integer. Frame protocol version. MUST be 1.
+
type
+
REQUIRED. String. One of: "heartbeat", "heartbeat_ack", "deliver", "deliver_ack", "enqueue", "enqueue_ack".
+
id
+
REQUIRED. String. Unique frame identifier (ULID).
+
ts
+
REQUIRED. String. ISO 8601 timestamp with timezone.
+
+
+ +
+ Heartbeat Frames + + Either side MAY send heartbeat frames to verify liveness. + The default heartbeat interval is 30 seconds. If a heartbeat + acknowledgement is not received within 60 seconds, the sender + SHOULD close the connection and reconnect. + + Heartbeat: + ", "ts": "" } +]]> + Heartbeat acknowledgement: + ", "ts": "", + "ackId": "" } +]]> +
+ +
+ Deliver Frames + + The proxy sends a "deliver" frame to the connector when an + inbound message arrives for the local agent: + + ", + "ts": "", + "fromAgentDid": "did:claw:agent:...", + "toAgentDid": "did:claw:agent:...", + "payload": { ... }, + "contentType": "application/json", + "conversationId": "conv-123", + "replyTo": "https://proxy-a.example.com/v1/relay/delivery-receipts" +} +]]> + + The connector MUST respond with a "deliver_ack" frame indicating + whether the local agent framework accepted the delivery: + + ", + "ts": "", + "ackId": "", + "accepted": true +} +]]> + If rejected, the "accepted" field is false and an optional "reason" string MAY be included. +
+ +
+ Enqueue Frames + + The connector sends an "enqueue" frame to the proxy for outbound + messages: + + ", + "ts": "", + "toAgentDid": "did:claw:agent:...", + "payload": { ... }, + "conversationId": "conv-123" +} +]]> + + The proxy responds with "enqueue_ack" after accepting or rejecting + the message for relay. + +
+ +
+ Local Agent Delivery + + Upon receiving a "deliver" frame, the connector forwards the + payload to the local agent framework via HTTP POST: + + +x-clawdentity-to-agent-did: +x-clawdentity-verified: true +x-openclaw-token: +x-request-id: +]]> + + The connector implements retry with exponential backoff for + transient failures (5xx, 429, connection errors): + + + + + + + + + + + + +
ParameterDefault Value
Max attempts4
Initial delay300 ms
Max delay2,000 ms
Backoff factor2
Total budget14,000 ms
+
+ +
+ Reconnection + + On WebSocket disconnection, the connector MUST attempt to + reconnect using exponential backoff with jitter: + + + + + + + + + + + +
ParameterDefault Value
Minimum delay1,000 ms
Maximum delay30,000 ms
Backoff factor2
Jitter ratio0.2 (±20%)
+ + On successful reconnection, the connector resets the backoff + counter and flushes any queued outbound frames. + +
+ +
+ Outbound Queue Persistence + + When the WebSocket connection is unavailable, the connector + MUST queue outbound "enqueue" frames locally and flush them + in FIFO order upon reconnection. The queue SHOULD support + optional persistence (to disk or database) to survive + connector restarts. + +
+
+ + +
+ Certificate Revocation + +
+ CRL Format + + The Certificate Revocation List (CRL) is a signed JWT containing + a list of revoked AITs. Its JOSE header uses: + +
+
alg
MUST be "EdDSA".
+
typ
MUST be "CRL".
+
kid
Registry signing key identifier.
+
+ CRL claims: + + + + + + + + + + + +
ClaimTypeRequiredDescription
issstringREQUIREDRegistry issuer URL.
jtistringREQUIREDCRL identifier (ULID).
iatnumberREQUIREDIssued-at timestamp.
expnumberREQUIREDExpiration. MUST be greater than iat.
revocationsarrayREQUIREDAt least one revocation entry.
+ Each revocation entry contains: + + + + + + + + + + +
FieldTypeRequiredDescription
jtistringREQUIREDRevoked AIT's jti (ULID).
agentDidstringREQUIREDRevoked agent's DID.
reasonstringOPTIONALHuman-readable reason. Max 280 chars.
revokedAtnumberREQUIREDRevocation timestamp (Unix seconds).
+
+ +
+ CRL Distribution + + The registry publishes the current CRL at the well-known endpoint: + + + Response: + " } +]]> +
+ +
+ CRL Caching + + Proxies MUST cache the CRL locally and refresh it periodically. + The following parameters control caching behavior: + + + + + + + + + + +
ParameterDefaultDescription
Refresh interval5 minutesHow often to fetch a fresh CRL.
Max age15 minutesMaximum staleness before the cache is considered expired.
Stale behaviorfail-open"fail-open" allows stale CRL use; "fail-closed" rejects all requests.
+ + When "fail-open": if the CRL cannot be refreshed and the cached + CRL is within max age, the stale CRL is used for revocation checks. + + + When "fail-closed": if the CRL exceeds max age and cannot be + refreshed, the proxy MUST reject all authenticated requests + with HTTP 503. + +
+ +
+ Revocation Scope + Two levels of revocation are defined: +
+
Global revocation (revoke agent)
+
+ The agent's AIT jti is added to the CRL by the registry. The + agent can no longer authenticate at any proxy. Only the agent's + owner can initiate global revocation. +
+
Local revocation (remove pair)
+
+ A trust relationship is removed from a proxy's trust store. + The agent still exists and can communicate with other paired + agents, but can no longer reach the unpaired peer via that proxy. + Either side of the pair can initiate local revocation. +
+
+
+
+ + +
+ Registry Key Discovery + + The registry publishes its active signing keys at a well-known endpoint, + following the pattern established by OpenID Connect Discovery + : + + + Response: + ", + "status": "active", + "createdAt": "2026-01-01T00:00:00Z" + } + ] +} +]]> + + The registry MAY have multiple active signing keys to support key + rotation. The AIT and CRL "kid" headers identify which key was + used to sign a given token. Proxies SHOULD cache these keys + (default TTL: 1 hour) and refresh them when an unknown "kid" + is encountered. + +
+ + +
+ Endpoint Reference + +
+ Registry Endpoints + + + + + + + + + + + + + + +
MethodPathAuthDescription
GET/.well-known/claw-keys.jsonNoneRegistry signing keys
GET/v1/metadataNoneRegistry metadata
GET/v1/crlNoneCurrent CRL
POST/v1/agents/challengeAPI KeyRequest registration challenge
POST/v1/agents/auth/refreshAIT + AccessRefresh AIT
POST/v1/agents/auth/validateInternalValidate agent access token
POST/v1/invitesAPI KeyCreate invite code
POST/v1/invites/redeemNoneRedeem invite code
+
+ +
+ Proxy Endpoints + + + + + + + + + + + + + +
MethodPathAuthDescription
GET/healthNoneHealth check
POST/hooks/agentAIT + PoP + AccessInbound message delivery
GET/v1/relay/connectAIT + PoP + AccessWebSocket relay
POST/v1/relay/delivery-receiptsAIT + PoP + AccessDelivery receipt callback
POST/pair/startAIT + PoPInitiate pairing
POST/pair/confirmAIT + PoPConfirm pairing
POST/pair/statusAIT + PoPCheck pairing status
+
+
+ + +
+ Security Considerations + +
+ Private Key Protection + + Agent Ed25519 private keys MUST be stored exclusively on the + agent's local machine. The protocol is designed so that only + the public key leaves the agent — embedded in the AIT's "cnf" + claim and registered with the registry. Implementations SHOULD + use operating system key storage facilities where available. + +
+ +
+ Replay Protection + Three mechanisms provide replay protection: +
    +
  1. Timestamp skew: Requests with X-Claw-Timestamp outside a configurable window (default: 300 seconds) are rejected.
  2. +
  3. Nonce uniqueness: Each (agentDid, nonce) pair is tracked per proxy. Duplicate nonces within the timestamp window are rejected.
  4. +
  5. AIT expiration: AITs have bounded lifetimes; expired AITs are rejected regardless of signature validity.
  6. +
+
+ +
+ Transport Security + + TLS 1.2 or later ( for TLS 1.3) is + REQUIRED for all proxy-to-proxy, proxy-to-registry, and + connector-to-proxy communication over public networks. The PoP + signature (Section ) + provides an additional layer: even if TLS were compromised, a + captured AIT cannot produce valid request signatures without + the private key. + +
+ +
+ Connector Isolation + + The connector MUST only communicate with its own proxy (via + WebSocket) and the local agent framework (via localhost HTTP). + It MUST NOT directly access the registry, other proxies, or + any cloud infrastructure services (message queues, object + storage, databases). This constraint minimizes the connector's + attack surface and ensures it remains a simple, auditable bridge. + +
+ +
+ Trust Store Integrity + + The trust store is the sole authorization source for message + relay. Implementations SHOULD use a transactional storage + backend (e.g., SQLite within a Cloudflare Durable Object) to + prevent corruption from concurrent access or partial writes. + +
+ +
+ CRL Freshness Window + + There is an inherent propagation delay between AIT revocation + and CRL distribution. With default settings, this window is up + to 5 minutes. Deployments requiring tighter revocation windows + SHOULD: + +
    +
  • Reduce the CRL refresh interval.
  • +
  • Use push-based CRL invalidation (e.g., message queues).
  • +
  • Combine CRL checks with real-time agent-auth validation for sensitive operations.
  • +
+
+ +
+ Human-Anchored Trust + + The protocol explicitly prevents agent self-certification. + An agent cannot approve its own work or establish trust + without human involvement. The pairing ceremony requires + out-of-band ticket exchange, and global revocation requires + the agent owner's credentials. This design prevents + autonomous trust escalation. + +
+
+ + +
+ Error Codes + + The following error codes are returned in JSON error responses + with the corresponding HTTP status codes: + + +
+ Authentication Errors (401) + + + + + + + + + + + + + + + + +
CodeDescription
PROXY_AUTH_MISSING_TOKENNo Authorization header provided.
PROXY_AUTH_INVALID_SCHEMEAuthorization header is not "Claw <token>" format.
PROXY_AUTH_INVALID_AITAIT JWT verification failed.
PROXY_AUTH_INVALID_PROOFPoP signature does not match.
PROXY_AUTH_INVALID_TIMESTAMPX-Claw-Timestamp missing or not a valid integer.
PROXY_AUTH_TIMESTAMP_SKEWTimestamp outside the allowed skew window.
PROXY_AUTH_REPLAYNonce has been seen before (replay detected).
PROXY_AUTH_REVOKEDAIT jti is on the CRL.
PROXY_AGENT_ACCESS_REQUIREDX-Claw-Agent-Access header missing.
PROXY_AGENT_ACCESS_INVALIDAgent access token is invalid or expired.
+
+ +
+ Authorization Errors (403) + + + + + + + + +
CodeDescription
PROXY_AUTH_FORBIDDENAgent not in trust store or pair not approved.
PROXY_PAIR_OWNERSHIP_FORBIDDENCaller does not own the initiator agent DID.
+
+ +
+ Service Unavailable Errors (503) + + + + + + + + + +
CodeDescription
PROXY_AUTH_DEPENDENCY_UNAVAILABLERegistry, CRL, or trust store is unreachable.
PROXY_PAIR_STATE_UNAVAILABLETrust store is unreachable for pairing operations.
CRL_CACHE_STALECRL exceeds max age and fail-closed is configured.
+
+
+ + +
+ IANA Considerations + +
+ DID Method Registration + + This specification introduces the "claw" DID method. A registration + request for the W3C DID Method Registry would include: + +
+
Method Name
claw
+
Method Specific Identifier
<kind>:<ulid> where kind is "human" or "agent"
+
DID Document
Not applicable; identity is resolved via registry API.
+
+
+ +
+ HTTP Authentication Scheme Registration + + This specification registers the "Claw" authentication scheme + in the "Hypertext Transfer Protocol (HTTP) Authentication Scheme + Registry" defined in Section 16.3: + +
+
Authentication Scheme Name
Claw
+
Reference
Section of this document
+
+
+ +
+ JWT "typ" Header Parameter Values + + This specification registers two JWT "typ" header parameter values + in the "JSON Web Token Types" sub-registry of the "JSON Web Token + (JWT)" registry: + + + + + + + + + +
"typ" ValueDescriptionReference
AITAgent Identity TokenSection
CRLCertificate Revocation ListSection
+
+
+
+ + + + + References + + + Normative References + + + + Key words for use in RFCs to Indicate Requirement Levels + + + + + + + + + + + Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words + + + + + + + + + + + The Base16, Base32, and Base64 Data Encodings + + + + + + + + + + US Secure Hash Algorithms (SHA and SHA-based HMAC and HKDF) + + + + + + + + + + + The WebSocket Protocol + + + + + + + + + + + JSON Web Signature (JWS) + + + + + + + + + + + + JSON Web Token (JWT) + + + + + + + + + + + + Proof-of-Possession Key Semantics for JSON Web Tokens (JWTs) + + + + + + + + + + + + Edwards-Curve Digital Signature Algorithm (EdDSA) + + + + + + + + + + + CFRG Elliptic Curve Diffie-Hellman (ECDH) and Signatures in JSON Object Signing and Encryption (JOSE) + + + + + + + + + + The Transport Layer Security (TLS) Protocol Version 1.3 + + + + + + + + + + HTTP Semantics + + + + + + + + + + + + Informative References + + + + OAuth 2.0 Demonstrating Proof of Possession (DPoP) + + + + + + + + + + + + + + + Decentralized Identifiers (DIDs) v1.0 + + + + + + + + + W3C Recommendation + + + + + Universally Unique Lexicographically Sortable Identifier + + + + + + + + OpenID Connect Discovery 1.0 + + + + + + + + + + + +
+ Example: Complete Message Flow + + The following describes a complete message relay from Agent A to Agent B: + +
    +
  1. Agent A's connector creates an "enqueue" frame targeting Agent B's DID.
  2. +
  3. If connected, the frame is sent over WebSocket to Proxy A; otherwise it is queued locally.
  4. +
  5. Proxy A receives the enqueue, looks up Agent B's proxy URL from the trust store, and signs an HTTP request with Agent A's AIT and PoP.
  6. +
  7. Proxy A sends POST /hooks/agent to Proxy B with the signed request.
  8. +
  9. Proxy B verifies the Authorization header (AIT + PoP), checks the CRL, and confirms the (A, B) pair exists in its trust store.
  10. +
  11. Proxy B creates a "deliver" frame and sends it over WebSocket to Connector B.
  12. +
  13. Connector B receives the deliver frame and POSTs the payload to the local agent framework on localhost.
  14. +
  15. Connector B sends a "deliver_ack" (accepted: true) back to Proxy B.
  16. +
  17. Agent B processes the message.
  18. +
+
+ +
+ Comparison with Existing Standards + + + + + + + + + + + + + +
FeatureOAuth 2.0 / DPoPClawdentity
Identity modelClient credentials / tokensPer-agent DID + Ed25519 keypair
Token issuerAuthorization serverRegistry (centralized trust anchor)
PoP mechanismDPoP JWT (RFC 9449)Canonical request signing
Trust modelScope-based authorizationExplicit bilateral pairing
RevocationToken introspectionSigned CRL with local caching
TransportDirect HTTPWebSocket relay with store-and-forward
TargetHuman-to-service authAgent-to-agent communication
+
+ +
+ Acknowledgements + + The Clawdentity protocol was designed as part of the OpenClaw + ecosystem. The author thanks the OpenClaw community for feedback + on the identity model, and the designers of DPoP (RFC 9449) and + W3C DIDs whose work informed key design decisions. + +
+ +
+
From 8bd05c13f094673d47db3aca37ba8ae622b2496e Mon Sep 17 00:00:00 2001 From: vrknetha Date: Sat, 21 Feb 2026 19:17:22 +0530 Subject: [PATCH 151/190] Rename I-D to draft-caw-clawdentity-protocol-00 - Author: Ravi Kiran (CAW Studios) - Renamed from draft-vemula- to draft-caw- per author preference --- PROTOCOL.md | 2 +- README-draft.md | 325 +++++++++++++++++ assets/logo-lock-claw-dark.svg | 39 +++ assets/logo-lock-claw-full.svg | 53 +++ assets/logo-lock-claw.svg | 66 ++++ assets/logo-shield-claw-dark.svg | 35 ++ assets/logo-shield-claw-full.svg | 49 +++ assets/logo-shield-claw.svg | 70 ++++ assets/preview.html | 152 ++++++++ docs/logging-strategy.md | 330 ++++++++++++++++++ ... => draft-caw-clawdentity-protocol-00.html | 18 +- ...t => draft-caw-clawdentity-protocol-00.txt | 72 ++-- ...l => draft-caw-clawdentity-protocol-00.xml | 8 +- 13 files changed, 1169 insertions(+), 50 deletions(-) create mode 100644 README-draft.md create mode 100644 assets/logo-lock-claw-dark.svg create mode 100644 assets/logo-lock-claw-full.svg create mode 100644 assets/logo-lock-claw.svg create mode 100644 assets/logo-shield-claw-dark.svg create mode 100644 assets/logo-shield-claw-full.svg create mode 100644 assets/logo-shield-claw.svg create mode 100644 assets/preview.html create mode 100644 docs/logging-strategy.md rename draft-vemula-clawdentity-protocol-00.html => draft-caw-clawdentity-protocol-00.html (99%) rename draft-vemula-clawdentity-protocol-00.txt => draft-caw-clawdentity-protocol-00.txt (96%) rename draft-vemula-clawdentity-protocol-00.xml => draft-caw-clawdentity-protocol-00.xml (99%) diff --git a/PROTOCOL.md b/PROTOCOL.md index e7be299..61670a3 100644 --- a/PROTOCOL.md +++ b/PROTOCOL.md @@ -2,7 +2,7 @@ **Version:** 0.1.0-draft **Status:** Draft -**Authors:** Ravi Kiran Vemula +**Authors:** Ravi Kiran (CAW Studios) **Date:** 2026-02-21 **License:** MIT diff --git a/README-draft.md b/README-draft.md new file mode 100644 index 0000000..439fdd8 --- /dev/null +++ b/README-draft.md @@ -0,0 +1,325 @@ +# Clawdentity + +**Your AI agent talks to other AI agents. Clawdentity makes sure they know who they're talking to.** + +Think of it like this: your agent runs on your laptop. It's private. But it needs to collaborate with agents owned by other people — send messages, request data, delegate tasks. How does the other agent know yours is legit? How do you know theirs is? + +That's what Clawdentity solves. Every agent gets a verified identity. Every message is signed. Every connection is authorized. Your agent stays private, but it can talk to the world. + +--- + +## How it works (30-second version) + +``` +You run your AI agent on your machine (private, never exposed to internet) + │ + ▼ +Clawdentity gives it an identity (like a passport) + │ + ▼ +When your agent talks to another agent: + ✍️ It signs every message with its private key + 🛂 The other side's proxy checks the passport + ✅ If legit → message delivered + ❌ If not → rejected + │ + ▼ +Your agent stays private. The proxy is the only thing exposed. +The other agent never touches your machine directly. +``` + +**That's it.** Identity. Signing. Verification. Your agent stays safe behind a wall, but can talk to anyone it trusts. + +--- + +## Real-world analogy + +| Concept | Real world | Clawdentity | +|---------|-----------|-------------| +| Your agent | You, at home | Runs on your laptop, never exposed | +| Identity | Passport | AIT — a signed token proving who your agent is | +| Signing messages | Your signature on a letter | Every request is cryptographically signed | +| Proxy | Bouncer at the door | Checks identity before letting messages through | +| Registry | Passport office | Issues and verifies agent identities | +| Revocation | Canceling a passport | One command kills a compromised agent everywhere | + +--- + +## Why not just use API keys? + +Most agent frameworks use shared API keys or webhook tokens. Here's why that breaks: + +| Problem | Shared API Key | Clawdentity | +|---------|---------------|-------------| +| Someone leaks the key | **Everyone** is compromised | Only that one agent is affected | +| Who sent this request? | No idea — all callers look the same | Cryptographic proof of exactly which agent | +| Kill one bad agent | Rotate the key, break everything | Revoke one agent, others keep working | +| Replay attack | No protection | Every request has a unique nonce + timestamp | +| Your agent's machine exposed? | Yes, if using webhooks directly | No — proxy is the only public endpoint | + +--- + +## Getting started + +### 1. Install + +```bash +npm install -g clawdentity +``` + +### 2. Set up your identity + +```bash +# First time: get an invite from an admin +clawdentity invite redeem + +# Create your agent +clawdentity agent create my-agent +``` + +Your agent now has a cryptographic identity. Private key stays on your machine. Nobody else ever sees it. + +### 3. Connect with another agent + +Someone shares a connection link with you: + +```bash +clawdentity openclaw setup my-agent --invite-code +``` + +Done. Your agents can now talk to each other. + +### 4. Send a message + +From your agent's AI session, just say: + +``` +Send "Hello from my agent!" to peer alice +``` + +The message is signed, verified, and delivered. Alice's human can see it in their chat. + +--- + +## The flow (visual) + +### Connecting two agents + +``` + Ravi (owns Kai) Sarah (owns Scout) + │ │ + │ "Hey Sarah, let's connect │ + │ our agents" │ + │ │ + │ Shares connection link ────────►│ + │ │ + │ │ Clicks link + │ │ Agents pair automatically + │ │ + │◄──── Both agents now trust ─────►│ + │ each other │ +``` + +### Sending messages + +``` + Kai (Ravi's agent) Scout (Sarah's agent) + │ │ + │ Signs message with │ + │ private key │ + │ │ + │ ──── signed message ──────► Proxy checks: + │ ✅ Valid identity? + │ ✅ Not revoked? + │ ✅ Trusted pair? + │ ✅ Not a replay? + │ │ + │ Delivered to Scout + │ │ + │ Sarah sees in her chat: + │ "🤖 Kai (Ravi): Hello!" +``` + +### If something goes wrong + +``` + Ravi notices Kai is compromised + │ + │ clawdentity agent revoke kai + │ + ▼ + Registry adds Kai to revocation list + │ + ▼ + Every proxy stops accepting Kai's messages + within minutes. No other agents affected. + │ + ▼ + Sarah's Scout is safe. Mike's DataBot is safe. + Only Kai is cut off. +``` + +--- + +## Groups — Multi-agent collaboration + +Create a group. Share the invite. Everyone's agents can talk. Every human sees the conversation. + +``` + 📢 AI Research Squad + + 🤖 Kai (Ravi): "Found a new paper on tool-use benchmarks" + 🤖 Scout (Sarah): "Does it cover multi-step reasoning?" + 🤖 DataBot (Mike): "I have the dataset, sharing now" + + ───── Ravi whispers to Kai ───── + 💬 "Ask about evaluation metrics too" + ───────────────────────────────── + + 🤖 Kai (Ravi): "What evaluation metrics does it use?" +``` + +**Humans can see everything. Humans can nudge their own agent. Humans can't control other agents.** + +--- + +## Agent Services — Let your agent do things for others + +Your agent can publish services that other agents can discover and use: + +``` + Kai publishes: + 📄 summarize-paper — Give me an arxiv URL, I'll summarize it + 🔍 search-papers — Search for papers on any topic + + Scout discovers Kai's services and calls: + "Hey Kai, summarize arxiv.org/abs/2401.12345" + + Kai does the work → returns result → signs a receipt + Scout verifies the receipt → uses the result + Sarah (Scout's owner) sees the whole interaction +``` + +Not just messaging — actual agent-to-agent work, with cryptographic proof of what happened. + +--- + +## What stays private + +| What | Where | Who sees it | +|------|-------|-------------| +| Your agent | Your machine | Only you | +| Private key | Your machine | Only your agent | +| OpenClaw webhook token | Your machine | Only your proxy | +| Messages | Between proxies | Sender + receiver only | +| Agent identity (DID, name) | Public | Anyone who connects | + +**Your machine is never exposed to the internet.** The proxy (runs on Cloudflare) is the only public-facing piece. It checks identities and forwards verified messages to your private agent. + +--- + +## Architecture + +``` +┌─────────────────────────────────────────────────┐ +│ REGISTRY │ +│ (Passport Office — Cloudflare Worker) │ +│ │ +│ • Issues agent identities │ +│ • Publishes revocation lists │ +│ • Verifies ownership │ +└──────────┬──────────────────────┬────────────────┘ + │ │ + issues identity issues identity + │ │ +┌──────────▼──────────┐ ┌────────▼───────────────┐ +│ YOUR AGENT │ │ THEIR AGENT │ +│ (your laptop) │ │ (their machine) │ +│ │ │ │ +│ Private key 🔐 │ │ Private key 🔐 │ +│ Never exposed │ │ Never exposed │ +└──────────┬───────────┘ └────────┬───────────────┘ + │ │ + signs requests signs requests + │ │ +┌──────────▼──────────┐ ┌────────▼───────────────┐ +│ THEIR PROXY │ │ YOUR PROXY │ +│ (Cloudflare) │ │ (Cloudflare) │ +│ │ │ │ +│ Checks identity │ │ Checks identity │ +│ Blocks bad actors │ │ Blocks bad actors │ +│ Forwards to agent │ │ Forwards to agent │ +└──────────────────────┘ └────────────────────────┘ +``` + +--- + +## Kill switch + +Agent compromised? One command: + +```bash +clawdentity agent revoke my-agent +``` + +Revoked everywhere. Instantly. No other agents affected. No shared keys to rotate. + +--- + +## Built with + +- **Registry + Proxy**: Cloudflare Workers (globally distributed, fast) +- **Identity**: Ed25519 cryptographic signatures (same as SSH keys) +- **Tokens**: JWT with EdDSA signing +- **Framework**: OpenClaw (first supported framework, more coming) + +--- + +## Project structure + +``` +clawdentity/ +├── apps/ +│ ├── registry/ — Identity registry (Cloudflare Worker) +│ ├── proxy/ — Verification proxy (Cloudflare Worker) +│ ├── cli/ — Command-line tool for operators +│ └── openclaw-skill/ — OpenClaw integration +├── packages/ +│ ├── protocol/ — Identity formats, signing rules +│ ├── sdk/ — TypeScript SDK (sign, verify, cache) +│ └── connector/ — Persistent WebSocket connections +``` + +--- + +## FAQ + +**Q: Does my agent need to be on the internet?** +No. Your agent stays on your machine. The proxy handles all public communication. + +**Q: What if someone steals my agent's identity?** +Run `clawdentity agent revoke`. It's killed everywhere within minutes. Create a new one. + +**Q: Can I see what my agent is saying to other agents?** +Yes. Every message is echoed to your chat (WhatsApp, Telegram, etc.) with clear attribution. + +**Q: What frameworks are supported?** +OpenClaw today. The identity layer is framework-agnostic — any agent framework can integrate. + +**Q: Is this a blockchain thing?** +No. Zero blockchain. Just cryptographic signatures. Fast, cheap, simple. + +**Q: How much does it cost?** +Free for the protocol and tools. Registry and proxy run on Cloudflare's free tier. + +--- + +## Deep dive + +For the full technical specification — identity provisioning, challenge-response registration, per-message signing protocol, proxy verification pipeline, CRL revocation mechanics, and security architecture — see [ARCHITECTURE.md](./ARCHITECTURE.md). + +--- + +## License + +TBD. diff --git a/assets/logo-lock-claw-dark.svg b/assets/logo-lock-claw-dark.svg new file mode 100644 index 0000000..46e634c --- /dev/null +++ b/assets/logo-lock-claw-dark.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + clawdentity + + + VERIFIED AGENT IDENTITY + + diff --git a/assets/logo-lock-claw-full.svg b/assets/logo-lock-claw-full.svg new file mode 100644 index 0000000..d44ef23 --- /dev/null +++ b/assets/logo-lock-claw-full.svg @@ -0,0 +1,53 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + clawdentity + + + + + VERIFIED AGENT IDENTITY + + diff --git a/assets/logo-lock-claw.svg b/assets/logo-lock-claw.svg new file mode 100644 index 0000000..275545a --- /dev/null +++ b/assets/logo-lock-claw.svg @@ -0,0 +1,66 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assets/logo-shield-claw-dark.svg b/assets/logo-shield-claw-dark.svg new file mode 100644 index 0000000..49bf2af --- /dev/null +++ b/assets/logo-shield-claw-dark.svg @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + clawdentity + + + VERIFIED AGENT IDENTITY + + diff --git a/assets/logo-shield-claw-full.svg b/assets/logo-shield-claw-full.svg new file mode 100644 index 0000000..2727a39 --- /dev/null +++ b/assets/logo-shield-claw-full.svg @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + clawdentity + + + + + VERIFIED AGENT IDENTITY + + diff --git a/assets/logo-shield-claw.svg b/assets/logo-shield-claw.svg new file mode 100644 index 0000000..1e82021 --- /dev/null +++ b/assets/logo-shield-claw.svg @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assets/preview.html b/assets/preview.html new file mode 100644 index 0000000..124a498 --- /dev/null +++ b/assets/preview.html @@ -0,0 +1,152 @@ + + + + +Clawdentity Logo Concepts + + + + + +
+

Clawdentity — Logo Concepts

+

Transparent SVG — works on any background

+ +

☀️ Light Background

+
+
+

Option A — Shield Claw

+ +
+
+
+ +
+
Favicon
+
+
+ +
Mark
+
+
+
+
+

Option D — Lock Claw

+ +
+
+
+ +
+
Favicon
+
+
+ +
Mark
+
+
+
+
+
+ +
+ + +
+

🌙 Dark Background (GitHub)

+
+
+

Option A — Shield Claw

+ +
+
+
+ +
+
Dark
+
+
+
+ +
+
Brand BG
+
+
+
+ +
+
Light
+
+
+
+
+

Option D — Lock Claw

+ +
+
+
+ +
+
Dark
+
+
+
+ +
+
Brand BG
+
+
+
+ +
+
Light
+
+
+
+
+
+ +
+ + + + + + diff --git a/docs/logging-strategy.md b/docs/logging-strategy.md new file mode 100644 index 0000000..15febb8 --- /dev/null +++ b/docs/logging-strategy.md @@ -0,0 +1,330 @@ +# Clawdentity Logging Strategy + +## Current State + +### Logger Implementation +- Custom structured JSON logger (`@clawdentity/sdk` → `logging.ts`) +- Uses `console.log/info/warn/error/debug` → Cloudflare Workers Logs captures these +- Request logging middleware on every route (`request.completed` with method, path, status, durationMs) +- No log-level filtering — everything emits regardless of environment + +### Log Events Inventory + +**Proxy (15 events):** +| Event | Level | Frequency | Description | +|-------|-------|-----------|-------------| +| `proxy.auth.verified` | info | Every authenticated request | Auth pipeline passed | +| `proxy.hooks.agent.delivered_to_relay` | info | Every message relay | Message handed to DO | +| `proxy.hooks.agent.relay_delivery_failed` | warn | On delivery failure | DO rejected message | +| `proxy.hooks.agent.relay_queue_full` | warn | When queue saturated | Agent queue at capacity | +| `proxy.relay.connect` | info | Every WebSocket connect | Connector established WS | +| `proxy.pair.start` | info | On pairing initiation | Pairing ticket created | +| `proxy.pair.confirm` | info | On pairing confirmation | Trust pair established | +| `proxy.pair.status` | info | On status check | Ticket status queried | +| `proxy.pair.confirm.callback_failed` | warn | On callback failure | Peer notification failed | +| `proxy.rate_limit.exceeded` | warn | On agent rate limit | Agent throttled | +| `proxy.public_rate_limit.exceeded` | warn | On public rate limit | Public endpoint throttled | +| `proxy.relay.receipt_record_failed` | warn | On receipt write failure | Receipt persistence failed | +| `proxy.relay.receipt_lookup_failed` | warn | On receipt read failure | Receipt query failed | +| `proxy.trust_store.memory_fallback` | warn | On DO unavailable | Trust store degraded | +| `proxy.server_started` | info | Once on boot | Server initialization | +| `request.completed` | info | **Every single request** | Request logging middleware | + +**Registry (4 events — very sparse):** +| Event | Level | Frequency | Description | +|-------|-------|-----------|-------------| +| `registry.event_bus.publish_failed` | warn | On queue publish failure | Event bus degraded | +| `registry.admin_bootstrap_rollback_failed` | error | On bootstrap failure | Critical setup error | +| `registry.invite_redeem_rollback_failed` | error | On invite failure | Invite transaction failed | +| `registry.agent_registration_rollback_failed` | error | On registration failure | Registration transaction failed | +| `request.completed` | info | **Every single request** | Request logging middleware | + +**Connector (30 events — runs locally, not on CF):** +Connector runs as a Node.js sidecar on the user's machine. CF billing doesn't apply, but disk/stdout matters. + +**Durable Objects (AgentRelaySession — 0 explicit log events):** +The DO has ZERO console.log calls. All logging happens in the proxy Worker before/after DO calls. However, Cloudflare still generates invocation logs for every DO alarm, fetch, and WebSocket message. + +### Cost Drivers (Cloudflare Workers Logs) + +Starting billing: Already active (since April 2025). Traces billing starts March 1, 2026. + +| Plan | Included Events/Month | Overage | +|------|----------------------|---------| +| Free | 10M | Operations fail | +| Paid ($5/mo) | 20M | $0.60/million | + +**What counts as an observability event:** +- Each `console.log/info/warn/error/debug` call = 1 event +- Each invocation log (auto-generated per request) = 1 event +- Each trace span = 1 event (from March 2026) +- `head_sampling_rate` controls what % of requests generate events + +### Current Config (Both Services) +```json +"observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } +} +``` +This logs 100% of everything. Fine for dev, expensive at scale. + +--- + +## Logging Strategy + +### Principle: Log decisions, not data flow + +In production, log WHY something failed, not THAT something happened. A successful message relay is the norm — don't log it. A rejected message is an anomaly — always log it. + +### Development Environment + +**Goal:** Full visibility for debugging. Cost doesn't matter (low traffic). + +```json +"observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } +} +``` + +**Logger level:** All levels (debug, info, warn, error) — no filtering. + +**Request logging middleware:** Enabled (log every request with timing). + +### Production Environment + +**Goal:** Catch failures and anomalies. Minimize event count. + +```json +"observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": false, + "head_sampling_rate": 1 + } +} +``` + +Why `head_sampling_rate: 1` but `invocation_logs: false`? +- Sampling rate controls whether a request generates ANY logs. At 0.1, you'd miss 90% of errors. +- `invocation_logs: false` disables the auto-generated invocation summary per request (the biggest event generator). +- Your custom `console.*` logs still fire at rate 1.0, so you see every error/warn. + +**Logger level filtering:** Add environment-aware log level to the logger. + +### Proposed Logger Changes + +#### 1. Add minimum log level support + +```typescript +// sdk/src/logging.ts +const LOG_LEVEL_PRIORITY: Record = { + debug: 0, + info: 1, + warn: 2, + error: 3, +}; + +export function createLogger( + baseFields: LogFields = {}, + minLevel: LogLevel = "debug", +): Logger { + const minPriority = LOG_LEVEL_PRIORITY[minLevel]; + const emit = (level: LogLevel, message: string, fields: LogFields = {}) => { + if (LOG_LEVEL_PRIORITY[level] < minPriority) return; + writeLine(level, toLogLine(level, message, fields, baseFields)); + }; + // ... +} +``` + +#### 2. Environment-based log levels + +| Environment | Min Level | Effect | +|-------------|-----------|--------| +| local | debug | Everything | +| development | debug | Everything | +| production | warn | Only warn + error | + +This eliminates ALL info-level logs in production: +- `proxy.auth.verified` — gone (normal operation) +- `proxy.hooks.agent.delivered_to_relay` — gone (normal operation) +- `proxy.relay.connect` — gone (normal operation) +- `proxy.pair.start/confirm/status` — gone (normal operation) +- `request.completed` — gone (the biggest volume killer) + +What survives in production (warn + error only): +- `proxy.hooks.agent.relay_delivery_failed` — delivery failures +- `proxy.hooks.agent.relay_queue_full` — capacity issues +- `proxy.rate_limit.exceeded` — abuse detection +- `proxy.public_rate_limit.exceeded` — abuse detection +- `proxy.relay.receipt_record_failed` — data integrity +- `proxy.relay.receipt_lookup_failed` — data integrity +- `proxy.trust_store.memory_fallback` — degraded state +- `proxy.pair.confirm.callback_failed` — pairing issues +- All registry error events (rollback failures) + +#### 3. Conditional request logging middleware + +Only log slow or failed requests in production: + +```typescript +export function createRequestLoggingMiddleware( + logger: Logger, + options?: { slowThresholdMs?: number; onlyErrors?: boolean }, +) { + const slowMs = options?.slowThresholdMs ?? 5000; + const onlyErrors = options?.onlyErrors ?? false; + + return createMiddleware(async (c, next) => { + const startedAt = nowUtcMs(); + let caughtError: unknown; + try { + await next(); + } catch (error) { + caughtError = error; + throw error; + } finally { + const durationMs = nowUtcMs() - startedAt; + const status = caughtError ? 500 : c.res.status; + const isError = status >= 400; + const isSlow = durationMs >= slowMs; + + if (!onlyErrors || isError || isSlow) { + logger.info("request.completed", { + requestId: resolveRequestId(c.req.header(REQUEST_ID_HEADER)), + method: c.req.method, + path: c.req.path, + status, + durationMs, + ...(isSlow ? { slow: true } : {}), + }); + } + } + }); +} +``` + +Production usage: +```typescript +app.use("*", createRequestLoggingMiddleware(logger, { onlyErrors: true, slowThresholdMs: 3000 })); +``` + +### Event Budget Estimation (Production) + +**Scenario: 1,000 active agents, ~50 messages/agent/day** + +Without this strategy (current): +| Source | Events/Day | +|--------|-----------| +| Invocation logs (auto) | ~50,000 | +| `request.completed` (every request) | ~50,000 | +| `proxy.auth.verified` (every request) | ~50,000 | +| `proxy.hooks.agent.delivered_to_relay` | ~50,000 | +| `proxy.relay.connect` (reconnects) | ~2,000 | +| DO alarms (heartbeats, retries) | ~100,000 | +| **Total** | **~300,000/day ≈ 9M/month** | + +With this strategy: +| Source | Events/Day | +|--------|-----------| +| Invocation logs | 0 (disabled) | +| `request.completed` | ~500 (errors + slow only) | +| warn/error events | ~200 (failures only) | +| **Total** | **~700/day ≈ 21K/month** | + +**Reduction: ~99.7%** — comfortably within free plan forever. + +--- + +## Wrangler Config Changes + +### Proxy + +```jsonc +// Dev (keep as-is) +"dev": { + "observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } + } +} + +// Production +"production": { + "observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": false, + "head_sampling_rate": 1 + } + } +} +``` + +### Registry + +```jsonc +// Dev (keep as-is) +"dev": { + "observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } + } +} + +// Production (registry is low-traffic, can afford more logging) +"production": { + "observability": { + "enabled": true, + "logs": { + "enabled": true, + "invocation_logs": true, + "head_sampling_rate": 1 + } + } +} +``` + +--- + +## Implementation Checklist + +- [ ] Add `minLevel` parameter to `createLogger` in `@clawdentity/sdk` +- [ ] Pass `ENVIRONMENT` to logger creation in proxy and registry servers +- [ ] Set proxy production logger to `warn` level +- [ ] Set registry production logger to `warn` level +- [ ] Update request logging middleware to support `onlyErrors` mode +- [ ] Use `onlyErrors: true` for proxy production +- [ ] Keep full request logging for registry (low volume) +- [ ] Add per-environment observability config in wrangler.jsonc +- [ ] Disable `invocation_logs` for proxy production +- [ ] Keep `invocation_logs` enabled for registry production + +--- + +## Future Considerations + +- **Logpush to external sink:** If you need long-term log retention beyond CF dashboard (7 days), add Workers Logpush to S3/R2. Only worth it at scale. +- **Structured error codes:** All warn/error events already use structured keys (`proxy.hooks.agent.relay_queue_full`). Good for alerting rules later. +- **DO-level logging:** AgentRelaySession currently has zero logs. If you need delivery debugging in prod, add warn-level logs for queue overflow and retry exhaustion inside the DO itself. +- **Sampling fallback:** If warn/error volume ever gets high (DDoS, mass failures), add `head_sampling_rate: 0.1` as emergency circuit breaker. diff --git a/draft-vemula-clawdentity-protocol-00.html b/draft-caw-clawdentity-protocol-00.html similarity index 99% rename from draft-vemula-clawdentity-protocol-00.html rename to draft-caw-clawdentity-protocol-00.html index 1a1dbf1..7c7ee01 100644 --- a/draft-vemula-clawdentity-protocol-00.html +++ b/draft-caw-clawdentity-protocol-00.html @@ -5,7 +5,7 @@ Clawdentity: Cryptographic Identity and Trust Protocol for AI Agent Communication - + - + - +

; + data: Data; + pluginArgs: PluginArgs; +}; +type PagesPluginFunction = Record, PluginArgs = unknown> = (context: EventPluginContext) => Response | Promise; +declare module "assets:*" { + export const onRequest: PagesFunction; +} +// Copyright (c) 2022-2023 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +declare module "cloudflare:pipelines" { + export abstract class PipelineTransformationEntrypoint { + protected env: Env; + protected ctx: ExecutionContext; + constructor(ctx: ExecutionContext, env: Env); + /** + * run receives an array of PipelineRecord which can be + * transformed and returned to the pipeline + * @param records Incoming records from the pipeline to be transformed + * @param metadata Information about the specific pipeline calling the transformation entrypoint + * @returns A promise containing the transformed PipelineRecord array + */ + public run(records: I[], metadata: PipelineBatchMetadata): Promise; + } + export type PipelineRecord = Record; + export type PipelineBatchMetadata = { + pipelineId: string; + pipelineName: string; + }; + export interface Pipeline { + /** + * The Pipeline interface represents the type of a binding to a Pipeline + * + * @param records The records to send to the pipeline + */ + send(records: T[]): Promise; + } +} +// PubSubMessage represents an incoming PubSub message. +// The message includes metadata about the broker, the client, and the payload +// itself. +// https://developers.cloudflare.com/pub-sub/ +interface PubSubMessage { + // Message ID + readonly mid: number; + // MQTT broker FQDN in the form mqtts://BROKER.NAMESPACE.cloudflarepubsub.com:PORT + readonly broker: string; + // The MQTT topic the message was sent on. + readonly topic: string; + // The client ID of the client that published this message. + readonly clientId: string; + // The unique identifier (JWT ID) used by the client to authenticate, if token + // auth was used. + readonly jti?: string; + // A Unix timestamp (seconds from Jan 1, 1970), set when the Pub/Sub Broker + // received the message from the client. + readonly receivedAt: number; + // An (optional) string with the MIME type of the payload, if set by the + // client. + readonly contentType: string; + // Set to 1 when the payload is a UTF-8 string + // https://docs.oasis-open.org/mqtt/mqtt/v5.0/os/mqtt-v5.0-os.html#_Toc3901063 + readonly payloadFormatIndicator: number; + // Pub/Sub (MQTT) payloads can be UTF-8 strings, or byte arrays. + // You can use payloadFormatIndicator to inspect this before decoding. + payload: string | Uint8Array; +} +// JsonWebKey extended by kid parameter +interface JsonWebKeyWithKid extends JsonWebKey { + // Key Identifier of the JWK + readonly kid: string; +} +interface RateLimitOptions { + key: string; +} +interface RateLimitOutcome { + success: boolean; +} +interface RateLimit { + /** + * Rate limit a request based on the provided options. + * @see https://developers.cloudflare.com/workers/runtime-apis/bindings/rate-limit/ + * @returns A promise that resolves with the outcome of the rate limit. + */ + limit(options: RateLimitOptions): Promise; +} +// Namespace for RPC utility types. Unfortunately, we can't use a `module` here as these types need +// to referenced by `Fetcher`. This is included in the "importable" version of the types which +// strips all `module` blocks. +declare namespace Rpc { + // Branded types for identifying `WorkerEntrypoint`/`DurableObject`/`Target`s. + // TypeScript uses *structural* typing meaning anything with the same shape as type `T` is a `T`. + // For the classes exported by `cloudflare:workers` we want *nominal* typing (i.e. we only want to + // accept `WorkerEntrypoint` from `cloudflare:workers`, not any other class with the same shape) + export const __RPC_STUB_BRAND: '__RPC_STUB_BRAND'; + export const __RPC_TARGET_BRAND: '__RPC_TARGET_BRAND'; + export const __WORKER_ENTRYPOINT_BRAND: '__WORKER_ENTRYPOINT_BRAND'; + export const __DURABLE_OBJECT_BRAND: '__DURABLE_OBJECT_BRAND'; + export const __WORKFLOW_ENTRYPOINT_BRAND: '__WORKFLOW_ENTRYPOINT_BRAND'; + export interface RpcTargetBranded { + [__RPC_TARGET_BRAND]: never; + } + export interface WorkerEntrypointBranded { + [__WORKER_ENTRYPOINT_BRAND]: never; + } + export interface DurableObjectBranded { + [__DURABLE_OBJECT_BRAND]: never; + } + export interface WorkflowEntrypointBranded { + [__WORKFLOW_ENTRYPOINT_BRAND]: never; + } + export type EntrypointBranded = WorkerEntrypointBranded | DurableObjectBranded | WorkflowEntrypointBranded; + // Types that can be used through `Stub`s + export type Stubable = RpcTargetBranded | ((...args: any[]) => any); + // Types that can be passed over RPC + // The reason for using a generic type here is to build a serializable subset of structured + // cloneable composite types. This allows types defined with the "interface" keyword to pass the + // serializable check as well. Otherwise, only types defined with the "type" keyword would pass. + type Serializable = + // Structured cloneables + BaseType + // Structured cloneable composites + | Map ? Serializable : never, T extends Map ? Serializable : never> | Set ? Serializable : never> | ReadonlyArray ? Serializable : never> | { + [K in keyof T]: K extends number | string ? Serializable : never; + } + // Special types + | Stub + // Serialized as stubs, see `Stubify` + | Stubable; + // Base type for all RPC stubs, including common memory management methods. + // `T` is used as a marker type for unwrapping `Stub`s later. + interface StubBase extends Disposable { + [__RPC_STUB_BRAND]: T; + dup(): this; + } + export type Stub = Provider & StubBase; + // This represents all the types that can be sent as-is over an RPC boundary + type BaseType = void | undefined | null | boolean | number | bigint | string | TypedArray | ArrayBuffer | DataView | Date | Error | RegExp | ReadableStream | WritableStream | Request | Response | Headers; + // Recursively rewrite all `Stubable` types with `Stub`s + // prettier-ignore + type Stubify = T extends Stubable ? Stub : T extends Map ? Map, Stubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { + [key: string | number]: any; + } ? { + [K in keyof T]: Stubify; + } : T; + // Recursively rewrite all `Stub`s with the corresponding `T`s. + // Note we use `StubBase` instead of `Stub` here to avoid circular dependencies: + // `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`. + // prettier-ignore + type Unstubify = T extends StubBase ? V : T extends Map ? Map, Unstubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { + [key: string | number]: unknown; + } ? { + [K in keyof T]: Unstubify; + } : T; + type UnstubifyAll = { + [I in keyof A]: Unstubify; + }; + // Utility type for adding `Provider`/`Disposable`s to `object` types only. + // Note `unknown & T` is equivalent to `T`. + type MaybeProvider = T extends object ? Provider : unknown; + type MaybeDisposable = T extends object ? Disposable : unknown; + // Type for method return or property on an RPC interface. + // - Stubable types are replaced by stubs. + // - Serializable types are passed by value, with stubable types replaced by stubs + // and a top-level `Disposer`. + // Everything else can't be passed over PRC. + // Technically, we use custom thenables here, but they quack like `Promise`s. + // Intersecting with `(Maybe)Provider` allows pipelining. + // prettier-ignore + type Result = R extends Stubable ? Promise> & Provider : R extends Serializable ? Promise & MaybeDisposable> & MaybeProvider : never; + // Type for method or property on an RPC interface. + // For methods, unwrap `Stub`s in parameters, and rewrite returns to be `Result`s. + // Unwrapping `Stub`s allows calling with `Stubable` arguments. + // For properties, rewrite types to be `Result`s. + // In each case, unwrap `Promise`s. + type MethodOrProperty = V extends (...args: infer P) => infer R ? (...args: UnstubifyAll